diff --git a/.asf.yaml b/.asf.yaml index 6b1bb1599f463..a9e6961c7e600 100644 --- a/.asf.yaml +++ b/.asf.yaml @@ -44,6 +44,9 @@ github: master: required_pull_request_reviews: required_approving_review_count: 1 + main: + required_pull_request_reviews: + required_approving_review_count: 1 v1-10-stable: required_pull_request_reviews: required_approving_review_count: 1 diff --git a/.dockerignore b/.dockerignore index 70494e637813c..b5a61d35e5a2e 100644 --- a/.dockerignore +++ b/.dockerignore @@ -52,6 +52,7 @@ !.flake8 !.dockerignore !pylintrc +!pylintrc-tests !pytest.ini !CHANGELOG.txt !LICENSE diff --git a/.github/actions/cancel-workflow-runs b/.github/actions/cancel-workflow-runs index 953e057dc81d3..8248bc1feff04 160000 --- a/.github/actions/cancel-workflow-runs +++ b/.github/actions/cancel-workflow-runs @@ -1 +1 @@ -Subproject commit 953e057dc81d3458935a18d1184c386b0f6b5738 +Subproject commit 8248bc1feff049e98c0e6a96889b147199c38203 diff --git a/.github/actions/label-when-approved-action b/.github/actions/label-when-approved-action index 4c5190fec5661..0058d0094da27 160000 --- a/.github/actions/label-when-approved-action +++ b/.github/actions/label-when-approved-action @@ -1 +1 @@ -Subproject commit 4c5190fec5661e98d83f50bbd4ef9ebb48bd1194 +Subproject commit 0058d0094da27e116fad6e0da516ebe1107f26de diff --git a/.github/workflows/build-images-workflow-run.yml b/.github/workflows/build-images-workflow-run.yml index 21ca332e4e8ff..e00cdc5fd0d0b 100644 --- a/.github/workflows/build-images-workflow-run.yml +++ b/.github/workflows/build-images-workflow-run.yml @@ -22,18 +22,21 @@ on: # yamllint disable-line rule:truthy workflows: ["CI Build"] types: ['requested'] env: - MOUNT_LOCAL_SOURCES: "false" + MOUNT_SELECTED_LOCAL_SOURCES: "false" FORCE_ANSWER_TO_QUESTIONS: "yes" - FORCE_PULL_IMAGES: "true" + FORCE_PULL_IMAGES: "false" CHECK_IMAGE_FOR_REBUILD: "true" SKIP_CHECK_REMOTE_IMAGE: "true" DB_RESET: "true" VERBOSE: "true" USE_GITHUB_REGISTRY: "true" - # Might be either 'ghcr.io' or 'docker.pkg.github.com' - GITHUB_REGISTRY: "docker.pkg.github.com" GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_USERNAME: ${{ github.actor }} + # You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the + # Airflow one is going to be used + CONSTRAINTS_GITHUB_REPOSITORY: >- + ${{ secrets.CONSTRAINTS_GITHUB_REPOSITORY != '' && + secrets.CONSTRAINTS_GITHUB_REPOSITORY || github.repository }} # This token is WRITE one - workflow_run type of events always have the WRITE token GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token should not be empty in workflow_run type of event. @@ -42,13 +45,14 @@ env: GITHUB_REGISTRY_WAIT_FOR_IMAGE: "false" BUILD_IMAGES: ${{ secrets.AIRFLOW_GITHUB_REGISTRY_WAIT_FOR_IMAGE != 'false' }} INSTALL_PROVIDERS_FROM_SOURCES: "true" + GITHUB_REGISTRY: ${{ secrets.OVERRIDE_GITHUB_REGISTRY }} jobs: cancel-workflow-runs: timeout-minutes: 10 name: "Cancel workflow runs" - runs-on: ubuntu-20.04 + runs-on: ${{ github.repository == 'apache/airflow' && 'self-hosted' || 'ubuntu-20.04' }} outputs: sourceHeadRepo: ${{ steps.source-run-info.outputs.sourceHeadRepo }} sourceHeadBranch: ${{ steps.source-run-info.outputs.sourceHeadBranch }} @@ -61,6 +65,7 @@ jobs: sourceEvent: ${{ steps.source-run-info.outputs.sourceEvent }} cacheDirective: ${{ steps.cache-directive.outputs.docker-cache }} buildImages: ${{ steps.build-images.outputs.buildImages }} + runsOn: ${{ github.repository == 'apache/airflow' && '["self-hosted"]' || '["ubuntu-20.04"]' }} steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 @@ -115,7 +120,7 @@ jobs: sourceRunId: ${{ github.event.workflow_run.id }} notifyPRCancel: true jobNameRegexps: > - ["^Pylint$", "^Static checks", "^Build docs$", "^Spell check docs$", "^Backport packages$", + ["^Pylint$", "^Static checks", "^Build docs$", "^Spell check docs$", "^Provider packages", "^Checks: Helm tests$", "^Test OpenAPI*"] - name: "Extract canceled failed runs" # We use this step to build regexp that will be used to match the Source Run id in @@ -191,7 +196,7 @@ jobs: Source Sha: ${{ needs.cancel-workflow-runs.outputs.sourceHeadSha }} Merge commit Sha: ${{ needs.cancel-workflow-runs.outputs.mergeCommitSha }} Target commit Sha: ${{ needs.cancel-workflow-runs.outputs.targetCommitSha }} - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.cancel-workflow-runs.outputs.runsOn) }} needs: [cancel-workflow-runs] env: GITHUB_CONTEXT: ${{ toJson(github) }} @@ -255,7 +260,7 @@ jobs: build-ci-images: timeout-minutes: 80 name: "Build CI images ${{matrix.python-version}}" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.cancel-workflow-runs.outputs.runsOn) }} needs: [build-info, cancel-workflow-runs] strategy: matrix: @@ -267,12 +272,15 @@ jobs: needs.build-info.outputs.image-build == 'true' && needs.cancel-workflow-runs.outputs.buildImages == 'true' env: + RUNS_ON: ${{ fromJson(needs.cancel-workflow-runs.outputs.runsOn)[0] }} BACKEND: postgres PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} GITHUB_REGISTRY_PUSH_IMAGE_TAG: ${{ github.event.workflow_run.id }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} CONTINUE_ON_PIP_CHECK_FAILURE: "true" DOCKER_CACHE: ${{ needs.cancel-workflow-runs.outputs.cacheDirective }} + FORCE_PULL_BASE_PYTHON_IMAGE: > + ${{ needs.cancel-workflow-runs.sourceEvent == 'schedule' && 'true' || 'false' }} steps: - name: > Checkout [${{ needs.cancel-workflow-runs.outputs.sourceEvent }}] @@ -380,7 +388,7 @@ jobs: build-prod-images: timeout-minutes: 80 name: "Build PROD images ${{matrix.python-version}}" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.cancel-workflow-runs.outputs.runsOn) }} needs: [build-info, cancel-workflow-runs, build-ci-images] strategy: matrix: @@ -392,12 +400,17 @@ jobs: needs.build-info.outputs.image-build == 'true' && needs.cancel-workflow-runs.outputs.buildImages == 'true' env: + RUNS_ON: ${{ fromJson(needs.cancel-workflow-runs.outputs.runsOn)[0] }} BACKEND: postgres PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} GITHUB_REGISTRY_PUSH_IMAGE_TAG: ${{ github.event.workflow_run.id }} GITHUB_REGISTRY_PULL_IMAGE_TAG: ${{ github.event.workflow_run.id }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} DOCKER_CACHE: ${{ needs.cancel-workflow-runs.outputs.cacheDirective }} + FORCE_PULL_BASE_PYTHON_IMAGE: > + ${{ needs.cancel-workflow-runs.sourceEvent == 'schedule' && 'true' || 'false' }} + VERSION_SUFFIX_FOR_PYPI: "dev" + VERSION_SUFFIX_FOR_SVN: "dev" steps: - name: > Checkout [${{ needs.cancel-workflow-runs.outputs.sourceEvent }}] @@ -509,9 +522,9 @@ jobs: cancel-on-build-cancel: name: "Cancel 'CI Build' jobs on build image cancelling." - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.cancel-workflow-runs.outputs.runsOn) }} if: cancelled() - needs: [build-ci-images, build-prod-images] + needs: [cancel-workflow-runs, build-ci-images, build-prod-images] steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 @@ -529,9 +542,9 @@ jobs: cancel-on-build-failure: name: "Cancel 'CI Build' jobs on build image failing." - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.cancel-workflow-runs.outputs.runsOn) }} if: failure() - needs: [build-ci-images, build-prod-images] + needs: [cancel-workflow-runs, build-ci-images, build-prod-images] steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 @@ -545,5 +558,5 @@ jobs: cancelMode: self notifyPRCancel: true notifyPRCancelMessage: | - Building images for the PR has failed. Follow the the workflow link to check the reason. + Building images for the PR has failed. Follow the workflow link to check the reason. sourceRunId: ${{ github.event.workflow_run.id }} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 941c12925d6e1..6311b40001671 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -26,10 +26,9 @@ on: # yamllint disable-line rule:truthy branches: ['master', 'v1-10-test', 'v1-10-stable', 'v2-0-test'] env: - - MOUNT_LOCAL_SOURCES: "false" + MOUNT_SELECTED_LOCAL_SOURCES: "false" FORCE_ANSWER_TO_QUESTIONS: "yes" - FORCE_PULL_IMAGES: "true" + FORCE_PULL_IMAGES: "false" CHECK_IMAGE_FOR_REBUILD: "true" SKIP_CHECK_REMOTE_IMAGE: "true" DB_RESET: "true" @@ -40,6 +39,11 @@ env: GITHUB_REGISTRY: "docker.pkg.github.com" GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_USERNAME: ${{ github.actor }} + # You can override CONSTRAINTS_GITHUB_REPOSITORY by setting secret in your repo but by default the + # Airflow one is going to be used + CONSTRAINTS_GITHUB_REPOSITORY: >- + ${{ secrets.CONSTRAINTS_GITHUB_REPOSITORY != '' && + secrets.CONSTRAINTS_GITHUB_REPOSITORY || github.repository }} # In builds from forks, this token is read-only. For scheduler/direct push it is WRITE one GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # In builds from forks, this token is empty, and this is good because such builds do not even try @@ -62,14 +66,63 @@ env: # # You can also switch back to building images locally and disabling the "Build Images" workflow # by defining AIRFLOW_GITHUB_REGISTRY_WAIT_FOR_IMAGE secret with value set to "false" - GITHUB_REGISTRY_WAIT_FOR_IMAGE: ${{ secrets.AIRFLOW_GITHUB_REGISTRY_WAIT_FOR_IMAGE != 'false' }} jobs: build-info: name: "Build info" - runs-on: ubuntu-20.04 + # The runs-on cannot refer to env. or secrets. context, so we have no + # option but to specify a hard-coded list here. This is "safe", as the list + # is checked again by the runner using it's own list, so a PR author cannot + # change this and get access to our self-hosted runners + # + # When changing this list, ensure that it is kept in sync with the + # configOverride parameter in AWS SSM (which is what the runner uses) + runs-on: >- + ${{ ( + ( + github.event_name == 'push' || + github.event_name == 'schedule' || + contains(fromJSON('[ + "BasPH", + "Fokko", + "KevinYang21", + "XD-DENG", + "aijamalnk", + "alexvanboxel", + "aoen", + "artwr", + "ashb", + "bolkedebruin", + "criccomini", + "dimberman", + "feng-tao", + "houqp", + "jghoman", + "jmcarp", + "kaxil", + "leahecole", + "mik-laj", + "milton0825", + "mistercrunch", + "msumit", + "potiuk", + "r39132", + "ryanahamilton", + "ryw", + "saguziel", + "sekikn", + "turbaszek", + "zhongjiajie", + "ephraimbuddy", + "jhtimmins", + "dstandish", + "xinbinhuang", + "yuqian" + ]'), github.actor) + ) && github.repository == 'apache/airflow' + ) && 'self-hosted' || 'ubuntu-20.04' }} env: GITHUB_CONTEXT: ${{ toJson(github) }} outputs: @@ -79,6 +132,7 @@ jobs: pythonVersionsListAsString: ${{ steps.selective-checks.outputs.python-versions-list-as-string }} defaultPythonVersion: ${{ steps.selective-checks.outputs.default-python-version }} kubernetesVersions: ${{ steps.selective-checks.outputs.kubernetes-versions }} + kubernetesVersionsListAsString: ${{ steps.selective-checks.outputs.kubernetes-versions-list-as-string }} defaultKubernetesVersion: ${{ steps.selective-checks.outputs.default-kubernetes-version }} kubernetesModes: ${{ steps.selective-checks.outputs.kubernetes-modes }} defaultKubernetesMode: ${{ steps.selective-checks.outputs.default-kubernetes-mode }} @@ -94,7 +148,6 @@ jobs: postgresExclude: ${{ steps.selective-checks.outputs.postgres-exclude }} mysqlExclude: ${{ steps.selective-checks.outputs.mysql-exclude }} sqliteExclude: ${{ steps.selective-checks.outputs.sqlite-exclude }} - kubernetesExclude: ${{ steps.selective-checks.outputs.kubernetes-exclude }} run-tests: ${{ steps.selective-checks.outputs.run-tests }} run-kubernetes-tests: ${{ steps.selective-checks.outputs.run-kubernetes-tests }} basic-checks-only: ${{ steps.selective-checks.outputs.basic-checks-only }} @@ -103,9 +156,22 @@ jobs: needs-helm-tests: ${{ steps.selective-checks.outputs.needs-helm-tests }} needs-api-tests: ${{ steps.selective-checks.outputs.needs-api-tests }} needs-api-codegen: ${{ steps.selective-checks.outputs.needs-api-codegen }} + default-branch: ${{ steps.selective-checks.outputs.default-branch }} pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }} pullRequestLabels: ${{ steps.source-run-info.outputs.pullRequestLabels }} + runsOn: ${{ steps.set-runs-on.outputs.runsOn }} steps: + # Avoid having to specify the runs-on logic every time. We use the custom + # env var AIRFLOW_SELF_HOSTED_RUNNER set only on our runners, but never + # on the public runners + - name: Set runs-on + id: set-runs-on + run: | + if [[ ${AIRFLOW_SELF_HOSTED_RUNNER} != "" ]]; then + echo "::set-output name=runsOn::\"self-hosted\"" + else + echo "::set-output name=runsOn::\"ubuntu-20.04\"" + fi - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 with: @@ -142,21 +208,20 @@ jobs: - name: Selective checks id: selective-checks env: - EVENT_NAME: ${{ github.event_name }} - TARGET_COMMIT_SHA: ${{ github.sha }} PR_LABELS: "${{ steps.source-run-info.outputs.pullRequestLabels }}" run: | - if [[ ${EVENT_NAME} == "pull_request" ]]; then + if [[ ${GITHUB_EVENT_NAME} == "pull_request" ]]; then # Run selective checks - ./scripts/ci/selective_ci_checks.sh "${TARGET_COMMIT_SHA}" + ./scripts/ci/selective_ci_checks.sh "${GITHUB_SHA}" else # Run all checks ./scripts/ci/selective_ci_checks.sh fi test-openapi-client-generation: + timeout-minutes: 10 name: "Test OpenAPI client generation" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info] if: needs.build-info.outputs.needs-api-codegen == 'true' steps: @@ -168,15 +233,42 @@ jobs: - name: "Generate client codegen diff" run: ./scripts/ci/openapi/client_codegen_diff.sh + test-examples-of-prod-image-building: + timeout-minutes: 60 + name: "Test examples of production image building" + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} + needs: [build-info] + if: needs.build-info.outputs.image-build == 'true' + steps: + - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" + uses: actions/checkout@v2 + with: + fetch-depth: 2 + persist-credentials: false + - name: "Free space" + run: ./scripts/ci/tools/ci_free_space_on_ci.sh + if: | + needs.build-info.outputs.waitForImage == 'true' + - name: "Setup python" + uses: actions/setup-python@v2 + with: + python-version: ${{needs.build-info.outputs.defaultPythonVersion}} + - name: "Test examples of PROD image building" + run: ./scripts/ci/images/ci_test_examples_of_prod_image_building.sh + ci-images: timeout-minutes: 120 name: "Wait for CI images" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info] if: needs.build-info.outputs.image-build == 'true' env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} BACKEND: sqlite UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} + WAIT_FOR_IMAGE: ${{ needs.build-info.outputs.waitForImage }} + outputs: + githubRegistry: ${{ steps.wait-for-images.outputs.githubRegistry }} steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 @@ -190,56 +282,37 @@ jobs: if: needs.build-info.outputs.waitForImage == 'true' - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - if: needs.build-info.outputs.waitForImage == 'true' + if: | + needs.build-info.outputs.waitForImage == 'true' - name: > Wait for CI images ${{ needs.build-info.outputs.pythonVersions }}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }} + id: wait-for-images env: CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: > ${{needs.build-info.outputs.pythonVersionsListAsString}} # We wait for the images to be available either from the build-ci-image step or from - # "build-images-workflow-run.yml' run as pull_request_target (it has the write - # permissions in case pull_request from fork is run. + # "build-images-workflow-run.yml' run as pull_request_target. # We are utilising single job to wait for all images because this job merely waits - # For the images to be available. The test jobs wait for it to complete! - run: ./scripts/ci/images/ci_wait_for_all_ci_images.sh - if: needs.build-info.outputs.waitForImage == 'true' + # for the images to be available. + # The test jobs wait for it to complete if WAIT_FOR_IMAGE is 'true'! + # The job will set the output "githubRegistry" - result of auto-detect which registry has + # been used by checking where the image can be downloaded from. + # + run: ./scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh - verify-ci-images: - timeout-minutes: 20 - name: "Verify CI Image Py${{matrix.python-version}}" - runs-on: ubuntu-20.04 - needs: [build-info, ci-images] - strategy: - matrix: - python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} - env: - BACKEND: sqlite - if: needs.build-info.outputs.image-build == 'true' - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 - if: needs.build-info.outputs.waitForImage == 'true' - with: - persist-credentials: false - - name: "Free space" - run: ./scripts/ci/tools/ci_free_space_on_ci.sh - if: needs.build-info.outputs.waitForImage == 'true' - - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" - run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - - name: "Verify CI image Py${{matrix.python-version}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" - run: ./scripts/ci/images/ci_verify_ci_image.sh - if: needs.build-info.outputs.waitForImage == 'true' static-checks: timeout-minutes: 30 name: "Static checks" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, ci-images] env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} SKIP: "pylint,identity" - MOUNT_LOCAL_SOURCES: "true" + MOUNT_SELECTED_LOCAL_SOURCES: "true" PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} if: needs.build-info.outputs.basic-checks-only == 'false' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" @@ -250,16 +323,29 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{needs.build-info.outputs.defaultPythonVersion}} - - name: Cache pre-commit env - uses: actions/cache@v2 - with: - path: ~/.cache/pre-commit - key: pre-commit-no-pylint-${{ hashFiles('.pre-commit-config.yaml') }} - restore-keys: pre-commit-no-pylint- - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh + - name: "Get Python version" + run: "echo \"::set-output name=host-python-version::$(python -c + 'import platform; print(platform.python_version())')\"" + id: host-python-version + - name: "Cache pre-commit local-installation" + uses: actions/cache@v2 + with: + path: ~/.local + key: "pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-\ +${{ hashFiles('setup.py', 'setup.cfg') }}" + restore-keys: "\ +pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-" + - name: "Cache pre-commit envs: no-pylint" + uses: actions/cache@v2 + with: + path: ~/.cache/pre-commit + key: "pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}}-\ +${{ hashFiles('.pre-commit-config.yaml') }}" + restore-keys: pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}} - name: "Static checks: except pylint" run: ./scripts/ci/static_checks/run_static_checks.sh env: @@ -271,11 +357,12 @@ jobs: static-checks-basic-checks-only: timeout-minutes: 30 name: "Static checks: basic checks only" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info] env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} SKIP: "build,mypy,flake8,pylint,bats-in-container-tests,identity" - MOUNT_LOCAL_SOURCES: "true" + MOUNT_SELECTED_LOCAL_SOURCES: "true" PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} if: needs.build-info.outputs.basic-checks-only == 'true' steps: @@ -287,12 +374,6 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{needs.build-info.outputs.defaultPythonVersion}} - - name: Cache pre-commit env - uses: actions/cache@v2 - with: - path: ~/.cache/pre-commit - key: pre-commit-basic-${{ hashFiles('.pre-commit-config.yaml') }} - restore-keys: pre-commit-no-basic- - name: > Fetch incoming commit ${{ github.sha }} with its parent uses: actions/checkout@v2 @@ -300,9 +381,27 @@ jobs: ref: ${{ github.sha }} fetch-depth: 2 persist-credentials: false + - name: "Get Python version" + run: "echo \"::set-output name=host-python-version::$(python -c + 'import platform; print(platform.python_version())')\"" + id: host-python-version + - name: "Cache pre-commit local-installation" + uses: actions/cache@v2 + with: + path: ~/.local + key: "pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-\ +${{ hashFiles('setup.py', 'setup.cfg') }}" + restore-keys: "\ +pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-" + - name: "Cache pre-commit envs: no-pylint" + uses: actions/cache@v2 + with: + path: ~/.cache/pre-commit + key: "pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}}-\ +${{ hashFiles('.pre-commit-config.yaml') }}" + restore-keys: pre-commit-no-pylint-${{steps.host-python-version.outputs.host-python-version}} - name: "Static checks: basic checks only" - run: | - ./scripts/ci/static_checks/run_basic_static_checks.sh "${{ github.sha }}" + run: ./scripts/ci/static_checks/run_basic_static_checks.sh "${{ github.sha }}" env: VERBOSE: false @@ -310,14 +409,16 @@ jobs: static-checks-pylint: timeout-minutes: 30 name: "Pylint" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, ci-images] if: needs.build-info.outputs.basic-checks-only == 'false' env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} # We want to make sure we have latest sources as only in_container scripts are added # to the image but we want to static-check all of them - MOUNT_LOCAL_SOURCES: "true" + MOUNT_SELECTED_LOCAL_SOURCES: "true" PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 @@ -327,16 +428,29 @@ jobs: uses: actions/setup-python@v2 with: python-version: ${{needs.build-info.outputs.defaultPythonVersion}} - - name: "Cache pre-commit env" - uses: actions/cache@v2 - with: - path: ~/.cache/pre-commit - key: pre-commit-pylint-${{ hashFiles('.pre-commit-config.yaml') }} - restore-keys: pre-commit-pylint- - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh + - name: "Get Python version" + run: "echo \"::set-output name=host-python-version::$(python -c + 'import platform; print(platform.python_version())')\"" + id: host-python-version + - name: "Cache pre-commit local-installation" + uses: actions/cache@v2 + with: + path: ~/.local + key: "pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-\ +${{ hashFiles('setup.py', 'setup.cfg') }}" + restore-keys: "\ +pre-commit-local-installation-${{steps.host-python-version.outputs.host-python-version}}-" + - name: "Cache pre-commit envs - pylint" + uses: actions/cache@v2 + with: + path: ~/.cache/pre-commit + key: "pre-commit-pylint-${{steps.host-python-version.outputs.host-python-version}}-\ +${{ hashFiles('.pre-commit-config.yaml') }}" + restore-keys: pre-commit-pylint-${{steps.host-python-version.outputs.host-python-version}} - name: "Static checks: pylint" run: ./scripts/ci/static_checks/run_static_checks.sh pylint env: @@ -345,26 +459,24 @@ jobs: docs: timeout-minutes: 45 name: "Build docs" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, ci-images] if: needs.build-info.outputs.docs-build == 'true' + env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 with: persist-credentials: false submodules: recursive + - name: "Free space" + run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - name: "Build docs" run: ./scripts/ci/docs/ci_docs.sh - - name: "Upload documentation" - uses: actions/upload-artifact@v2 - if: always() && github.event_name == 'pull_request' - with: - name: airflow-documentation - path: "./files/documentation" - retention-days: 7 - name: Configure AWS credentials uses: ./.github/actions/configure-aws-credentials if: > @@ -378,31 +490,27 @@ jobs: if: > github.ref == 'refs/heads/master' && github.repository == 'apache/airflow' && github.event_name == 'push' - run: aws s3 sync --delete ./files/documentation s3://apache-airflow-docs + run: aws s3 sync --delete ./docs/_build s3://apache-airflow-docs - prepare-backport-provider-packages: - timeout-minutes: 30 - name: "Backport packages: ${{ matrix.package-format }}" - runs-on: ubuntu-20.04 + prepare-test-provider-packages-wheel: + timeout-minutes: 40 + name: "Build and test provider packages wheel" + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, ci-images] - strategy: - matrix: - package-format: ['wheel', 'sdist'] env: - # In this case we want to install airflow from the latest released 1.10 version - # all provider packages are installed from wheels or .tar.gz files - INSTALL_AIRFLOW_VERSION: "1.10.14" + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} AIRFLOW_EXTRAS: "all" PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} - BACKPORT_PACKAGES: "true" VERSION_SUFFIX_FOR_PYPI: "dev" - PACKAGE_FORMAT: ${{ matrix.package-format }} - if: needs.build-info.outputs.image-build == 'true' + VERSION_SUFFIX_FOR_SVN: "dev" + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} + if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'master' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 with: persist-credentials: false + if: needs.build-info.outputs.default-branch == 'master' - name: "Setup python" uses: actions/setup-python@v2 with: @@ -411,100 +519,46 @@ jobs: run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - - name: "Prepare provider readmes" - run: ./scripts/ci/provider_packages/ci_prepare_provider_readmes.sh - - name: "Prepare provider packages: ${{ matrix.package-format }}" + - name: "Prepare provider documentation" + run: ./scripts/ci/provider_packages/ci_prepare_provider_documentation.sh + - name: "Prepare provider packages: wheel" run: ./scripts/ci/provider_packages/ci_prepare_provider_packages.sh - - name: "Install and test provider packages and airflow via ${{ matrix.package-format }} files" - run: ./scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh env: - INSTALL_PROVIDERS_FROM_SOURCES: "false" - - name: "Upload package artifacts" - uses: actions/upload-artifact@v2 - if: always() - with: - name: airflow-backport-packages - path: "./dist/apache*" - retention-days: 7 - - name: "Upload readme artifacts" - uses: actions/upload-artifact@v2 - if: always() && matrix.package-format == 'wheel' - with: - name: airflow-backport-readmes - path: "./files/airflow-readme-*" - retention-days: 7 - - prepare-provider-packages: - timeout-minutes: 30 - name: "Provider packages ${{ matrix.package-format }}" - runs-on: ubuntu-20.04 - needs: [build-info, ci-images] - env: - INSTALL_AIRFLOW_VERSION: "${{ matrix.package-format }}" - AIRFLOW_EXTRAS: "all" - PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} - VERSION_SUFFIX_FOR_PYPI: "dev" - PACKAGE_FORMAT: ${{ matrix.package-format }} - strategy: - matrix: - package-format: ['wheel', 'sdist'] - if: needs.build-info.outputs.image-build == 'true' - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 - with: - persist-credentials: false - - name: "Setup python" - uses: actions/setup-python@v2 - with: - python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} - - name: "Free space" - run: ./scripts/ci/tools/ci_free_space_on_ci.sh - - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" - run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - - name: "Prepare provider readmes" - run: ./scripts/ci/provider_packages/ci_prepare_provider_readmes.sh - - name: "Prepare provider packages: ${{ matrix.package-format }}" - run: ./scripts/ci/provider_packages/ci_prepare_provider_packages.sh - - name: "Prepare airflow packages: ${{ matrix.package-format }}" + PACKAGE_FORMAT: "wheel" + - name: "Prepare airflow package: wheel" run: ./scripts/ci/build_airflow/ci_build_airflow_package.sh - - name: "Install and test provider packages and airflow via ${{ matrix.package-format }} files" + env: + PACKAGE_FORMAT: "wheel" + - name: "Install and test provider packages and airflow via wheel files" run: ./scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh - - name: "Upload package artifacts" - uses: actions/upload-artifact@v2 - if: always() - with: - name: airflow-provider-packages - path: "./dist/apache-*" - retention-days: 7 - - name: "Upload readme artifacts" - uses: actions/upload-artifact@v2 - if: always() && matrix.package-format == 'wheel' - with: - name: airflow-provider-readmes - path: "./files/airflow-readme-*" - retention-days: 7 + env: + INSTALL_AIRFLOW_VERSION: "wheel" + PACKAGE_FORMAT: "wheel" + - name: "Install and test provider packages and airflow on Airflow 2.0 files" + run: ./scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh + env: + INSTALL_AIRFLOW_VERSION: "2.0.0" + PACKAGE_FORMAT: "wheel" - test-provider-packages-released-airflow: - timeout-minutes: 30 - name: "Test Provider packages with 2.0.0 version ${{ matrix.package-format }}" - runs-on: ubuntu-20.04 + prepare-test-provider-packages-sdist: + timeout-minutes: 40 + name: "Build and test provider packages sdist" + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, ci-images] env: - INSTALL_AIRFLOW_VERSION: "2.0.0" + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} AIRFLOW_EXTRAS: "all" PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} VERSION_SUFFIX_FOR_PYPI: "dev" - PACKAGE_FORMAT: ${{ matrix.package-format }} - strategy: - matrix: - package-format: ['wheel', 'sdist'] - if: needs.build-info.outputs.image-build == 'true' + VERSION_SUFFIX_FOR_SVN: "dev" + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} + if: needs.build-info.outputs.image-build == 'true' && needs.build-info.outputs.default-branch == 'master' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 with: persist-credentials: false + if: needs.build-info.outputs.default-branch == 'master' - name: "Setup python" uses: actions/setup-python@v2 with: @@ -513,24 +567,32 @@ jobs: run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - - name: "Prepare provider readmes" - run: ./scripts/ci/provider_packages/ci_prepare_provider_readmes.sh - - name: "Prepare provider packages: ${{ matrix.package-format }}" + - name: "Prepare provider packages: sdist" run: ./scripts/ci/provider_packages/ci_prepare_provider_packages.sh - - name: "Install and test provider packages and airflow via ${{ matrix.package-format }} files" + env: + PACKAGE_FORMAT: "sdist" + - name: "Prepare airflow package: sdist" + run: ./scripts/ci/build_airflow/ci_build_airflow_package.sh + env: + PACKAGE_FORMAT: "sdist" + - name: "Install and test provider packages and airflow via sdist files" run: ./scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh + env: + INSTALL_AIRFLOW_VERSION: "sdist" + PACKAGE_FORMAT: "sdist" tests-helm: timeout-minutes: 20 name: "Python unit tests for helm chart" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, ci-images] env: - MOUNT_LOCAL_SOURCES: "true" - RUN_TESTS: true + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} + MOUNT_SELECTED_LOCAL_SOURCES: "true" TEST_TYPES: "Helm" BACKEND: "sqlite" PYTHON_MAJOR_MINOR_VERSION: ${{needs.build-info.outputs.defaultPythonVersion}} + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} if: > needs.build-info.outputs.needs-helm-tests == 'true' && (github.repository == 'apache/airflow' || github.event_name != 'schedule') @@ -568,7 +630,7 @@ jobs: with: name: > coverage-helm - path: "./files/coverage.xml" + path: "./files/coverage*.xml" retention-days: 7 tests-postgres: @@ -576,7 +638,7 @@ jobs: name: > Postgres${{matrix.postgres-version}},Py${{matrix.python-version}}: ${{needs.build-info.outputs.testTypes}} - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, ci-images] strategy: matrix: @@ -585,12 +647,12 @@ jobs: exclude: ${{ fromJson(needs.build-info.outputs.postgresExclude) }} fail-fast: false env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} BACKEND: postgres PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} POSTGRES_VERSION: ${{ matrix.postgres-version }} - RUN_TESTS: true TEST_TYPES: "${{needs.build-info.outputs.testTypes}}" - TEST_TYPE: "" + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} if: needs.build-info.outputs.run-tests == 'true' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" @@ -626,14 +688,14 @@ jobs: with: name: > coverage-postgres-${{matrix.python-version}}-${{matrix.postgres-version}} - path: "./files/coverage.xml" + path: "./files/coverage*.xml" retention-days: 7 tests-mysql: timeout-minutes: 130 name: > MySQL${{matrix.mysql-version}}, Py${{matrix.python-version}}: ${{needs.build-info.outputs.testTypes}} - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, ci-images] strategy: matrix: @@ -642,12 +704,12 @@ jobs: exclude: ${{ fromJson(needs.build-info.outputs.mysqlExclude) }} fail-fast: false env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} BACKEND: mysql PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} MYSQL_VERSION: ${{ matrix.mysql-version }} - RUN_TESTS: true TEST_TYPES: "${{needs.build-info.outputs.testTypes}}" - TEST_TYPE: "" + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} if: needs.build-info.outputs.run-tests == 'true' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" @@ -682,14 +744,14 @@ jobs: uses: actions/upload-artifact@v2 with: name: coverage-mysql-${{matrix.python-version}}-${{matrix.mysql-version}} - path: "./files/coverage.xml" + path: "./files/coverage*.xml" retention-days: 7 tests-sqlite: timeout-minutes: 130 name: > Sqlite Py${{matrix.python-version}}: ${{needs.build-info.outputs.testTypes}} - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, ci-images] strategy: matrix: @@ -697,11 +759,11 @@ jobs: exclude: ${{ fromJson(needs.build-info.outputs.sqliteExclude) }} fail-fast: false env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} BACKEND: sqlite PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} - RUN_TESTS: true TEST_TYPES: "${{needs.build-info.outputs.testTypes}}" - TEST_TYPE: "" + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} if: needs.build-info.outputs.run-tests == 'true' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" @@ -736,31 +798,24 @@ jobs: uses: actions/upload-artifact@v2 with: name: coverage-sqlite-${{matrix.python-version}} - path: ./files/coverage.xml + path: ./files/coverage*.xml retention-days: 7 tests-quarantined: timeout-minutes: 60 name: "Quarantined tests" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} continue-on-error: true needs: [build-info, ci-images] - strategy: - matrix: - include: - - backend: mysql - - backend: postgres - - backend: sqlite env: - BACKEND: ${{ matrix.backend }} + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} MYSQL_VERSION: ${{needs.build-info.outputs.defaultMySQLVersion}} POSTGRES_VERSION: ${{needs.build-info.outputs.defaultPostgresVersion}} - RUN_TESTS: true TEST_TYPES: "Quarantined" - TEST_TYPE: "" NUM_RUNS: 10 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} if: needs.build-info.outputs.run-tests == 'true' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" @@ -788,13 +843,13 @@ jobs: - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - name: "Tests: Quarantined" - run: ./scripts/ci/testing/ci_run_airflow_testing.sh + run: ./scripts/ci/testing/ci_run_quarantined_tests.sh - name: "Upload Quarantine test results" uses: actions/upload-artifact@v2 if: always() with: name: quarantined_tests - path: "files/test_result.xml" + path: "files/test_result-*.xml" retention-days: 7 - name: "Upload airflow logs" uses: actions/upload-artifact@v2 @@ -814,20 +869,23 @@ jobs: uses: actions/upload-artifact@v2 with: name: coverage-quarantined-${{ matrix.backend }} - path: "./files/coverage.xml" + path: "./files/coverage*.xml" retention-days: 7 upload-coverage: - timeout-minutes: 5 + timeout-minutes: 15 name: "Upload coverage" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} continue-on-error: true needs: + - build-info - tests-kubernetes - tests-postgres - tests-sqlite - tests-mysql - tests-quarantined + env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 @@ -848,13 +906,16 @@ jobs: prod-images: timeout-minutes: 120 name: "Wait for PROD images" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, ci-images] + if: needs.build-info.outputs.image-build == 'true' env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} BACKEND: sqlite PYTHON_MAJOR_MINOR_VERSION: ${{ needs.build-info.outputs.defaultPythonVersion }} UPGRADE_TO_NEWER_DEPENDENCIES: ${{ needs.build-info.outputs.upgradeToNewerDependencies }} - if: needs.build-info.outputs.image-build == 'true' + outputs: + githubRegistry: ${{ steps.wait-for-images.outputs.githubRegistry }} steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 @@ -868,62 +929,42 @@ jobs: if: needs.build-info.outputs.waitForImage == 'true' - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - if: needs.build-info.outputs.waitForImage == 'true' + if: | + needs.build-info.outputs.waitForImage == 'true' - name: > Wait for PROD images ${{ needs.build-info.outputs.pythonVersions }}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }} + # We wait for the images to be available either from the build-ci-image step or from + # "build-images-workflow-run.yml' run as pull_request_target. + # We are utilising single job to wait for all images because this job merely waits + # For the images to be available. The test jobs wait for it to complete! + # The job will set the output "githubRegistry" - result of auto-detect which registry has + # been used by checking where the image can be downloaded from. + # + id: wait-for-images env: CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: > ${{needs.build-info.outputs.pythonVersionsListAsString}} - run: ./scripts/ci/images/ci_wait_for_all_prod_images.sh - if: needs.build-info.outputs.waitForImage == 'true' - - verify-prod-images: - timeout-minutes: 20 - name: "Verify Prod Image Py${{matrix.python-version}}" - runs-on: ubuntu-20.04 - needs: [build-info, prod-images] - strategy: - matrix: - python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} - env: - BACKEND: sqlite - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 - if: needs.build-info.outputs.waitForImage == 'true' - with: - persist-credentials: false - - name: "Free space" - run: ./scripts/ci/tools/ci_free_space_on_ci.sh - if: needs.build-info.outputs.waitForImage == 'true' - - name: "Prepare PROD Image" - run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh - - name: "Verify PROD image Py${{matrix.python-version}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" - run: ./scripts/ci/images/ci_verify_prod_image.sh - if: needs.build-info.outputs.waitForImage == 'true' + run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh tests-kubernetes: timeout-minutes: 50 - name: K8s ${{matrix.python-version}} ${{matrix.kubernetes-version}} ${{matrix.kubernetes-mode}} - runs-on: ubuntu-20.04 + name: K8s tests + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: [build-info, prod-images] - strategy: - matrix: - python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} - kubernetes-version: ${{ fromJson(needs.build-info.outputs.kubernetesVersions) }} - kubernetes-mode: ${{ fromJson(needs.build-info.outputs.kubernetesModes) }} - exclude: ${{ fromJson(needs.build-info.outputs.kubernetesExclude) }} - fail-fast: false env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} BACKEND: postgres RUN_TESTS: "true" RUNTIME: "kubernetes" - PYTHON_MAJOR_MINOR_VERSION: "${{ matrix.python-version }}" - KUBERNETES_MODE: "${{ matrix.kubernetes-mode }}" - KUBERNETES_VERSION: "${{ matrix.kubernetes-version }}" + KUBERNETES_MODE: "image" KIND_VERSION: "${{ needs.build-info.outputs.defaultKindVersion }}" HELM_VERSION: "${{ needs.build-info.outputs.defaultHelmVersion }}" + GITHUB_REGISTRY: ${{ needs.prod-images.outputs.githubRegistry }} + CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: > + ${{needs.build-info.outputs.pythonVersionsListAsString}} + CURRENT_KUBERNETES_VERSIONS_AS_STRING: > + ${{needs.build-info.outputs.kubernetesVersionsListAsString}} if: needs.build-info.outputs.run-kubernetes-tests == 'true' steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" @@ -936,51 +977,49 @@ jobs: python-version: ${{ needs.build-info.outputs.defaultPythonVersion }} - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - - name: "Prepare PROD Image" - run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh - - name: "Setup cluster and deploy Airflow" - id: setp-cluster-deploy-app - run: ./scripts/ci/kubernetes/ci_setup_cluster_and_deploy_airflow_to_kubernetes.sh - env: - # We have the right image pulled already by the previous step - SKIP_BUILDING_PROD_IMAGE: "true" + - name: "Get all PROD images" + run: ./scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh - name: "Cache virtualenv for kubernetes testing" uses: actions/cache@v2 with: - path: ".build/.kubernetes_venv_ ${{ needs.build-info.outputs.defaultPythonVersion }}" + path: ".build/.kubernetes_venv" key: "kubernetes-${{ needs.build-info.outputs.defaultPythonVersion }}\ +-${{needs.build-info.outputs.kubernetesVersionsListAsString}} +-${{needs.build-info.outputs.pythonVersionsListAsString}} -${{ hashFiles('setup.py','setup.cfg') }}" - restore-keys: "kubernetes-${{ needs.build-info.outputs.defaultPythonVersion }}-" + restore-keys: "kubernetes-${{ needs.build-info.outputs.defaultPythonVersion }}-\ +-${{needs.build-info.outputs.kubernetesVersionsListAsString}} +-${{needs.build-info.outputs.pythonVersionsListAsString}}" - name: "Cache bin folder with tools for kubernetes testing" uses: actions/cache@v2 with: - path: ".build/bin" - key: "bin-${{ matrix.kubernetes-version }}\ + path: ".build/kubernetes-bin" + key: "kubernetes-binaries -${{ needs.build-info.outputs.defaultKindVersion }}\ -${{ needs.build-info.outputs.defaultHelmVersion }}" - restore-keys: "bin-${{ matrix.kubernetes-version }}" + restore-keys: "kubernetes-binaries" - name: "Kubernetes Tests" - run: ./scripts/ci/kubernetes/ci_run_kubernetes_tests.sh + run: ./scripts/ci/kubernetes/ci_setup_clusters_and_run_kubernetes_tests_in_parallel.sh - name: "Upload KinD logs" uses: actions/upload-artifact@v2 if: failure() with: name: > - kind-logs-${{matrix.kubernetes-mode}}-${{matrix.python-version}}-${{matrix.kubernetes-version}} + kind-logs- path: /tmp/kind_logs_* retention-days: 7 - name: "Upload artifact for coverage" uses: actions/upload-artifact@v2 with: name: > - coverage-k8s-${{matrix.kubernetes-mode}}-${{matrix.python-version}}-${{matrix.kubernetes-version}} - path: "./files/coverage.xml" + coverage-k8s- + path: "./files/coverage*.xml" retention-days: 7 push-prod-images-to-github-registry: timeout-minutes: 10 name: "Push PROD images as cache to GitHub Registry" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: - build-info - static-checks @@ -989,11 +1028,7 @@ jobs: - tests-postgres - tests-mysql - tests-kubernetes - - prepare-backport-provider-packages - - prepare-provider-packages - - test-provider-packages-released-airflow - prod-images - - verify-prod-images - docs if: > (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' || @@ -1003,8 +1038,10 @@ jobs: matrix: python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest" + GITHUB_REGISTRY: ${{ needs.prod-images.outputs.githubRegistry }} steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 @@ -1016,6 +1053,16 @@ jobs: python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh + - name: Set push-python-image + id: push-python-image + run: | + if [[ "${REF}" == 'refs/head/master' || "${REF}" == 'refs/head/main' ]]; then + echo "::set-output name=wanted::true" + else + echo "::set-output name=wanted::false" + fi + env: + REF: ${{ github.ref }} - name: "Prepare PROD image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }}" run: ./scripts/ci/images/ci_prepare_prod_image_on_ci.sh @@ -1023,13 +1070,16 @@ jobs: # Since we are going to push both final image and build image segment, we need to pull the # build image, in case we are pulling from registry rather than building. WAIT_FOR_PROD_BUILD_IMAGE: "true" + WAIT_FOR_PYTHON_BASE_IMAGE: ${{ steps.push-python-image.outputs.wanted}} - name: "Push PROD images ${{ matrix.python-version }}:${{ env.GITHUB_REGISTRY_PUSH_IMAGE_TAG }}" run: ./scripts/ci/images/ci_push_production_images.sh + env: + PUSH_PYTHON_BASE_IMAGE: ${{ steps.push-python-image.outputs.wanted}} push-ci-images-to-github-registry: timeout-minutes: 10 name: "Push CI images as cache to GitHub Registry" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: - build-info - static-checks @@ -1038,9 +1088,7 @@ jobs: - tests-postgres - tests-mysql - tests-kubernetes - - prepare-backport-provider-packages - ci-images - - verify-ci-images - docs if: > (github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' || @@ -1050,8 +1098,10 @@ jobs: matrix: python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest" + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 @@ -1071,16 +1121,23 @@ jobs: constraints: timeout-minutes: 10 name: "Constraints" - runs-on: ubuntu-20.04 - strategy: - matrix: - python-version: ${{ fromJson(needs.build-info.outputs.pythonVersions) }} - fail-fast: false + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: - build-info - ci-images + - prod-images + - static-checks + - static-checks-pylint + - tests-sqlite + - tests-mysql + - tests-postgres + - tests-kubernetes env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} + GITHUB_REGISTRY: ${{ needs.ci-images.outputs.githubRegistry }} + CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING: ${{needs.build-info.outputs.pythonVersionsListAsString}} + # Only run it for direct pushes if: > github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' || github.ref == 'refs/heads/v2-0-test' @@ -1089,47 +1146,29 @@ jobs: uses: actions/checkout@v2 with: persist-credentials: false + submodules: recursive - name: "Setup python" uses: actions/setup-python@v2 with: python-version: ${{ env.PYTHON_MAJOR_MINOR_VERSION }} - name: "Free space" run: ./scripts/ci/tools/ci_free_space_on_ci.sh - - name: "Prepare CI image ${{env.PYTHON_MAJOR_MINOR_VERSION}}:${{ github.sha }}" - run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh - - name: "Generate constraints" - run: ./scripts/ci/constraints/ci_generate_constraints.sh - - name: "Upload constraint artifacts" - uses: actions/upload-artifact@v2 - with: - name: 'constraints-${{matrix.python-version}}' - path: './files/constraints-${{matrix.python-version}}/constraints-${{matrix.python-version}}.txt' - retention-days: 7 - - constraints-push: - timeout-minutes: 10 - name: "Constraints push" - runs-on: ubuntu-20.04 - needs: - - build-info - - constraints - - verify-ci-images - - verify-prod-images - - static-checks - - static-checks-pylint - - tests-sqlite - - tests-mysql - - tests-postgres - - tests-kubernetes - if: > - github.ref == 'refs/heads/master' || github.ref == 'refs/heads/v1-10-test' || - github.ref == 'refs/heads/v2-0-test' - steps: - - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" - uses: actions/checkout@v2 - with: - persist-credentials: false - submodules: recursive + - name: > + Wait for CI images + ${{ needs.build-info.outputs.pythonVersions }}:${{ env.GITHUB_REGISTRY_PULL_IMAGE_TAG }} + run: ./scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh + - name: "Generate constraints with PyPI providers" + run: ./scripts/ci/constraints/ci_generate_all_constraints.sh + env: + GENERATE_CONSTRAINTS_MODE: "pypi-providers" + - name: "Generate constraints with source providers" + run: ./scripts/ci/constraints/ci_generate_all_constraints.sh + env: + GENERATE_CONSTRAINTS_MODE: "source-providers" + - name: "Generate constraints without providers" + run: ./scripts/ci/constraints/ci_generate_all_constraints.sh + env: + GENERATE_CONSTRAINTS_MODE: "no-providers" - name: "Set constraints branch name" id: constraints-branch run: ./scripts/ci/constraints/ci_branch_constraints.sh @@ -1139,10 +1178,6 @@ jobs: path: "repo" ref: ${{ steps.constraints-branch.outputs.branch }} persist-credentials: false - - name: "Get all artifacts (constraints)" - uses: actions/download-artifact@v2 - with: - path: 'artifacts' - name: "Commit changed constraint files for ${{needs.build-info.outputs.pythonVersions}}" run: ./scripts/ci/constraints/ci_commit_constraints.sh - name: "Push changes" @@ -1155,18 +1190,22 @@ jobs: tag-repo-nightly: timeout-minutes: 10 name: "Tag repo nightly" - runs-on: ubuntu-20.04 + runs-on: ${{ fromJson(needs.build-info.outputs.runsOn) }} needs: - docs + - build-info - static-checks - static-checks-pylint - tests-sqlite - tests-postgres - tests-mysql - tests-kubernetes - - constraints-push - - prepare-provider-packages + - constraints + - prepare-test-provider-packages-wheel + - prepare-test-provider-packages-sdist if: github.event_name == 'schedule' && github.repository == 'apache/airflow' + env: + RUNS_ON: ${{ fromJson(needs.build-info.outputs.runsOn) }} steps: - name: "Checkout ${{ github.ref }} ( ${{ github.sha }} )" uses: actions/checkout@v2 diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 8bdd809b5d19e..0c35b14855615 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -20,7 +20,7 @@ name: "CodeQL" on: # yamllint disable-line rule:truthy push: - branches: [master] + branches: [master, main] schedule: - cron: '0 2 * * *' @@ -66,22 +66,11 @@ jobs: - name: Checkout repository uses: actions/checkout@v2 with: - # We must fetch at least the immediate parents so that if this is - # a pull request then we can checkout the head. - fetch-depth: 2 persist-credentials: false if: | matrix.language == 'python' && needs.selective-checks.outputs.needs-python-scans == 'true' || matrix.language == 'javascript' && needs.selective-checks.outputs.needs-javascript-scans == 'true' - # If this run was triggered by a pull request event, then checkout - # the head of the pull request instead of the merge commit. - - run: git checkout HEAD^2 - if: | - github.event_name == 'pull_request' && - (matrix.language == 'python' && needs.selective-checks.outputs.needs-python-scans == 'true' || - matrix.language == 'javascript' && needs.selective-checks.outputs.needs-javascript-scans == 'true') - # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v1 diff --git a/.github/workflows/label_when_reviewed_workflow_run.yml b/.github/workflows/label_when_reviewed_workflow_run.yml index 1ed50dd95f10e..59bde48284a00 100644 --- a/.github/workflows/label_when_reviewed_workflow_run.yml +++ b/.github/workflows/label_when_reviewed_workflow_run.yml @@ -102,6 +102,7 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} label: 'full tests needed' require_committers_approval: 'true' + remove_label_when_approval_missing: 'false' pullRequestNumber: ${{ steps.source-run-info.outputs.pullRequestNumber }} comment: > The PR most likely needs to run full matrix of tests because it modifies parts of the core diff --git a/.github/workflows/scheduled_quarantined.yml b/.github/workflows/scheduled_quarantined.yml index ce2630d1d338b..2a5c4a24e20fb 100644 --- a/.github/workflows/scheduled_quarantined.yml +++ b/.github/workflows/scheduled_quarantined.yml @@ -23,9 +23,9 @@ on: # yamllint disable-line rule:truthy - cron: '12 */6 * * *' env: - MOUNT_LOCAL_SOURCES: "false" + MOUNT_SELECTED_LOCAL_SOURCES: "false" FORCE_ANSWER_TO_QUESTIONS: "yes" - FORCE_PULL_IMAGES: "true" + FORCE_PULL_IMAGES: "false" CHECK_IMAGE_FOR_REBUILD: "true" SKIP_CHECK_REMOTE_IMAGE: "true" DB_RESET: "true" @@ -33,8 +33,6 @@ env: UPGRADE_TO_NEWER_DEPENDENCIES: false PYTHON_MAJOR_MINOR_VERSION: 3.6 USE_GITHUB_REGISTRY: "true" - # Might be either 'ghcr.io' or 'docker.pkg.github.com' - GITHUB_REGISTRY: "docker.pkg.github.com" GITHUB_REPOSITORY: ${{ github.repository }} GITHUB_USERNAME: ${{ github.actor }} # This token is WRITE one - schedule type of events always have the WRITE token @@ -46,6 +44,7 @@ env: GITHUB_REGISTRY_PULL_IMAGE_TAG: "latest" GITHUB_REGISTRY_PUSH_IMAGE_TAG: "latest" GITHUB_REGISTRY_WAIT_FOR_IMAGE: "false" + GITHUB_REGISTRY: ${{ secrets.OVERRIDE_GITHUB_REGISTRY }} jobs: @@ -62,7 +61,7 @@ jobs: PYTHON_MAJOR_MINOR_VERSION: ${{ matrix.python-version }} POSTGRES_VERSION: ${{ matrix.postgres-version }} RUN_TESTS: "true" - TEST_TYPE: Quarantined + FORCE_TEST_TYPE: Quarantined NUM_RUNS: 20 GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} steps: @@ -73,7 +72,7 @@ jobs: with: python-version: '3.7' - name: "Set issue id for master" - if: github.ref == 'refs/heads/master' + if: github.ref == 'refs/heads/master' || github.ref == 'refs/heads/main' run: | echo "ISSUE_ID=10118" >> $GITHUB_ENV - name: "Set issue id for v1-10-stable" @@ -88,6 +87,8 @@ jobs: run: ./scripts/ci/tools/ci_free_space_on_ci.sh - name: "Build CI image ${{ matrix.python-version }}" run: ./scripts/ci/images/ci_prepare_ci_image_on_ci.sh + env: + GITHUB_REGISTRY: ${{ steps.determine-github-registry.outputs.githubRegistry }} - name: "Tests" run: ./scripts/ci/testing/ci_run_airflow_testing.sh - uses: actions/upload-artifact@v2 @@ -95,7 +96,7 @@ jobs: if: always() with: name: 'quarantined_tests' - path: 'files/test_result.xml' + path: 'files/test_result-*.xml' retention-days: 7 - uses: actions/upload-artifact@v2 name: Upload airflow logs diff --git a/.gitignore b/.gitignore index 67dfd80baba8b..0454790d2b6da 100644 --- a/.gitignore +++ b/.gitignore @@ -64,7 +64,7 @@ htmlcov/ .coverage.* .cache nosetests.xml -coverage.xml +coverage*.xml *,cover .hypothesis/ .pytest_cache @@ -215,3 +215,8 @@ Chart.lock pip-wheel-metadata .pypirc +/.docs-venv + +# Dev files +/dev/packages.txt +/dev/Dockerfile.pmc diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 6392499d1d3c3..e30b2f241a2e7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -233,6 +233,7 @@ repos: entry: "./scripts/ci/pre_commit/pre_commit_lint_dockerfile.sh" files: Dockerfile.*$ pass_filenames: true + require_serial: true - id: setup-order name: Check order of dependencies in setup.py and setup.cfg language: python @@ -407,6 +408,12 @@ repos: language: system files: ^.pre-commit-config.yaml$|^INTHEWILD.md$ require_serial: true + - id: sort-spelling-wordlist + name: Sort alphabetically and uniquify spelling_wordlist.txt + entry: ./scripts/ci/pre_commit/pre_commit_sort_spelling_wordlist.sh + language: system + files: ^.pre-commit-config.yaml$|^docs/spelling_wordlist.txt$ + require_serial: true - id: helm-lint name: Lint Helm Chart entry: ./scripts/ci/pre_commit/pre_commit_helm_lint.sh @@ -566,26 +573,33 @@ repos: language: system entry: "./scripts/ci/pre_commit/pre_commit_mypy.sh" files: ^chart/.*\.py$ - require_serial: true + require_serial: false - id: mypy name: Run mypy for /docs/ folder language: system entry: "./scripts/ci/pre_commit/pre_commit_mypy.sh" files: ^docs/.*\.py$ exclude: rtd-deprecation - require_serial: true + require_serial: false - id: pylint - name: Run pylint + name: Run pylint for main code language: system entry: "./scripts/ci/pre_commit/pre_commit_pylint.sh" files: \.py$ - exclude: ^scripts/.*\.py$|^dev|^provider_packages|^chart + exclude: ^scripts/.*\.py$|^dev|^provider_packages|^chart|^tests|^kubernetes_tests + pass_filenames: true + require_serial: true + - id: pylint + name: Run pylint for tests + language: system + entry: "env PYLINTRC=pylintrc-tests ./scripts/ci/pre_commit/pre_commit_pylint.sh" + files: ^tests/.*\.py$ pass_filenames: true require_serial: true - id: pylint name: Run pylint for helm chart tests language: system - entry: "./scripts/ci/pre_commit/pre_commit_pylint.sh" + entry: "env PYLINTRC=pylintrc-tests ./scripts/ci/pre_commit/pre_commit_pylint.sh" files: ^chart/.*\.py$ pass_filenames: true require_serial: true diff --git a/.rat-excludes b/.rat-excludes index 148f4743f9580..125ed46ca1642 100644 --- a/.rat-excludes +++ b/.rat-excludes @@ -71,14 +71,13 @@ node_modules/* coverage/* git_version flake8_diff.sh -coverage.xml +coverage*.xml _sources/* rat-results.txt apache-airflow-.*\+source.tar.gz.* apache-airflow-.*\+bin.tar.gz.* PULL_REQUEST_TEMPLATE.md -BACKPORT_PROVIDER_CHANGES*.md PROVIDER_CHANGES*.md manifests/* redirects.txt diff --git a/BREEZE.rst b/BREEZE.rst index 964d4fca58f3e..66532bd6071ef 100644 --- a/BREEZE.rst +++ b/BREEZE.rst @@ -36,6 +36,8 @@ We called it *Airflow Breeze* as **It's a Breeze to contribute to Airflow**. The advantages and disadvantages of using the Breeze environment vs. other ways of testing Airflow are described in `CONTRIBUTING.rst `_. +All the output from the last ./breeze command is automatically logged to the ``logs/breeze.out`` file. + Watch the video below about Airflow Breeze. It explains the motivation for Breeze and screencasts all its uses. @@ -155,10 +157,29 @@ If you use zsh, run this command and re-login: echo 'export PATH="/usr/local/opt/gnu-getopt/bin:$PATH"' >> ~/.zprofile . ~/.zprofile + +Let's confirm that ``getopt`` and ``gstat`` utilities are successfully installed + +.. code-block:: bash + + $ getopt --version + getopt from util-linux * + $ gstat --version + stat (GNU coreutils) * + Copyright (C) 2020 Free Software Foundation, Inc. + License GPLv3+: GNU GPL version 3 or later . + This is free software: you are free to change and redistribute it. + There is NO WARRANTY, to the extent permitted by law. + + Written by Michael Meskes. + +Resources required +================== + Memory ------ -Minimum 4GB RAM is required to run the full Breeze environment. +Minimum 4GB RAM for Docker Engine is required to run the full Breeze environment. On macOS, 2GB of RAM are available for your Docker containers by default, but more memory is recommended (4GB should be comfortable). For details see @@ -166,6 +187,18 @@ On macOS, 2GB of RAM are available for your Docker containers by default, but mo On Windows WSL 2 expect the Linux Distro and Docker containers to use 7 - 8 GB of RAM. +Disk +---- + +Minimum 40GB free disk space is required for your Docker Containers. + +On Mac OS This might deteriorate over time so you might need to increase it or run ``docker system --prune`` +periodically. For details see +`Docker for Mac - Advanced tab `_. + +On WSL2 you might want to increase your Virtual Hard Disk by following: +`Expanding the size of your WSL 2 Virtual Hard Disk `_ + Cleaning the environment ------------------------ @@ -175,7 +208,7 @@ them, you may end up with some unused image data. To clean up the Docker environment: -1. Stop Breeze with ``./breeze stop``. +1. Stop Breeze with ``./breeze stop``. (If Breeze is already running) 2. Run the ``docker system prune`` command. @@ -294,7 +327,7 @@ can check whether your problem is fixed. 1. If you are on macOS, check if you have enough disk space for Docker. 2. Restart Breeze with ``./breeze restart``. -3. Delete the ``.build`` directory and run ``./breeze build-image --force-pull-images``. +3. Delete the ``.build`` directory and run ``./breeze build-image``. 4. Clean up Docker images via ``breeze cleanup-image`` command. 5. Restart your Docker Engine and try again. 6. Restart your machine and try again. @@ -328,10 +361,10 @@ Managing CI environment: * Stop running interactive environment with ``breeze stop`` command * Restart running interactive environment with ``breeze restart`` command * Run test specified with ``breeze tests`` command - * Generate constraints with ``breeze generate-constraints`` command + * Generate constraints with ``breeze generate-constraints`` * Execute arbitrary command in the test environment with ``breeze shell`` command * Execute arbitrary docker-compose command with ``breeze docker-compose`` command - * Push docker images with ``breeze push-image`` command (require committer's rights to push images) + * Push docker images with ``breeze push-image`` command (require committers rights to push images) You can optionally reset the Airflow metada database if specified as extra ``--db-reset`` flag and for CI image you can also start integrations (separate Docker images) if specified as extra ``--integration`` flags. You can also @@ -354,7 +387,7 @@ Managing Prod environment (with ``--production-image`` flag): * Restart running interactive environment with ``breeze restart`` command * Execute arbitrary command in the test environment with ``breeze shell`` command * Execute arbitrary docker-compose command with ``breeze docker-compose`` command - * Push docker images with ``breeze push-image`` command (require committer's rights to push images) + * Push docker images with ``breeze push-image`` command (require committers rights to push images) You can optionally reset database if specified as extra ``--db-reset`` flag. You can also chose which backend database should be used with ``--backend`` flag and python version with ``--python`` flag. @@ -543,9 +576,6 @@ There are several commands that you can run in Breeze to manage and build packag Preparing provider readme files is part of the release procedure by the release managers and it is described in detail in `dev `_ . -You can prepare provider packages - by default regular provider packages are prepared, but with -``--backport`` flag you can prepare backport packages. - The packages are prepared in ``dist`` folder. Note, that this command cleans up the ``dist`` folder before running, so you should run it before generating airflow package below as it will be removed. @@ -556,20 +586,12 @@ The below example builds provider packages in the wheel format. ./breeze prepare-provider-packages If you run this command without packages, you will prepare all packages, you can however specify -providers that you would like to build. By default only ``wheel`` packages are prepared, -but you can change it providing optional --package-format flag. - - -.. code-block:: bash - - ./breeze prepare-provider-packages --package-format=both google amazon - -You can also prepare backport provider packages, if you specify ``--backport`` flag. You can read more -about backport packages in `dev `_ +providers that you would like to build. By default ``both`` types of packages are prepared ( +``wheel`` and ``sdist``, but you can change it providing optional --package-format flag. .. code-block:: bash - ./breeze prepare-provider-packages --backports --package-format=both google amazon + ./breeze prepare-provider-packages google amazon You can see all providers available by running this command: @@ -586,11 +608,12 @@ You can also prepare airflow packages using breeze: This prepares airflow .whl package in the dist folder. -Again, you can specify optional ``--package-format`` flag to build airflow packages. +Again, you can specify optional ``--package-format`` flag to build selected formats of airflow packages, +default is to build ``both`` type of packages ``sdist`` and ``wheel``. .. code-block:: bash - ./breeze prepare-airflow-packages --package-format=bot + ./breeze prepare-airflow-packages --package-format=wheel Building Production images @@ -785,31 +808,46 @@ Generating constraints ---------------------- Whenever setup.py gets modified, the CI master job will re-generate constraint files. Those constraint -files are stored in separated orphan branches: ``constraints-master``, ``constraints-2-0`` and ``constraints-1-10``. -They are stored separately for each python version. Those are -constraint files as described in detail in the +files are stored in separated orphan branches: ``constraints-master``, ``constraints-2-0`` +and ``constraints-1-10``. + +Those are constraint files as described in detail in the ``_ contributing documentation. -In case someone modifies setup.py, the ``CRON`` scheduled CI build automatically upgrades and -pushes changed to the constraint files, however you can also perform test run of this locally using -``generate-constraints`` command of Breeze. +You can use ``./breeze generate-constraints`` command to manually generate constraints for a single python +version and single constraint mode like this: .. code-block:: bash - ./breeze generate-constraints --python 3.6 + ./breeze generate-constraints --generate-constraints-mode pypi-providers -.. code-block:: bash - ./breeze generate-constraints --python 3.7 +Constraints are generated separately for each python version and there are separate constraints modes: -.. code-block:: bash +* 'constraints' - those are constraints generated by matching the current airflow version from sources + and providers that are installed from PyPI. Those are constraints used by the users who want to + install airflow with pip. Use ``pypi-providers`` mode for that. - ./breeze generate-constraints --python 3.8 +* "constraints-source-providers" - those are constraints generated by using providers installed from + current sources. While adding new providers their dependencies might change, so this set of providers + is the current set of the constraints for airflow and providers from the current master sources. + Those providers are used by CI system to keep "stable" set of constraints. Use + ``source-providers`` mode for that. + +* "constraints-no-providers" - those are constraints generated from only Apache Airflow, without any + providers. If you want to manage airflow separately and then add providers individually, you can + use those. Use ``no-providers`` mode for that. + +In case someone modifies setup.py, the ``CRON`` scheduled CI build automatically upgrades and +pushes changed to the constraint files, however you can also perform test run of this locally using +the procedure described in ``_ which utilises +multiple processors on your local machine to generate such constraints faster. This bumps the constraint files to latest versions and stores hash of setup.py. The generated constraint and setup.py hash files are stored in the ``files`` folder and while generating the constraints diff of changes vs the previous constraint files is printed. + Using local virtualenv environment in Your Host IDE --------------------------------------------------- @@ -1151,12 +1189,12 @@ This is the current syntax for `./breeze <./breeze>`_: Commands with arguments: - docker-compose Executes specified docker-compose command - kind-cluster Manages KinD cluster on the host - prepare-provider-readme Prepares provider packages readme files - prepare-provider-packages Prepares provider packages - static-check Performs selected static check for changed files - tests Runs selected tests in the container + docker-compose Executes specified docker-compose command + kind-cluster Manages KinD cluster on the host + prepare-provider-documentation Prepares provider packages documentation + prepare-provider-packages Prepares provider packages + static-check Performs selected static check for changed files + tests Runs selected tests in the container Help commands: @@ -1239,16 +1277,24 @@ This is the current syntax for `./breeze <./breeze>`_: breeze build-image [FLAGS] Builds docker image (CI or production) without entering the container. You can pass - additional options to this command, such as '--force-build-image', - '--force-pull-image', '--python', '--build-cache-local' or '-build-cache-pulled' - in order to modify build behaviour. + additional options to this command, such as: + + Choosing python version: + '--python' + + Choosing cache option: + '--build-cache-local' or '-build-cache-pulled', or '--build-cache-none' + + Choosing whether to force pull images or force build the image: + '--force-build-image', + '--force-pull-image', '--force-pull-base-python-image' You can also pass '--production-image' flag to build production image rather than CI image. - For DockerHub pull --dockerhub-user and --dockerhub-repo flags can be used to specify - the repository to pull from. For GitHub repository, the --github-repository + For DockerHub pull. '--dockerhub-user' and '--dockerhub-repo' flags can be used to specify + the repository to pull from. For GitHub repository, the '--github-repository' flag can be used for the same purpose. You can also use - --github-image-id | in case you want to pull the image with + '--github-image-id |' in case you want to pull the image with specific COMMIT_SHA tag or RUN_ID. Flags: @@ -1264,10 +1310,15 @@ This is the current syntax for `./breeze <./breeze>`_: 2.7 3.5 3.6 3.7 3.8 -a, --install-airflow-version INSTALL_AIRFLOW_VERSION - If specified, installs Airflow directly from PIP released version. This happens at - image building time in production image and at container entering time for CI image. One of: + In CI image, installs Airflow (in entrypoint) from PIP released version or using + the installation method specified (sdist, wheel, none). + + In PROD image the installation of selected method or version happens during image building. + For PROD image, the 'none' options is not valid. + + One of: - 2.0.0 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel sdist + 2.0.1 2.0.0 1.10.15 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel sdist When 'none' is used, you can install airflow from local packages. When building image, airflow package should be added to 'docker-context-files' and @@ -1280,8 +1331,9 @@ This is the current syntax for `./breeze <./breeze>`_: This can be a GitHub branch like master or v1-10-test, or a tag like 2.0.0a1. --installation-method INSTALLATION_METHOD - Method of installing airflow - either from the sources ('.') or from package - 'apache-airflow' to install from PyPI. Default in Breeze is to install from sources. One of: + Method of installing airflow for production image - either from the sources ('.') + or from package 'apache-airflow' to install from PyPI. + Default in Breeze is to install from sources. One of: . apache-airflow @@ -1312,6 +1364,13 @@ This is the current syntax for `./breeze <./breeze>`_: images are pulled by default only for the first time you run the environment, later the locally build images are used as cache. + --force-pull-base-python-image + Forces pulling of Python base image from DockerHub before building to + populate cache. This should only be run in case we need to update to latest available + Python base image. This should be a rare and manually triggered event. Also this flag + is used in the scheduled run in CI when we rebuild all the images from the scratch + and run the tests to see if the latest python images do not fail our tests. + Customization options: -E, --extras EXTRAS @@ -1445,7 +1504,7 @@ This is the current syntax for `./breeze <./breeze>`_: and you need to be committer to push to Apache Airflow' GitHub registry. --github-registry GITHUB_REGISTRY - Github registry used. GitHub has legacy Packages registry and Public Beta Container + GitHub registry used. GitHub has legacy Packages registry and Public Beta Container registry. Default: docker.pkg.github.com. @@ -1480,6 +1539,10 @@ This is the current syntax for `./breeze <./breeze>`_: Note that you can further increase verbosity and see all the commands executed by breeze by running 'export VERBOSE_COMMANDS="true"' before running breeze. + --dry-run-docker + Only show docker commands to execute instead of actually executing them. The docker + commands are printed in yellow color. + #################################################################################################### @@ -1516,6 +1579,10 @@ This is the current syntax for `./breeze <./breeze>`_: Note that you can further increase verbosity and see all the commands executed by breeze by running 'export VERBOSE_COMMANDS="true"' before running breeze. + --dry-run-docker + Only show docker commands to execute instead of actually executing them. The docker + commands are printed in yellow color. + #################################################################################################### @@ -1539,16 +1606,28 @@ This is the current syntax for `./breeze <./breeze>`_: breeze generate-constraints [FLAGS] - Generates pinned constraint files from setup.py. Those files are generated in files folder - - separate files for different python version. Those constraint files when pushed to orphan - constraints-master, constraints-2-0 and constraints-1-10 branches are used to generate - repeatable CI builds as well as run repeatable production image builds. You can use those + Generates pinned constraint files with all extras from setup.py. Those files are generated in + files folder - separate files for different python version. Those constraint files when + pushed to orphan constraints-master, constraints-2-0 and constraints-1-10 branches are used + to generate repeatable CI builds as well as run repeatable production image builds and + upgrades when you want to include installing or updating some of the released providers + released at the time particular airflow version was released. You can use those constraints to predictably install released Airflow versions. This is mainly used to test - the constraint generation - constraints are pushed to the orphan branches by a - successful scheduled CRON job in CI automatically. + the constraint generation or manually fix them - constraints are pushed to the orphan + branches by a successful scheduled CRON job in CI automatically, but sometimes manual fix + might be needed. Flags: + --generate-constraints-mode GENERATE_CONSTRAINTS_MODE + Mode of generating constraints - determines whether providers are installed when generating + constraints and which version of them (either the ones from sources are used or the ones + from pypi. + + One of: + + source-providers pypi-providers no-providers + -p, --python PYTHON_MAJOR_MINOR_VERSION Python version used for the image. This is always major/minor version. @@ -1567,6 +1646,10 @@ This is the current syntax for `./breeze <./breeze>`_: Note that you can further increase verbosity and see all the commands executed by breeze by running 'export VERBOSE_COMMANDS="true"' before running breeze. + --dry-run-docker + Only show docker commands to execute instead of actually executing them. The docker + commands are printed in yellow color. + #################################################################################################### @@ -1615,7 +1698,7 @@ This is the current syntax for `./breeze <./breeze>`_: and you need to be committer to push to Apache Airflow' GitHub registry. --github-registry GITHUB_REGISTRY - Github registry used. GitHub has legacy Packages registry and Public Beta Container + GitHub registry used. GitHub has legacy Packages registry and Public Beta Container registry. Default: docker.pkg.github.com. @@ -1650,6 +1733,10 @@ This is the current syntax for `./breeze <./breeze>`_: Note that you can further increase verbosity and see all the commands executed by breeze by running 'export VERBOSE_COMMANDS="true"' before running breeze. + --dry-run-docker + Only show docker commands to execute instead of actually executing them. The docker + commands are printed in yellow color. + #################################################################################################### @@ -1703,9 +1790,9 @@ This is the current syntax for `./breeze <./breeze>`_: One of: - wheel,sdist,both + both,sdist,wheel - Default: wheel + Default: both -v, --verbose Show verbose information about executed docker, kind, kubectl, helm commands. Useful for @@ -1715,6 +1802,10 @@ This is the current syntax for `./breeze <./breeze>`_: Note that you can further increase verbosity and see all the commands executed by breeze by running 'export VERBOSE_COMMANDS="true"' before running breeze. + --dry-run-docker + Only show docker commands to execute instead of actually executing them. The docker + commands are printed in yellow color. + #################################################################################################### @@ -1873,6 +1964,10 @@ This is the current syntax for `./breeze <./breeze>`_: Note that you can further increase verbosity and see all the commands executed by breeze by running 'export VERBOSE_COMMANDS="true"' before running breeze. + --dry-run-docker + Only show docker commands to execute instead of actually executing them. The docker + commands are printed in yellow color. + #################################################################################################### @@ -1920,6 +2015,13 @@ This is the current syntax for `./breeze <./breeze>`_: images are pulled by default only for the first time you run the environment, later the locally build images are used as cache. + --force-pull-base-python-image + Forces pulling of Python base image from DockerHub before building to + populate cache. This should only be run in case we need to update to latest available + Python base image. This should be a rare and manually triggered event. Also this flag + is used in the scheduled run in CI when we rebuild all the images from the scratch + and run the tests to see if the latest python images do not fail our tests. + Customization options: -E, --extras EXTRAS @@ -2045,30 +2147,26 @@ This is the current syntax for `./breeze <./breeze>`_: #################################################################################################### - Detailed usage for command: prepare-provider-readme + Detailed usage for command: prepare-provider-documentation - breeze prepare-provider-readme [FLAGS] [YYYY.MM.DD] [PACKAGE_ID ...] + breeze prepare-provider-documentation [FLAGS] [PACKAGE_ID ...] - Prepares README.md files for backport packages. You can provide (after --) optional version - in the form of YYYY.MM.DD, optionally followed by the list of packages to generate readme for. + Prepares documentation files for provider packages. + + The command is optionally followed by the list of packages to generate readme for. If the first parameter is not formatted as a date, then today is regenerated. If no packages are specified, readme for all packages are generated. If no date is specified, current date + 3 days is used (allowing for PMC votes to pass). Examples: - 'breeze prepare-provider-readme' or - 'breeze prepare-provider-readme 2020.05.10' or - 'breeze prepare-provider-readme 2020.05.10 https google amazon' + 'breeze prepare-provider-documentation' or + 'breeze prepare-provider-documentation --version-suffix-for-pypi rc1' General form: - 'breeze prepare-provider-readme YYYY.MM.DD ...' - - * YYYY.MM.DD - is the CALVER version of the package to prepare. Note that this date - cannot be earlier than the already released version (the script will fail if it - will be). It can be set in the future anticipating the future release date. + 'breeze prepare-provider-documentation ...' * is usually directory in the airflow/providers folder (for example 'google' but in several cases, it might be one level deeper separated with @@ -2076,6 +2174,24 @@ This is the current syntax for `./breeze <./breeze>`_: Flags: + -S, --version-suffix-for-pypi SUFFIX + Adds optional suffix to the version in the generated provider package. It can be used + to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI. + + -N, --version-suffix-for-svn SUFFIX + Adds optional suffix to the generated names of package. It can be used to generate + rc1/rc2 ... versions of the packages to be uploaded to SVN. + + --package-format PACKAGE_FORMAT + + Chooses format of packages to prepare. + + One of: + + both,sdist,wheel + + Default: both + -v, --verbose Show verbose information about executed docker, kind, kubectl, helm commands. Useful for debugging - when you run breeze with --verbose flags you will be able to see the commands @@ -2084,6 +2200,10 @@ This is the current syntax for `./breeze <./breeze>`_: Note that you can further increase verbosity and see all the commands executed by breeze by running 'export VERBOSE_COMMANDS="true"' before running breeze. + --dry-run-docker + Only show docker commands to execute instead of actually executing them. The docker + commands are printed in yellow color. + #################################################################################################### @@ -2093,7 +2213,7 @@ This is the current syntax for `./breeze <./breeze>`_: breeze prepare-provider-packages [FLAGS] [PACKAGE_ID ...] - Prepares backport packages. You can provide (after --) optional list of packages to prepare. + Prepares provider packages. You can provide (after --) optional list of packages to prepare. If no packages are specified, readme for all packages are generated. You can specify optional --version-suffix-for-svn flag to generate rc candidate packages to upload to SVN or --version-suffix-for-pypi flag to generate rc candidates for PyPI packages. You can also @@ -2107,7 +2227,7 @@ This is the current syntax for `./breeze <./breeze>`_: 'breeze prepare-provider-packages' or 'breeze prepare-provider-packages google' or - 'breeze prepare-provider-packages --package-format both google' or + 'breeze prepare-provider-packages --package-format wheel google' or 'breeze prepare-provider-packages --version-suffix-for-svn rc1 http google amazon' or 'breeze prepare-provider-packages --version-suffix-for-pypi rc1 http google amazon' 'breeze prepare-provider-packages --version-suffix-for-pypi a1 @@ -2130,12 +2250,12 @@ This is the current syntax for `./breeze <./breeze>`_: One of: - wheel,sdist,both + both,sdist,wheel - Default: wheel + Default: both -S, --version-suffix-for-pypi SUFFIX - Adds optional suffix to the version in the generated backport package. It can be used + Adds optional suffix to the version in the generated provider package. It can be used to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI. -N, --version-suffix-for-svn SUFFIX @@ -2150,6 +2270,10 @@ This is the current syntax for `./breeze <./breeze>`_: Note that you can further increase verbosity and see all the commands executed by breeze by running 'export VERBOSE_COMMANDS="true"' before running breeze. + --dry-run-docker + Only show docker commands to execute instead of actually executing them. The docker + commands are printed in yellow color. + #################################################################################################### @@ -2175,10 +2299,11 @@ This is the current syntax for `./breeze <./breeze>`_: pre-commit-hook-names provide-create-sessions providers-init-file provider-yamls pydevd pydocstyle pylint pylint-tests python-no-log-warn pyupgrade restrict-start_date rst-backticks setup-order setup-extra-packages shellcheck - sort-in-the-wild stylelint trailing-whitespace update-breeze-file update-extras - update-local-yml-file update-setup-cfg-file version-sync yamllint + sort-in-the-wild sort-spelling-wordlist stylelint trailing-whitespace + update-breeze-file update-extras update-local-yml-file update-setup-cfg-file + version-sync yamllint - You can pass extra arguments including options to to the pre-commit framework as + You can pass extra arguments including options to the pre-commit framework as passed after --. For example: 'breeze static-check mypy' or @@ -2220,7 +2345,7 @@ This is the current syntax for `./breeze <./breeze>`_: --test-type TEST_TYPE Type of the test to run. One of: - All,Core,Providers,API,CLI,Integration,Other,WWW,Heisentests,Postgres,MySQL,Helm + All,Core,Providers,API,CLI,Integration,Other,WWW,Postgres,MySQL,Helm,Quarantined Default: All @@ -2319,7 +2444,7 @@ This is the current syntax for `./breeze <./breeze>`_: start all integrations. Selected integrations are not saved for future execution. One of: - cassandra kerberos mongo openldap pinot presto rabbitmq redis all + cassandra kerberos mongo openldap pinot rabbitmq redis statsd trino all --init-script INIT_SCRIPT_FILE Initialization script name - Sourced from files/airflow-breeze-config. Default value @@ -2361,17 +2486,17 @@ This is the current syntax for `./breeze <./breeze>`_: Kubernetes version - only used in case one of kind-cluster commands is used. One of: - v1.18.6 v1.17.5 v1.16.9 + v1.20.2 v1.19.7 v1.18.15 - Default: v1.18.6 + Default: v1.20.2 --kind-version KIND_VERSION Kind version - only used in case one of kind-cluster commands is used. One of: - v0.8.0 + v0.10.0 - Default: v0.8.0 + Default: v0.10.0 --helm-version HELM_VERSION Helm version - only used in case one of kind-cluster commands is used. @@ -2404,10 +2529,15 @@ This is the current syntax for `./breeze <./breeze>`_: Choose different Airflow version to install or run -a, --install-airflow-version INSTALL_AIRFLOW_VERSION - If specified, installs Airflow directly from PIP released version. This happens at - image building time in production image and at container entering time for CI image. One of: + In CI image, installs Airflow (in entrypoint) from PIP released version or using + the installation method specified (sdist, wheel, none). - 2.0.0 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel sdist + In PROD image the installation of selected method or version happens during image building. + For PROD image, the 'none' options is not valid. + + One of: + + 2.0.1 2.0.0 1.10.15 1.10.14 1.10.12 1.10.11 1.10.10 1.10.9 none wheel sdist When 'none' is used, you can install airflow from local packages. When building image, airflow package should be added to 'docker-context-files' and @@ -2420,8 +2550,9 @@ This is the current syntax for `./breeze <./breeze>`_: This can be a GitHub branch like master or v1-10-test, or a tag like 2.0.0a1. --installation-method INSTALLATION_METHOD - Method of installing airflow - either from the sources ('.') or from package - 'apache-airflow' to install from PyPI. Default in Breeze is to install from sources. One of: + Method of installing airflow for production image - either from the sources ('.') + or from package 'apache-airflow' to install from PyPI. + Default in Breeze is to install from sources. One of: . apache-airflow @@ -2459,6 +2590,13 @@ This is the current syntax for `./breeze <./breeze>`_: images are pulled by default only for the first time you run the environment, later the locally build images are used as cache. + --force-pull-base-python-image + Forces pulling of Python base image from DockerHub before building to + populate cache. This should only be run in case we need to update to latest available + Python base image. This should be a rare and manually triggered event. Also this flag + is used in the scheduled run in CI when we rebuild all the images from the scratch + and run the tests to see if the latest python images do not fail our tests. + Customization options: -E, --extras EXTRAS @@ -2595,7 +2733,7 @@ This is the current syntax for `./breeze <./breeze>`_: and you need to be committer to push to Apache Airflow' GitHub registry. --github-registry GITHUB_REGISTRY - Github registry used. GitHub has legacy Packages registry and Public Beta Container + GitHub registry used. GitHub has legacy Packages registry and Public Beta Container registry. Default: docker.pkg.github.com. @@ -2628,15 +2766,15 @@ This is the current syntax for `./breeze <./breeze>`_: --test-type TEST_TYPE Type of the test to run. One of: - All,Core,Providers,API,CLI,Integration,Other,WWW,Heisentests,Postgres,MySQL,Helm + All,Core,Providers,API,CLI,Integration,Other,WWW,Postgres,MySQL,Helm,Quarantined Default: All **************************************************************************************************** - Flags for generation of the backport packages + Flags for generation of the provider packages -S, --version-suffix-for-pypi SUFFIX - Adds optional suffix to the version in the generated backport package. It can be used + Adds optional suffix to the version in the generated provider package. It can be used to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI. -N, --version-suffix-for-svn SUFFIX @@ -2654,6 +2792,10 @@ This is the current syntax for `./breeze <./breeze>`_: Note that you can further increase verbosity and see all the commands executed by breeze by running 'export VERBOSE_COMMANDS="true"' before running breeze. + --dry-run-docker + Only show docker commands to execute instead of actually executing them. The docker + commands are printed in yellow color. + **************************************************************************************************** Print detailed help message diff --git a/CHANGELOG.txt b/CHANGELOG.txt index bc93e57bcfd0c..d687c810d98d2 100644 --- a/CHANGELOG.txt +++ b/CHANGELOG.txt @@ -1,5 +1,100 @@ +Airflow 2.0.2, 2021-04-19 +------------------------- + +Bug Fixes +""""""""" + +* Bugfix: ``TypeError`` when Serializing & sorting iterable properties of DAGs (#15395) +* Fix missing ``on_load`` trigger for folder-based plugins (#15208) +* ``kubernetes cleanup-pods`` subcommand will only clean up Airflow-created Pods (#15204) +* Fix password masking in CLI action_logging (#15143) +* Fix url generation for TriggerDagRunOperatorLink (#14990) +* Restore base lineage backend (#14146) +* Unable to trigger backfill or manual jobs with Kubernetes executor. (#14160) +* Bugfix: Task docs are not shown in the Task Instance Detail View (#15191) +* Bugfix: Fix overriding ``pod_template_file`` in KubernetesExecutor (#15197) +* Bugfix: resources in ``executor_config`` breaks Graph View in UI (#15199) +* Fix celery executor bug trying to call len on map (#14883) +* Fix bug in airflow.stats timing that broke dogstatsd mode (#15132) +* Avoid scheduler/parser manager deadlock by using non-blocking IO (#15112) +* Re-introduce ``dagrun.schedule_delay`` metric (#15105) +* Compare string values, not if strings are the same object in Kube executor(#14942) +* Pass queue to BaseExecutor.execute_async like in airflow 1.10 (#14861) +* Scheduler: Remove TIs from starved pools from the critical path. (#14476) +* Remove extra/needless deprecation warnings from airflow.contrib module (#15065) +* Fix support for long dag_id and task_id in KubernetesExecutor (#14703) +* Sort lists, sets and tuples in Serialized DAGs (#14909) +* Simplify cleaning string passed to origin param (#14738) (#14905) +* Fix error when running tasks with Sentry integration enabled. (#13929) +* Webserver: Sanitize string passed to origin param (#14738) +* Fix losing duration < 1 secs in tree (#13537) +* Pin SQLAlchemy to <1.4 due to breakage of sqlalchemy-utils (#14812) +* Fix KubernetesExecutor issue with deleted pending pods (#14810) +* Default to Celery Task model when backend model does not exist (#14612) +* Bugfix: Plugins endpoint was unauthenticated (#14570) +* BugFix: fix DAG doc display (especially for TaskFlow DAGs) (#14564) +* BugFix: TypeError in airflow.kubernetes.pod_launcher's monitor_pod (#14513) +* Bugfix: Fix wrong output of tags and owners in dag detail API endpoint (#14490) +* Fix logging error with task error when JSON logging is enabled (#14456) +* Fix statsd metrics not sending when using daemon mode (#14454) +* Gracefully handle missing start_date and end_date for DagRun (#14452) +* BugFix: Serialize max_retry_delay as a timedelta (#14436) +* Fix crash when user clicks on "Task Instance Details" caused by start_date being None (#14416) +* BugFix: Fix TaskInstance API call fails if a task is removed from running DAG (#14381) +* Scheduler should not fail when invalid ``executor_config`` is passed (#14323) +* Fix bug allowing task instances to survive when dagrun_timeout is exceeded (#14321) +* Fix bug where DAG timezone was not always shown correctly in UI tooltips (#14204) +* Use ``Lax`` for ``cookie_samesite`` when empty string is passed (#14183) +* [AIRFLOW-6076] fix ``dag.cli()`` KeyError (#13647) +* Fix running child tasks in a subdag after clearing a successful subdag (#14776) + +Improvements +"""""""""""" + +* Remove unused JS packages causing false security alerts (#15383) +* Change default of ``[kubernetes] enable_tcp_keepalive`` for new installs to ``True`` (#15338) +* Fixed #14270: Add error message in OOM situations (#15207) +* Better compatibility/diagnostics for arbitrary UID in docker image (#15162) +* Updates 3.6 limits for latest versions of a few libraries (#15209) +* Adds Blinker dependency which is missing after recent changes (#15182) +* Remove 'conf' from search_columns in DagRun View (#15099) +* More proper default value for namespace in K8S cleanup-pods CLI (#15060) +* Faster default role syncing during webserver start (#15017) +* Speed up webserver start when there are many DAGs (#14993) +* Much easier to use and better documented Docker image (#14911) +* Use ``libyaml`` C library when available. (#14577) +* Don't create unittest.cfg when not running in unit test mode (#14420) +* Webserver: Allow Filtering TaskInstances by queued_dttm (#14708) +* Update Flask-AppBuilder dependency to allow 3.2 (and all 3.x series) (#14665) +* Remember expanded task groups in browser local storage (#14661) +* Add plain format output to cli tables (#14546) +* Make ``airflow dags show`` command display TaskGroups (#14269) +* Increase maximum size of ``extra`` connection field. (#12944) +* Speed up clear_task_instances by doing a single sql delete for TaskReschedule (#14048) +* Add more flexibility with FAB menu links (#13903) +* Add better description and guidance in case of sqlite version mismatch (#14209) + +Doc only changes +"""""""""""""""" + +* Add documentation create/update community providers (#15061) +* Fix mistake and typos in airflow.utils.timezone docstrings (#15180) +* Replace new url for Stable Airflow Docs (#15169) +* Docs: Clarify behavior of delete_worker_pods_on_failure (#14958) +* Create a documentation package for Docker image (#14846) +* Multiple minor doc (OpenAPI) fixes (#14917) +* Replace Graph View Screenshot to show Auto-refresh (#14571) + +Misc/Internal +""""""""""""" + +* Import Connection lazily in hooks to avoid cycles (#15361) +* Rename last_scheduler_run into last_parsed_time, and ensure it's updated in DB (#14581) +* Make TaskInstance.pool_slots not nullable with a default of 1 (#14406) +* Log migrations info in consistent way (#14158) + Airflow 2.0.1, 2021-02-08 ----------------------------- +------------------------- Bug Fixes """"""""" diff --git a/CI.rst b/CI.rst index 8d99e519708c7..beae70f6d9169 100644 --- a/CI.rst +++ b/CI.rst @@ -33,8 +33,6 @@ environments we use. Most of our CI jobs are written as bash scripts which are e the CI jobs. And we have a number of variables determine build behaviour. - - GitHub Actions runs ------------------- @@ -53,14 +51,23 @@ techniques have been implemented that use efficiently cache from the GitHub Dock this brings down the time needed to rebuild the image to ~4 minutes. In some cases (when dependencies change) it can be ~6-7 minutes and in case base image of Python releases new patch-level, it can be ~12 minutes. +Container Registry used as cache +-------------------------------- + +For the CI builds of our we are using Container Registry to store results of the "Build Image" workflow +and pass it to the "CI Build" workflow. + Currently in master version of Airflow we run tests in 3 different versions of Python (3.6, 3.7, 3.8) which means that we have to build 6 images (3 CI ones and 3 PROD ones). Yet we run around 12 jobs with each of the CI images. That is a lot of time to just build the environment to run. Therefore -we are utilising ``workflow_run`` feature of GitHub Actions. This feature allows to run a separate, -independent workflow, when the main workflow is run - this separate workflow is different than the main -one, because by default it runs using ``master`` version of the sources but also - and most of all - that -it has WRITE access to the repository. This is especially important in our case where Pull Requests to -Airflow might come from any repository, and it would be a huge security issue if anyone from outside could +we are utilising ``workflow_run`` feature of GitHub Actions. + +This feature allows to run a separate, independent workflow, when the main workflow is run - +this separate workflow is different than the main one, because by default it runs using ``master`` version +of the sources but also - and most of all - that it has WRITE access to the repository. + +This is especially important in our case where Pull Requests to Airflow might come from any repository, +and it would be a huge security issue if anyone from outside could utilise the WRITE access to Apache Airflow repository via an external Pull Request. Thanks to the WRITE access and fact that the 'workflow_run' by default uses the 'master' version of the @@ -71,9 +78,56 @@ this image can be built only once and used by all the jobs running tests. The im rather than build it from the scratch. Pulling such image takes ~ 1 minute, thanks to that we are saving a lot of precious time for jobs. - -Local runs ----------- +We can use either of the two available GitHub Container registries as cache: + +* Legacy `GitHub Package Registry `_ which is not very + stable, uses old infrastructure of GitHub and it lacks certain features - notably it does not allow + us to delete the old image. The benefit of using GitHub Package Registry is that it works + out-of-the-box (write authentication is done using ``GITHUB_TOKEN`` and users do not have to do any + action to make it work in case they want to run build using their own forks. Also those images + do not provide public access, so you need to login to ``docker.pkg.github.com`` docker registry + using your username and personal token to be able to pull those images. + +* The new `GitHub Container Registry `_ + which is in Public Beta, has many more features (including permission management, public access and + image retention possibility). It has also the drawback (at least as of January 2020) that you need to + have separate personal access token created as ``PAT_CR`` secret in your repository with write access + to registry in order to make it works. You also have to manually manage permissions of the images, + i.e. after creating images for the first time, you need to set their visibility to "Public" and + add ``Admin`` permissions to group of people managing the images (in our case ``airflow-committers`` group). + This makes it not very suitable to use GitHub container registry if you want to run builds of Airflow + in your own forks (note - it does not affect pull requests from forks to Airflow). + +Those two images have different naming schemas. See `Images documentation `_ for details. + +You can choose which registry should be used by the repository by setting ``OVERRIDE_GITHUB_REGISTRY`` secret +to either ``docker.pkg.github.com`` for GitHub Package Registry or ``ghcr.io`` for GitHub Container Registry. +Default is the GitHub Package Registry one. The Pull Request forks have no access to the secret but they +auto-detect the registry used when they wait for the images. + +You can interact with the GitHub Registry images (pull/push) via `Breeze `_ - you can +pass ``--github-registry`` flag wih either ``docker.pkg.github.com`` for GitHub Package Registry or +``ghcr.io`` for GitHub Container Registry and pull/push operations will be performed using the chosen +registry, using appropriate naming convention. This allows building and pushing the images locally by +committers who have access to push/pull those images. + + +GitHub Container Registry Token +------------------------------- + +Unlike GitHub Packages, GitHub Registry requires a personal access token added as ``PAT_CR`` secret in order +to make it works. This token has to have "Registry Write" scope. Ideally you should not use a token +of a person who has access to many repositories, because this token allows to write packages in +ANY repository, where the person has write access (including private organisations). Ideally, you need to have +a separate account with only access to that repository and generate Personal Access Token with Package +Registry write permission for that Account. Discussion about setting up such account is opened at +`ASF Jira `_. More info about +the token for GitHub Container Registry can be found +`here `_ + + +Locally replicating CI failures +------------------------------- The main goal of the CI philosophy we have that no matter how complex the test and integration infrastructure, as a developer you should be able to reproduce and re-run any of the failed checks @@ -124,7 +178,7 @@ You can use those variables when you try to reproduce the build locally. +-----------------------------------------+----------------------------------------+-------------------------------------------------+ | Mount variables | +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ -| ``MOUNT_LOCAL_SOURCES`` | true | false | false | Determines whether local sources are | +| ``MOUNT_SELECTED_LOCAL_SOURCES`` | true | false | false | Determines whether local sources are | | | | | | mounted to inside the container. Useful for | | | | | | local development, as changes you make | | | | | | locally can be immediately tested in | @@ -135,6 +189,15 @@ You can use those variables when you try to reproduce the build locally. | | | | | directories) generated locally on the | | | | | | host during development. | +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ +| ``MOUNT_ALL_LOCAL_SOURCES`` | false | false | false | Determines whether all local sources are | +| | | | | mounted to inside the container. Useful for | +| | | | | local development when you need to access .git | +| | | | | folders and other folders excluded when | +| | | | | ``MOUNT_SELECTED_LOCAL_SOURCES`` is true. | +| | | | | You might need to manually delete egg-info | +| | | | | folder when you enter breeze and the folder was | +| | | | | generated using different python versions. | ++-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ | Force variables | +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ | ``FORCE_PULL_IMAGES`` | true | true | true | Determines if images are force-pulled, | @@ -202,10 +265,10 @@ You can use those variables when you try to reproduce the build locally. +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ | Version suffix variables | +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ -| ``VERSION_SUFFIX_FOR_PYPI`` | | | | Version suffix used during backport | +| ``VERSION_SUFFIX_FOR_PYPI`` | | | | Version suffix used during provider | | | | | | package preparation for PyPI builds. | +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ -| ``VERSION_SUFFIX_FOR_SVN`` | | | | Version suffix used during backport | +| ``VERSION_SUFFIX_FOR_SVN`` | | | | Version suffix used during provider | | | | | | package preparation for SVN builds. | +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ | Git variables | @@ -245,7 +308,7 @@ You can use those variables when you try to reproduce the build locally. | Image build variables | +-----------------------------------------+-------------+-------------+------------+-------------------------------------------------+ | ``UPGRADE_TO_NEWER_DEPENDENCIES`` | false | false | false\* | Determines whether the build should | -| | | | | attempt to upgrade all | +| | | | | attempt to upgrade python base image and all | | | | | | PIP dependencies to latest ones matching | | | | | | ``setup.py`` limits. This tries to replicate | | | | | | the situation of "fresh" user who just installs | @@ -367,7 +430,7 @@ DockerHub registry or change the GitHub registry to interact with and use your o ``GITHUB_REPOSITORY`` and providing your own GitHub Username and Token. Currently we are using GitHub Packages to cache images for the build. GitHub Packages are "legacy" -storage of binary artifacts for GitHub and as of September 2020 they introduced Github Container Registry +storage of binary artifacts for GitHub and as of September 2020 they introduced GitHub Container Registry as more stable, easier to manage replacement for container storage. It includes complete self-management of the images including permission management, public access, retention management and many more. @@ -375,9 +438,9 @@ More about it here: https://github.blog/2020-09-01-introducing-github-container-registry/ -Recently we started to experience unstable behaviour of the Github Packages ('unknown blob' +Recently we started to experience unstable behaviour of the GitHub Packages ('unknown blob' and manifest v1 vs. v2 when pushing images to it). So together with ASF we proposed to -enable Github Container Registry and it happened as of January 2020. +enable GitHub Container Registry and it happened as of January 2020. More about it in https://issues.apache.org/jira/browse/INFRA-20959 @@ -423,10 +486,10 @@ the model of permission management is not the same for Container Registry as it | | | default, can be commit SHA or RUN_ID. | +--------------------------------+---------------------------+----------------------------------------------+ -Authentication in Github Registry +Authentication in GitHub Registry ================================= -We are currently in the process of testing using Github Container Registry as cache for our images during +We are currently in the process of testing using GitHub Container Registry as cache for our images during the CI process. The default registry is set to "GitHub Packages", but we are testing the GitHub Container Registry. In case of GitHub Packages, authentication uses GITHUB_TOKEN mechanism. Authentication is needed for both pushing the images (WRITE) and pulling them (READ) - which means that GitHub token @@ -643,8 +706,6 @@ This workflow is a regular workflow that performs all checks of Airflow code. +---------------------------+----------------------------------------------+-------+-------+------+ | Spell check docs | Spell check for documentation | Yes | Yes | Yes | +---------------------------+----------------------------------------------+-------+-------+------+ -| Backport packages | Prepares Backport Packages for 1.10 Airflow | Yes | Yes | Yes | -+---------------------------+----------------------------------------------+-------+-------+------+ | Trigger tests | Checks if tests should be triggered | Yes | Yes | Yes | +---------------------------+----------------------------------------------+-------+-------+------+ | Tests [Pg/Msql/Sqlite] | Run all the Pytest tests for Python code | Yes(2)| Yes | Yes | @@ -800,7 +861,7 @@ you need to reproduce a MySQL environment with kerberos integration enabled for .. code-block:: bash - ./breeze --github-image-id 210056909 --python 3.8 --integration kerberos + ./breeze --github-image-id 210056909 --github-registry docker.pkg.github.com --python 3.8 You will be dropped into a shell with the exact version that was used during the CI run and you will be able to run pytest tests manually, easily reproducing the environment that was used in CI. Note that in @@ -834,3 +895,51 @@ Scheduled build flow .. image:: images/ci/scheduled_ci_flow.png :align: center :alt: Scheduled build flow + + +Adding new Python versions to CI +-------------------------------- + +In 2.0 line we currently support Python 3.6, 3.7, 3.8. + +In order to add a new version the following operations should be done (example uses python 3.9) + +* copy the latest constraints in ``constraints-master`` branch from previous versions and name it + using the new Python version (``constraints-3.9.txt``). Commit and push + +* add the new python version to `breeze-complete `_ and + `_initialization.sh `_ - tests will fail if they are not + in sync. + +* build image locally for both prod and CI locally using Breeze: + +.. code-block:: bash + + ./breeze build-image --python 3.9 + +* push image as cache to DockerHub and both registries: + +.. code-block:: bash + + ./breeze push-image --python 3.9 + ./breeze push-image --python 3.9 --github-registry ghcr.io + ./breeze push-image --python 3.9 --github-registry docker.pkg.github.com + +* Find the 3 new images (main, ci, build) created in + `GitHub Container registry`_ + go to Package Settings and turn on ``Public Visibility`` and add ``airflow-committers`` + group as ``Admin Role`` to all of them. + +* In `DockerHub `_ create three entries + for automatically built nightly-tag and release images: + + ++-------------+----------------+-----------------------+---------------------+---------------+-----------+---------------+------------------------------------------------------------------------+ +| Source type | Source | Docker Tag | Dockerfile location | Build Context | Autobuild | Build caching | Comment | ++=============+================+=======================+=====================+===============+===========+===============+========================================================================+ +| Tag | nightly-master | master-python3.9 | Dockerfile | / | x | - | Nightly CI/PROD images from successful scheduled master nightly builds | ++-------------+----------------+-----------------------+---------------------+---------------+-----------+---------------+------------------------------------------------------------------------+ +| Branch | v2-0-stable | v2-0-stable-python3.9 | Dockerfile | / | x | | CI/PROD images automatically built pushed stable branch | ++-------------+----------------+-----------------------+---------------------+---------------+-----------+---------------+------------------------------------------------------------------------+ +| Tag | /^([1-2].*)$/ | {\1}-python3.9 | Dockerfile | / | x | | CI/PROD images automatically built from pushed release tags | ++-------------+----------------+-----------------------+---------------------+---------------+-----------+---------------+------------------------------------------------------------------------+ diff --git a/COMMITTERS.rst b/COMMITTERS.rst index 3e0c735fb7ff7..0ca8769a352c4 100644 --- a/COMMITTERS.rst +++ b/COMMITTERS.rst @@ -139,7 +139,7 @@ Prerequisites General prerequisites that we look for in all candidates: 1. Consistent contribution over last few months -2. Visibility on discussions on the dev mailing list, Slack channels or Github issues/discussions +2. Visibility on discussions on the dev mailing list, Slack channels or GitHub issues/discussions 3. Contributions to community health and project's sustainability for the long-term 4. Understands contributor/committer guidelines: `Contributors' Guide `__ diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 4965a289dcaa5..7ac115cc39d36 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -63,6 +63,14 @@ Fix Bugs Look through the GitHub issues for bugs. Anything is open to whoever wants to implement it. +Issue reporting and resolution process +-------------------------------------- + +The Apache Airflow project uses a set of labels for tracking and triaging issues, as +well as a set of priorities and milestones to track how and when the enhancements and bug +fixes make it into an Airflow release. This is documented as part of +the `Issue reporting and resolution process `_, + Implement Features ------------------ @@ -117,7 +125,7 @@ Committers/Maintainers Committers are community members that have write access to the project’s repositories, i.e., they can modify the code, documentation, and website by themselves and also accept other contributions. -The official list of committers can be found `here `__. +The official list of committers can be found `here `__. Additionally, committers are listed in a few other places (some of these may only be visible to existing committers): @@ -188,9 +196,14 @@ From the `apache/airflow `_ repo, Step 2: Configure Your Environment ---------------------------------- -Configure the Docker-based Breeze development environment and run tests. -You can use the default Breeze configuration as follows: +You can use either a local virtual env or a Docker-based env. The differences +between the two are explained `here `_. + +The local env's instructions can be found in full in the `LOCAL_VIRTUALENV.rst `_ file. +The Docker env is here to maintain a consistent and common development environment so that you can replicate CI failures locally and work on solving them locally rather by pushing to CI. + +You can configure the Docker-based Breeze development environment as follows: 1. Install the latest versions of the Docker Community Edition and Docker Compose and add them to the PATH. @@ -245,7 +258,7 @@ Step 4: Prepare PR For example, to address this example issue, do the following: - * Read about `email configuration in Airflow `__. + * Read about `email configuration in Airflow `__. * Find the class you should modify. For the example GitHub issue, this is `email.py `__. @@ -297,7 +310,7 @@ Step 4: Prepare PR and send it through the right path: * In case of a "no-code" change, approval will generate a comment that the PR can be merged and no - tests are needed. This is usually when the change modifies some non-documentation related rst + tests are needed. This is usually when the change modifies some non-documentation related RST files (such as this file). No python tests are run and no CI images are built for such PR. Usually it can be approved and merged few minutes after it is submitted (unless there is a big queue of jobs). @@ -368,7 +381,7 @@ these guidelines: of the same PR. Doc string is often sufficient. Make sure to follow the Sphinx compatible standards. -- Make sure your code fulfils all the +- Make sure your code fulfills all the `static code checks `__ we have in our code. The easiest way to make sure of that is to use `pre-commit hooks `__ @@ -414,7 +427,7 @@ The production images are build in DockerHub from: * ``2.0.*``, ``2.0.*rc*`` releases from the ``v2-0-stable`` branch when we prepare release candidates and final releases. There are no production images prepared from v2-0-stable branch. -Similar rules apply to ``1.10.x`` releases until June 2020. We have ``v1-10-test`` and ``v1-10-stable`` +Similar rules apply to ``1.10.x`` releases until June 2021. We have ``v1-10-test`` and ``v1-10-stable`` branches there. Development Environments @@ -572,17 +585,17 @@ This is the full list of those extras: .. START EXTRAS HERE -all, all_dbs, amazon, apache.atlas, apache.beam, apache.cassandra, apache.druid, apache.hdfs, -apache.hive, apache.kylin, apache.livy, apache.pig, apache.pinot, apache.spark, apache.sqoop, -apache.webhdfs, async, atlas, aws, azure, cassandra, celery, cgroups, cloudant, cncf.kubernetes, -crypto, dask, databricks, datadog, devel, devel_all, devel_ci, devel_hadoop, dingding, discord, doc, -docker, druid, elasticsearch, exasol, facebook, ftp, gcp, gcp_api, github_enterprise, google, -google_auth, grpc, hashicorp, hdfs, hive, http, imap, jdbc, jenkins, jira, kerberos, kubernetes, -ldap, microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, mysql, odbc, openfaas, -opsgenie, oracle, pagerduty, papermill, password, pinot, plexus, postgres, presto, qds, qubole, -rabbitmq, redis, s3, salesforce, samba, segment, sendgrid, sentry, sftp, singularity, slack, -snowflake, spark, sqlite, ssh, statsd, tableau, telegram, vertica, virtualenv, webhdfs, winrm, -yandex, zendesk +airbyte, all, all_dbs, amazon, apache.atlas, apache.beam, apache.cassandra, apache.druid, +apache.hdfs, apache.hive, apache.kylin, apache.livy, apache.pig, apache.pinot, apache.spark, +apache.sqoop, apache.webhdfs, async, atlas, aws, azure, cassandra, celery, cgroups, cloudant, +cncf.kubernetes, crypto, dask, databricks, datadog, devel, devel_all, devel_ci, devel_hadoop, +dingding, discord, doc, docker, druid, elasticsearch, exasol, facebook, ftp, gcp, gcp_api, +github_enterprise, google, google_auth, grpc, hashicorp, hdfs, hive, http, imap, jdbc, jenkins, +jira, kerberos, kubernetes, ldap, microsoft.azure, microsoft.mssql, microsoft.winrm, mongo, mssql, +mysql, neo4j, odbc, openfaas, opsgenie, oracle, pagerduty, papermill, password, pinot, plexus, +postgres, presto, qds, qubole, rabbitmq, redis, s3, salesforce, samba, segment, sendgrid, sentry, +sftp, singularity, slack, snowflake, spark, sqlite, ssh, statsd, tableau, telegram, trino, vertica, +virtualenv, webhdfs, winrm, yandex, zendesk .. END EXTRAS HERE @@ -616,8 +629,8 @@ The dependency list is automatically used during PyPI packages generation. Cross-dependencies between provider packages are converted into extras - if you need functionality from the other provider package you can install it adding [extra] after the -``apache-airflow-backport-providers-PROVIDER`` for example: -``pip install apache-airflow-backport-providers-google[amazon]`` in case you want to use GCP +``apache-airflow-providers-PROVIDER`` for example: +``pip install apache-airflow-providers-google[amazon]`` in case you want to use GCP transfer operators from Amazon ECS. If you add a new dependency between different providers packages, it will be detected automatically during @@ -640,19 +653,22 @@ Here is the list of packages and their extras: ========================== =========================== Package Extras ========================== =========================== +airbyte http amazon apache.hive,google,imap,mongo,mysql,postgres,ssh +apache.beam google apache.druid apache.hive apache.hive amazon,microsoft.mssql,mysql,presto,samba,vertica apache.livy http dingding http discord http -google amazon,apache.cassandra,cncf.kubernetes,facebook,microsoft.azure,microsoft.mssql,mysql,postgres,presto,salesforce,sftp,ssh +google amazon,apache.beam,apache.cassandra,cncf.kubernetes,facebook,microsoft.azure,microsoft.mssql,mysql,postgres,presto,salesforce,sftp,ssh,trino hashicorp google microsoft.azure google,oracle microsoft.mssql odbc -mysql amazon,presto,vertica +mysql amazon,presto,trino,vertica opsgenie http postgres amazon +salesforce tableau sftp ssh slack http snowflake slack @@ -740,7 +756,7 @@ providers. not only "green path" * Integration tests where 'local' integration with a component is possible (for example tests with - MySQL/Postgres DB/Presto/Kerberos all have integration tests which run with real, dockerised components + MySQL/Postgres DB/Trino/Kerberos all have integration tests which run with real, dockerized components * System Tests which provide end-to-end testing, usually testing together several operators, sensors, transfers connecting to a real external system @@ -748,26 +764,13 @@ providers. You can read more about out approach for tests in `TESTING.rst `_ but here are some highlights. - -Backport providers ------------------- - -You can also build backport provider packages for Airflow 1.10. They aim to provide a bridge when users -of Airflow 1.10 want to migrate to Airflow 2.0. The backport packages are named similarly to the -provider packages, but with "backport" added: - -* ``apache-airflow-backport-provider-*`` - -Those backport providers are automatically refactored to work with Airflow 1.10.* and have a few -limitations described in those packages. - Dependency management ===================== Airflow is not a standard python project. Most of the python projects fall into one of two types - application or library. As described in -[StackOverflow Question](https://stackoverflow.com/questions/28509481/should-i-pin-my-python-dependencies-versions) -decision whether to pin (freeze) dependency versions for a python project depends on the type. For +`this StackOverflow question `_, +the decision whether to pin (freeze) dependency versions for a python project depends on the type. For applications, dependencies should be pinned, but for libraries, they should be open. For application, pinning the dependencies makes it more stable to install in the future - because new @@ -810,11 +813,26 @@ install in case a direct or transitive dependency is released that breaks the in when installing ``apache-airflow``, you might need to provide additional constraints (for example ``pip install apache-airflow==1.10.2 Werkzeug<1.0.0``) -However we now have ``constraints-.txt`` files generated -automatically and committed to orphan ``constraints-master``, ``constraints-2-0` and ``constraints-1-10`` branches based on -the set of all latest working and tested dependency versions. Those -``constraints-.txt`` files can be used as -constraints file when installing Apache Airflow - either from the sources: +There are several sets of constraints we keep: + +* 'constraints' - those are constraints generated by matching the current airflow version from sources + and providers that are installed from PyPI. Those are constraints used by the users who want to + install airflow with pip, they are named ``constraints-.txt``. + +* "constraints-source-providers" - those are constraints generated by using providers installed from + current sources. While adding new providers their dependencies might change, so this set of providers + is the current set of the constraints for airflow and providers from the current master sources. + Those providers are used by CI system to keep "stable" set of constraints. Thet are named + ``constraints-source-providers-.txt`` + +* "constraints-no-providers" - those are constraints generated from only Apache Airflow, without any + providers. If you want to manage airflow separately and then add providers individually, you can + use those. Those constraints are named ``constraints-no-providers-.txt``. + +We also have constraints with "source-providers" but they are used i + +The first ones can be used as constraints file when installing Apache Airflow in a repeatable way. +It can be done from the sources: .. code-block:: bash @@ -850,9 +868,57 @@ fixed valid constraints 1.10.12 can be used by using ``constraints-1.10.12`` tag There are different set of fixed constraint files for different python major/minor versions and you should use the right file for the right python version. -The ``constraints-.txt`` will be automatically regenerated by CI cron job -every time after the ``setup.py`` is updated and pushed if the tests are successful. There are separate -jobs for each python version. +If you want to update just airflow dependencies, without paying attention to providers, you can do it using +-no-providers constraint files as well. + +.. code-block:: bash + + pip install . --upgrade \ + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-no-providers-3.6.txt" + + +The ``constraints-.txt`` and ``constraints-no-providers-.txt`` +will be automatically regenerated by CI job every time after the ``setup.py`` is updated and pushed +if the tests are successful. + +Manually generating constraint files +------------------------------------ + +The constraint files are generated automatically by the CI job. Sometimes however it is needed to regenerate +them manually (committers only). For example when master build did not succeed for quite some time). +This can be done by running this (it utilizes parallel preparation of the constraints): + +.. code-block:: bash + + export CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING="3.6 3.7 3.8" + for python_version in $(echo "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING}") + do + ./breeze build-image --upgrade-to-newer-dependencies --python ${python_version} --build-cache-local + ./breeze build-image --upgrade-to-newer-dependencies --python ${python_version} --build-cache-local + ./breeze build-image --upgrade-to-newer-dependencies --python ${python_version} --build-cache-local + done + + GENERATE_CONSTRAINTS_MODE="pypi-providers" ./scripts/ci/constraints/ci_generate_all_constraints.sh + GENERATE_CONSTRAINTS_MODE="source-providers" ./scripts/ci/constraints/ci_generate_all_constraints.sh + GENERATE_CONSTRAINTS_MODE="no-providers" ./scripts/ci/constraints/ci_generate_all_constraints.sh + + AIRFLOW_SOURCES=$(pwd) + + +The constraints will be generated in "files/constraints-PYTHON_VERSION/constraints-*.txt files. You need to +checkout the right 'constraints-' branch in a separate repository and then you can copy, commit and push the +generated files: + +.. code-block:: bash + + cd + git pull + cp ${AIRFLOW_SOURCES}/files/constraints-*/constraints*.txt . + git diff + git add . + git commit -m "Your commit message here" --no-verify + git push + Documentation ============= @@ -912,8 +978,8 @@ If this function is designed to be called by "end-users" (i.e. DAG authors) then ... # You SHOULD not commit the session here. The wrapper will take care of commit()/rollback() if exception -Don't use time() for duration calcuations ------------------------------------------ +Don't use time() for duration calculations +------------------------------------------ If you wish to compute the time difference between two events with in the same process, use ``time.monotonic()``, not ``time.time()`` nor ``timzeone.utcnow()``. @@ -959,7 +1025,7 @@ Naming Conventions for provider packages In Airflow 2.0 we standardized and enforced naming for provider packages, modules and classes. those rules (introduced as AIP-21) were not only introduced but enforced using automated checks that verify if the naming conventions are followed. Here is a brief summary of the rules, for -detailed discussion you can go to [AIP-21 Changes in import paths](https://cwiki.apache.org/confluence/display/AIRFLOW/AIP-21%3A+Changes+in+import+paths) +detailed discussion you can go to `AIP-21 Changes in import paths `_ The rules are as follows: diff --git a/CONTRIBUTORS_QUICK_START.rst b/CONTRIBUTORS_QUICK_START.rst index 3d6ca501384e9..9a8398e0862b8 100644 --- a/CONTRIBUTORS_QUICK_START.rst +++ b/CONTRIBUTORS_QUICK_START.rst @@ -621,7 +621,7 @@ All Tests are inside ./tests directory. entrypoint_exec.sh* run_install_and_test_provider_packages.sh* _in_container_script_init.sh* run_mypy.sh* prod/ run_prepare_provider_packages.sh* - refresh_pylint_todo.sh* run_prepare_provider_readme.sh* + refresh_pylint_todo.sh* run_prepare_provider_documentation.sh* run_ci_tests.sh* run_pylint.sh* run_clear_tmp.sh* run_system_tests.sh* run_docs_build.sh* run_tmux_welcome.sh* @@ -634,13 +634,6 @@ All Tests are inside ./tests directory. - Types of tests - .. code-block:: bash - - $ breeze --backend mysql --mysql-version 5.7 --python 3.8 --db-reset --test-type - All CLI Heisentests Integration Other Providers - API Core Helm MySQL Postgres WWW - - - Running specific type of Test .. code-block:: bash @@ -650,16 +643,6 @@ All Tests are inside ./tests directory. - Running Integration test for specific test type - - - Types of Integration Tests - - .. code-block:: bash - - $ breeze --backend mysql --mysql-version 5.7 --python 3.8 --db-reset --test-type Core --integration - - all kerberos openldap presto redis - cassandra mongo pinot rabbitmq - - Running an Integration Test .. code-block:: bash @@ -750,7 +733,7 @@ tests are applied when you commit your code.
CI tests Github + alt="CI tests GitHub">
@@ -836,7 +819,7 @@ To avoid burden on CI infrastructure and to save time, Pre-commit hooks can be r entrypoint_exec.sh* run_install_and_test_provider_packages.sh* _in_container_script_init.sh* run_mypy.sh* prod/ run_prepare_provider_packages.sh* - refresh_pylint_todo.sh* run_prepare_provider_readme.sh* + refresh_pylint_todo.sh* run_prepare_provider_documentation.sh* run_ci_tests.sh* run_pylint.sh* run_clear_tmp.sh* run_system_tests.sh* run_docs_build.sh* run_tmux_welcome.sh* @@ -939,7 +922,7 @@ Contribution guide Raising Pull Request -------------------- -1. Go to your Github account and open your fork project and click on Branches +1. Go to your GitHub account and open your fork project and click on Branches .. raw:: html diff --git a/Dockerfile b/Dockerfile index 8e61ada090f43..2a05964f6f840 100644 --- a/Dockerfile +++ b/Dockerfile @@ -33,7 +33,7 @@ # all the build essentials. This makes the image # much smaller. # -ARG AIRFLOW_VERSION="2.0.0.dev0" +ARG AIRFLOW_VERSION="2.0.1" ARG AIRFLOW_EXTRAS="async,amazon,celery,cncf.kubernetes,docker,dask,elasticsearch,ftp,grpc,hashicorp,http,ldap,google,microsoft.azure,mysql,postgres,redis,sendgrid,sftp,slack,ssh,statsd,virtualenv" ARG ADDITIONAL_AIRFLOW_EXTRAS="" ARG ADDITIONAL_PYTHON_DEPS="" @@ -45,10 +45,12 @@ ARG AIRFLOW_GID="50000" ARG CASS_DRIVER_BUILD_CONCURRENCY="8" ARG PYTHON_BASE_IMAGE="python:3.6-slim-buster" -ARG PYTHON_MAJOR_MINOR_VERSION="3.6" ARG AIRFLOW_PIP_VERSION=20.2.4 +# By default PIP has progress bar but you can disable it. +ARG PIP_PROGRESS_BAR="on" + ############################################################################################## # This is the build image where we build all dependencies ############################################################################################## @@ -58,9 +60,6 @@ SHELL ["/bin/bash", "-o", "pipefail", "-e", "-u", "-x", "-c"] ARG PYTHON_BASE_IMAGE ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} -ARG PYTHON_MAJOR_MINOR_VERSION -ENV PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION} - # Make sure noninteractive debian install is used and language variables set ENV DEBIAN_FRONTEND=noninteractive LANGUAGE=C.UTF-8 LANG=C.UTF-8 LC_ALL=C.UTF-8 \ LC_CTYPE=C.UTF-8 LC_MESSAGES=C.UTF-8 @@ -158,10 +157,20 @@ ARG AIRFLOW_EXTRAS ARG ADDITIONAL_AIRFLOW_EXTRAS="" ENV AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS}${ADDITIONAL_AIRFLOW_EXTRAS:+,}${ADDITIONAL_AIRFLOW_EXTRAS} -ARG AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0" -ARG AIRFLOW_CONSTRAINTS_LOCATION="https://raw.githubusercontent.com/apache/airflow/${AIRFLOW_CONSTRAINTS_REFERENCE}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" +# Allows to override constraints source +ARG CONSTRAINTS_GITHUB_REPOSITORY="apache/airflow" +ENV CONSTRAINTS_GITHUB_REPOSITORY=${CONSTRAINTS_GITHUB_REPOSITORY} + +ARG AIRFLOW_CONSTRAINTS="constraints-2.0" +ENV AIRFLOW_CONSTRAINTS=${AIRFLOW_CONSTRAINTS} +ARG AIRFLOW_CONSTRAINTS_REFERENCE="" +ENV AIRFLOW_CONSTRAINTS_REFERENCE=${AIRFLOW_CONSTRAINTS_REFERENCE} +ARG AIRFLOW_CONSTRAINTS_LOCATION="" ENV AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION} +ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-master" +ENV DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH} + ENV PATH=${PATH}:/root/.local/bin RUN mkdir -p /root/.local/bin @@ -172,13 +181,17 @@ RUN if [[ -f /docker-context-files/.pypirc ]]; then \ ARG AIRFLOW_PIP_VERSION ENV AIRFLOW_PIP_VERSION=${AIRFLOW_PIP_VERSION} +# By default PIP has progress bar but you can disable it. +ARG PIP_PROGRESS_BAR +ENV PIP_PROGRESS_BAR=${PIP_PROGRESS_BAR} + # Install Airflow with "--user" flag, so that we can copy the whole .local folder to the final image # from the build image and always in non-editable mode ENV AIRFLOW_INSTALL_USER_FLAG="--user" ENV AIRFLOW_INSTALL_EDITABLE_FLAG="" # Upgrade to specific PIP version -RUN pip install --upgrade "pip==${AIRFLOW_PIP_VERSION}" +RUN pip install --no-cache-dir --upgrade "pip==${AIRFLOW_PIP_VERSION}" # By default we do not use pre-cached packages, but in CI/Breeze environment we override this to speed up # builds in case setup.py/setup.cfg changed. This is pure optimisation of CI/Breeze builds. @@ -191,8 +204,28 @@ ENV AIRFLOW_PRE_CACHED_PIP_PACKAGES=${AIRFLOW_PRE_CACHED_PIP_PACKAGES} ARG INSTALL_PROVIDERS_FROM_SOURCES="false" ENV INSTALL_PROVIDERS_FROM_SOURCES=${INSTALL_PROVIDERS_FROM_SOURCES} -# Only copy install_airflow_from_latest_master.sh to not invalidate cache on other script changes -COPY scripts/docker/install_airflow_from_latest_master.sh /scripts/docker/install_airflow_from_latest_master.sh +# This is airflow version that is put in the label of the image build +ARG AIRFLOW_VERSION +ENV AIRFLOW_VERSION=${AIRFLOW_VERSION} + +# Determines the way airflow is installed. By default we install airflow from PyPI `apache-airflow` package +# But it also can be `.` from local installation or GitHub URL pointing to specific branch or tag +# Of Airflow. Note That for local source installation you need to have local sources of +# Airflow checked out together with the Dockerfile and AIRFLOW_SOURCES_FROM and AIRFLOW_SOURCES_TO +# set to "." and "/opt/airflow" respectively. +ARG AIRFLOW_INSTALLATION_METHOD="apache-airflow" +ENV AIRFLOW_INSTALLATION_METHOD=${AIRFLOW_INSTALLATION_METHOD} + +# By default latest released version of airflow is installed (when empty) but this value can be overridden +# and we can install version according to specification (For example ==2.0.2 or <3.0.0). +ARG AIRFLOW_VERSION_SPECIFICATION="" +ENV AIRFLOW_VERSION_SPECIFICATION=${AIRFLOW_VERSION_SPECIFICATION} + +# Only copy common.sh to not invalidate cache on other script changes +COPY scripts/docker/common.sh /scripts/docker/common.sh + +# Only copy install_airflow_from_branch_tip.sh to not invalidate cache on other script changes +COPY scripts/docker/install_airflow_from_branch_tip.sh /scripts/docker/install_airflow_from_branch_tip.sh # By default we do not upgrade to latest dependencies ARG UPGRADE_TO_NEWER_DEPENDENCIES="false" @@ -206,7 +239,7 @@ ENV UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES} # account for removed dependencies (we do not install them in the first place) RUN if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" && \ ${UPGRADE_TO_NEWER_DEPENDENCIES} == "false" ]]; then \ - bash /scripts/docker/install_airflow_from_latest_master.sh; \ + bash /scripts/docker/install_airflow_from_branch_tip.sh; \ fi # By default we install latest airflow from PyPI so we do not need to copy sources of Airflow @@ -223,28 +256,11 @@ COPY ${AIRFLOW_SOURCES_FROM} ${AIRFLOW_SOURCES_TO} ARG CASS_DRIVER_BUILD_CONCURRENCY ENV CASS_DRIVER_BUILD_CONCURRENCY=${CASS_DRIVER_BUILD_CONCURRENCY} -# This is airflow version that is put in the label of the image build -ARG AIRFLOW_VERSION -ENV AIRFLOW_VERSION=${AIRFLOW_VERSION} - # Add extra python dependencies ARG ADDITIONAL_PYTHON_DEPS="" ENV ADDITIONAL_PYTHON_DEPS=${ADDITIONAL_PYTHON_DEPS} -# Determines the way airflow is installed. By default we install airflow from PyPI `apache-airflow` package -# But it also can be `.` from local installation or GitHub URL pointing to specific branch or tag -# Of Airflow. Note That for local source installation you need to have local sources of -# Airflow checked out together with the Dockerfile and AIRFLOW_SOURCES_FROM and AIRFLOW_SOURCES_TO -# set to "." and "/opt/airflow" respectively. -ARG AIRFLOW_INSTALLATION_METHOD="apache-airflow" -ENV AIRFLOW_INSTALLATION_METHOD=${AIRFLOW_INSTALLATION_METHOD} - -# By default latest released version of airflow is installed (when empty) but this value can be overridden -# and we can install specific version of airflow this way. -ARG AIRFLOW_INSTALL_VERSION="" -ENV AIRFLOW_INSTALL_VERSION=${AIRFLOW_INSTALL_VERSION} - -# We can seet this value to true in case we want to install .whl .tar.gz packages placed in the +# We can set this value to true in case we want to install .whl .tar.gz packages placed in the # docker-context-files folder. This can be done for both - additional packages you want to install # and for airflow as well (you have to set INSTALL_FROM_PYPI to false in this case) ARG INSTALL_FROM_DOCKER_CONTEXT_FILES="" @@ -257,9 +273,11 @@ ENV INSTALL_FROM_PYPI=${INSTALL_FROM_PYPI} # Those are additional constraints that are needed for some extras but we do not want to # Force them on the main Airflow package. -# * urllib3 - required to keep boto3 happy # * chardet<4 - required to keep snowflake happy -ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="urllib3<1.26 chardet<4" +# * urllib3 - required to keep boto3 happy +# * pyjwt<2.0.0: flask-jwt-extended requires it +# * dill<0.3.3 required by apache-beam +ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="chardet<4 urllib3<1.26 pyjwt<2.0.0 dill<0.3.3" WORKDIR /opt/airflow @@ -269,11 +287,10 @@ ARG CONTINUE_ON_PIP_CHECK_FAILURE="false" COPY scripts/docker/install*.sh /scripts/docker/ # hadolint ignore=SC2086, SC2010 -RUN if [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \ - bash /scripts/docker/install_airflow.sh; \ - fi; \ - if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "true" ]]; then \ +RUN if [[ ${INSTALL_FROM_DOCKER_CONTEXT_FILES} == "true" ]]; then \ bash /scripts/docker/install_from_docker_context_files.sh; \ + elif [[ ${INSTALL_FROM_PYPI} == "true" ]]; then \ + bash /scripts/docker/install_airflow.sh; \ fi; \ if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \ bash /scripts/docker/install_additional_dependencies.sh; \ @@ -457,7 +474,7 @@ COPY --chown=airflow:root scripts/in_container/prod/entrypoint_prod.sh /entrypoi COPY --chown=airflow:root scripts/in_container/prod/clean-logs.sh /clean-logs RUN chmod a+x /entrypoint /clean-logs -RUN pip install --upgrade "pip==${AIRFLOW_PIP_VERSION}" +RUN pip install --no-cache-dir --upgrade "pip==${AIRFLOW_PIP_VERSION}" # Make /etc/passwd root-group-writeable so that user can be dynamically added by OpenShift # See https://github.com/apache/airflow/issues/9248 @@ -470,6 +487,8 @@ WORKDIR ${AIRFLOW_HOME} EXPOSE 8080 +RUN usermod -g 0 airflow -G ${AIRFLOW_GID} + USER ${AIRFLOW_UID} # Having the variable in final image allows to disable providers manager warnings when @@ -509,6 +528,9 @@ LABEL org.apache.airflow.distro="debian" \ org.opencontainers.image.title="Production Airflow Image" \ org.opencontainers.image.description="Installed Apache Airflow" +# By default PIP will install everything in ~/.local +ARG PIP_USER="true" +ENV PIP_USER=${PIP_USER} ENTRYPOINT ["/usr/bin/dumb-init", "--", "/entrypoint"] CMD ["--help"] diff --git a/Dockerfile.ci b/Dockerfile.ci index 646adcf01808f..692a08426a97a 100644 --- a/Dockerfile.ci +++ b/Dockerfile.ci @@ -26,9 +26,6 @@ ENV PYTHON_BASE_IMAGE=${PYTHON_BASE_IMAGE} ARG AIRFLOW_VERSION="2.0.0.dev0" ENV AIRFLOW_VERSION=$AIRFLOW_VERSION -ARG PYTHON_MAJOR_MINOR_VERSION="3.6" -ENV PYTHON_MAJOR_MINOR_VERSION=${PYTHON_MAJOR_MINOR_VERSION} - # Print versions RUN echo "Base image: ${PYTHON_BASE_IMAGE}" RUN echo "Airflow version: ${AIRFLOW_VERSION}" @@ -237,11 +234,21 @@ ENV AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS}${ADDITIONAL_AIRFLOW_EXTRAS:+,}${ADDITIONAL_ RUN echo "Installing with extras: ${AIRFLOW_EXTRAS}." -ARG AIRFLOW_CONSTRAINTS_REFERENCE="constraints-master" -ARG AIRFLOW_CONSTRAINTS_LOCATION="https://raw.githubusercontent.com/apache/airflow/${AIRFLOW_CONSTRAINTS_REFERENCE}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" +# Allows to override constraints source +ARG CONSTRAINTS_GITHUB_REPOSITORY="apache/airflow" +ENV CONSTRAINTS_GITHUB_REPOSITORY=${CONSTRAINTS_GITHUB_REPOSITORY} + +ARG AIRFLOW_CONSTRAINTS="constraints" +ENV AIRFLOW_CONSTRAINTS=${AIRFLOW_CONSTRAINTS} +ARG AIRFLOW_CONSTRAINTS_REFERENCE="" +ENV AIRFLOW_CONSTRAINTS_REFERENCE=${AIRFLOW_CONSTRAINTS_REFERENCE} +ARG AIRFLOW_CONSTRAINTS_LOCATION="" ENV AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION} -# By changing the CI build epoch we can force reinstalling Airflow from the current master +ARG DEFAULT_CONSTRAINTS_BRANCH="constraints-master" +ENV DEFAULT_CONSTRAINTS_BRANCH=${DEFAULT_CONSTRAINTS_BRANCH} + +# By changing the CI build epoch we can force reinstalling Airflow and pip all dependencies # It can also be overwritten manually by setting the AIRFLOW_CI_BUILD_EPOCH environment variable. ARG AIRFLOW_CI_BUILD_EPOCH="3" ENV AIRFLOW_CI_BUILD_EPOCH=${AIRFLOW_CI_BUILD_EPOCH} @@ -273,7 +280,7 @@ ENV INSTALL_MYSQL_CLIENT="true" ENV AIRFLOW_INSTALLATION_METHOD="." ENV AIRFLOW_INSTALL_USER_FLAG="" ENV AIRFLOW_INSTALL_EDITABLE_FLAG="--editable" -ENV AIRFLOW_INSTALL_VERSION="" +ENV AIRFLOW_VERSION_SPECIFICATION="" # Setup PIP # By default PIP install run without cache to make image smaller @@ -281,10 +288,17 @@ ARG PIP_NO_CACHE_DIR="true" ENV PIP_NO_CACHE_DIR=${PIP_NO_CACHE_DIR} RUN echo "Pip no cache dir: ${PIP_NO_CACHE_DIR}" -RUN pip install --upgrade "pip==${AIRFLOW_PIP_VERSION}" +# By default PIP has progress bar but you can disable it. +ARG PIP_PROGRESS_BAR="on" +ENV PIP_PROGRESS_BAR=${PIP_PROGRESS_BAR} + +RUN pip install --no-cache-dir --upgrade "pip==${AIRFLOW_PIP_VERSION}" + +# Only copy common.sh to not invalidate further layers +COPY scripts/docker/common.sh /scripts/docker/common.sh -# Only copy install_airflow_from_latest_master.sh to not invalidate cache on other script changes -COPY scripts/docker/install_airflow_from_latest_master.sh /scripts/docker/install_airflow_from_latest_master.sh +# Only copy install_airflow_from_branch_tip.sh to not invalidate cache on other script changes +COPY scripts/docker/install_airflow_from_branch_tip.sh /scripts/docker/install_airflow_from_branch_tip.sh ARG UPGRADE_TO_NEWER_DEPENDENCIES="false" ENV UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES} @@ -298,7 +312,7 @@ ENV UPGRADE_TO_NEWER_DEPENDENCIES=${UPGRADE_TO_NEWER_DEPENDENCIES} # account for removed dependencies (we do not install them in the first place) RUN if [[ ${AIRFLOW_PRE_CACHED_PIP_PACKAGES} == "true" && \ ${UPGRADE_TO_NEWER_DEPENDENCIES} == "false" ]]; then \ - bash /scripts/docker/install_airflow_from_latest_master.sh; \ + bash /scripts/docker/install_airflow_from_branch_tip.sh; \ fi # Generate random hex dump file so that we can determine whether it's faster to rebuild the image @@ -328,12 +342,15 @@ COPY setup.cfg ${AIRFLOW_SOURCES}/setup.cfg COPY airflow/__init__.py ${AIRFLOW_SOURCES}/airflow/__init__.py # Those are additional constraints that are needed for some extras but we do not want to -# Force them on the main Airflow package. Those limitations are: -# * chardet,<4: required by snowflake provider +# force them on the main Airflow package. Those limitations are: +# * chardet<4: required by snowflake provider # * lazy-object-proxy<1.5.0: required by astroid -# * pyOpenSSL: Imposed by snowflake provider https://github.com/snowflakedb/snowflake-connector-python/blob/v2.3.6/setup.py#L201 +# * pyOpenSSL: required by snowflake provider https://github.com/snowflakedb/snowflake-connector-python/blob/v2.3.6/setup.py#L201 # * urllib3<1.26: Required to keep boto3 happy -ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="chardet<4 lazy-object-proxy<1.5.0 pyOpenSSL<20.0.0 urllib3<1.26" +# * pyjwt<2.0.0: flask-jwt-extended requires it +# * dill<0.3.3 required by apache-beam +ARG EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS="chardet<4 lazy-object-proxy<1.5.0 pyOpenSSL<20.0.0 urllib3<1.26 pyjwt<2.0.0 dill<0.3.3" +ENV EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS=${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} ARG CONTINUE_ON_PIP_CHECK_FAILURE="false" @@ -401,7 +418,7 @@ RUN SYSTEM=$(uname -s | tr '[:upper:]' '[:lower:]') \ ARG ADDITIONAL_PYTHON_DEPS="" RUN if [[ -n "${ADDITIONAL_PYTHON_DEPS}" ]]; then \ - pip install ${ADDITIONAL_PYTHON_DEPS}; \ + pip install --no-cache-dir ${ADDITIONAL_PYTHON_DEPS}; \ fi WORKDIR ${AIRFLOW_SOURCES} diff --git a/IMAGES.rst b/IMAGES.rst index fd1497ab48af4..23012a2ecf230 100644 --- a/IMAGES.rst +++ b/IMAGES.rst @@ -116,7 +116,7 @@ parameter to Breeze: .. code-block:: bash - ./breeze build-image --python 3.7 --additional-extras=presto \ + ./breeze build-image --python 3.7 --additional-extras=trino \ --production-image --install-airflow-version=2.0.0 @@ -158,14 +158,16 @@ HEAD of development for constraints): pip install "https://github.com/apache/airflow/archive/.tar.gz#egg=apache-airflow" \ --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt" -You can also skip installing airflow by providing ``--install-airflow-version none`` parameter to Breeze: +You can also skip installing airflow and install it from locally provided files by using +``--install-from-local-files-when-building`` parameter and ``--disable-pypi-when-building`` to Breeze: .. code-block:: bash - ./breeze build-image --python 3.7 --additional-extras=presto \ - --production-image --install-airflow-version=none --install-from-local-files-when-building + ./breeze build-image --python 3.7 --additional-extras=trino \ + --production-image --disable-pypi-when-building --install-from-local-files-when-building + +In this case you airflow and all packages (.whl files) should be placed in ``docker-context-files`` folder. -In this case you usually install airflow and all packages in ``docker-context-files`` folder. Using cache during builds ========================= @@ -226,7 +228,12 @@ Choosing image registry ======================= By default images are pulled and pushed from and to DockerHub registry when you use Breeze's push-image -or build commands. +or build commands. But as described in `CI Documentaton `_, you can choose different image +registry by setting ``GITHUB_REGISTRY`` to ``docker.pkg.github.com`` for GitHub Package Registry or +``ghcr.io`` for GitHub Container Registry. + +Default is the GitHub Package Registry one. The Pull Request forks have no access to the secret but they +auto-detect the registry used when they wait for the images. Our images are named like that: @@ -264,7 +271,7 @@ image name if DockerHub is not used as registry). Also GitHub has its own struct each project has its own registry naming convention that should be followed. The name of images for GitHub registry are different as they must follow limitation of the registry used. -We are still using Github Packages as registry, but we are in the process of testing and switching +We are still using GitHub Packages as registry, but we are in the process of testing and switching to GitHub Container Registry, and the naming conventions are slightly different (GitHub Packages required all packages to have "organization/repository/" URL prefix ("apache/airflow/", where in GitHub Container Registry, all images are in "organization" not in "repository" and they are all @@ -347,6 +354,60 @@ GitHub Container Registry docker login ghcr.io +Interacting with container registries +===================================== + +Since there are different naming conventions used for Airflow images and there are multiple images used, +`Breeze `_ provides easy to use management interface for the images. The +`CI system of ours `_ is designed in the way that it should automatically refresh caches, rebuild +the images periodically and update them whenever new version of base python is released. +However, occasionally, you might need to rebuild images locally and push them directly to the registries +to refresh them. + +This can be done with ``Breeze`` command line which has easy-to-use tool to manage those images. For +example: + + +Force building Python 3.6 CI image using local cache and pushing it container registry: + +.. code-block:: bash + + ./breeze build-image --python 3.6 --force-build-images --build-cache-local + ./breeze push-image --python 3.6 --github-registry ghcr.io + + +Building Python 3.7 PROD images (both build and final image) using cache pulled +from ``docker.pkg.github.com`` and pushing it back: + +.. code-block:: bash + + ./breeze build-image --production-image --python 3.7 --github-registry docker.pkg.github.com + ./breeze push-image --production-image --python 3.7 --github-registry docker.pkg.github.com + + +Building Python 3.8 CI image using cache pulled from DockerHub and pushing it back: + +.. code-block:: bash + + ./breeze build-image --python 3.8 + ./breeze push-image --python 3.8 + +You can also pull and run images being result of a specific CI run in GitHub Actions. This is a powerful +tool that allows to reproduce CI failures locally, enter the images and fix them much faster. It is enough +to pass ``--github-image-id`` and the registry and Breeze will download and execute commands using +the same image that was used during the CI build. + +For example this command will run the same Python 3.8 image as was used in 210056909 +run with enabled Kerberos integration (assuming docker.pkg.github.com was used as build cache). + +.. code-block:: bash + + ./breeze --github-image-id 210056909 \ + --github-registry docker.pkg.github.com \ + --python 3.8 --integration kerberos + +You can see more details and examples in `Breeze `_ + Technical details of Airflow images =================================== @@ -371,11 +432,11 @@ It first pre-installs them from the right GitHub branch and only after that fina done from either local sources or remote location (PIP or GitHub repository). Customizing the image -..................... +--------------------- Customizing the image is an alternative way of adding your own dependencies to the image. -The easiest way to build the image image is to use ``breeze`` script, but you can also build such customized +The easiest way to build the image is to use ``breeze`` script, but you can also build such customized image by running appropriately crafted docker build in which you specify all the ``build-args`` that you need to add to customize it. You can read about all the args and ways you can build the image in the `<#ci-image-build-arguments>`_ chapter below. @@ -389,11 +450,9 @@ additional apt dev and runtime dependencies. docker build . -f Dockerfile.ci \ --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ - --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \ --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ --build-arg AIRFLOW_VERSION="2.0.0" \ - --build-arg AIRFLOW_INSTALL_VERSION="==2.0.0" \ - --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0" \ + --build-arg AIRFLOW_VERSION_SPECIFICATION="==2.0.0" \ --build-arg AIRFLOW_SOURCES_FROM="empty" \ --build-arg AIRFLOW_SOURCES_TO="/empty" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" @@ -424,15 +483,13 @@ based on example in `this comment `_ for details but those are: * Integration - tests that require external integration images running in docker-compose -* Heisentests - tests that are vulnerable to some side effects and are better to be run on their own * Quarantined - tests that are flaky and need to be fixed * Postgres - tests that require Postgres database. They are only run when backend is Postgres * MySQL - tests that require MySQL database. They are only run when backend is MySQL @@ -126,7 +125,8 @@ The logic implemented for the changes works as follows: 1) In case of direct push (so when PR gets merged) or scheduled run, we always run all tests and checks. This is in order to make sure that the merge did not miss anything important. The remainder of the logic - is executed only in case of Pull Requests. + is executed only in case of Pull Requests. We do not add providers tests in case DEFAULT_BRANCH is + different than master, because providers are only important in master branch and PRs to master branch. 2) We retrieve which files have changed in the incoming Merge Commit (github.sha is a merge commit automatically prepared by GitHub in case of Pull Request, so we can retrieve the list of changed @@ -134,7 +134,9 @@ The logic implemented for the changes works as follows: 3) If any of the important, environment files changed (Dockerfile, ci scripts, setup.py, GitHub workflow files), then we again run all tests and checks. Those are cases where the logic of the checks changed - or the environment for the checks changed so we want to make sure to check everything. + or the environment for the checks changed so we want to make sure to check everything. We do not add + providers tests in case DEFAULT_BRANCH is different than master, because providers are only + important in master branch and PRs to master branch. 4) If any of py files changed: we need to have CI image and run full static checks so we enable image building @@ -158,14 +160,14 @@ The logic implemented for the changes works as follows: b) if any of the Airflow API files changed we enable ``API`` test type c) if any of the Airflow CLI files changed we enable ``CLI`` test type and Kubernetes tests (the K8S tests depend on CLI changes as helm chart uses CLI to run Airflow). - d) if any of the Provider files changed we enable ``Providers`` test type + d) if this is a master branch and if any of the Provider files changed we enable ``Providers`` test type e) if any of the WWW files changed we enable ``WWW`` test type f) if any of the Kubernetes files changed we enable ``Kubernetes`` test type g) Then we subtract count of all the ``specific`` above per-type changed files from the count of all changed files. In case there are any files changed, then we assume that some unknown files changed (likely from the core of airflow) and in this case we enable all test types above and the Core test types - simply because we do not want to risk to miss anything. - h) In all cases where tests are enabled we also add Heisentests, Integration and - depending on + h) In all cases where tests are enabled we also add Integration and - depending on the backend used = Postgres or MySQL types of tests. 10) Quarantined tests are always run when tests are run - we need to run them often to observe how @@ -237,7 +239,7 @@ As explained above the approval and matrix tests workflow works according to the :align: center :alt: Full tests are needed for the PR -4) If this or another committer "request changes" in in a previously approved PR with "full tests needed" +4) If this or another committer "request changes" in a previously approved PR with "full tests needed" label, the bot automatically removes the label, moving it back to "run only default set of parameters" mode. For PRs touching core of airflow once the PR gets approved back, the label will be restored. If it was manually set by the committer, it has to be restored manually. @@ -248,7 +250,7 @@ As explained above the approval and matrix tests workflow works according to the for the PRs and they provide good "notification" for the committer to act on a PR that was recently approved. -The PR approval workflow is possible thanks two two custom GitHub Actions we've developed: +The PR approval workflow is possible thanks to two custom GitHub Actions we've developed: * `Get workflow origin `_ * `Label when approved `_ diff --git a/README.md b/README.md index 80e77895ee96a..02701312a701d 100644 --- a/README.md +++ b/README.md @@ -84,7 +84,7 @@ Apache Airflow is tested with: | PostgreSQL | 9.6, 10, 11, 12, 13 | 9.6, 10, 11, 12, 13 | 9.6, 10, 11, 12, 13 | | MySQL | 5.7, 8 | 5.7, 8 | 5.6, 5.7 | | SQLite | 3.15.0+ | 3.15.0+ | 3.15.0+ | -| Kubernetes | 1.16.9, 1.17.5, 1.18.6 | 1.16.9, 1.17.5, 1.18.6 | 1.16.9, 1.17.5, 1.18.6 | +| Kubernetes | 1.20, 1.19, 1.18 | 1.20, 1.19, 1.18 | 1.18, 1.17, 1.16 | **Note:** MySQL 5.x versions are unable to or have limitations with running multiple schedulers -- please see the "Scheduler" docs. MariaDB is not tested/recommended. @@ -174,7 +174,8 @@ pip install apache-airflow[postgres,google]==2.0.1 \ --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.1/constraints-3.7.txt" ``` -For information on installing backport providers check [backport-providers.rst](docs/apache-airflow/backport-providers.rst). +For information on installing provider packages check +[providers](http://airflow.apache.org/docs/apache-airflow-providers/index.html). ## Official source code diff --git a/STATIC_CODE_CHECKS.rst b/STATIC_CODE_CHECKS.rst index 55674bf950e8a..13ac4e99cd84e 100644 --- a/STATIC_CODE_CHECKS.rst +++ b/STATIC_CODE_CHECKS.rst @@ -162,6 +162,8 @@ require Breeze Docker images to be installed locally: ----------------------------------- ---------------------------------------------------------------- ------------ ``sort-in-the-wild`` Sort INTHEWILD.md alphabetically. ----------------------------------- ---------------------------------------------------------------- ------------ +``sort-spelling-wordlist`` Sort alphabetically and uniquify spelling_wordlist.txt. +----------------------------------- ---------------------------------------------------------------- ------------ ``stylelint`` Checks CSS files with stylelint. ----------------------------------- ---------------------------------------------------------------- ------------ ``trailing-whitespace`` Removes trailing whitespace at end of line. diff --git a/TESTING.rst b/TESTING.rst index 1efc63c3ee34a..24d1e72628825 100644 --- a/TESTING.rst +++ b/TESTING.rst @@ -177,7 +177,7 @@ kinds of test types: ./breeze --test-type Providers --db-reset tests -* Special kinds of tests - Integration, Heisentests, Quarantined, Postgres, MySQL, which are marked with pytest +* Special kinds of tests - Integration, Quarantined, Postgres, MySQL, which are marked with pytest marks and for those you need to select the type using test-type switch. If you want to run such tests using breeze, you need to pass appropriate ``--test-type`` otherwise the test will be skipped. Similarly to the per-directory tests if you do not specify the test or tests to run, @@ -281,12 +281,12 @@ The following integrations are available: - Integration required for OpenLDAP hooks * - pinot - Integration required for Apache Pinot hooks - * - presto - - Integration required for Presto hooks * - rabbitmq - Integration required for Celery executor tests * - redis - Integration required for Celery executor tests + * - trino + - Integration required for Trino hooks To start the ``mongo`` integration only, enter: @@ -418,16 +418,85 @@ Those tests are marked with ``@pytest.mark.quarantined`` annotation. Those tests are skipped by default. You can enable them with ``--include-quarantined`` flag. You can also decide to only run tests with ``-m quarantined`` flag to run only those tests. -Heisen tests ------------- -Some of our tests are Heisentests. This means that they run fine in isolation but when they run together with -others they might fail the tests (this is likely due to resource consumptions). Therefore we run those tests -in isolation. +Airflow test types +================== + +Airflow tests in the CI environment are split into several test types: + +* Always - those are tests that should be always executed (always folder) +* Core - for the core Airflow functionality (core folder) +* API - Tests for the Airflow API (api and api_connexion folders) +* CLI - Tests for the Airflow CLI (cli folder) +* WWW - Tests for the Airflow webserver (www and www_rbac in 1.10 folders) +* Providers - Tests for all Providers of Airflow (providers folder) +* Other - all other tests (all other folders that are not part of any of the above) + +This is done for three reasons: + +1. in order to selectively run only subset of the test types for some PRs +2. in order to allow parallel execution of the tests on Self-Hosted runners + +For case 1. see `Pull Request Workflow `_ for details. + +For case 2. We can utilise memory and CPUs available on both CI and local development machines to run +test in parallel. This way we can decrease the time of running all tests in self-hosted runners from +60 minutes to ~15 minutes. + +.. note:: + + We need to split tests manually into separate suites rather than utilise + ``pytest-xdist`` or ``pytest-parallel`` which could ba a simpler and much more "native" parallelization + mechanism. Unfortunately, we cannot utilise those tools because our tests are not truly ``unit`` tests that + can run in parallel. A lot of our tests rely on shared databases - and they update/reset/cleanup the + databases while they are executing. They are also exercising features of the Database such as locking which + further increases cross-dependency between tests. Until we make all our tests truly unit tests (and not + touching the database or until we isolate all such tests to a separate test type, we cannot really rely on + frameworks that run tests in parallel. In our solution each of the test types is run in parallel with its + own database (!) so when we have 8 test types running in parallel, there are in fact 8 databases run + behind the scenes to support them and each of the test types executes its own tests sequentially. + + +Running full Airflow test suite in parallel +=========================================== + +If you run ``./scripts/ci/testing/ci_run_airflow_testing.sh`` tests run in parallel +on your development machine - maxing out the number of parallel runs at the number of cores you +have available in your Docker engine. + +In case you do not have enough memory available to your Docker (~32 GB), the ``Integration`` test type +is always run sequentially - after all tests are completed (docker cleanup is performed in-between). + +This allows for massive speedup in full test execution. On 8 CPU machine with 16 cores and 64 GB memory +and fast SSD disk, the whole suite of tests completes in about 5 minutes (!). Same suite of tests takes +more than 30 minutes on the same machine when tests are run sequentially. + +.. note:: + + On MacOS you might have less CPUs and less memory available to run the tests than you have in the host, + simply because your Docker engine runs in a Linux Virtual Machine under-the-hood. If you want to make + use of the paralllelism and memory usage for the CI tests you might want to increase the resources available + to your docker engine. See the `Resources `_ chapter + in the ``Docker for Mac`` documentation on how to do it. + +You can also limit the parallelism by specifying the maximum number of parallel jobs via +MAX_PARALLEL_TEST_JOBS variable. If you set it to "1", all the test types will be run sequentially. + +.. code-block:: bash + + MAX_PARALLEL_TEST_JOBS="1" ./scripts/ci/testing/ci_run_airflow_testing.sh + +.. note:: + + In case you would like to cleanup after execution of such tests you might have to cleanup + some of the docker containers running in case you use ctrl-c to stop execution. You can easily do it by + running this command (it will kill all docker containers running so do not use it if you want to keep some + docker containers running): + + .. code-block:: bash + + docker kill $(docker ps -q) -Those tests are marked with ``@pytest.mark.heisentests`` annotation. -Those tests are skipped by default. You can enable them with ``--include-heisentests`` flag. You -can also decide to only run tests with ``-m heisentests`` flag to run only those tests. Running Tests with provider packages ==================================== @@ -560,6 +629,12 @@ Entering shell with Kubernetes Cluster This shell is prepared to run Kubernetes tests interactively. It has ``kubectl`` and ``kind`` cli tools available in the path, it has also activated virtualenv environment that allows you to run tests via pytest. +The binaries are available in ./.build/kubernetes-bin/``KUBERNETES_VERSION`` path. +The virtualenv is available in ./.build/.kubernetes_venv/``KIND_CLUSTER_NAME``_host_python_``HOST_PYTHON_VERSION`` + +Where ``KIND_CLUSTER_NAME`` is the name of the cluster and ``HOST_PYTHON_VERSION`` is the version of python +in the host. + You can enter the shell via those scripts ./scripts/ci/kubernetes/ci_run_kubernetes_tests.sh [-i|--interactive] - Activates virtual environment ready to run tests and drops you in @@ -881,10 +956,6 @@ run Google Cloud system tests. RANDOM_POSTFIX=$(cat "${RANDOM_FILE}") - # install any packages from dist folder if they are available - if [[ ${RUN_AIRFLOW_1_10:=} == "true" ]]; then - pip install /dist/apache_airflow_backport_providers_{google,postgres,mysql}*.whl || true - fi To execute system tests, specify the ``--system SYSTEM`` flag where ``SYSTEM`` is a system to run the system tests for. It can be repeated. @@ -959,75 +1030,19 @@ example, the below command will build google postgres and mysql wheel packages: Those packages will be prepared in ./dist folder. This folder is mapped to /dist folder when you enter Breeze, so it is easy to automate installing those packages for testing. - -Installing backported for Airflow 1.10.* series ------------------------------------------------ - -The tests can be executed against the master version of Airflow, but they also work -with older versions. This is especially useful to test back-ported operators -from Airflow 2.0 to 1.10.* versions. - -To run the tests for Airflow 1.10.* series, you need to run Breeze with -``--install-airflow-version=`` to install a different version of Airflow. -If ``current`` is specified (default), then the current version of Airflow is used. -Otherwise, the released version of Airflow is installed. - -The ``-install-airflow-version=`` command make sure that the current (from sources) version of -Airflow is removed and the released version of Airflow from ``PyPI`` is installed. Note that tests sources -are not removed and they can be used to run tests (unit tests and system tests) against the -freshly installed version. - -You should automate installing of the provider packages in your own -``./files/airflow-breeze-config/variables.env`` file. You should make it depend on -``RUN_AIRFLOW_1_10`` variable value equals to "true" so that -the installation of provider packages is only performed when you install airflow 1.10.*. -The provider packages are available in ``/dist`` directory if they were prepared as described -in the previous chapter. - -Typically the command in you variables.env file will be similar to: - -.. code-block:: bash - - # install any packages from dist folder if they are available - if [[ ${RUN_AIRFLOW_1_10:=} == "true" ]]; then - pip install /dist/apache_airflow_backport_providers_{google,postgres,mysql}*.whl || true - fi - -The command above will automatically install backported google, postgres, and mysql packages if they -were prepared before entering the breeze. - - -Running system tests for backported packages in Airflow 1.10.* series ---------------------------------------------------------------------- - -Once you installed 1.10.* Airflow version with ``--install-airflow-version`` and prepared and -installed the required packages via ``variables.env`` it should be as easy as running -``pytest --system= TEST_NAME``. Note that we have default timeout for running -system tests set to 8 minutes and some system tests might take much longer to run and you might -want to add ``-o faulthandler_timeout=2400`` (2400s = 40 minutes for example) to your -pytest command. - The typical system test session ------------------------------- Here is the typical session that you need to do to run system tests: -1. Prepare provider packages +1. Enter breeze .. code-block:: bash - ./breeze prepare-provider-packages -- google postgres mysql - -2. Enter breeze with installing Airflow 1.10.*, forwarding credentials and installing - backported packages (you need an appropriate line in ``./files/airflow-breeze-config/variables.env``) - -.. code-block:: bash - - ./breeze --install-airflow-version 1.10.9 --python 3.6 --db-reset --forward-credentials restart + ./breeze --python 3.6 --db-reset --forward-credentials restart This will: -* install Airflow 1.10.9 * restarts the whole environment (i.e. recreates metadata database from the scratch) * run Breeze with python 3.6 version * reset the Airflow database @@ -1075,61 +1090,26 @@ Breeze session. They are usually expensive to run. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!! Important !!!!!!!!!!!!!!!!!!!!!!!!!!!! -Note that in case you have to update your backported operators or system tests (they are part of -the provider packageS) you need to rebuild the packages outside of breeze and -``pip remove/pip install`` those packages to get them installed. This is not needed -if you run system tests with ``current`` Airflow version, so it is better to iterate with the -system tests with the ``current`` version and fix all problems there and only afterwards run -the tests with Airflow 1.10.* - -The typical session then looks as follows: - -1. Prepare provider packages +1. Enter breeze .. code-block:: bash - ./breeze prepare-provider-packages -- google postgres mysql + ./breeze --python 3.6 --db-reset --forward-credentials restart -2. Enter breeze with installing Airflow 1.10.*, forwarding credentials and installing - backported packages (you need an appropriate line in ``./files/airflow-breeze-config/variables.env``) - -.. code-block:: bash - - ./breeze --install-airflow-version 1.10.9 --python 3.6 --db-reset --forward-credentials restart - -3. Run create action in helper (to create slowly created resources): +2. Run create action in helper (to create slowly created resources): .. code-block:: bash python tests/providers/google/cloud/operators/test_cloud_sql_system_helper.py --action create -4. Run the tests: +3. Run the tests: .. code-block:: bash pytest -o faulthandler_timeout=2400 \ --system=google tests/providers/google/cloud/operators/test_compute_system.py -5. In case you are running provider packages tests you need to rebuild and reinstall a package - every time you change the operators/hooks or example_dags. The example below shows reinstallation - of the google package: - -In the host: - -.. code-block:: bash - - ./breeze prepare-provider-packages -- google - -In the container: - -.. code-block:: bash - - pip uninstall apache-airflow-backport-providers-google - pip install /dist/apache_airflow_backport_providers_google-*.whl - -The points 4. and 5. can be repeated multiple times without leaving the container - -6. Run delete action in helper: +4. Run delete action in helper: .. code-block:: bash diff --git a/UPDATING.md b/UPDATING.md index 60dc211ac8fa6..8735b520029d7 100644 --- a/UPDATING.md +++ b/UPDATING.md @@ -26,6 +26,7 @@ assists users migrating to a new version. **Table of contents** +- [Airflow 2.0.2](#airflow-202) - [Airflow 2.0.1](#airflow-201) - [Airflow 2.0.0](#airflow-200) - [Airflow 1.10.14](#airflow-11014) @@ -50,8 +51,6 @@ assists users migrating to a new version. -## Airflow 2.0.1 - +## Airflow 2.0.2 + +### Default `[kubernetes] enable_tcp_keepalive` is changed to `True` + +This allows Airflow to work more reliably with some environments (like Azure) by default. + +## Airflow 2.0.1 + ### Permission to view Airflow Configurations has been removed from `User` and `Viewer` role Previously, Users with `User` or `Viewer` role were able to get/view configurations using @@ -204,7 +211,7 @@ from my_plugin import MyOperator The name under `airflow.operators.` was the plugin name, where as in the second example it is the python module name where the operator is defined. -See https://airflow.apache.org/docs/stable/howto/custom-operator.html for more info. +See https://airflow.apache.org/docs/apache-airflow/stable/howto/custom-operator.html for more info. ### Importing Hooks via plugins is no longer supported @@ -222,7 +229,7 @@ from my_plugin import MyHook It is still possible (but not required) to "register" hooks in plugins. This is to allow future support for dynamically populating the Connections form in the UI. -See https://airflow.apache.org/docs/stable/howto/custom-operator.html for more info. +See https://airflow.apache.org/docs/apache-airflow/stable/howto/custom-operator.html for more info. ### Adding Operators and Sensors via plugins is no longer supported @@ -268,7 +275,7 @@ def execution_date_fn(execution_date, ds_nodash, dag): ### The default value for `[webserver] cookie_samesite` has been changed to `Lax` As [recommended](https://flask.palletsprojects.com/en/1.1.x/config/#SESSION_COOKIE_SAMESITE) by Flask, the -`[webserver] cookie_samesite` has been changed to `Lax` from `None`. +`[webserver] cookie_samesite` has been changed to `Lax` from `''` (empty string) . #### Changes to import paths diff --git a/airflow/__init__.py b/airflow/__init__.py index 3e84cc63807b7..7ecc4877c5935 100644 --- a/airflow/__init__.py +++ b/airflow/__init__.py @@ -47,6 +47,7 @@ login: Optional[Callable] = None +PY36 = sys.version_info >= (3, 6) PY37 = sys.version_info >= (3, 7) PY38 = sys.version_info >= (3, 8) diff --git a/airflow/api_connexion/openapi/v1.yaml b/airflow/api_connexion/openapi/v1.yaml index 0da59250e1df1..2a9f7cc08cbfc 100644 --- a/airflow/api_connexion/openapi/v1.yaml +++ b/airflow/api_connexion/openapi/v1.yaml @@ -1445,7 +1445,7 @@ components: type: string readOnly: true nullable: true - description: If the DAG is SubDAG then it is the top level DAG identifier. Otherwise, nulll. + description: If the DAG is SubDAG then it is the top level DAG identifier. Otherwise, null. is_paused: type: boolean nullable: true @@ -2498,6 +2498,7 @@ components: - queued - none - scheduled + - removed DagState: description: DAG State. diff --git a/airflow/api_connexion/schemas/dag_schema.py b/airflow/api_connexion/schemas/dag_schema.py index b15fbd6ba27b0..aabd215a76ecf 100644 --- a/airflow/api_connexion/schemas/dag_schema.py +++ b/airflow/api_connexion/schemas/dag_schema.py @@ -21,6 +21,7 @@ from marshmallow import Schema, fields from marshmallow_sqlalchemy import SQLAlchemySchema, auto_field +from airflow import DAG from airflow.api_connexion.schemas.common_schema import ScheduleIntervalSchema, TimeDeltaSchema, TimezoneField from airflow.configuration import conf from airflow.models.dag import DagModel, DagTag @@ -73,15 +74,32 @@ def get_token(obj: DagModel): class DAGDetailSchema(DAGSchema): """DAG details""" - timezone = TimezoneField(dump_only=True) - catchup = fields.Boolean(dump_only=True) - orientation = fields.String(dump_only=True) - concurrency = fields.Integer(dump_only=True) - start_date = fields.DateTime(dump_only=True) - dag_run_timeout = fields.Nested(TimeDeltaSchema, dump_only=True, attribute="dagrun_timeout") - doc_md = fields.String(dump_only=True) - default_view = fields.String(dump_only=True) - params = fields.Dict(dump_only=True) + owners = fields.Method("get_owners", dump_only=True) + timezone = TimezoneField() + catchup = fields.Boolean() + orientation = fields.String() + concurrency = fields.Integer() + start_date = fields.DateTime() + dag_run_timeout = fields.Nested(TimeDeltaSchema, attribute="dagrun_timeout") + doc_md = fields.String() + default_view = fields.String() + params = fields.Dict() + tags = fields.Method("get_tags", dump_only=True) + + @staticmethod + def get_tags(obj: DAG): + """Dumps tags as objects""" + tags = obj.tags + if tags: + return [DagTagSchema().dump(dict(name=tag)) for tag in tags] + return [] + + @staticmethod + def get_owners(obj: DAG): + """Convert owners attribute to DAG representation""" + if not getattr(obj, 'owner', None): + return [] + return obj.owner.split(",") class DAGCollection(NamedTuple): diff --git a/airflow/cli/cli_parser.py b/airflow/cli/cli_parser.py index b198234c6b622..b5384b486a5c8 100644 --- a/airflow/cli/cli_parser.py +++ b/airflow/cli/cli_parser.py @@ -24,7 +24,7 @@ import textwrap from argparse import Action, ArgumentError, RawTextHelpFormatter from functools import lru_cache -from typing import Callable, Dict, Iterable, List, NamedTuple, Optional, Set, Union +from typing import Callable, Dict, Iterable, List, NamedTuple, Optional, Union from airflow import settings from airflow.cli.commands.legacy_commands import check_legacy_command @@ -178,9 +178,9 @@ def positive_int(value): "-o", "--output", ), - help="Output format. Allowed values: json, yaml, table (default: table)", - metavar="(table, json, yaml)", - choices=("table", "json", "yaml"), + help="Output format. Allowed values: json, yaml, plain, table (default: table)", + metavar="(table, json, yaml, plain)", + choices=("table", "json", "yaml", "plain"), default="table", ) ARG_COLOR = Arg( @@ -678,8 +678,8 @@ def positive_int(value): # kubernetes cleanup-pods ARG_NAMESPACE = Arg( ("--namespace",), - default='default', - help="Kubernetes Namespace", + default=conf.get('kubernetes', 'namespace'), + help="Kubernetes Namespace. Default value is `[kubernetes] namespace` in configuration.", ) ALTERNATIVE_CONN_SPECS_ARGS = [ @@ -1341,7 +1341,11 @@ class GroupCommand(NamedTuple): KUBERNETES_COMMANDS = ( ActionCommand( name='cleanup-pods', - help="Clean up Kubernetes pods in evicted/failed/succeeded states", + help=( + "Clean up Kubernetes pods " + "(created by KubernetesExecutor/KubernetesPodOperator) " + "in evicted/failed/succeeded states" + ), func=lazy_load_command('airflow.cli.commands.kubernetes_command.cleanup_pods'), args=(ARG_NAMESPACE,), ), @@ -1511,7 +1515,31 @@ class GroupCommand(NamedTuple): ), ] ALL_COMMANDS_DICT: Dict[str, CLICommand] = {sp.name: sp for sp in airflow_commands} -DAG_CLI_COMMANDS: Set[str] = {'list_tasks', 'backfill', 'test', 'run', 'pause', 'unpause', 'list_dag_runs'} + + +def _remove_dag_id_opt(command: ActionCommand): + cmd = command._asdict() + cmd['args'] = (arg for arg in command.args if arg is not ARG_DAG_ID) + return ActionCommand(**cmd) + + +dag_cli_commands: List[CLICommand] = [ + GroupCommand( + name='dags', + help='Manage DAGs', + subcommands=[ + _remove_dag_id_opt(sp) + for sp in DAGS_COMMANDS + if sp.name in ['backfill', 'list-runs', 'pause', 'unpause'] + ], + ), + GroupCommand( + name='tasks', + help='Manage tasks', + subcommands=[_remove_dag_id_opt(sp) for sp in TASKS_COMMANDS if sp.name in ['list', 'test', 'run']], + ), +] +DAG_CLI_DICT: Dict[str, CLICommand] = {sp.name: sp for sp in dag_cli_commands} class AirflowHelpFormatter(argparse.HelpFormatter): @@ -1563,10 +1591,11 @@ def get_parser(dag_parser: bool = False) -> argparse.ArgumentParser: subparsers = parser.add_subparsers(dest='subcommand', metavar="GROUP_OR_COMMAND") subparsers.required = True - subparser_list = DAG_CLI_COMMANDS if dag_parser else ALL_COMMANDS_DICT.keys() + command_dict = DAG_CLI_DICT if dag_parser else ALL_COMMANDS_DICT + subparser_list = command_dict.keys() sub_name: str for sub_name in sorted(subparser_list): - sub: CLICommand = ALL_COMMANDS_DICT[sub_name] + sub: CLICommand = command_dict[sub_name] _add_command(subparsers, sub) return parser diff --git a/airflow/cli/commands/connection_command.py b/airflow/cli/commands/connection_command.py index 202a2521b69ab..435395b1b77d4 100644 --- a/airflow/cli/commands/connection_command.py +++ b/airflow/cli/commands/connection_command.py @@ -22,9 +22,9 @@ from typing import Any, Dict, List from urllib.parse import urlparse, urlunparse -import yaml from sqlalchemy.orm import exc +import airflow.utils.yaml as yaml from airflow.cli.simple_table import AirflowConsole from airflow.exceptions import AirflowNotFoundException from airflow.hooks.base import BaseHook diff --git a/airflow/cli/commands/kubernetes_command.py b/airflow/cli/commands/kubernetes_command.py index 23864aedd8b7c..daf11a3e7097f 100644 --- a/airflow/cli/commands/kubernetes_command.py +++ b/airflow/cli/commands/kubernetes_command.py @@ -18,11 +18,11 @@ import os import sys -import yaml from kubernetes import client from kubernetes.client.api_client import ApiClient from kubernetes.client.rest import ApiException +import airflow.utils.yaml as yaml from airflow.executors.kubernetes_executor import KubeConfig, create_pod_id from airflow.kubernetes import pod_generator from airflow.kubernetes.kube_client import get_kube_client @@ -90,7 +90,23 @@ def cleanup_pods(args): print('Loading Kubernetes configuration') kube_client = get_kube_client() print(f'Listing pods in namespace {namespace}') - list_kwargs = {"namespace": namespace, "limit": 500} + airflow_pod_labels = [ + 'dag_id', + 'task_id', + 'execution_date', + 'try_number', + 'airflow_version', + ] + list_kwargs = { + "namespace": namespace, + "limit": 500, + "label_selector": client.V1LabelSelector( + match_expressions=[ + client.V1LabelSelectorRequirement(key=label, operator="Exists") + for label in airflow_pod_labels + ] + ), + } while True: # pylint: disable=too-many-nested-blocks pod_list = kube_client.list_namespaced_pod(**list_kwargs) for pod in pod_list.items: diff --git a/airflow/cli/simple_table.py b/airflow/cli/simple_table.py index 696b9bfcf3302..2aa470775564d 100644 --- a/airflow/cli/simple_table.py +++ b/airflow/cli/simple_table.py @@ -18,12 +18,13 @@ import json from typing import Any, Callable, Dict, List, Optional, Union -import yaml from rich.box import ASCII_DOUBLE_HEAD from rich.console import Console from rich.syntax import Syntax from rich.table import Table +from tabulate import tabulate +import airflow.utils.yaml as yaml from airflow.plugins_manager import PluginsDirectorySource @@ -56,6 +57,15 @@ def print_as_table(self, data: List[Dict]): table.add_row(*[str(d) for d in row.values()]) self.print(table) + def print_as_plain_table(self, data: List[Dict]): + """Renders list of dictionaries as a simple table than can be easily piped""" + if not data: + self.print("No data found") + return + rows = [d.values() for d in data] + output = tabulate(rows, tablefmt="plain", headers=data[0].keys()) + print(output) + # pylint: disable=too-many-return-statements def _normalize_data(self, value: Any, output: str) -> Optional[Union[list, str, dict]]: if isinstance(value, (tuple, list)): @@ -76,6 +86,7 @@ def print_as(self, data: List[Union[Dict, Any]], output: str, mapper: Optional[C "json": self.print_as_json, "yaml": self.print_as_yaml, "table": self.print_as_table, + "plain": self.print_as_plain_table, } renderer = output_to_renderer.get(output) if not renderer: diff --git a/airflow/config_templates/config.yml b/airflow/config_templates/config.yml index a350a079fdbd3..c92acf8d25cb6 100644 --- a/airflow/config_templates/config.yml +++ b/airflow/config_templates/config.yml @@ -718,7 +718,7 @@ - name: auth_backend description: | How to authenticate users of the API. See - https://airflow.apache.org/docs/stable/security.html for possible values. + https://airflow.apache.org/docs/apache-airflow/stable/security.html for possible values. ("airflow.api.auth.backend.default" allows all requests for historic reasons) version_added: ~ type: string @@ -1987,6 +1987,8 @@ description: | If False (and delete_worker_pods is True), failed worker pods will not be deleted so users can investigate them. + This only prevents removal of worker pods where the worker itself failed, + not when the task it ran failed. version_added: 1.10.11 type: string example: ~ @@ -2063,7 +2065,7 @@ version_added: ~ type: boolean example: ~ - default: "False" + default: "True" - name: tcp_keep_idle description: | When the `enable_tcp_keepalive` option is enabled, TCP probes a connection that has diff --git a/airflow/config_templates/default_airflow.cfg b/airflow/config_templates/default_airflow.cfg index 72e2e43f51402..bc4d54a0b8d3a 100644 --- a/airflow/config_templates/default_airflow.cfg +++ b/airflow/config_templates/default_airflow.cfg @@ -384,7 +384,7 @@ fail_fast = False enable_experimental_api = False # How to authenticate users of the API. See -# https://airflow.apache.org/docs/stable/security.html for possible values. +# https://airflow.apache.org/docs/apache-airflow/stable/security.html for possible values. # ("airflow.api.auth.backend.default" allows all requests for historic reasons) auth_backend = airflow.api.auth.backend.deny_all @@ -970,6 +970,8 @@ delete_worker_pods = True # If False (and delete_worker_pods is True), # failed worker pods will not be deleted so users can investigate them. +# This only prevents removal of worker pods where the worker itself failed, +# not when the task it ran failed. delete_worker_pods_on_failure = False # Number of Kubernetes Worker Pod creation calls per scheduler loop. @@ -1012,7 +1014,7 @@ delete_option_kwargs = # Enables TCP keepalive mechanism. This prevents Kubernetes API requests to hang indefinitely # when idle connection is time-outed on services like cloud load balancers or firewalls. -enable_tcp_keepalive = False +enable_tcp_keepalive = True # When the `enable_tcp_keepalive` option is enabled, TCP probes a connection that has # been idle for `tcp_keep_idle` seconds. diff --git a/airflow/configuration.py b/airflow/configuration.py index ecd2bc6847bf4..dc01d4a8dff10 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. +import functools import json import logging import multiprocessing @@ -30,12 +31,8 @@ # Ignored Mypy on configparser because it thinks the configparser module has no _UNSET attribute from configparser import _UNSET, ConfigParser, NoOptionError, NoSectionError # type: ignore -from distutils.version import StrictVersion from json.decoder import JSONDecodeError -from typing import Dict, List, Optional, Tuple, Union - -import yaml -from cryptography.fernet import Fernet +from typing import Dict, List, Optional, Union from airflow.exceptions import AirflowConfigException from airflow.secrets import DEFAULT_SECRETS_SEARCH_PATH, BaseSecretsBackend @@ -89,15 +86,9 @@ def _get_config_value_from_secret_backend(config_key): return secrets_client.get_config(config_key) -def _read_default_config_file(file_name: str) -> Tuple[str, str]: +def _default_config_file_path(file_name: str): templates_dir = os.path.join(os.path.dirname(__file__), 'config_templates') - file_path = os.path.join(templates_dir, file_name) - with open(file_path, encoding='utf-8') as config_file: - return config_file.read(), file_path - - -DEFAULT_CONFIG, DEFAULT_CONFIG_FILE_PATH = _read_default_config_file('default_airflow.cfg') -TEST_CONFIG, TEST_CONFIG_FILE_PATH = _read_default_config_file('default_test.cfg') + return os.path.join(templates_dir, file_name) def default_config_yaml() -> dict: @@ -106,10 +97,9 @@ def default_config_yaml() -> dict: :return: Python dictionary containing configs & their info """ - templates_dir = os.path.join(os.path.dirname(__file__), 'config_templates') - file_path = os.path.join(templates_dir, "config.yml") + import airflow.utils.yaml as yaml - with open(file_path) as config_file: + with open(_default_config_file_path('config.yml')) as config_file: return yaml.safe_load(config_file) @@ -239,11 +229,17 @@ def _validate_config_dependencies(self): raise AirflowConfigException(f"error: cannot use sqlite with the {self.get('core', 'executor')}") if is_sqlite: import sqlite3 + from distutils.version import StrictVersion + + from airflow.utils.docs import get_docs_url # Some of the features in storing rendered fields require sqlite version >= 3.15.0 min_sqlite_version = '3.15.0' if StrictVersion(sqlite3.sqlite_version) < StrictVersion(min_sqlite_version): - raise AirflowConfigException(f"error: cannot use sqlite version < {min_sqlite_version}") + raise AirflowConfigException( + f"error: sqlite C library version too old (< {min_sqlite_version}). " + f"See {get_docs_url('howto/set-up-database.rst#setting-up-a-sqlite-database')}" + ) if self.has_option('core', 'mp_start_method'): mp_start_method = self.get('core', 'mp_start_method') @@ -679,12 +675,15 @@ def load_test_config(self): Note: this is not reversible. """ - # override any custom settings with defaults - log.info("Overriding settings with defaults from %s", DEFAULT_CONFIG_FILE_PATH) - self.read_string(parameterized_config(DEFAULT_CONFIG)) + # remove all sections, falling back to defaults + for section in self.sections(): + self.remove_section(section) + # then read test config - log.info("Reading default test configuration from %s", TEST_CONFIG_FILE_PATH) - self.read_string(parameterized_config(TEST_CONFIG)) + + path = _default_config_file_path('default_test.cfg') + log.info("Reading default test configuration from %s", path) + self.read_string(_parameterized_config_from_template('default_test.cfg')) # then read any "custom" test settings log.info("Reading test configuration from %s", TEST_CONFIG_FILE) self.read(TEST_CONFIG_FILE) @@ -715,6 +714,22 @@ def _warn_deprecate(section, key, deprecated_section, deprecated_name): stacklevel=3, ) + def __getstate__(self): + return { + name: getattr(self, name) + for name in [ + '_sections', + 'is_validated', + 'airflow_defaults', + ] + } + + def __setstate__(self, state): + self.__init__() + config = state.pop('_sections') + self.read_dict(config) + self.__dict__.update(state) + def get_airflow_home(): """Get path to Airflow Home""" @@ -728,32 +743,16 @@ def get_airflow_config(airflow_home): return expand_env_var(os.environ['AIRFLOW_CONFIG']) -# Setting AIRFLOW_HOME and AIRFLOW_CONFIG from environment variables, using -# "~/airflow" and "$AIRFLOW_HOME/airflow.cfg" respectively as defaults. - -AIRFLOW_HOME = get_airflow_home() -AIRFLOW_CONFIG = get_airflow_config(AIRFLOW_HOME) -pathlib.Path(AIRFLOW_HOME).mkdir(parents=True, exist_ok=True) - - -# Set up dags folder for unit tests -# this directory won't exist if users install via pip -_TEST_DAGS_FOLDER = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'tests', 'dags' -) -if os.path.exists(_TEST_DAGS_FOLDER): - TEST_DAGS_FOLDER = _TEST_DAGS_FOLDER -else: - TEST_DAGS_FOLDER = os.path.join(AIRFLOW_HOME, 'dags') +def _parameterized_config_from_template(filename) -> str: + TEMPLATE_START = '# ----------------------- TEMPLATE BEGINS HERE -----------------------\n' -# Set up plugins folder for unit tests -_TEST_PLUGINS_FOLDER = os.path.join( - os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'tests', 'plugins' -) -if os.path.exists(_TEST_PLUGINS_FOLDER): - TEST_PLUGINS_FOLDER = _TEST_PLUGINS_FOLDER -else: - TEST_PLUGINS_FOLDER = os.path.join(AIRFLOW_HOME, 'plugins') + path = _default_config_file_path(filename) + with open(path) as fh: + for line in fh: + if line != TEMPLATE_START: + continue + return parameterized_config(fh.read().strip()) + raise RuntimeError(f"Template marker not found in {path!r}") def parameterized_config(template): @@ -774,65 +773,93 @@ def get_airflow_test_config(airflow_home): return expand_env_var(os.environ['AIRFLOW_TEST_CONFIG']) -TEST_CONFIG_FILE = get_airflow_test_config(AIRFLOW_HOME) +def _generate_fernet_key(): + from cryptography.fernet import Fernet -# only generate a Fernet key if we need to create a new config file -if not os.path.isfile(TEST_CONFIG_FILE) or not os.path.isfile(AIRFLOW_CONFIG): - FERNET_KEY = Fernet.generate_key().decode() -else: - FERNET_KEY = '' + return Fernet.generate_key().decode() -SECRET_KEY = b64encode(os.urandom(16)).decode('utf-8') -TEMPLATE_START = '# ----------------------- TEMPLATE BEGINS HERE -----------------------' -if not os.path.isfile(TEST_CONFIG_FILE): - log.info('Creating new Airflow config file for unit tests in: %s', TEST_CONFIG_FILE) - with open(TEST_CONFIG_FILE, 'w') as file: - cfg = parameterized_config(TEST_CONFIG) - file.write(cfg.split(TEMPLATE_START)[-1].strip()) -if not os.path.isfile(AIRFLOW_CONFIG): - log.info('Creating new Airflow config file in: %s', AIRFLOW_CONFIG) - with open(AIRFLOW_CONFIG, 'w') as file: - cfg = parameterized_config(DEFAULT_CONFIG) - cfg = cfg.split(TEMPLATE_START)[-1].strip() - file.write(cfg) - -log.info("Reading the config from %s", AIRFLOW_CONFIG) - -conf = AirflowConfigParser(default_config=parameterized_config(DEFAULT_CONFIG)) - -conf.read(AIRFLOW_CONFIG) - -if conf.has_option('core', 'AIRFLOW_HOME'): - msg = ( - 'Specifying both AIRFLOW_HOME environment variable and airflow_home ' - 'in the config file is deprecated. Please use only the AIRFLOW_HOME ' - 'environment variable and remove the config file entry.' - ) - if 'AIRFLOW_HOME' in os.environ: - warnings.warn(msg, category=DeprecationWarning) - elif conf.get('core', 'airflow_home') == AIRFLOW_HOME: - warnings.warn( - 'Specifying airflow_home in the config file is deprecated. As you ' - 'have left it at the default value you should remove the setting ' - 'from your airflow.cfg and suffer no change in behaviour.', - category=DeprecationWarning, - ) +def initialize_config(): + """ + Load the Airflow config files. + + Called for you automatically as part of the Airflow boot process. + """ + global FERNET_KEY, AIRFLOW_HOME + + default_config = _parameterized_config_from_template('default_airflow.cfg') + + conf = AirflowConfigParser(default_config=default_config) + + if conf.getboolean('core', 'unit_test_mode'): + # Load test config only + if not os.path.isfile(TEST_CONFIG_FILE): + from cryptography.fernet import Fernet + + log.info('Creating new Airflow config file for unit tests in: %s', TEST_CONFIG_FILE) + pathlib.Path(AIRFLOW_HOME).mkdir(parents=True, exist_ok=True) + + FERNET_KEY = Fernet.generate_key().decode() + + with open(TEST_CONFIG_FILE, 'w') as file: + cfg = _parameterized_config_from_template('default_test.cfg') + file.write(cfg) + + conf.load_test_config() else: - AIRFLOW_HOME = conf.get('core', 'airflow_home') - warnings.warn(msg, category=DeprecationWarning) + # Load normal config + if not os.path.isfile(AIRFLOW_CONFIG): + from cryptography.fernet import Fernet + log.info('Creating new Airflow config file in: %s', AIRFLOW_CONFIG) + pathlib.Path(AIRFLOW_HOME).mkdir(parents=True, exist_ok=True) -WEBSERVER_CONFIG = AIRFLOW_HOME + '/webserver_config.py' + FERNET_KEY = Fernet.generate_key().decode() -if not os.path.isfile(WEBSERVER_CONFIG): - log.info('Creating new FAB webserver config file in: %s', WEBSERVER_CONFIG) - DEFAULT_WEBSERVER_CONFIG, _ = _read_default_config_file('default_webserver_config.py') - with open(WEBSERVER_CONFIG, 'w') as file: - file.write(DEFAULT_WEBSERVER_CONFIG) + with open(AIRFLOW_CONFIG, 'w') as file: + file.write(default_config) -if conf.getboolean('core', 'unit_test_mode'): - conf.load_test_config() + log.info("Reading the config from %s", AIRFLOW_CONFIG) + + conf.read(AIRFLOW_CONFIG) + + if conf.has_option('core', 'AIRFLOW_HOME'): + msg = ( + 'Specifying both AIRFLOW_HOME environment variable and airflow_home ' + 'in the config file is deprecated. Please use only the AIRFLOW_HOME ' + 'environment variable and remove the config file entry.' + ) + if 'AIRFLOW_HOME' in os.environ: + warnings.warn(msg, category=DeprecationWarning) + elif conf.get('core', 'airflow_home') == AIRFLOW_HOME: + warnings.warn( + 'Specifying airflow_home in the config file is deprecated. As you ' + 'have left it at the default value you should remove the setting ' + 'from your airflow.cfg and suffer no change in behaviour.', + category=DeprecationWarning, + ) + else: + AIRFLOW_HOME = conf.get('core', 'airflow_home') + warnings.warn(msg, category=DeprecationWarning) + + # They _might_ have set unit_test_mode in the airflow.cfg, we still + # want to respect that and then load the unittests.cfg + if conf.getboolean('core', 'unit_test_mode'): + conf.load_test_config() + + # Make it no longer a proxy variable, just set it to an actual string + global WEBSERVER_CONFIG + WEBSERVER_CONFIG = AIRFLOW_HOME + '/webserver_config.py' + + if not os.path.isfile(WEBSERVER_CONFIG): + import shutil + + log.info('Creating new FAB webserver config file in: %s', WEBSERVER_CONFIG) + shutil.copy(_default_config_file_path('default_webserver_config.py'), WEBSERVER_CONFIG) + + conf.validate() + + return conf # Historical convenience functions to access config entries @@ -1002,6 +1029,78 @@ def initialize_secrets_backends() -> List[BaseSecretsBackend]: return backend_list +@functools.lru_cache(maxsize=None) +def _DEFAULT_CONFIG(): + path = _default_config_file_path('default_airflow.cfg') + with open(path) as fh: + return fh.read() + + +@functools.lru_cache(maxsize=None) +def _TEST_CONFIG(): + path = _default_config_file_path('default_test.cfg') + with open(path) as fh: + return fh.read() + + +_deprecated = { + 'DEFAULT_CONFIG': _DEFAULT_CONFIG, + 'TEST_CONFIG': _TEST_CONFIG, + 'TEST_CONFIG_FILE_PATH': functools.partial(_default_config_file_path, ('default_test.cfg')), + 'DEFAULT_CONFIG_FILE_PATH': functools.partial(_default_config_file_path, ('default_airflow.cfg')), +} + + +def __getattr__(name): + if name in _deprecated: + warnings.warn( + f"{__name__}.{name} is deprecated and will be removed in future", + DeprecationWarning, + stacklevel=2, + ) + return _deprecated[name]() + raise AttributeError(f"module {__name__} has no attribute {name}") + + +# Setting AIRFLOW_HOME and AIRFLOW_CONFIG from environment variables, using +# "~/airflow" and "$AIRFLOW_HOME/airflow.cfg" respectively as defaults. + +AIRFLOW_HOME = get_airflow_home() +AIRFLOW_CONFIG = get_airflow_config(AIRFLOW_HOME) + + +# Set up dags folder for unit tests +# this directory won't exist if users install via pip +_TEST_DAGS_FOLDER = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'tests', 'dags' +) +if os.path.exists(_TEST_DAGS_FOLDER): + TEST_DAGS_FOLDER = _TEST_DAGS_FOLDER +else: + TEST_DAGS_FOLDER = os.path.join(AIRFLOW_HOME, 'dags') + +# Set up plugins folder for unit tests +_TEST_PLUGINS_FOLDER = os.path.join( + os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'tests', 'plugins' +) +if os.path.exists(_TEST_PLUGINS_FOLDER): + TEST_PLUGINS_FOLDER = _TEST_PLUGINS_FOLDER +else: + TEST_PLUGINS_FOLDER = os.path.join(AIRFLOW_HOME, 'plugins') + + +TEST_CONFIG_FILE = get_airflow_test_config(AIRFLOW_HOME) + +SECRET_KEY = b64encode(os.urandom(16)).decode('utf-8') +FERNET_KEY = '' # Set only if needed when generating a new file +WEBSERVER_CONFIG = '' # Set by initialize_config + +conf = initialize_config() secrets_backend_list = initialize_secrets_backends() -conf.validate() + +PY37 = sys.version_info >= (3, 7) +if not PY37: + from pep562 import Pep562 + + Pep562(__name__) diff --git a/airflow/contrib/__init__.py b/airflow/contrib/__init__.py index 3a89862127ebc..37bd67f0e7a73 100644 --- a/airflow/contrib/__init__.py +++ b/airflow/contrib/__init__.py @@ -16,7 +16,3 @@ # specific language governing permissions and limitations # under the License. """This package is deprecated.""" - -import warnings - -warnings.warn("This module is deprecated.", DeprecationWarning, stacklevel=2) diff --git a/airflow/contrib/operators/__init__.py b/airflow/contrib/operators/__init__.py index ad3fa4b90dc76..2041adb9ea24a 100644 --- a/airflow/contrib/operators/__init__.py +++ b/airflow/contrib/operators/__init__.py @@ -17,11 +17,3 @@ # under the License. # """This package is deprecated. Please use `airflow.operators` or `airflow.providers.*.operators`.""" - -import warnings - -warnings.warn( - "This package is deprecated. Please use `airflow.operators` or `airflow.providers.*.operators`.", - DeprecationWarning, - stacklevel=2, -) diff --git a/airflow/example_dags/example_bash_operator.py b/airflow/example_dags/example_bash_operator.py index 1c22fff546451..0665971db2847 100644 --- a/airflow/example_dags/example_bash_operator.py +++ b/airflow/example_dags/example_bash_operator.py @@ -29,7 +29,7 @@ 'owner': 'airflow', } -dag = DAG( +with DAG( dag_id='example_bash_operator', default_args=args, schedule_interval='0 0 * * *', @@ -37,39 +37,35 @@ dagrun_timeout=timedelta(minutes=60), tags=['example', 'example2'], params={"example_key": "example_value"}, -) +) as dag: -run_this_last = DummyOperator( - task_id='run_this_last', - dag=dag, -) + run_this_last = DummyOperator( + task_id='run_this_last', + ) + + # [START howto_operator_bash] + run_this = BashOperator( + task_id='run_after_loop', + bash_command='echo 1', + ) + # [END howto_operator_bash] -# [START howto_operator_bash] -run_this = BashOperator( - task_id='run_after_loop', - bash_command='echo 1', - dag=dag, -) -# [END howto_operator_bash] + run_this >> run_this_last -run_this >> run_this_last + for i in range(3): + task = BashOperator( + task_id='runme_' + str(i), + bash_command='echo "{{ task_instance_key_str }}" && sleep 1', + ) + task >> run_this -for i in range(3): - task = BashOperator( - task_id='runme_' + str(i), - bash_command='echo "{{ task_instance_key_str }}" && sleep 1', - dag=dag, + # [START howto_operator_bash_template] + also_run_this = BashOperator( + task_id='also_run_this', + bash_command='echo "run_id={{ run_id }} | dag_run={{ dag_run }}"', ) - task >> run_this - -# [START howto_operator_bash_template] -also_run_this = BashOperator( - task_id='also_run_this', - bash_command='echo "run_id={{ run_id }} | dag_run={{ dag_run }}"', - dag=dag, -) -# [END howto_operator_bash_template] -also_run_this >> run_this_last + # [END howto_operator_bash_template] + also_run_this >> run_this_last if __name__ == "__main__": dag.cli() diff --git a/airflow/example_dags/example_branch_operator.py b/airflow/example_dags/example_branch_operator.py index 50eb3281f5883..7c5e166b1a02d 100644 --- a/airflow/example_dags/example_branch_operator.py +++ b/airflow/example_dags/example_branch_operator.py @@ -29,43 +29,38 @@ 'owner': 'airflow', } -dag = DAG( +with DAG( dag_id='example_branch_operator', default_args=args, start_date=days_ago(2), schedule_interval="@daily", tags=['example', 'example2'], -) +) as dag: -run_this_first = DummyOperator( - task_id='run_this_first', - dag=dag, -) - -options = ['branch_a', 'branch_b', 'branch_c', 'branch_d'] - -branching = BranchPythonOperator( - task_id='branching', - python_callable=lambda: random.choice(options), - dag=dag, -) -run_this_first >> branching + run_this_first = DummyOperator( + task_id='run_this_first', + ) -join = DummyOperator( - task_id='join', - trigger_rule='none_failed_or_skipped', - dag=dag, -) + options = ['branch_a', 'branch_b', 'branch_c', 'branch_d'] -for option in options: - t = DummyOperator( - task_id=option, - dag=dag, + branching = BranchPythonOperator( + task_id='branching', + python_callable=lambda: random.choice(options), ) + run_this_first >> branching - dummy_follow = DummyOperator( - task_id='follow_' + option, - dag=dag, + join = DummyOperator( + task_id='join', + trigger_rule='none_failed_or_skipped', ) - branching >> t >> dummy_follow >> join + for option in options: + t = DummyOperator( + task_id=option, + ) + + dummy_follow = DummyOperator( + task_id='follow_' + option, + ) + + branching >> t >> dummy_follow >> join diff --git a/airflow/example_dags/example_branch_python_dop_operator_3.py b/airflow/example_dags/example_branch_python_dop_operator_3.py index f01fc5067726b..badad5a2e93f8 100644 --- a/airflow/example_dags/example_branch_python_dop_operator_3.py +++ b/airflow/example_dags/example_branch_python_dop_operator_3.py @@ -31,14 +31,6 @@ 'depends_on_past': True, } -dag = DAG( - dag_id='example_branch_dop_operator_v3', - schedule_interval='*/1 * * * *', - start_date=days_ago(2), - default_args=args, - tags=['example'], -) - def should_run(**kwargs): """ @@ -59,12 +51,19 @@ def should_run(**kwargs): return "dummy_task_2" -cond = BranchPythonOperator( - task_id='condition', - python_callable=should_run, - dag=dag, -) +with DAG( + dag_id='example_branch_dop_operator_v3', + schedule_interval='*/1 * * * *', + start_date=days_ago(2), + default_args=args, + tags=['example'], +) as dag: + + cond = BranchPythonOperator( + task_id='condition', + python_callable=should_run, + ) -dummy_task_1 = DummyOperator(task_id='dummy_task_1', dag=dag) -dummy_task_2 = DummyOperator(task_id='dummy_task_2', dag=dag) -cond >> [dummy_task_1, dummy_task_2] + dummy_task_1 = DummyOperator(task_id='dummy_task_1') + dummy_task_2 = DummyOperator(task_id='dummy_task_2') + cond >> [dummy_task_1, dummy_task_2] diff --git a/airflow/example_dags/example_kubernetes_executor_config.py b/airflow/example_dags/example_kubernetes_executor_config.py index cbd69cb6bba98..5290dd8e1eb8e 100644 --- a/airflow/example_dags/example_kubernetes_executor_config.py +++ b/airflow/example_dags/example_kubernetes_executor_config.py @@ -24,6 +24,7 @@ from airflow import DAG from airflow.example_dags.libs.helper import print_stuff from airflow.operators.python import PythonOperator +from airflow.settings import AIRFLOW_HOME from airflow.utils.dates import days_ago default_args = { @@ -110,7 +111,7 @@ def test_volume_mount(): task_id="task_with_template", python_callable=print_stuff, executor_config={ - "pod_template_file": "/usr/local/airflow/pod_templates/basic_template.yaml", + "pod_template_file": os.path.join(AIRFLOW_HOME, "pod_templates/basic_template.yaml"), "pod_override": k8s.V1Pod(metadata=k8s.V1ObjectMeta(labels={"release": "stable"})), }, ) diff --git a/airflow/example_dags/example_latest_only.py b/airflow/example_dags/example_latest_only.py index 272a0548e6390..d0d5db07996a0 100644 --- a/airflow/example_dags/example_latest_only.py +++ b/airflow/example_dags/example_latest_only.py @@ -25,14 +25,14 @@ from airflow.operators.latest_only import LatestOnlyOperator from airflow.utils.dates import days_ago -dag = DAG( +with DAG( dag_id='latest_only', schedule_interval=dt.timedelta(hours=4), start_date=days_ago(2), tags=['example2', 'example3'], -) +) as dag: -latest_only = LatestOnlyOperator(task_id='latest_only', dag=dag) -task1 = DummyOperator(task_id='task1', dag=dag) + latest_only = LatestOnlyOperator(task_id='latest_only') + task1 = DummyOperator(task_id='task1') -latest_only >> task1 + latest_only >> task1 diff --git a/airflow/example_dags/example_latest_only_with_trigger.py b/airflow/example_dags/example_latest_only_with_trigger.py index 917827868b8ce..a8e96e7ac6aa7 100644 --- a/airflow/example_dags/example_latest_only_with_trigger.py +++ b/airflow/example_dags/example_latest_only_with_trigger.py @@ -28,19 +28,19 @@ from airflow.utils.dates import days_ago from airflow.utils.trigger_rule import TriggerRule -dag = DAG( +with DAG( dag_id='latest_only_with_trigger', schedule_interval=dt.timedelta(hours=4), start_date=days_ago(2), tags=['example3'], -) +) as dag: -latest_only = LatestOnlyOperator(task_id='latest_only', dag=dag) -task1 = DummyOperator(task_id='task1', dag=dag) -task2 = DummyOperator(task_id='task2', dag=dag) -task3 = DummyOperator(task_id='task3', dag=dag) -task4 = DummyOperator(task_id='task4', dag=dag, trigger_rule=TriggerRule.ALL_DONE) + latest_only = LatestOnlyOperator(task_id='latest_only') + task1 = DummyOperator(task_id='task1') + task2 = DummyOperator(task_id='task2') + task3 = DummyOperator(task_id='task3') + task4 = DummyOperator(task_id='task4', trigger_rule=TriggerRule.ALL_DONE) -latest_only >> task1 >> [task3, task4] -task2 >> [task3, task4] + latest_only >> task1 >> [task3, task4] + task2 >> [task3, task4] # [END example] diff --git a/airflow/example_dags/example_passing_params_via_test_command.py b/airflow/example_dags/example_passing_params_via_test_command.py index 8eaadd7eb245f..2c930cc5bdedd 100644 --- a/airflow/example_dags/example_passing_params_via_test_command.py +++ b/airflow/example_dags/example_passing_params_via_test_command.py @@ -20,23 +20,13 @@ import os from datetime import timedelta +from textwrap import dedent from airflow import DAG from airflow.operators.bash import BashOperator from airflow.operators.python import PythonOperator from airflow.utils.dates import days_ago -dag = DAG( - "example_passing_params_via_test_command", - default_args={ - "owner": "airflow", - }, - schedule_interval='*/1 * * * *', - start_date=days_ago(1), - dagrun_timeout=timedelta(minutes=4), - tags=['example'], -) - def my_py_command(test_mode, params): """ @@ -56,26 +46,6 @@ def my_py_command(test_mode, params): return 1 -my_templated_command = """ - echo " 'foo was passed in via Airflow CLI Test command with value {{ params.foo }} " - echo " 'miff was passed in via BashOperator with value {{ params.miff }} " -""" - -run_this = PythonOperator( - task_id='run_this', - python_callable=my_py_command, - params={"miff": "agg"}, - dag=dag, -) - -also_run_this = BashOperator( - task_id='also_run_this', - bash_command=my_templated_command, - params={"miff": "agg"}, - dag=dag, -) - - def print_env_vars(test_mode): """ Print out the "foo" param passed in via @@ -87,6 +57,36 @@ def print_env_vars(test_mode): print("AIRFLOW_TEST_MODE={}".format(os.environ.get('AIRFLOW_TEST_MODE'))) -env_var_test_task = PythonOperator(task_id='env_var_test_task', python_callable=print_env_vars, dag=dag) +with DAG( + "example_passing_params_via_test_command", + default_args={ + "owner": "airflow", + }, + schedule_interval='*/1 * * * *', + start_date=days_ago(1), + dagrun_timeout=timedelta(minutes=4), + tags=['example'], +) as dag: + + my_templated_command = dedent( + """ + echo " 'foo was passed in via Airflow CLI Test command with value {{ params.foo }} " + echo " 'miff was passed in via BashOperator with value {{ params.miff }} " + """ + ) + + run_this = PythonOperator( + task_id='run_this', + python_callable=my_py_command, + params={"miff": "agg"}, + ) + + also_run_this = BashOperator( + task_id='also_run_this', + bash_command=my_templated_command, + params={"miff": "agg"}, + ) + + env_var_test_task = PythonOperator(task_id='env_var_test_task', python_callable=print_env_vars) -run_this >> also_run_this + run_this >> also_run_this diff --git a/airflow/example_dags/example_python_operator.py b/airflow/example_dags/example_python_operator.py index d5e16a55e95b8..a9db34254ebcc 100644 --- a/airflow/example_dags/example_python_operator.py +++ b/airflow/example_dags/example_python_operator.py @@ -28,77 +28,68 @@ 'owner': 'airflow', } -dag = DAG( +with DAG( dag_id='example_python_operator', default_args=args, schedule_interval=None, start_date=days_ago(2), tags=['example'], -) - - -# [START howto_operator_python] -def print_context(ds, **kwargs): - """Print the Airflow context and ds variable from the context.""" - pprint(kwargs) - print(ds) - return 'Whatever you return gets printed in the logs' - - -run_this = PythonOperator( - task_id='print_the_context', - python_callable=print_context, - dag=dag, -) -# [END howto_operator_python] - - -# [START howto_operator_python_kwargs] -def my_sleeping_function(random_base): - """This is a function that will run within the DAG execution""" - time.sleep(random_base) - - -# Generate 5 sleeping tasks, sleeping from 0.0 to 0.4 seconds respectively -for i in range(5): - task = PythonOperator( - task_id='sleep_for_' + str(i), - python_callable=my_sleeping_function, - op_kwargs={'random_base': float(i) / 10}, - dag=dag, +) as dag: + + # [START howto_operator_python] + def print_context(ds, **kwargs): + """Print the Airflow context and ds variable from the context.""" + pprint(kwargs) + print(ds) + return 'Whatever you return gets printed in the logs' + + run_this = PythonOperator( + task_id='print_the_context', + python_callable=print_context, ) - - run_this >> task -# [END howto_operator_python_kwargs] - - -# [START howto_operator_python_venv] -def callable_virtualenv(): - """ - Example function that will be performed in a virtual environment. - - Importing at the module level ensures that it will not attempt to import the - library before it is installed. - """ - from time import sleep - - from colorama import Back, Fore, Style - - print(Fore.RED + 'some red text') - print(Back.GREEN + 'and with a green background') - print(Style.DIM + 'and in dim text') - print(Style.RESET_ALL) - for _ in range(10): - print(Style.DIM + 'Please wait...', flush=True) - sleep(10) - print('Finished') - - -virtualenv_task = PythonVirtualenvOperator( - task_id="virtualenv_python", - python_callable=callable_virtualenv, - requirements=["colorama==0.4.0"], - system_site_packages=False, - dag=dag, -) -# [END howto_operator_python_venv] + # [END howto_operator_python] + + # [START howto_operator_python_kwargs] + def my_sleeping_function(random_base): + """This is a function that will run within the DAG execution""" + time.sleep(random_base) + + # Generate 5 sleeping tasks, sleeping from 0.0 to 0.4 seconds respectively + for i in range(5): + task = PythonOperator( + task_id='sleep_for_' + str(i), + python_callable=my_sleeping_function, + op_kwargs={'random_base': float(i) / 10}, + ) + + run_this >> task + # [END howto_operator_python_kwargs] + + # [START howto_operator_python_venv] + def callable_virtualenv(): + """ + Example function that will be performed in a virtual environment. + + Importing at the module level ensures that it will not attempt to import the + library before it is installed. + """ + from time import sleep + + from colorama import Back, Fore, Style + + print(Fore.RED + 'some red text') + print(Back.GREEN + 'and with a green background') + print(Style.DIM + 'and in dim text') + print(Style.RESET_ALL) + for _ in range(10): + print(Style.DIM + 'Please wait...', flush=True) + sleep(10) + print('Finished') + + virtualenv_task = PythonVirtualenvOperator( + task_id="virtualenv_python", + python_callable=callable_virtualenv, + requirements=["colorama==0.4.0"], + system_site_packages=False, + ) + # [END howto_operator_python_venv] diff --git a/airflow/example_dags/example_short_circuit_operator.py b/airflow/example_dags/example_short_circuit_operator.py index 38163a07acf3c..3836ef9490257 100644 --- a/airflow/example_dags/example_short_circuit_operator.py +++ b/airflow/example_dags/example_short_circuit_operator.py @@ -27,27 +27,25 @@ 'owner': 'airflow', } -dag = DAG( +with DAG( dag_id='example_short_circuit_operator', default_args=args, start_date=dates.days_ago(2), tags=['example'], -) +) as dag: -cond_true = ShortCircuitOperator( - task_id='condition_is_True', - python_callable=lambda: True, - dag=dag, -) + cond_true = ShortCircuitOperator( + task_id='condition_is_True', + python_callable=lambda: True, + ) -cond_false = ShortCircuitOperator( - task_id='condition_is_False', - python_callable=lambda: False, - dag=dag, -) + cond_false = ShortCircuitOperator( + task_id='condition_is_False', + python_callable=lambda: False, + ) -ds_true = [DummyOperator(task_id='true_' + str(i), dag=dag) for i in [1, 2]] -ds_false = [DummyOperator(task_id='false_' + str(i), dag=dag) for i in [1, 2]] + ds_true = [DummyOperator(task_id='true_' + str(i)) for i in [1, 2]] + ds_false = [DummyOperator(task_id='false_' + str(i)) for i in [1, 2]] -chain(cond_true, *ds_true) -chain(cond_false, *ds_false) + chain(cond_true, *ds_true) + chain(cond_false, *ds_false) diff --git a/airflow/example_dags/example_skip_dag.py b/airflow/example_dags/example_skip_dag.py index 633dc5e5a85d6..77fbf4a6fcbc4 100644 --- a/airflow/example_dags/example_skip_dag.py +++ b/airflow/example_dags/example_skip_dag.py @@ -56,6 +56,6 @@ def create_test_pipeline(suffix, trigger_rule, dag_): join >> final -dag = DAG(dag_id='example_skip_dag', default_args=args, start_date=days_ago(2), tags=['example']) -create_test_pipeline('1', 'all_success', dag) -create_test_pipeline('2', 'one_success', dag) +with DAG(dag_id='example_skip_dag', default_args=args, start_date=days_ago(2), tags=['example']) as dag: + create_test_pipeline('1', 'all_success', dag) + create_test_pipeline('2', 'one_success', dag) diff --git a/airflow/example_dags/example_subdag_operator.py b/airflow/example_dags/example_subdag_operator.py index be882814d38e0..f27aec7db07b7 100644 --- a/airflow/example_dags/example_subdag_operator.py +++ b/airflow/example_dags/example_subdag_operator.py @@ -31,36 +31,31 @@ 'owner': 'airflow', } -dag = DAG( +with DAG( dag_id=DAG_NAME, default_args=args, start_date=days_ago(2), schedule_interval="@once", tags=['example'] -) +) as dag: -start = DummyOperator( - task_id='start', - dag=dag, -) + start = DummyOperator( + task_id='start', + ) -section_1 = SubDagOperator( - task_id='section-1', - subdag=subdag(DAG_NAME, 'section-1', args), - dag=dag, -) + section_1 = SubDagOperator( + task_id='section-1', + subdag=subdag(DAG_NAME, 'section-1', args), + ) -some_other_task = DummyOperator( - task_id='some-other-task', - dag=dag, -) + some_other_task = DummyOperator( + task_id='some-other-task', + ) -section_2 = SubDagOperator( - task_id='section-2', - subdag=subdag(DAG_NAME, 'section-2', args), - dag=dag, -) + section_2 = SubDagOperator( + task_id='section-2', + subdag=subdag(DAG_NAME, 'section-2', args), + ) -end = DummyOperator( - task_id='end', - dag=dag, -) + end = DummyOperator( + task_id='end', + ) -start >> section_1 >> some_other_task >> section_2 >> end + start >> section_1 >> some_other_task >> section_2 >> end # [END example_subdag_operator] diff --git a/airflow/example_dags/example_trigger_controller_dag.py b/airflow/example_dags/example_trigger_controller_dag.py index 0f706c70350b1..9d0239919b43b 100644 --- a/airflow/example_dags/example_trigger_controller_dag.py +++ b/airflow/example_dags/example_trigger_controller_dag.py @@ -25,17 +25,16 @@ from airflow.operators.trigger_dagrun import TriggerDagRunOperator from airflow.utils.dates import days_ago -dag = DAG( +with DAG( dag_id="example_trigger_controller_dag", default_args={"owner": "airflow"}, start_date=days_ago(2), schedule_interval="@once", tags=['example'], -) +) as dag: -trigger = TriggerDagRunOperator( - task_id="test_trigger_dagrun", - trigger_dag_id="example_trigger_target_dag", # Ensure this equals the dag_id of the DAG to trigger - conf={"message": "Hello World"}, - dag=dag, -) + trigger = TriggerDagRunOperator( + task_id="test_trigger_dagrun", + trigger_dag_id="example_trigger_target_dag", # Ensure this equals the dag_id of the DAG to trigger + conf={"message": "Hello World"}, + ) diff --git a/airflow/example_dags/example_trigger_target_dag.py b/airflow/example_dags/example_trigger_target_dag.py index 035527546d289..39ecefc2622ce 100644 --- a/airflow/example_dags/example_trigger_target_dag.py +++ b/airflow/example_dags/example_trigger_target_dag.py @@ -27,14 +27,6 @@ from airflow.operators.python import PythonOperator from airflow.utils.dates import days_ago -dag = DAG( - dag_id="example_trigger_target_dag", - default_args={"owner": "airflow"}, - start_date=days_ago(2), - schedule_interval=None, - tags=['example'], -) - def run_this_func(**context): """ @@ -46,11 +38,18 @@ def run_this_func(**context): print("Remotely received value of {} for key=message".format(context["dag_run"].conf["message"])) -run_this = PythonOperator(task_id="run_this", python_callable=run_this_func, dag=dag) +with DAG( + dag_id="example_trigger_target_dag", + default_args={"owner": "airflow"}, + start_date=days_ago(2), + schedule_interval=None, + tags=['example'], +) as dag: + + run_this = PythonOperator(task_id="run_this", python_callable=run_this_func) -bash_task = BashOperator( - task_id="bash_task", - bash_command='echo "Here is the message: $message"', - env={'message': '{{ dag_run.conf["message"] if dag_run else "" }}'}, - dag=dag, -) + bash_task = BashOperator( + task_id="bash_task", + bash_command='echo "Here is the message: $message"', + env={'message': '{{ dag_run.conf["message"] if dag_run else "" }}'}, + ) diff --git a/airflow/example_dags/example_xcom.py b/airflow/example_dags/example_xcom.py index 779e392c70083..03f85d910ef61 100644 --- a/airflow/example_dags/example_xcom.py +++ b/airflow/example_dags/example_xcom.py @@ -21,14 +21,6 @@ from airflow.operators.python import PythonOperator from airflow.utils.dates import days_ago -dag = DAG( - 'example_xcom', - schedule_interval="@once", - start_date=days_ago(2), - default_args={'owner': 'airflow'}, - tags=['example'], -) - value_1 = [1, 2, 3] value_2 = {'a': 'b'} @@ -65,22 +57,27 @@ def puller(**kwargs): raise ValueError(f'The two values differ {pulled_value_2} and {value_2}') -push1 = PythonOperator( - task_id='push', - dag=dag, - python_callable=push, -) +with DAG( + 'example_xcom', + schedule_interval="@once", + start_date=days_ago(2), + default_args={'owner': 'airflow'}, + tags=['example'], +) as dag: + + push1 = PythonOperator( + task_id='push', + python_callable=push, + ) -push2 = PythonOperator( - task_id='push_by_returning', - dag=dag, - python_callable=push_by_returning, -) + push2 = PythonOperator( + task_id='push_by_returning', + python_callable=push_by_returning, + ) -pull = PythonOperator( - task_id='puller', - dag=dag, - python_callable=puller, -) + pull = PythonOperator( + task_id='puller', + python_callable=puller, + ) -pull << [push1, push2] + pull << [push1, push2] diff --git a/airflow/example_dags/test_utils.py b/airflow/example_dags/test_utils.py index a1a2ed0c0a66b..0211dfb2a0d02 100644 --- a/airflow/example_dags/test_utils.py +++ b/airflow/example_dags/test_utils.py @@ -20,12 +20,11 @@ from airflow.operators.bash import BashOperator from airflow.utils.dates import days_ago -dag = DAG(dag_id='test_utils', schedule_interval=None, tags=['example']) +with DAG(dag_id='test_utils', schedule_interval=None, tags=['example']) as dag: -task = BashOperator( - task_id='sleeps_forever', - dag=dag, - bash_command="sleep 10000000000", - start_date=days_ago(2), - owner='airflow', -) + task = BashOperator( + task_id='sleeps_forever', + bash_command="sleep 10000000000", + start_date=days_ago(2), + owner='airflow', + ) diff --git a/airflow/example_dags/tutorial.py b/airflow/example_dags/tutorial.py index a00051c43abe2..09d6ca3f1a658 100644 --- a/airflow/example_dags/tutorial.py +++ b/airflow/example_dags/tutorial.py @@ -24,6 +24,7 @@ # [START tutorial] # [START import_module] from datetime import timedelta +from textwrap import dedent # The DAG object; we'll need this to instantiate a DAG from airflow import DAG @@ -62,62 +63,64 @@ # [END default_args] # [START instantiate_dag] -dag = DAG( +with DAG( 'tutorial', default_args=default_args, description='A simple tutorial DAG', schedule_interval=timedelta(days=1), start_date=days_ago(2), tags=['example'], -) -# [END instantiate_dag] +) as dag: + # [END instantiate_dag] -# t1, t2 and t3 are examples of tasks created by instantiating operators -# [START basic_task] -t1 = BashOperator( - task_id='print_date', - bash_command='date', - dag=dag, -) + # t1, t2 and t3 are examples of tasks created by instantiating operators + # [START basic_task] + t1 = BashOperator( + task_id='print_date', + bash_command='date', + ) -t2 = BashOperator( - task_id='sleep', - depends_on_past=False, - bash_command='sleep 5', - retries=3, - dag=dag, -) -# [END basic_task] + t2 = BashOperator( + task_id='sleep', + depends_on_past=False, + bash_command='sleep 5', + retries=3, + ) + # [END basic_task] -# [START documentation] -dag.doc_md = __doc__ + # [START documentation] + dag.doc_md = __doc__ -t1.doc_md = """\ -#### Task Documentation -You can document your task using the attributes `doc_md` (markdown), -`doc` (plain text), `doc_rst`, `doc_json`, `doc_yaml` which gets -rendered in the UI's Task Instance Details page. -![img](http://montcs.bloomu.edu/~bobmon/Semesters/2012-01/491/import%20soul.png) -""" -# [END documentation] + t1.doc_md = dedent( + """\ + #### Task Documentation + You can document your task using the attributes `doc_md` (markdown), + `doc` (plain text), `doc_rst`, `doc_json`, `doc_yaml` which gets + rendered in the UI's Task Instance Details page. -# [START jinja_template] -templated_command = """ -{% for i in range(5) %} - echo "{{ ds }}" - echo "{{ macros.ds_add(ds, 7)}}" - echo "{{ params.my_param }}" -{% endfor %} -""" + ![img](http://montcs.bloomu.edu/~bobmon/Semesters/2012-01/491/import%20soul.png) + """ + ) + # [END documentation] + + # [START jinja_template] + templated_command = dedent( + """ + {% for i in range(5) %} + echo "{{ ds }}" + echo "{{ macros.ds_add(ds, 7)}}" + echo "{{ params.my_param }}" + {% endfor %} + """ + ) -t3 = BashOperator( - task_id='templated', - depends_on_past=False, - bash_command=templated_command, - params={'my_param': 'Parameter I passed in'}, - dag=dag, -) -# [END jinja_template] + t3 = BashOperator( + task_id='templated', + depends_on_past=False, + bash_command=templated_command, + params={'my_param': 'Parameter I passed in'}, + ) + # [END jinja_template] -t1 >> [t2, t3] + t1 >> [t2, t3] # [END tutorial] diff --git a/airflow/example_dags/tutorial_etl_dag.py b/airflow/example_dags/tutorial_etl_dag.py index 48b519b5e59eb..8b45600a677b7 100644 --- a/airflow/example_dags/tutorial_etl_dag.py +++ b/airflow/example_dags/tutorial_etl_dag.py @@ -27,6 +27,7 @@ # [START tutorial] # [START import_module] import json +from textwrap import dedent # The DAG object; we'll need this to instantiate a DAG from airflow import DAG @@ -98,33 +99,39 @@ def load(**kwargs): task_id='extract', python_callable=extract, ) - extract_task.doc_md = """\ -#### Extract task -A simple Extract task to get data ready for the rest of the data pipeline. -In this case, getting data is simulated by reading from a hardcoded JSON string. -This data is then put into xcom, so that it can be processed by the next task. -""" + extract_task.doc_md = dedent( + """\ + #### Extract task + A simple Extract task to get data ready for the rest of the data pipeline. + In this case, getting data is simulated by reading from a hardcoded JSON string. + This data is then put into xcom, so that it can be processed by the next task. + """ + ) transform_task = PythonOperator( task_id='transform', python_callable=transform, ) - transform_task.doc_md = """\ -#### Transform task -A simple Transform task which takes in the collection of order data from xcom -and computes the total order value. -This computed value is then put into xcom, so that it can be processed by the next task. -""" + transform_task.doc_md = dedent( + """\ + #### Transform task + A simple Transform task which takes in the collection of order data from xcom + and computes the total order value. + This computed value is then put into xcom, so that it can be processed by the next task. + """ + ) load_task = PythonOperator( task_id='load', python_callable=load, ) - load_task.doc_md = """\ -#### Load task -A simple Load task which takes in the result of the Transform task, by reading it -from xcom and instead of saving it to end user review, just prints it out. -""" + load_task.doc_md = dedent( + """\ + #### Load task + A simple Load task which takes in the result of the Transform task, by reading it + from xcom and instead of saving it to end user review, just prints it out. + """ + ) extract_task >> transform_task >> load_task diff --git a/airflow/executors/base_executor.py b/airflow/executors/base_executor.py index fc753055194ea..8e87901115338 100644 --- a/airflow/executors/base_executor.py +++ b/airflow/executors/base_executor.py @@ -182,10 +182,10 @@ def trigger_tasks(self, open_slots: int) -> None: sorted_queue = self.order_queued_tasks_by_priority() for _ in range(min((open_slots, len(self.queued_tasks)))): - key, (command, _, _, ti) = sorted_queue.pop(0) + key, (command, _, queue, ti) = sorted_queue.pop(0) self.queued_tasks.pop(key) self.running.add(key) - self.execute_async(key=key, command=command, queue=None, executor_config=ti.executor_config) + self.execute_async(key=key, command=command, queue=queue, executor_config=ti.executor_config) def change_state(self, key: TaskInstanceKey, state: str, info=None) -> None: """ diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py index 8bbaed15aec34..2d0e915c5013b 100644 --- a/airflow/executors/celery_executor.py +++ b/airflow/executors/celery_executor.py @@ -35,7 +35,7 @@ from celery import Celery, Task, states as celery_states from celery.backends.base import BaseKeyValueStoreBackend -from celery.backends.database import DatabaseBackend, session_cleanup +from celery.backends.database import DatabaseBackend, Task as TaskDb, session_cleanup from celery.result import AsyncResult from celery.signals import import_modules as celery_import_modules from setproctitle import setproctitle # pylint: disable=no-name-in-module @@ -476,7 +476,7 @@ def try_adopt_task_instances(self, tis: List[TaskInstance]) -> List[TaskInstance return tis states_by_celery_task_id = self.bulk_state_fetcher.get_many( - map(operator.itemgetter(0), celery_tasks.values()) + list(map(operator.itemgetter(0), celery_tasks.values())) ) adopted = [] @@ -526,10 +526,6 @@ def fetch_celery_task_state(async_result: AsyncResult) -> Tuple[str, Union[str, return async_result.task_id, ExceptionWithTraceback(e, exception_traceback), None -def _tasks_list_to_task_ids(async_tasks) -> Set[str]: - return {a.task_id for a in async_tasks} - - class BulkStateFetcher(LoggingMixin): """ Gets status for many Celery tasks using the best method available @@ -543,20 +539,22 @@ def __init__(self, sync_parralelism=None): super().__init__() self._sync_parallelism = sync_parralelism + def _tasks_list_to_task_ids(self, async_tasks) -> Set[str]: + return {a.task_id for a in async_tasks} + def get_many(self, async_results) -> Mapping[str, EventBufferValueType]: """Gets status for many Celery tasks using the best method available.""" if isinstance(app.backend, BaseKeyValueStoreBackend): result = self._get_many_from_kv_backend(async_results) - return result - if isinstance(app.backend, DatabaseBackend): + elif isinstance(app.backend, DatabaseBackend): result = self._get_many_from_db_backend(async_results) - return result - result = self._get_many_using_multiprocessing(async_results) - self.log.debug("Fetched %d states for %d task", len(result), len(async_results)) + else: + result = self._get_many_using_multiprocessing(async_results) + self.log.debug("Fetched %d state(s) for %d task(s)", len(result), len(async_results)) return result def _get_many_from_kv_backend(self, async_tasks) -> Mapping[str, EventBufferValueType]: - task_ids = _tasks_list_to_task_ids(async_tasks) + task_ids = self._tasks_list_to_task_ids(async_tasks) keys = [app.backend.get_key_for_task(k) for k in task_ids] values = app.backend.mget(keys) task_results = [app.backend.decode_result(v) for v in values if v] @@ -565,9 +563,9 @@ def _get_many_from_kv_backend(self, async_tasks) -> Mapping[str, EventBufferValu return self._prepare_state_and_info_by_task_dict(task_ids, task_results_by_task_id) def _get_many_from_db_backend(self, async_tasks) -> Mapping[str, EventBufferValueType]: - task_ids = _tasks_list_to_task_ids(async_tasks) + task_ids = self._tasks_list_to_task_ids(async_tasks) session = app.backend.ResultSession() - task_cls = app.backend.task_cls + task_cls = getattr(app.backend, "task_cls", TaskDb) with session_cleanup(session): tasks = session.query(task_cls).filter(task_cls.task_id.in_(task_ids)).all() diff --git a/airflow/executors/kubernetes_executor.py b/airflow/executors/kubernetes_executor.py index 88e26be885932..ec7cbf7fd9418 100644 --- a/airflow/executors/kubernetes_executor.py +++ b/airflow/executors/kubernetes_executor.py @@ -194,10 +194,8 @@ def process_status( """Process status response""" if status == 'Pending': if event['type'] == 'DELETED': - self.log.info('Event: Failed to start pod %s, will reschedule', pod_id) - self.watcher_queue.put( - (pod_id, namespace, State.UP_FOR_RESCHEDULE, annotations, resource_version) - ) + self.log.info('Event: Failed to start pod %s', pod_id) + self.watcher_queue.put((pod_id, namespace, State.FAILED, annotations, resource_version)) else: self.log.info('Event: %s Pending', pod_id) elif status == 'Failed': @@ -490,9 +488,15 @@ def execute_async( ) -> None: """Executes task asynchronously""" self.log.info('Add task %s with command %s with executor_config %s', key, command, executor_config) - kube_executor_config = PodGenerator.from_obj(executor_config) + try: + kube_executor_config = PodGenerator.from_obj(executor_config) + except Exception: # pylint: disable=broad-except + self.log.error("Invalid executor_config for %s", key) + self.fail(key=key, info="Invalid executor_config passed") + return + if executor_config: - pod_template_file = executor_config.get("pod_template_override", None) + pod_template_file = executor_config.get("pod_template_file", None) else: pod_template_file = None if not self.task_queue: @@ -572,7 +576,7 @@ def _change_state(self, key: TaskInstanceKey, state: Optional[str], pod_id: str, if self.kube_config.delete_worker_pods: if not self.kube_scheduler: raise AirflowException(NOT_STARTED_MESSAGE) - if state is not State.FAILED or self.kube_config.delete_worker_pods_on_failure: + if state != State.FAILED or self.kube_config.delete_worker_pods_on_failure: self.kube_scheduler.delete_pod(pod_id, namespace) self.log.info('Deleted pod: %s in namespace %s', str(key), str(namespace)) try: diff --git a/airflow/hooks/base.py b/airflow/hooks/base.py index b3c0c11520a00..dee76dc70a90f 100644 --- a/airflow/hooks/base.py +++ b/airflow/hooks/base.py @@ -18,12 +18,14 @@ """Base class for all hooks""" import logging import warnings -from typing import Any, Dict, List +from typing import TYPE_CHECKING, Any, Dict, List -from airflow.models.connection import Connection from airflow.typing_compat import Protocol from airflow.utils.log.logging_mixin import LoggingMixin +if TYPE_CHECKING: + from airflow.models.connection import Connection # Avoid circular imports. + log = logging.getLogger(__name__) @@ -37,7 +39,7 @@ class BaseHook(LoggingMixin): """ @classmethod - def get_connections(cls, conn_id: str) -> List[Connection]: + def get_connections(cls, conn_id: str) -> List["Connection"]: """ Get all connections as an iterable, given the connection id. @@ -53,13 +55,15 @@ def get_connections(cls, conn_id: str) -> List[Connection]: return [cls.get_connection(conn_id)] @classmethod - def get_connection(cls, conn_id: str) -> Connection: + def get_connection(cls, conn_id: str) -> "Connection": """ Get connection, given connection id. :param conn_id: connection id :return: connection """ + from airflow.models.connection import Connection + conn = Connection.get_connection_from_secrets(conn_id) if conn.host: log.info( diff --git a/airflow/jobs/backfill_job.py b/airflow/jobs/backfill_job.py index 0d3d057bba0c6..a16f26170da58 100644 --- a/airflow/jobs/backfill_job.py +++ b/airflow/jobs/backfill_job.py @@ -785,6 +785,7 @@ def _execute(self, session=None): pickle_id = pickle.id executor = self.executor + executor.job_id = "backfill" executor.start() ti_status.total_runs = len(run_dates) # total dag runs in backfill diff --git a/airflow/jobs/scheduler_job.py b/airflow/jobs/scheduler_job.py index f25ae02431ddf..1076cb6670411 100644 --- a/airflow/jobs/scheduler_job.py +++ b/airflow/jobs/scheduler_job.py @@ -920,8 +920,12 @@ def _executable_task_instances_to_queued(self, max_tis: int, session: Session = .filter(not_(DM.is_paused)) .filter(TI.state == State.SCHEDULED) .options(selectinload('dag_model')) - .limit(max_tis) ) + starved_pools = [pool_name for pool_name, stats in pools.items() if stats['open'] <= 0] + if starved_pools: + query = query.filter(not_(TI.pool.in_(starved_pools))) + + query = query.limit(max_tis) task_instances_to_examine: List[TI] = with_row_locks( query, @@ -1459,7 +1463,7 @@ def _do_scheduling(self, session) -> int: By "next oldest", we mean hasn't been examined/scheduled in the most time. The reason we don't select all dagruns at once because the rows are selected with row locks, meaning - that only one scheduler can "process them", even it it is waiting behind other dags. Increasing this + that only one scheduler can "process them", even it is waiting behind other dags. Increasing this limit will allow more throughput for smaller DAGs but will likely slow down throughput for larger (>500 tasks.) DAGs @@ -1633,7 +1637,7 @@ def _create_dag_runs(self, dag_models: Iterable[DagModel], session: Session) -> # create a new one. This is so that in the next Scheduling loop we try to create new runs # instead of falling in a loop of Integrity Error. if (dag.dag_id, dag_model.next_dagrun) not in active_dagruns: - dag.create_dagrun( + run = dag.create_dagrun( run_type=DagRunType.SCHEDULED, execution_date=dag_model.next_dagrun, start_date=timezone.utcnow(), @@ -1644,6 +1648,14 @@ def _create_dag_runs(self, dag_models: Iterable[DagModel], session: Session) -> creating_job_id=self.id, ) + expected_start_date = dag.following_schedule(run.execution_date) + if expected_start_date: + schedule_delay = run.start_date - expected_start_date + Stats.timing( + f'dagrun.schedule_delay.{dag.dag_id}', + schedule_delay, + ) + self._update_dag_next_dagruns(dag_models, session) # TODO[HA]: Should we do a session.flush() so we don't have to keep lots of state/object in @@ -1717,10 +1729,18 @@ def _schedule_dag_run( and dag.dagrun_timeout and dag_run.start_date < timezone.utcnow() - dag.dagrun_timeout ): - dag_run.state = State.FAILED - dag_run.end_date = timezone.utcnow() - self.log.info("Run %s of %s has timed-out", dag_run.run_id, dag_run.dag_id) + dag_run.set_state(State.FAILED) + unfinished_task_instances = ( + session.query(TI) + .filter(TI.dag_id == dag_run.dag_id) + .filter(TI.execution_date == dag_run.execution_date) + .filter(TI.state.in_(State.unfinished)) + ) + for task_instance in unfinished_task_instances: + task_instance.state = State.SKIPPED + session.merge(task_instance) session.flush() + self.log.info("Run %s of %s has timed-out", dag_run.run_id, dag_run.dag_id) # Work out if we should allow creating a new DagRun now? self._update_dag_next_dagruns([session.query(DagModel).get(dag_run.dag_id)], session) diff --git a/airflow/kubernetes/kube_client.py b/airflow/kubernetes/kube_client.py index 7e8c5e83d7f3c..1e65ae5fb6a6f 100644 --- a/airflow/kubernetes/kube_client.py +++ b/airflow/kubernetes/kube_client.py @@ -80,9 +80,9 @@ def _enable_tcp_keepalive() -> None: from urllib3.connection import HTTPConnection, HTTPSConnection - tcp_keep_idle = conf.getint('kubernetes', 'tcp_keep_idle', fallback=120) - tcp_keep_intvl = conf.getint('kubernetes', 'tcp_keep_intvl', fallback=30) - tcp_keep_cnt = conf.getint('kubernetes', 'tcp_keep_cnt', fallback=6) + tcp_keep_idle = conf.getint('kubernetes', 'tcp_keep_idle') + tcp_keep_intvl = conf.getint('kubernetes', 'tcp_keep_intvl') + tcp_keep_cnt = conf.getint('kubernetes', 'tcp_keep_cnt') socket_options = [ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), @@ -120,7 +120,7 @@ def get_kube_client( if config_file is None: config_file = conf.get('kubernetes', 'config_file', fallback=None) - if conf.getboolean('kubernetes', 'enable_tcp_keepalive', fallback=False): + if conf.getboolean('kubernetes', 'enable_tcp_keepalive'): _enable_tcp_keepalive() client_conf = _get_kube_config(in_cluster, cluster_context, config_file) diff --git a/airflow/kubernetes/pod_generator.py b/airflow/kubernetes/pod_generator.py index 0782f1af9d069..42394bae14141 100644 --- a/airflow/kubernetes/pod_generator.py +++ b/airflow/kubernetes/pod_generator.py @@ -30,11 +30,11 @@ from functools import reduce from typing import List, Optional, Union -import yaml from dateutil import parser from kubernetes.client import models as k8s from kubernetes.client.api_client import ApiClient +import airflow.utils.yaml as yaml from airflow.exceptions import AirflowConfigException from airflow.kubernetes.pod_generator_deprecated import PodGenerator as PodGeneratorDeprecated from airflow.version import version as airflow_version @@ -368,10 +368,6 @@ def construct_pod( # pylint: disable=too-many-arguments except Exception: # pylint: disable=W0703 image = kube_image - task_id = make_safe_label_value(task_id) - dag_id = make_safe_label_value(dag_id) - scheduler_job_id = make_safe_label_value(str(scheduler_job_id)) - dynamic_pod = k8s.V1Pod( metadata=k8s.V1ObjectMeta( namespace=namespace, @@ -383,9 +379,9 @@ def construct_pod( # pylint: disable=too-many-arguments }, name=PodGenerator.make_unique_pod_id(pod_id), labels={ - 'airflow-worker': scheduler_job_id, - 'dag_id': dag_id, - 'task_id': task_id, + 'airflow-worker': make_safe_label_value(str(scheduler_job_id)), + 'dag_id': make_safe_label_value(dag_id), + 'task_id': make_safe_label_value(task_id), 'execution_date': datetime_to_label_safe_datestring(date), 'try_number': str(try_number), 'airflow_version': airflow_version.replace('+', '-'), diff --git a/airflow/kubernetes/pod_launcher.py b/airflow/kubernetes/pod_launcher.py index 02194d72b8b20..3d663d287716f 100644 --- a/airflow/kubernetes/pod_launcher.py +++ b/airflow/kubernetes/pod_launcher.py @@ -140,9 +140,10 @@ def monitor_pod(self, pod: V1Pod, get_logs: bool) -> Tuple[State, Optional[str]] break self.log.warning('Pod %s log read interrupted', pod.metadata.name) - delta = pendulum.now() - last_log_time - # Prefer logs duplication rather than loss - read_logs_since_sec = math.ceil(delta.total_seconds()) + if last_log_time: + delta = pendulum.now() - last_log_time + # Prefer logs duplication rather than loss + read_logs_since_sec = math.ceil(delta.total_seconds()) result = None if self.extract_xcom: while self.base_container_is_running(pod): diff --git a/airflow/kubernetes/refresh_config.py b/airflow/kubernetes/refresh_config.py index 9067cb15da71b..37b3becf80eae 100644 --- a/airflow/kubernetes/refresh_config.py +++ b/airflow/kubernetes/refresh_config.py @@ -27,11 +27,12 @@ from typing import Optional, cast import pendulum -import yaml from kubernetes.client import Configuration from kubernetes.config.exec_provider import ExecProvider from kubernetes.config.kube_config import KUBE_CONFIG_DEFAULT_LOCATION, KubeConfigLoader +import airflow.utils.yaml as yaml + def _parse_timestamp(ts_str: str) -> int: parsed_dt = cast(pendulum.DateTime, pendulum.parse(ts_str)) @@ -68,6 +69,7 @@ def _load_from_exec_plugin(self): return True except Exception as e: # pylint: disable=W0703 logging.error(str(e)) + return None def refresh_api_key(self, client_configuration): """Refresh API key if expired""" diff --git a/airflow/kubernetes_executor_templates/basic_template.yaml b/airflow/kubernetes_executor_templates/basic_template.yaml index a953867582dbf..a6eb83f8ad8eb 100644 --- a/airflow/kubernetes_executor_templates/basic_template.yaml +++ b/airflow/kubernetes_executor_templates/basic_template.yaml @@ -69,8 +69,8 @@ spec: defaultMode: 420 restartPolicy: Never terminationGracePeriodSeconds: 30 - serviceAccountName: airflow-worker-serviceaccount - serviceAccount: airflow-worker-serviceaccount + serviceAccountName: airflow-worker + serviceAccount: airflow-worker securityContext: runAsUser: 50000 fsGroup: 50000 diff --git a/airflow/lineage/__init__.py b/airflow/lineage/__init__.py index 65f19ef5ca7c9..905eb0059a2f9 100644 --- a/airflow/lineage/__init__.py +++ b/airflow/lineage/__init__.py @@ -25,6 +25,8 @@ import jinja2 from cattr import structure, unstructure +from airflow.configuration import conf +from airflow.lineage.backend import LineageBackend from airflow.utils.module_loading import import_string ENV = jinja2.Environment() @@ -45,6 +47,22 @@ class Metadata: data: Dict = attr.ib() +def get_backend() -> Optional[LineageBackend]: + """Gets the lineage backend if defined in the configs""" + clazz = conf.getimport("lineage", "backend", fallback=None) + + if clazz: + if not issubclass(clazz, LineageBackend): + raise TypeError( + f"Your custom Lineage class `{clazz.__name__}` " + f"is not a subclass of `{LineageBackend.__name__}`." + ) + else: + return clazz() + + return None + + def _get_instance(meta: Metadata): """Instantiate an object from Metadata""" cls = import_string(meta.type_name) @@ -82,6 +100,7 @@ def apply_lineage(func: T) -> T: Saves the lineage to XCom and if configured to do so sends it to the backend. """ + _backend = get_backend() @wraps(func) def wrapper(self, context, *args, **kwargs): @@ -101,6 +120,9 @@ def wrapper(self, context, *args, **kwargs): context, key=PIPELINE_INLETS, value=inlets, execution_date=context['ti'].execution_date ) + if _backend: + _backend.send_lineage(operator=self, inlets=self.inlets, outlets=self.outlets, context=context) + return ret_val return cast(T, wrapper) diff --git a/airflow/lineage/backend.py b/airflow/lineage/backend.py new file mode 100644 index 0000000000000..edfbe0e2b8924 --- /dev/null +++ b/airflow/lineage/backend.py @@ -0,0 +1,47 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""Sends lineage metadata to a backend""" +from typing import TYPE_CHECKING, Optional + +if TYPE_CHECKING: + from airflow.models.baseoperator import BaseOperator # pylint: disable=cyclic-import + + +class LineageBackend: + """Sends lineage metadata to a backend""" + + def send_lineage( + self, + operator: 'BaseOperator', + inlets: Optional[list] = None, + outlets: Optional[list] = None, + context: Optional[dict] = None, + ): + """ + Sends lineage metadata to a backend + + :param operator: the operator executing a transformation on the inlets and outlets + :type operator: airflow.models.baseoperator.BaseOperator + :param inlets: the inlets to this operator + :type inlets: list + :param outlets: the outlets from this operator + :type outlets: list + :param context: the current context of the task instance + :type context: dict + """ + raise NotImplementedError() diff --git a/airflow/migrations/versions/2e42bb497a22_rename_last_scheduler_run_column.py b/airflow/migrations/versions/2e42bb497a22_rename_last_scheduler_run_column.py new file mode 100644 index 0000000000000..97d8ff6211afc --- /dev/null +++ b/airflow/migrations/versions/2e42bb497a22_rename_last_scheduler_run_column.py @@ -0,0 +1,65 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""rename last_scheduler_run column + +Revision ID: 2e42bb497a22 +Revises: 8646922c8a04 +Create Date: 2021-03-04 19:50:38.880942 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import mssql + +# revision identifiers, used by Alembic. +revision = '2e42bb497a22' +down_revision = '8646922c8a04' +branch_labels = None +depends_on = None + + +def upgrade(): + """Apply rename last_scheduler_run column""" + conn = op.get_bind() + if conn.dialect.name == "mssql": + with op.batch_alter_table('dag') as batch_op: + batch_op.alter_column( + 'last_scheduler_run', new_column_name='last_parsed_time', type_=mssql.DATETIME2(precision=6) + ) + else: + with op.batch_alter_table('dag') as batch_op: + batch_op.alter_column( + 'last_scheduler_run', new_column_name='last_parsed_time', type_=sa.TIMESTAMP(timezone=True) + ) + + +def downgrade(): + """Unapply rename last_scheduler_run column""" + conn = op.get_bind() + if conn.dialect.name == "mssql": + with op.batch_alter_table('dag') as batch_op: + batch_op.alter_column( + 'last_parsed_time', new_column_name='last_scheduler_run', type_=mssql.DATETIME2(precision=6) + ) + else: + with op.batch_alter_table('dag') as batch_op: + batch_op.alter_column( + 'last_parsed_time', new_column_name='last_scheduler_run', type_=sa.TIMESTAMP(timezone=True) + ) diff --git a/airflow/migrations/versions/449b4072c2da_increase_size_of_connection_extra_field_.py b/airflow/migrations/versions/449b4072c2da_increase_size_of_connection_extra_field_.py new file mode 100644 index 0000000000000..808d435870dd6 --- /dev/null +++ b/airflow/migrations/versions/449b4072c2da_increase_size_of_connection_extra_field_.py @@ -0,0 +1,56 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Increase size of connection.extra field to handle multiple RSA keys + +Revision ID: 449b4072c2da +Revises: 82b7c48c147f +Create Date: 2020-03-16 19:02:55.337710 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = '449b4072c2da' +down_revision = '82b7c48c147f' +branch_labels = None +depends_on = None + + +def upgrade(): + """Apply increase_length_for_connection_password""" + with op.batch_alter_table('connection', schema=None) as batch_op: + batch_op.alter_column( + 'extra', + existing_type=sa.VARCHAR(length=5000), + type_=sa.TEXT(), + existing_nullable=True, + ) + + +def downgrade(): + """Unapply increase_length_for_connection_password""" + with op.batch_alter_table('connection', schema=None) as batch_op: + batch_op.alter_column( + 'extra', + existing_type=sa.TEXT(), + type_=sa.VARCHAR(length=5000), + existing_nullable=True, + ) diff --git a/airflow/migrations/versions/82b7c48c147f_remove_can_read_permission_on_config_.py b/airflow/migrations/versions/82b7c48c147f_remove_can_read_permission_on_config_.py index 5e85ee4a6ba82..85d0872704570 100644 --- a/airflow/migrations/versions/82b7c48c147f_remove_can_read_permission_on_config_.py +++ b/airflow/migrations/versions/82b7c48c147f_remove_can_read_permission_on_config_.py @@ -23,6 +23,7 @@ Create Date: 2021-02-04 12:45:58.138224 """ +import logging from airflow.security import permissions from airflow.www.app import create_app @@ -36,6 +37,9 @@ def upgrade(): """Remove can_read permission on config resource for User and Viewer role""" + log = logging.getLogger() + handlers = log.handlers[:] + appbuilder = create_app(config={'FAB_UPDATE_PERMS': False}).appbuilder roles_to_modify = [role for role in appbuilder.sm.get_all_roles() if role.name in ["User", "Viewer"]] can_read_on_config_perm = appbuilder.sm.find_permission_view_menu( @@ -48,6 +52,8 @@ def upgrade(): ): appbuilder.sm.del_permission_role(role, can_read_on_config_perm) + log.handlers = handlers + def downgrade(): """Add can_read permission on config resource for User and Viewer role""" diff --git a/airflow/migrations/versions/8646922c8a04_change_default_pool_slots_to_1.py b/airflow/migrations/versions/8646922c8a04_change_default_pool_slots_to_1.py new file mode 100644 index 0000000000000..bf498735b392d --- /dev/null +++ b/airflow/migrations/versions/8646922c8a04_change_default_pool_slots_to_1.py @@ -0,0 +1,93 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Change default pool_slots to 1 + +Revision ID: 8646922c8a04 +Revises: 449b4072c2da +Create Date: 2021-02-23 23:19:22.409973 + +""" + +import dill +import sqlalchemy as sa +from alembic import op +from sqlalchemy import Column, Float, Integer, PickleType, String + +# revision identifiers, used by Alembic. +from sqlalchemy.ext.declarative import declarative_base + +from airflow.models.base import COLLATION_ARGS +from airflow.utils.sqlalchemy import UtcDateTime + +revision = '8646922c8a04' +down_revision = '449b4072c2da' +branch_labels = None +depends_on = None + +Base = declarative_base() +BATCH_SIZE = 5000 +ID_LEN = 250 + + +class TaskInstance(Base): # noqa: D101 # type: ignore + __tablename__ = "task_instance" + + task_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True) + dag_id = Column(String(ID_LEN, **COLLATION_ARGS), primary_key=True) + execution_date = Column(UtcDateTime, primary_key=True) + start_date = Column(UtcDateTime) + end_date = Column(UtcDateTime) + duration = Column(Float) + state = Column(String(20)) + _try_number = Column('try_number', Integer, default=0) + max_tries = Column(Integer) + hostname = Column(String(1000)) + unixname = Column(String(1000)) + job_id = Column(Integer) + pool = Column(String(50), nullable=False) + pool_slots = Column(Integer, default=1) + queue = Column(String(256)) + priority_weight = Column(Integer) + operator = Column(String(1000)) + queued_dttm = Column(UtcDateTime) + queued_by_job_id = Column(Integer) + pid = Column(Integer) + executor_config = Column(PickleType(pickler=dill)) + external_executor_id = Column(String(ID_LEN, **COLLATION_ARGS)) + + +def upgrade(): + """Change default pool_slots to 1 and make pool_slots not nullable""" + connection = op.get_bind() + sessionmaker = sa.orm.sessionmaker() + session = sessionmaker(bind=connection) + + session.query(TaskInstance).filter(TaskInstance.pool_slots.is_(None)).update( + {TaskInstance.pool_slots: 1}, synchronize_session=False + ) + session.commit() + + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.alter_column("pool_slots", existing_type=sa.Integer, nullable=False) + + +def downgrade(): + """Unapply Change default pool_slots to 1""" + with op.batch_alter_table("task_instance", schema=None) as batch_op: + batch_op.alter_column("pool_slots", existing_type=sa.Integer, nullable=True) diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index 64ed4c59b5918..eacea646c8231 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -278,6 +278,21 @@ class derived from this one results in the creation of a task object, :param do_xcom_push: if True, an XCom is pushed containing the Operator's result :type do_xcom_push: bool + :param doc: Add documentation or notes to your Task objects that is visible in + Task Instance details View in the Webserver + :type doc: str + :param doc_md: Add documentation (in Markdown format) or notes to your Task objects + that is visible in Task Instance details View in the Webserver + :type doc_md: str + :param doc_rst: Add documentation (in RST format) or notes to your Task objects + that is visible in Task Instance details View in the Webserver + :type doc_rst: str + :param doc_json: Add documentation (in JSON format) or notes to your Task objects + that is visible in Task Instance details View in the Webserver + :type doc_json: str + :param doc_yaml: Add documentation (in YAML format) or notes to your Task objects + that is visible in Task Instance details View in the Webserver + :type doc_yaml: str """ # For derived classes to define which fields will get jinjaified @@ -353,7 +368,7 @@ def __init__( retries: Optional[int] = conf.getint('core', 'default_task_retries', fallback=0), retry_delay: timedelta = timedelta(seconds=300), retry_exponential_backoff: bool = False, - max_retry_delay: Optional[datetime] = None, + max_retry_delay: Optional[timedelta] = None, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None, depends_on_past: bool = False, @@ -381,6 +396,11 @@ def __init__( inlets: Optional[Any] = None, outlets: Optional[Any] = None, task_group: Optional["TaskGroup"] = None, + doc: Optional[str] = None, + doc_md: Optional[str] = None, + doc_json: Optional[str] = None, + doc_yaml: Optional[str] = None, + doc_rst: Optional[str] = None, **kwargs, ): from airflow.models.dag import DagContext @@ -460,6 +480,13 @@ def __init__( self.retry_delay = timedelta(seconds=retry_delay) # noqa self.retry_exponential_backoff = retry_exponential_backoff self.max_retry_delay = max_retry_delay + if max_retry_delay: + if isinstance(max_retry_delay, timedelta): + self.max_retry_delay = max_retry_delay + else: + self.log.debug("Max_retry_delay isn't timedelta object, assuming secs") + self.max_retry_delay = timedelta(seconds=max_retry_delay) # noqa + self.params = params or {} # Available in templates! self.priority_weight = priority_weight if not WeightRule.is_valid(weight_rule): @@ -479,6 +506,12 @@ def __init__( self.executor_config = executor_config or {} self.do_xcom_push = do_xcom_push + self.doc_md = doc_md + self.doc_json = doc_json + self.doc_yaml = doc_yaml + self.doc_rst = doc_rst + self.doc = doc + # Private attributes self._upstream_task_ids: Set[str] = set() self._downstream_task_ids: Set[str] = set() @@ -1486,7 +1519,7 @@ def cross_downstream( class BaseOperatorLink(metaclass=ABCMeta): """Abstract base class that defines how we get an operator link.""" - operators: ClassVar[List[Type[BaseOperator]]] = [] + operators: ClassVar[List[Type[BaseOperator]]] = [] # pylint: disable=invalid-name """ This property will be used by Airflow Plugins to find the Operators to which you want to assign this Operator Link diff --git a/airflow/models/connection.py b/airflow/models/connection.py index 1159a44a90648..c030571fbd4ef 100644 --- a/airflow/models/connection.py +++ b/airflow/models/connection.py @@ -102,7 +102,7 @@ class Connection(Base, LoggingMixin): # pylint: disable=too-many-instance-attri port = Column(Integer()) is_encrypted = Column(Boolean, unique=False, default=False) is_extra_encrypted = Column(Boolean, unique=False, default=False) - _extra = Column('extra', String(5000)) + _extra = Column('extra', Text()) def __init__( # pylint: disable=too-many-arguments self, diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 8bb32db11a681..0db56097c693c 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -1087,7 +1087,7 @@ def topological_sort(self, include_subdag_tasks: bool = False): # using the items() method for iterating, a copy of the # unsorted graph is used, allowing us to modify the unsorted # graph as we move through it. We also keep a flag for - # checking that that graph is acyclic, which is true if any + # checking that graph is acyclic, which is true if any # nodes are resolved during each pass through the graph. If # not, we need to exit as the graph therefore can't be # sorted. @@ -1116,13 +1116,15 @@ def set_dag_runs_state( session: Session = None, start_date: Optional[datetime] = None, end_date: Optional[datetime] = None, + dag_ids: List[str] = None, ) -> None: - query = session.query(DagRun).filter_by(dag_id=self.dag_id) + dag_ids = dag_ids or [self.dag_id] + query = session.query(DagRun).filter(DagRun.dag_id.in_(dag_ids)) if start_date: query = query.filter(DagRun.execution_date >= start_date) if end_date: query = query.filter(DagRun.execution_date <= end_date) - query.update({DagRun.state: state}) + query.update({DagRun.state: state}, synchronize_session='fetch') @provide_session def clear( @@ -1183,11 +1185,13 @@ def clear( """ TI = TaskInstance tis = session.query(TI) + dag_ids = [] if include_subdags: # Crafting the right filter for dag_id and task_ids combo conditions = [] for dag in self.subdags + [self]: conditions.append((TI.dag_id == dag.dag_id) & TI.task_id.in_(dag.task_ids)) + dag_ids.append(dag.dag_id) tis = tis.filter(or_(*conditions)) else: tis = session.query(TI).filter(TI.dag_id == self.dag_id) @@ -1327,11 +1331,13 @@ def clear( dag=self, activate_dag_runs=False, # We will set DagRun state later. ) + self.set_dag_runs_state( session=session, start_date=start_date, end_date=end_date, state=dag_run_state, + dag_ids=dag_ids, ) else: count = 0 @@ -1876,6 +1882,7 @@ def bulk_write_to_db(cls, dags: Collection["DAG"], session=None): orm_dag.fileloc = dag.fileloc orm_dag.owners = dag.owner orm_dag.is_active = True + orm_dag.last_parsed_time = timezone.utcnow() orm_dag.default_view = dag.default_view orm_dag.description = dag.description orm_dag.schedule_interval = dag.schedule_interval @@ -1960,13 +1967,13 @@ def deactivate_stale_dags(expiration_date, session=None): """ for dag in ( session.query(DagModel) - .filter(DagModel.last_scheduler_run < expiration_date, DagModel.is_active) + .filter(DagModel.last_parsed_time < expiration_date, DagModel.is_active) .all() ): log.info( "Deactivating DAG ID %s since it was last touched by the scheduler at %s", dag.dag_id, - dag.last_scheduler_run.isoformat(), + dag.last_parsed_time.isoformat(), ) dag.is_active = False session.merge(dag) @@ -2069,7 +2076,7 @@ class DagModel(Base): # Whether that DAG was seen on the last DagBag load is_active = Column(Boolean, default=False) # Last time the scheduler started - last_scheduler_run = Column(UtcDateTime) + last_parsed_time = Column(UtcDateTime) # Last time this DAG was pickled last_pickled = Column(UtcDateTime) # Time when the DAG last received a refresh signal diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py index 4a96f4fdaf0af..8228659b0c87a 100644 --- a/airflow/models/dagbag.py +++ b/airflow/models/dagbag.py @@ -27,7 +27,7 @@ import warnings import zipfile from datetime import datetime, timedelta -from typing import Dict, List, NamedTuple, Optional +from typing import TYPE_CHECKING, Dict, List, NamedTuple, Optional, Union from croniter import CroniterBadCronError, CroniterBadDateError, CroniterNotAlphaError, croniter from sqlalchemy.exc import OperationalError @@ -46,6 +46,9 @@ from airflow.utils.session import provide_session from airflow.utils.timeout import timeout +if TYPE_CHECKING: + import pathlib + class FileLoadStat(NamedTuple): """Information about single file""" @@ -89,7 +92,7 @@ class DagBag(LoggingMixin): def __init__( self, - dag_folder: Optional[str] = None, + dag_folder: Union[str, "pathlib.Path", None] = None, include_examples: bool = conf.getboolean('core', 'LOAD_EXAMPLES'), include_smart_sensor: bool = conf.getboolean('smart_sensor', 'USE_SMART_SENSOR'), safe_mode: bool = conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE'), @@ -330,7 +333,7 @@ def _load_modules_from_zip(self, filepath, safe_mode): if not might_contain_dag(zip_info.filename, safe_mode, current_zip_file): # todo: create ignore list # Don't want to spam user with skip messages - if not self.has_logged or True: + if not self.has_logged: self.has_logged = True self.log.info( "File %s:%s assumed to contain no DAGs. Skipping.", filepath, zip_info.filename @@ -424,11 +427,11 @@ def bag_dag(self, dag, root_dag): def collect_dags( self, - dag_folder=None, - only_if_updated=True, - include_examples=conf.getboolean('core', 'LOAD_EXAMPLES'), - include_smart_sensor=conf.getboolean('smart_sensor', 'USE_SMART_SENSOR'), - safe_mode=conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE'), + dag_folder: Union[str, "pathlib.Path", None] = None, + only_if_updated: bool = True, + include_examples: bool = conf.getboolean('core', 'LOAD_EXAMPLES'), + include_smart_sensor: bool = conf.getboolean('smart_sensor', 'USE_SMART_SENSOR'), + safe_mode: bool = conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE'), ): """ Given a file path or a folder, this method looks for python modules, @@ -450,7 +453,8 @@ def collect_dags( # Used to store stats around DagBag processing stats = [] - dag_folder = correct_maybe_zipped(dag_folder) + # Ensure dag_folder is a str -- it may have been a pathlib.Path + dag_folder = correct_maybe_zipped(str(dag_folder)) for filepath in list_py_file_paths( dag_folder, safe_mode=safe_mode, diff --git a/airflow/models/dagrun.py b/airflow/models/dagrun.py index fe7b29cb62e50..f1c32b1e80aaa 100644 --- a/airflow/models/dagrun.py +++ b/airflow/models/dagrun.py @@ -576,7 +576,7 @@ def _emit_true_scheduling_delay_stats_for_finished_state(self, finished_tis): started task within the DAG and calculate the expected DagRun start time (based on dag.execution_date & dag.schedule_interval), and minus these two values to get the delay. The emitted data may contains outlier (e.g. when the first task was cleared, so - the second task's start_date will be used), but we can get rid of the the outliers + the second task's start_date will be used), but we can get rid of the outliers on the stats side through the dashboards tooling built. Note, the stat will only be emitted if the DagRun is a scheduler triggered one (i.e. external_trigger is False). @@ -611,9 +611,15 @@ def _emit_true_scheduling_delay_stats_for_finished_state(self, finished_tis): def _emit_duration_stats_for_finished_state(self): if self.state == State.RUNNING: return + if self.start_date is None: + self.log.warning('Failed to record duration of %s: start_date is not set.', self) + return + if self.end_date is None: + self.log.warning('Failed to record duration of %s: end_date is not set.', self) + return duration = self.end_date - self.start_date - if self.state is State.SUCCESS: + if self.state == State.SUCCESS: Stats.timing(f'dagrun.duration.success.{self.dag_id}', duration) elif self.state == State.FAILED: Stats.timing(f'dagrun.duration.failed.{self.dag_id}', duration) @@ -641,7 +647,7 @@ def verify_integrity(self, session: Session = None): except AirflowException: if ti.state == State.REMOVED: pass # ti has already been removed, just ignore it - elif self.state is not State.RUNNING and not dag.partial: + elif self.state != State.RUNNING and not dag.partial: self.log.warning("Failed to get task '%s' for dag '%s'. Marking it as removed.", ti, dag) Stats.incr(f"task_removed_from_dag.{dag.dag_id}", 1, 1) ti.state = State.REMOVED diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index d671a01065030..119116eb766ee 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -24,6 +24,7 @@ import pickle import signal import warnings +from collections import defaultdict from datetime import datetime, timedelta from tempfile import NamedTemporaryFile from typing import IO, Any, Dict, Iterable, List, NamedTuple, Optional, Tuple, Union @@ -146,6 +147,7 @@ def clear_task_instances( :param dag: DAG object """ job_ids = [] + task_id_by_key = defaultdict(lambda: defaultdict(lambda: defaultdict(set))) for ti in tis: if ti.state == State.RUNNING: if ti.job_id: @@ -166,13 +168,36 @@ def clear_task_instances( ti.max_tries = max(ti.max_tries, ti.prev_attempted_tries) ti.state = State.NONE session.merge(ti) + + task_id_by_key[ti.dag_id][ti.execution_date][ti.try_number].add(ti.task_id) + + if task_id_by_key: # Clear all reschedules related to the ti to clear - session.query(TR).filter( - TR.dag_id == ti.dag_id, - TR.task_id == ti.task_id, - TR.execution_date == ti.execution_date, - TR.try_number == ti.try_number, - ).delete() + + # This is an optimization for the common case where all tis are for a small number + # of dag_id, execution_date and try_number. Use a nested dict of dag_id, + # execution_date, try_number and task_id to construct the where clause in a + # hierarchical manner. This speeds up the delete statement by more than 40x for + # large number of tis (50k+). + conditions = or_( + and_( + TR.dag_id == dag_id, + or_( + and_( + TR.execution_date == execution_date, + or_( + and_(TR.try_number == try_number, TR.task_id.in_(task_ids)) + for try_number, task_ids in task_tries.items() + ), + ) + for execution_date, task_tries in dates.items() + ), + ) + for dag_id, dates in task_id_by_key.items() + ) + + delete_qry = TR.__table__.delete().where(conditions) + session.execute(delete_qry) if job_ids: from airflow.jobs.base_job import BaseJob @@ -248,7 +273,7 @@ class TaskInstance(Base, LoggingMixin): # pylint: disable=R0902,R0904 unixname = Column(String(1000)) job_id = Column(Integer) pool = Column(String(50), nullable=False) - pool_slots = Column(Integer, default=1) + pool_slots = Column(Integer, default=1, nullable=False) queue = Column(String(256)) priority_weight = Column(Integer) operator = Column(String(1000)) @@ -798,7 +823,8 @@ def get_previous_start_date( """ self.log.debug("previous_start_date was called") prev_ti = self.get_previous_ti(state=state, session=session) - return prev_ti and pendulum.instance(prev_ti.start_date) + # prev_ti may not exist and prev_ti.start_date may be None. + return prev_ti and prev_ti.start_date and pendulum.instance(prev_ti.start_date) @property def previous_start_date_success(self) -> Optional[pendulum.DateTime]: @@ -1452,7 +1478,10 @@ def handle_failure( test_mode = self.test_mode if error: - self.log.exception(error) + if isinstance(error, Exception): + self.log.exception("Task failed with exception") + else: + self.log.error("%s", error) # external monitoring process provides pickle file so _run_raw_task # can send its runtime errors for access by failure callback if error_file: diff --git a/airflow/plugins_manager.py b/airflow/plugins_manager.py index b68dbb93e0e6d..cf957fff63266 100644 --- a/airflow/plugins_manager.py +++ b/airflow/plugins_manager.py @@ -173,13 +173,23 @@ def is_valid_plugin(plugin_obj): return False +def register_plugin(plugin_instance): + """ + Start plugin load and register it after success initialization + + :param plugin_instance: subclass of AirflowPlugin + """ + global plugins # pylint: disable=global-statement + plugin_instance.on_load() + plugins.append(plugin_instance) + + def load_entrypoint_plugins(): """ Load and register plugins AirflowPlugin subclasses from the entrypoints. The entry_point group should be 'airflow.plugins'. """ global import_errors # pylint: disable=global-statement - global plugins # pylint: disable=global-statement log.debug("Loading plugins from entrypoints") @@ -191,10 +201,8 @@ def load_entrypoint_plugins(): continue plugin_instance = plugin_class() - if callable(getattr(plugin_instance, 'on_load', None)): - plugin_instance.on_load() - plugin_instance.source = EntryPointSource(entry_point, dist) - plugins.append(plugin_instance) + plugin_instance.source = EntryPointSource(entry_point, dist) + register_plugin(plugin_instance) except Exception as e: # pylint: disable=broad-except log.exception("Failed to import plugin %s", entry_point.name) import_errors[entry_point.module] = str(e) @@ -203,11 +211,9 @@ def load_entrypoint_plugins(): def load_plugins_from_plugin_directory(): """Load and register Airflow Plugins from plugins directory""" global import_errors # pylint: disable=global-statement - global plugins # pylint: disable=global-statement log.debug("Loading plugins from directory: %s", settings.PLUGINS_FOLDER) for file_path in find_path_from_directory(settings.PLUGINS_FOLDER, ".airflowignore"): - if not os.path.isfile(file_path): continue mod_name, file_ext = os.path.splitext(os.path.split(file_path)[-1]) @@ -225,8 +231,7 @@ def load_plugins_from_plugin_directory(): for mod_attr_value in (m for m in mod.__dict__.values() if is_valid_plugin(m)): plugin_instance = mod_attr_value() plugin_instance.source = PluginsDirectorySource(file_path) - plugins.append(plugin_instance) - + register_plugin(plugin_instance) except Exception as e: # pylint: disable=broad-except log.exception(e) log.error('Failed to import plugin %s', file_path) diff --git a/airflow/provider.yaml.schema.json b/airflow/provider.yaml.schema.json index fcdfdb1a27dce..bdec41dc3cc0b 100644 --- a/airflow/provider.yaml.schema.json +++ b/airflow/provider.yaml.schema.json @@ -42,6 +42,10 @@ "type": "string" } }, + "logo": { + "description": "Path to the logo for the integration. The path must start with '/integration-logos/'", + "type": "string" + }, "tags": { "description": "List of tags describing the integration. While we're using RST, only one tag is supported per integration.", "type": "array", diff --git a/airflow/providers/.gitignore b/airflow/providers/.gitignore new file mode 100644 index 0000000000000..9b4a1a9d8f3ed --- /dev/null +++ b/airflow/providers/.gitignore @@ -0,0 +1 @@ +get_provider_info.py diff --git a/airflow/providers/CHANGELOG.rst b/airflow/providers/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/README.md b/airflow/providers/README.md deleted file mode 100644 index 3955f29e0d91d..0000000000000 --- a/airflow/providers/README.md +++ /dev/null @@ -1,28 +0,0 @@ - - -# Airflow Providers - -Providers are logical abstractions of submodules that can be used to interface with various tools and endpoints from your Airflow DAGs. Each provider is grouped by the relevant top-level service that a user might need to interact with and submodules for specific forms of interaction, including hooks, operators, sensors, and transfers, exist within each provider directory. - -## Using Providers - -As of Airflow 2.0, the provider packages contained in this subdirectory will be versioned and released independently of the core Airflow codebase. That means that, in order to use the submodules contained within these provider directories, a user will need to install the relevant provider python package into their Airflow environment. The relevant pip commands to install these providers and their submodules are documented in READMEs within each provider subdirectory. - -Note that this does not mean that **all** Airflow operators will be abstracted away into python packages- core Airflow hooks and operators that exist in `airflow/operators` and `airflow/hooks` will continue to be included in core Airflow releases and directly accessible within any Airflow environment. diff --git a/airflow/providers/airbyte/CHANGELOG.rst b/airflow/providers/airbyte/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/airbyte/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/airbyte/__init__.py b/airflow/providers/airbyte/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/airbyte/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/airflow_pylint/__init__.py b/airflow/providers/airbyte/example_dags/__init__.py similarity index 100% rename from tests/airflow_pylint/__init__.py rename to airflow/providers/airbyte/example_dags/__init__.py diff --git a/airflow/providers/airbyte/example_dags/example_airbyte_trigger_job.py b/airflow/providers/airbyte/example_dags/example_airbyte_trigger_job.py new file mode 100644 index 0000000000000..1ac62a88e3641 --- /dev/null +++ b/airflow/providers/airbyte/example_dags/example_airbyte_trigger_job.py @@ -0,0 +1,64 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Example DAG demonstrating the usage of the AirbyteTriggerSyncOperator.""" + +from datetime import timedelta + +from airflow import DAG +from airflow.providers.airbyte.operators.airbyte import AirbyteTriggerSyncOperator +from airflow.providers.airbyte.sensors.airbyte import AirbyteJobSensor +from airflow.utils.dates import days_ago + +args = { + 'owner': 'airflow', +} + +with DAG( + dag_id='example_airbyte_operator', + default_args=args, + schedule_interval=None, + start_date=days_ago(1), + dagrun_timeout=timedelta(minutes=60), + tags=['example'], +) as dag: + + # [START howto_operator_airbyte_synchronous] + sync_source_destination = AirbyteTriggerSyncOperator( + task_id='airbyte_sync_source_dest_example', + airbyte_conn_id='airbyte_default', + connection_id='15bc3800-82e4-48c3-a32d-620661273f28', + ) + # [END howto_operator_airbyte_synchronous] + + # [START howto_operator_airbyte_asynchronous] + async_source_destination = AirbyteTriggerSyncOperator( + task_id='airbyte_async_source_dest_example', + airbyte_conn_id='airbyte_default', + connection_id='15bc3800-82e4-48c3-a32d-620661273f28', + asynchronous=True, + ) + + airbyte_sensor = AirbyteJobSensor( + task_id='airbyte_sensor_source_dest_example', + airbyte_job_id="{{task_instance.xcom_pull(task_ids='airbyte_async_source_dest_example')}}", + airbyte_conn_id='airbyte_default', + ) + # [END howto_operator_airbyte_asynchronous] + + async_source_destination >> airbyte_sensor diff --git a/airflow/providers/airbyte/hooks/__init__.py b/airflow/providers/airbyte/hooks/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/airbyte/hooks/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/airbyte/hooks/airbyte.py b/airflow/providers/airbyte/hooks/airbyte.py new file mode 100644 index 0000000000000..0aeb4f887f321 --- /dev/null +++ b/airflow/providers/airbyte/hooks/airbyte.py @@ -0,0 +1,109 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import time +from typing import Any, Optional + +from airflow.exceptions import AirflowException +from airflow.providers.http.hooks.http import HttpHook + + +class AirbyteHook(HttpHook): + """ + Hook for Airbyte API + + :param airbyte_conn_id: Required. The name of the Airflow connection to get + connection information for Airbyte. + :type airbyte_conn_id: str + :param api_version: Optional. Airbyte API version. + :type api_version: str + """ + + RUNNING = "running" + SUCCEEDED = "succeeded" + CANCELLED = "cancelled" + PENDING = "pending" + FAILED = "failed" + ERROR = "error" + + def __init__(self, airbyte_conn_id: str = "airbyte_default", api_version: Optional[str] = "v1") -> None: + super().__init__(http_conn_id=airbyte_conn_id) + self.api_version: str = api_version + + def wait_for_job( + self, job_id: str, wait_seconds: Optional[float] = 3, timeout: Optional[float] = 3600 + ) -> None: + """ + Helper method which polls a job to check if it finishes. + + :param job_id: Required. Id of the Airbyte job + :type job_id: str + :param wait_seconds: Optional. Number of seconds between checks. + :type wait_seconds: float + :param timeout: Optional. How many seconds wait for job to be ready. + Used only if ``asynchronous`` is False. + :type timeout: float + """ + state = None + start = time.monotonic() + while True: + if timeout and start + timeout < time.monotonic(): + raise AirflowException(f"Timeout: Airbyte job {job_id} is not ready after {timeout}s") + time.sleep(wait_seconds) + try: + job = self.get_job(job_id=job_id) + state = job.json()["job"]["status"] + except AirflowException as err: + self.log.info("Retrying. Airbyte API returned server error when waiting for job: %s", err) + continue + + if state in (self.RUNNING, self.PENDING): + continue + if state == self.SUCCEEDED: + break + if state == self.ERROR: + raise AirflowException(f"Job failed:\n{job}") + elif state == self.CANCELLED: + raise AirflowException(f"Job was cancelled:\n{job}") + else: + raise Exception(f"Encountered unexpected state `{state}` for job_id `{job_id}`") + + def submit_sync_connection(self, connection_id: str) -> Any: + """ + Submits a job to a Airbyte server. + + :param connection_id: Required. The ConnectionId of the Airbyte Connection. + :type connectiond_id: str + """ + return self.run( + endpoint=f"api/{self.api_version}/connections/sync", + json={"connectionId": connection_id}, + headers={"accept": "application/json"}, + ) + + def get_job(self, job_id: int) -> Any: + """ + Gets the resource representation for a job in Airbyte. + + :param job_id: Required. Id of the Airbyte job + :type job_id: int + """ + return self.run( + endpoint=f"api/{self.api_version}/jobs/get", + json={"id": job_id}, + headers={"accept": "application/json"}, + ) diff --git a/airflow/providers/airbyte/operators/__init__.py b/airflow/providers/airbyte/operators/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/airbyte/operators/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/airbyte/operators/airbyte.py b/airflow/providers/airbyte/operators/airbyte.py new file mode 100644 index 0000000000000..6932fa31a2a9d --- /dev/null +++ b/airflow/providers/airbyte/operators/airbyte.py @@ -0,0 +1,85 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Optional + +from airflow.models import BaseOperator +from airflow.providers.airbyte.hooks.airbyte import AirbyteHook +from airflow.utils.decorators import apply_defaults + + +class AirbyteTriggerSyncOperator(BaseOperator): + """ + This operator allows you to submit a job to an Airbyte server to run a integration + process between your source and destination. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:AirbyteTriggerSyncOperator` + + :param airbyte_conn_id: Required. The name of the Airflow connection to get connection + information for Airbyte. + :type airbyte_conn_id: str + :param connection_id: Required. The Airbyte ConnectionId UUID between a source and destination. + :type connection_id: str + :param asynchronous: Optional. Flag to get job_id after submitting the job to the Airbyte API. + This is useful for submitting long running jobs and + waiting on them asynchronously using the AirbyteJobSensor. + :type asynchronous: bool + :param api_version: Optional. Airbyte API version. + :type api_version: str + :param wait_seconds: Optional. Number of seconds between checks. Only used when ``asynchronous`` is False. + :type wait_seconds: float + :param timeout: Optional. The amount of time, in seconds, to wait for the request to complete. + Only used when ``asynchronous`` is False. + :type timeout: float + """ + + template_fields = ('connection_id',) + + @apply_defaults + def __init__( + self, + connection_id: str, + airbyte_conn_id: str = "airbyte_default", + asynchronous: Optional[bool] = False, + api_version: Optional[str] = "v1", + wait_seconds: Optional[float] = 3, + timeout: Optional[float] = 3600, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.airbyte_conn_id = airbyte_conn_id + self.connection_id = connection_id + self.timeout = timeout + self.api_version = api_version + self.wait_seconds = wait_seconds + self.asynchronous = asynchronous + + def execute(self, context) -> None: + """Create Airbyte Job and wait to finish""" + hook = AirbyteHook(airbyte_conn_id=self.airbyte_conn_id, api_version=self.api_version) + job_object = hook.submit_sync_connection(connection_id=self.connection_id) + job_id = job_object.json()['job']['id'] + + self.log.info("Job %s was submitted to Airbyte Server", job_id) + if not self.asynchronous: + self.log.info('Waiting for job %s to complete', job_id) + hook.wait_for_job(job_id=job_id, wait_seconds=self.wait_seconds, timeout=self.timeout) + self.log.info('Job %s completed successfully', job_id) + + return job_id diff --git a/airflow/providers/airbyte/provider.yaml b/airflow/providers/airbyte/provider.yaml new file mode 100644 index 0000000000000..77b109f45058d --- /dev/null +++ b/airflow/providers/airbyte/provider.yaml @@ -0,0 +1,51 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +--- +package-name: apache-airflow-providers-airbyte +name: Airbyte +description: | + `Airbyte `__ + +versions: + - 1.0.0 + +integrations: + - integration-name: Airbyte + external-doc-url: https://www.airbyte.io/ + logo: /integration-logos/airbyte/Airbyte.png + how-to-guide: + - /docs/apache-airflow-providers-airbyte/operators/airbyte.rst + tags: [service] + +operators: + - integration-name: Airbyte + python-modules: + - airflow.providers.airbyte.operators.airbyte + +hooks: + - integration-name: Airbyte + python-modules: + - airflow.providers.airbyte.hooks.airbyte + +sensors: + - integration-name: Airbyte + python-modules: + - airflow.providers.airbyte.sensors.airbyte + +hook-class-names: + - airflow.providers.airbyte.hooks.airbyte.AirbyteHook diff --git a/airflow/providers/airbyte/sensors/__init__.py b/airflow/providers/airbyte/sensors/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/airbyte/sensors/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/airbyte/sensors/airbyte.py b/airflow/providers/airbyte/sensors/airbyte.py new file mode 100644 index 0000000000000..9799ade881f2e --- /dev/null +++ b/airflow/providers/airbyte/sensors/airbyte.py @@ -0,0 +1,73 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""This module contains a Airbyte Job sensor.""" +from typing import Optional + +from airflow.exceptions import AirflowException +from airflow.providers.airbyte.hooks.airbyte import AirbyteHook +from airflow.sensors.base import BaseSensorOperator +from airflow.utils.decorators import apply_defaults + + +class AirbyteJobSensor(BaseSensorOperator): + """ + Check for the state of a previously submitted Airbyte job. + + :param airbyte_job_id: Required. Id of the Airbyte job + :type airbyte_job_id: str + :param airbyte_conn_id: Required. The name of the Airflow connection to get + connection information for Airbyte. + :type airbyte_conn_id: str + :param api_version: Optional. Airbyte API version. + :type api_version: str + """ + + template_fields = ('airbyte_job_id',) + ui_color = '#6C51FD' + + @apply_defaults + def __init__( + self, + *, + airbyte_job_id: str, + airbyte_conn_id: str = 'airbyte_default', + api_version: Optional[str] = "v1", + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.airbyte_conn_id = airbyte_conn_id + self.airbyte_job_id = airbyte_job_id + self.api_version = api_version + + def poke(self, context: dict) -> bool: + hook = AirbyteHook(airbyte_conn_id=self.airbyte_conn_id, api_version=self.api_version) + job = hook.get_job(job_id=self.airbyte_job_id) + status = job.json()['job']['status'] + + if status == hook.FAILED: + raise AirflowException(f"Job failed: \n{job}") + elif status == hook.CANCELLED: + raise AirflowException(f"Job was cancelled: \n{job}") + elif status == hook.SUCCEEDED: + self.log.info("Job %s completed successfully.", self.airbyte_job_id) + return True + elif status == hook.ERROR: + self.log.info("Job %s attempt has failed.", self.airbyte_job_id) + + self.log.info("Waiting for job %s to complete.", self.airbyte_job_id) + return False diff --git a/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 546eb8d26fe46..0000000000000 --- a/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,106 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [992a18c84](https://github.com/apache/airflow/commit/992a18c84a355d13e821c703e7364f12233c37dc) | 2020-06-19 | Move MySqlToS3Operator to transfers (#9400) | -| [a60f589aa](https://github.com/apache/airflow/commit/a60f589aa251cc3df6bec5b306ad4a7f736f539f) | 2020-06-19 | Add MySqlToS3Operator (#9054) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [58a8ec0e4](https://github.com/apache/airflow/commit/58a8ec0e46f624ee0369dd156dd8fb4f81884a21) | 2020-06-16 | AWSBatchOperator <> ClientHook relation changed to composition (#9306) | -| [a80cd25e8](https://github.com/apache/airflow/commit/a80cd25e8eb7f8b5d89af26cdcd62a5bbe44d65c) | 2020-06-15 | Close/Flush byte stream in s3 hook load_string and load_bytes (#9211) | -| [ffb857403](https://github.com/apache/airflow/commit/ffb85740373f7adb70d28ec7d5a8886380170e5e) | 2020-06-14 | Decrypt secrets from SystemsManagerParameterStoreBackend (#9214) | -| [a69b031f2](https://github.com/apache/airflow/commit/a69b031f20c5a1cd032f9873394374f661811e8f) | 2020-06-10 | Add S3ToRedshift example dag and system test (#8877) | -| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | -| [357e11e0c](https://github.com/apache/airflow/commit/357e11e0cfb4c02833018e073bc4f5e5b52fae4f) | 2020-05-29 | Add Delete/Create S3 bucket operators (#8895) | -| [1ed171bfb](https://github.com/apache/airflow/commit/1ed171bfb265ded8674058bdc425640d25f1f4fc) | 2020-05-28 | Add script_args for S3FileTransformOperator (#9019) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [f946f96da](https://github.com/apache/airflow/commit/f946f96da45d8e6101805450d8cab7ccb2774ad0) | 2020-05-23 | Old json boto compat removed from dynamodb_to_s3 operator (#8987) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [f4edd90a9](https://github.com/apache/airflow/commit/f4edd90a94b8f91bbefbbbfba367372399559596) | 2020-05-16 | Speed up TestAwsLambdaHook by not actually running a function (#8882) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [85bbab27d](https://github.com/apache/airflow/commit/85bbab27dbb4f55f6f322b894fe3d54797076c15) | 2020-05-15 | Add EMR operators howto docs (#8863) | -| [e61b9bb9b](https://github.com/apache/airflow/commit/e61b9bb9bbe6d8a0621310f3583483b9135c6770) | 2020-05-13 | Add AWS EMR System tests (#8618) | -| [ed3f5131a](https://github.com/apache/airflow/commit/ed3f5131a27e2ef0422f2495a4532630a6204f82) | 2020-05-13 | Correctly pass sleep time from AWSAthenaOperator down to the hook. (#8845) | -| [7236862a1](https://github.com/apache/airflow/commit/7236862a1f5361b5e99c03dd63dae9b966efcd24) | 2020-05-12 | [AIRFLOW-2310] Enable AWS Glue Job Integration (#6007) | -| [d590e5e76](https://github.com/apache/airflow/commit/d590e5e7679322bebb1472fa8c7ec6d183e4154a) | 2020-05-11 | Add option to propagate tags in ECSOperator (#8811) | -| [0c3db84c3](https://github.com/apache/airflow/commit/0c3db84c3ce5107f53ed5ecc48edfdfe1b97feff) | 2020-05-11 | [AIRFLOW-7068] Create EC2 Hook, Operator and Sensor (#7731) | -| [cbebed2b4](https://github.com/apache/airflow/commit/cbebed2b4d0bd1e0984c331c0270e83bf8df8540) | 2020-05-10 | Allow passing backend_kwargs to AWS SSM client (#8802) | -| [c7788a689](https://github.com/apache/airflow/commit/c7788a6894cb79c22153434dd9b977393b8236be) | 2020-05-10 | Add imap_attachment_to_s3 example dag and system test (#8669) | -| [ff5b70149](https://github.com/apache/airflow/commit/ff5b70149bf51012156378c8fc8b072c7c280d9d) | 2020-05-07 | Add google_api_to_s3_transfer example dags and system tests (#8581) | -| [4421f011e](https://github.com/apache/airflow/commit/4421f011eeec2d1022a39933e27f530fb9f9c1b1) | 2020-05-01 | Improve template capabilities of EMR job and step operators (#8572) | -| [379a884d6](https://github.com/apache/airflow/commit/379a884d645a4d73db1c81e3450adc82571989ea) | 2020-04-28 | fix: aws hook should work without conn id (#8534) | -| [74bc316c5](https://github.com/apache/airflow/commit/74bc316c56192f14677e9406d3878887a836062b) | 2020-04-27 | [AIRFLOW-4438] Add Gzip compression to S3_hook (#8571) | -| [7ea66a1a9](https://github.com/apache/airflow/commit/7ea66a1a9594704869e82513d3a06fe35b6109b2) | 2020-04-26 | Add example DAG for ECSOperator (#8452) | -| [b6434dedf](https://github.com/apache/airflow/commit/b6434dedf974085e5f8891446fa63104836c8fdf) | 2020-04-24 | [AIRFLOW-7111] Add generate_presigned_url method to S3Hook (#8441) | -| [becedd5af](https://github.com/apache/airflow/commit/becedd5af8df01a0210e0a3fa78e619785f39908) | 2020-04-19 | Remove unrelated EC2 references in ECSOperator (#8451) | -| [ab1290cb0](https://github.com/apache/airflow/commit/ab1290cb0c5856fa85c8596bfdf780fcdfd99c31) | 2020-04-13 | Make launch_type parameter optional (#8248) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [b46d6c060](https://github.com/apache/airflow/commit/b46d6c060280da59193a28cf67e791eb825cb51c) | 2020-04-08 | Add support for AWS Secrets Manager as Secrets Backend (#8186) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31 | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011) | -| [7239d9a82](https://github.com/apache/airflow/commit/7239d9a82dbb3b9bdf27b531daa70338af9dd796) | 2020-03-28 | Get Airflow Variables from AWS Systems Manager Parameter Store (#7945) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [438da7241](https://github.com/apache/airflow/commit/438da7241eb537e3ef5ae711629446155bf738a3) | 2020-03-28 | [AIRFLOW-5825] SageMakerEndpointOperator is not idempotent (#7891) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [a36002412](https://github.com/apache/airflow/commit/a36002412334c445e4eab41fdbb85ef31b6fd384) | 2020-03-19 | [AIRFLOW-5705] Make AwsSsmSecretsBackend consistent with VaultBackend (#7753) | -| [2a54512d7](https://github.com/apache/airflow/commit/2a54512d785ba603ba71381dc3dfa049e9f74063) | 2020-03-17 | [AIRFLOW-5705] Fix bugs in AWS SSM Secrets Backend (#7745) | -| [a8b5fc74d](https://github.com/apache/airflow/commit/a8b5fc74d07e50c91bb64cb66ca1a450aa5ce6e1) | 2020-03-16 | [AIRFLOW-4175] S3Hook load_file should support ACL policy paramete (#7733) | -| [e31e9ddd2](https://github.com/apache/airflow/commit/e31e9ddd2332e5d92422baf668acee441646ad68) | 2020-03-14 | [AIRFLOW-5705] Add secrets backend and support for AWS SSM (#6376) | -| [3bb60afc7](https://github.com/apache/airflow/commit/3bb60afc7b8319996385d681faac342afe2b3bd2) | 2020-03-13 | [AIRFLOW-6975] Base AWSHook AssumeRoleWithSAML (#7619) | -| [c0c5f11ad](https://github.com/apache/airflow/commit/c0c5f11ad11a5a38e0553c1a36aa75eb83efae51) | 2020-03-12 | [AIRFLOW-6884] Make SageMakerTrainingOperator idempotent (#7598) | -| [b7cdda1c6](https://github.com/apache/airflow/commit/b7cdda1c64595bc7f85519337029de259e573fce) | 2020-03-10 | [AIRFLOW-4438] Add Gzip compression to S3_hook (#7680) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [9a94ab246](https://github.com/apache/airflow/commit/9a94ab246db8c09aa83bb6a6d245b1ca9563bcd9) | 2020-03-01 | [AIRFLOW-6962] Fix compeleted to completed (#7600) | -| [1b38f6d9b](https://github.com/apache/airflow/commit/1b38f6d9b6710bd5e25fc16883599f1842ab7cb9) | 2020-02-29 | [AIRFLOW-5908] Add download_file to S3 Hook (#6577) | -| [3ea3e1a2b](https://github.com/apache/airflow/commit/3ea3e1a2b580b7ed10efe668de0cc37b03673500) | 2020-02-26 | [AIRFLOW-6824] EMRAddStepsOperator problem with multi-step XCom (#7443) | -| [6eaa7e3b1](https://github.com/apache/airflow/commit/6eaa7e3b1845644d5ec65a00a997f4029bec9628) | 2020-02-25 | [AIRFLOW-5924] Automatically unify bucket name and key in S3Hook (#6574) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [7d0e7122d](https://github.com/apache/airflow/commit/7d0e7122dd14576d834c6f66fe919a72b100b7f8) | 2020-02-24 | [AIRFLOW-6830] Add Subject/MessageAttributes to SNS hook and operator (#7451) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [47a922b86](https://github.com/apache/airflow/commit/47a922b86426968bfa07cc7892d2eeeca761d884) | 2020-02-21 | [AIRFLOW-6854] Fix missing typing_extensions on python 3.8 (#7474) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [58c3542ed](https://github.com/apache/airflow/commit/58c3542ed25061320ce61dbe0adf451a44c738dd) | 2020-02-12 | [AIRFLOW-5231] Fix S3Hook.delete_objects method (#7375) | -| [b7aa778b3](https://github.com/apache/airflow/commit/b7aa778b38df2f116a1c20031e72fea8b97315bf) | 2020-02-10 | [AIRFLOW-6767] Correct name for default Athena workgroup (#7394) | -| [9282185e6](https://github.com/apache/airflow/commit/9282185e6624e64bb7f17447f81c1b2d1bb4d56d) | 2020-02-09 | [AIRFLOW-6761] Fix WorkGroup param in AWSAthenaHook (#7386) | -| [94fccca97](https://github.com/apache/airflow/commit/94fccca97030ee59d89f302a98137b17e7b01a33) | 2020-02-04 | [AIRFLOW-XXXX] Add pre-commit check for utf-8 file encoding (#7347) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [88e40c714](https://github.com/apache/airflow/commit/88e40c714d2853aa8966796945b2907c263fed08) | 2020-02-03 | [AIRFLOW-6716] Fix AWS Datasync Example DAG (#7339) | -| [a311d3d82](https://github.com/apache/airflow/commit/a311d3d82e0c2e32bcb56e29f33c95ed0a2a2ddc) | 2020-02-03 | [AIRFLOW-6718] Fix more occurrences of utils.dates.days_ago (#7341) | -| [cb766b05b](https://github.com/apache/airflow/commit/cb766b05b17b80fd54a5ce6ac3ee35a631115000) | 2020-02-03 | [AIRFLOW-XXXX] Fix Static Checks on CI (#7342) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [7527eddc5](https://github.com/apache/airflow/commit/7527eddc5e9729aa7e732209a07d57985f6c73e4) | 2020-02-02 | [AIRFLOW-4364] Make all code in airflow/providers/amazon pylint compatible (#7336) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [63aa3db88](https://github.com/apache/airflow/commit/63aa3db88f8824efe79622301efd9f8ba75b991c) | 2020-02-02 | [AIRFLOW-6258] Add CloudFormation operators to AWS providers (#6824) | -| [af4157fde](https://github.com/apache/airflow/commit/af4157fdeffc0c18492b518708c0db44815067ab) | 2020-02-02 | [AIRFLOW-6672] AWS DataSync - better logging of error message (#7288) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [1988a97e8](https://github.com/apache/airflow/commit/1988a97e8f687e28a5a39b29677fb514e097753c) | 2020-01-28 | [AIRFLOW-6659] Move AWS Transfer operators to providers package (#7274) | -| [ab10443e9](https://github.com/apache/airflow/commit/ab10443e965269efe9c1efaf5fa33bcdbe609f13) | 2020-01-28 | [AIRFLOW-6424] Added a operator to modify EMR cluster (#7213) | -| [40246132a](https://github.com/apache/airflow/commit/40246132a7ef3b07fe3173c6e7646ed6b53aad6e) | 2020-01-28 | [AIRFLOW-6654] AWS DataSync - bugfix when creating locations (#7270) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [599e4791c](https://github.com/apache/airflow/commit/599e4791c91cff411b1bf1c45555db5094c2b420) | 2020-01-18 | [AIRFLOW-6541] Use EmrJobFlowSensor for other states (#7146) | -| [c319e81ca](https://github.com/apache/airflow/commit/c319e81cae1de31ad1373903252d8608ffce1fba) | 2020-01-17 | [AIRFLOW-6572] Move AWS classes to providers.amazon.aws package (#7178) | -| [941a07057](https://github.com/apache/airflow/commit/941a070578bc7d9410715b89658548167352cc4d) | 2020-01-15 | [AIRFLOW-6570] Add dag tag for all example dag (#7176) | -| [78d8fe694](https://github.com/apache/airflow/commit/78d8fe6944b689b9b0af99255286e34e06eedec3) | 2020-01-08 | [AIRFLOW-6245] Add custom waiters for AWS batch jobs (#6811) | -| [e0b022725](https://github.com/apache/airflow/commit/e0b022725749181bd4e30933e4a0ffefb993eede) | 2019-12-28 | [AIRFLOW-6319] Add support for AWS Athena workgroups (#6871) | -| [57da45685](https://github.com/apache/airflow/commit/57da45685457520d51a0967e2aeb5e5ff162dfa7) | 2019-12-24 | [AIRFLOW-6333] Bump Pylint to 2.4.4 & fix/disable new checks (#6888) | -| [cf647c27e](https://github.com/apache/airflow/commit/cf647c27e0f35bbd1183bfcf87a106cbdb69d3fa) | 2019-12-18 | [AIRFLOW-6038] AWS DataSync reworked (#6773) | -| [7502cad28](https://github.com/apache/airflow/commit/7502cad2844139d57e4276d971c0706a361d9dbe) | 2019-12-17 | [AIRFLOW-6206] Move and rename AWS batch operator [AIP-21] (#6764) | -| [c4c635df6](https://github.com/apache/airflow/commit/c4c635df6906f56e01724573923e19763bb0da62) | 2019-12-17 | [AIRFLOW-6083] Adding ability to pass custom configuration to lambda client. (#6678) | -| [4fb498f87](https://github.com/apache/airflow/commit/4fb498f87ef89acc30f2576ebc5090ab0653159e) | 2019-12-09 | [AIRFLOW-6072] aws_hook: Outbound http proxy setting and other enhancements (#6686) | -| [a1e2f8635](https://github.com/apache/airflow/commit/a1e2f863526973b17892ec31caf09eded95c1cd2) | 2019-11-20 | [AIRFLOW-6021] Replace list literal with list constructor (#6617) | -| [baae14084](https://github.com/apache/airflow/commit/baae140847cdf9d84e905fb6d1f119d6950eecf9) | 2019-11-19 | [AIRFLOW-5781] AIP-21 Migrate AWS Kinesis to /providers/amazon/aws (#6588) | -| [504cfbac1](https://github.com/apache/airflow/commit/504cfbac1a4ec2e2fd169523ed357808f63881bb) | 2019-11-18 | [AIRFLOW-5783] AIP-21 Move aws redshift into providers structure (#6539) | -| [992f0e3ac](https://github.com/apache/airflow/commit/992f0e3acf11163294508858515a5f79116e3ad8) | 2019-11-12 | AIRFLOW-5824: AWS DataSync Hook and Operators added (#6512) | -| [c015eb2f6](https://github.com/apache/airflow/commit/c015eb2f6496b9721afda9e85d5d4af3bbe0696b) | 2019-11-10 | [AIRFLOW-5786] Migrate AWS SNS to /providers/amazon/aws (#6502) | -| [3d76fb4bf](https://github.com/apache/airflow/commit/3d76fb4bf25e5b7d3d30e0d64867b5999b77f0b0) | 2019-11-09 | [AIRFLOW-5782] Migrate AWS Lambda to /providers/amazon/aws [AIP-21] (#6518) | diff --git a/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 5b2ef6f7b1c54..0000000000000 --- a/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,65 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [00ffedb8c](https://github.com/apache/airflow/commit/00ffedb8c402eb5638782628eb706a5f28215eac) | 2020-09-30 | Add amazon glacier to GCS transfer operator (#10947) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21 | Add D204 pydocstyle check (#11031) | -| [2410f592a](https://github.com/apache/airflow/commit/2410f592a4ab160b377f1a9e5de3b7262b9851cc) | 2020-09-19 | Get Airflow configs with sensitive data from AWS Systems Manager (#11023) | -| [2bf7b7cac](https://github.com/apache/airflow/commit/2bf7b7cac7858f5a6a495f1a9eb4780ec84f95b4) | 2020-09-19 | Add typing to amazon provider EMR (#10910) | -| [9edfcb7ac](https://github.com/apache/airflow/commit/9edfcb7ac46917836ec956264da8876e58d92392) | 2020-09-19 | Support extra_args in S3Hook and GCSToS3Operator (#11001) | -| [4e1f3a69d](https://github.com/apache/airflow/commit/4e1f3a69db8614c302e4916332555034053b935c) | 2020-09-14 | [AIRFLOW-10645] Add AWS Secrets Manager Hook (#10655) | -| [e9add7916](https://github.com/apache/airflow/commit/e9add79160e3a16bb348e30f4e83386a371dbc1e) | 2020-09-14 | Fix Failing static tests on Master (#10927) | -| [383a118d2](https://github.com/apache/airflow/commit/383a118d2df618e46d81c520cd2c4a31d81b33dd) | 2020-09-14 | Add more type annotations to AWS hooks (#10671) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [2934220dc](https://github.com/apache/airflow/commit/2934220dc98e295764f7791d33e121629ed2fbbb) | 2020-09-08 | Always return a list from S3Hook list methods (#10774) | -| [f40ac9b15](https://github.com/apache/airflow/commit/f40ac9b151124dbcd87197d6ae38c85191d41f38) | 2020-09-01 | Add placement_strategy option (#9444) | -| [e4878e677](https://github.com/apache/airflow/commit/e4878e6775bbe5cb2a1d786e57e009271b78bba0) | 2020-08-31 | fix type hints for s3 hook read_key method (#10653) | -| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29 | Update Google Cloud branding (#10642) | -| [8969b7185](https://github.com/apache/airflow/commit/8969b7185ebc3c90168ce9a2fb97dfbc74d2bed9) | 2020-08-28 | Removed bad characters from AWS operator (#10590) | -| [8349061f9](https://github.com/apache/airflow/commit/8349061f9cb01a92c87edd349cc844c4053851e8) | 2020-08-26 | Improve Docstring for AWS Athena Hook/Operator (#10580) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24 | Implement impersonation in google operators (#10052) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [c6358045f](https://github.com/apache/airflow/commit/c6358045f9d61af63c96833cb6682d6f382a6408) | 2020-08-22 | Fixes S3ToRedshift COPY query (#10436) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [27d08b76a](https://github.com/apache/airflow/commit/27d08b76a2d171d716a1599157a8a60a121dbec6) | 2020-08-21 | Amazon SES Hook (#10391) | -| [dea345b05](https://github.com/apache/airflow/commit/dea345b05c2cd226e70f97a3934d7456aa1cc754) | 2020-08-17 | Fix AwsGlueJobSensor to stop running after the Glue job finished (#9022) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [82f744b87](https://github.com/apache/airflow/commit/82f744b871bb2c5e9a2d628e1c45ae16c1244240) | 2020-08-11 | Add type annotations to AwsGlueJobHook, RedshiftHook modules (#10286) | -| [19bc97d0c](https://github.com/apache/airflow/commit/19bc97d0ce436a6ec9d8e9a5adcd48c0a769d01f) | 2020-08-10 | Revert "Add Amazon SES hook (#10004)" (#10276) | -| [f06fe616e](https://github.com/apache/airflow/commit/f06fe616e66256bdc53710de505c2c6b1bd21528) | 2020-08-10 | Add Amazon SES hook (#10004) | -| [0c77ea8a3](https://github.com/apache/airflow/commit/0c77ea8a3c417805f66d10f0c757ca218bf8dee0) | 2020-08-06 | Add type annotations to S3 hook module (#10164) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [9667314b2](https://github.com/apache/airflow/commit/9667314b2fb879edc451793a8350123507e1cfd6) | 2020-08-05 | Add correct signatures for operators in amazon provider package (#10167) | -| [000287753](https://github.com/apache/airflow/commit/000287753b478f29e6c25442ac253e3a6c8e8c87) | 2020-08-03 | Improve Typing coverage of amazon/aws/athena (#10025) | -| [53ada6e79](https://github.com/apache/airflow/commit/53ada6e7911f411e80ebb00be9f07a7cc0788d01) | 2020-08-03 | Add S3KeysUnchangedSensor (#9817) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [2b8dea64e](https://github.com/apache/airflow/commit/2b8dea64e9e8716fba8c38a1b439f7835bbd2918) | 2020-08-01 | Fix typo in Athena sensor retries (#10079) | -| [1508c43ec](https://github.com/apache/airflow/commit/1508c43ec9594e801b415dd82472fa017791b759) | 2020-07-29 | Adding new SageMaker operator for ProcessingJobs (#9594) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25 | Stop using start_date in default_args in example_dags (#9982) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [e7c87fe45](https://github.com/apache/airflow/commit/e7c87fe453c6a70ed087c7ffbccaacbf0d2831b9) | 2020-07-20 | Refactor AwsBaseHook._get_credentials (#9878) | -| [2577f9334](https://github.com/apache/airflow/commit/2577f9334a5cb71cccd97e62b0ae2d097cb99e1a) | 2020-07-16 | Fix S3FileTransformOperator to support S3 Select transformation only (#8936) | -| [52b6efe1e](https://github.com/apache/airflow/commit/52b6efe1ecaae74b9c2497f565e116305d575a76) | 2020-07-15 | Add option to delete by prefix to S3DeleteObjectsOperator (#9350) | -| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13 | Keep functions signatures in decorators (#9786) | -| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08 | Get Airflow configs with sensitive data from Secret Backends (#9645) | -| [07b81029e](https://github.com/apache/airflow/commit/07b81029ebc2a296fb54181f2cec11fcc7704d9d) | 2020-07-08 | Allow AWSAthenaHook to get more than 1000/first page of results (#6075) | -| [564192c16](https://github.com/apache/airflow/commit/564192c1625a552456cebb3751978c08eebdb2a1) | 2020-07-08 | Add AWS StepFunctions integrations to the aws provider (#8749) | -| [ecce1ace7](https://github.com/apache/airflow/commit/ecce1ace7a277c948c61d7d4cbfc8632cc216559) | 2020-07-08 | [AIRFLOW-XXXX] Remove unnecessary docstring in AWSAthenaOperator | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [ee20086b8](https://github.com/apache/airflow/commit/ee20086b8c499fa40dcaac71652f21b466e7f80f) | 2020-07-02 | Move S3TaskHandler to the AWS provider package (#9602) | -| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29 | Remove almost all references to airflow.contrib (#9559) | -| [c858babdd](https://github.com/apache/airflow/commit/c858babddf8b18b417993b5bfefec1c5635510da) | 2020-06-26 | Remove kwargs from Super calls in AWS Secrets Backends (#9523) | -| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25 | Use literal syntax instead of function calls to create data structure (#9516) | -| [c7a454aa3](https://github.com/apache/airflow/commit/c7a454aa32bf33133d042e8438ac259b32144b21) | 2020-06-22 | Add AWS ECS system test (#8888) | -| [df8efd04f](https://github.com/apache/airflow/commit/df8efd04f394afc4b5affb677bc78d8b7bd5275a) | 2020-06-21 | Enable & Fix "Docstring Content Issues" PyDocStyle Check (#9460) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 3159cebd8a5a7..0000000000000 --- a/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,26 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [3934ef224](https://github.com/apache/airflow/commit/3934ef22494db6d9613c229aaa82ea6a366b7c2f) | 2020-10-24 | Remove redundant builtins imports (#11809) | -| [4c8e033c0](https://github.com/apache/airflow/commit/4c8e033c0ee7d28963d504a9216205155f20f58f) | 2020-10-24 | Fix spelling and grammar (#11814) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [0df60b773](https://github.com/apache/airflow/commit/0df60b773671ecf8d4e5f582ac2be200cf2a2edd) | 2020-10-23 | Add reattach flag to ECSOperator (#10643) | -| [b9d677cdd](https://github.com/apache/airflow/commit/b9d677cdd660e0be8278a64658e73359276a9682) | 2020-10-22 | Add type hints to aws provider (#11531) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [674368f66](https://github.com/apache/airflow/commit/674368f66cf61b2a105f326f23868ac3aee08807) | 2020-10-19 | Fixes MySQLToS3 float to int conversion (#10437) | -| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16 | Add type annotations for AWS operators and hooks (#11434) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d38a0a781](https://github.com/apache/airflow/commit/d38a0a781e123c8c50313efdb23f767d6678afe0) | 2020-10-12 | added type hints for aws cloud formation (#11470) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [c3e340584](https://github.com/apache/airflow/commit/c3e340584bf1892c4f73aa9e7495b5823dab0c40) | 2020-10-11 | Change prefix of AwsDynamoDB hook module (#11209) | -| [42a23d16f](https://github.com/apache/airflow/commit/42a23d16fe9b2f165b0805fb767ecbb825c93657) | 2020-10-11 | Update MySQLToS3Operator's s3_bucket to template_fields (#10778) | -| [422b61a9d](https://github.com/apache/airflow/commit/422b61a9dd95ab9d00b239daa14d87d7cae5ae73) | 2020-10-09 | Adding ElastiCache Hook for creating, describing and deleting replication groups (#8701) | -| [dd98b2149](https://github.com/apache/airflow/commit/dd98b21494ff6036242b63268140abe1294b3657) | 2020-10-06 | Add acl_policy parameter to GCSToS3Operator (#10804) (#10829) | -| [32b3cfbcf](https://github.com/apache/airflow/commit/32b3cfbcf0209cb062dd641c1232ab25d02d4d6d) | 2020-10-06 | Strict type check for all hooks in amazon (#11250) | -| [6d573e8ab](https://github.com/apache/airflow/commit/6d573e8abbf87e3c7281347e03d428a6e5baccd4) | 2020-10-03 | Add s3 key to template fields for s3/redshift transfer operators (#10890) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.11.23.md deleted file mode 100644 index dc4f8023abe6d..0000000000000 --- a/airflow/providers/amazon/BACKPORT_PROVIDER_CHANGES_2020.11.23.md +++ /dev/null @@ -1,26 +0,0 @@ - - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [c94b1241a](https://github.com/apache/airflow/commit/c94b1241a144294f5f1c5f461d5e3b92e4a8fc38) | 2020-11-13 | Add extra error handling to S3 remote logging (#9908) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08 | Add authentication to AWS with Google credentials (#12079) | -| [fb6bddba0](https://github.com/apache/airflow/commit/fb6bddba0c9e3e7ef2610b4fb3f73622e48d7ea0) | 2020-11-07 | In AWS Secrets backend, a lookup is optional (#12143) | -| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06 | Simplify string expressions (#12123) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5e77a6154](https://github.com/apache/airflow/commit/5e77a61543d26e5466d885d639247aa5189c011d) | 2020-11-02 | Docstring fix for S3DeleteBucketOperator (#12049) | -| [822285134](https://github.com/apache/airflow/commit/8222851348aa81424c9bdcea994e25e0d6692709) | 2020-10-29 | Add Template Fields to RedshiftToS3Operator & S3ToRedshiftOperator (#11844) | -| [db121f726](https://github.com/apache/airflow/commit/db121f726b3c7a37aca1ea05eb4714f884456005) | 2020-10-28 | Add truncate table (before copy) option to S3ToRedshiftOperator (#9246) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26 | Fix spellings (#11825) | diff --git a/airflow/providers/amazon/BACKPORT_PROVIDER_README.md b/airflow/providers/amazon/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 1d0f3096f7b96..0000000000000 --- a/airflow/providers/amazon/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,488 +0,0 @@ - - - -# Package apache-airflow-backport-providers-amazon - -Release: 2020.11.23 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [New transfer operators](#new-transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) - - [Moved hooks](#moved-hooks) - - [Secrets](#secrets) - - [Moved secrets](#moved-secrets) -- [Releases](#releases) - - [Release 2020.11.23](#release-20201123) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `amazon` provider. All classes for this provider package -are in `airflow.providers.amazon` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-amazon` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| boto3 | >=1.12.0,<2.0.0 | -| watchtower | ~=0.7.3 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-amazon[apache.hive] -``` - -| Dependent package | Extra | -|:-----------------------------------------------------------------------------------------------------------------------------|:------------| -| [apache-airflow-backport-providers-apache-hive](https://github.com/apache/airflow/tree/master/airflow/providers/apache/hive) | apache.hive | -| [apache-airflow-backport-providers-google](https://github.com/apache/airflow/tree/master/airflow/providers/google) | google | -| [apache-airflow-backport-providers-imap](https://github.com/apache/airflow/tree/master/airflow/providers/imap) | imap | -| [apache-airflow-backport-providers-mongo](https://github.com/apache/airflow/tree/master/airflow/providers/mongo) | mongo | -| [apache-airflow-backport-providers-mysql](https://github.com/apache/airflow/tree/master/airflow/providers/mysql) | mysql | -| [apache-airflow-backport-providers-postgres](https://github.com/apache/airflow/tree/master/airflow/providers/postgres) | postgres | -| [apache-airflow-backport-providers-ssh](https://github.com/apache/airflow/tree/master/airflow/providers/ssh) | ssh | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `amazon` provider -are in the `airflow.providers.amazon` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.amazon` package | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.operators.cloud_formation.CloudFormationCreateStackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/cloud_formation.py) | -| [aws.operators.cloud_formation.CloudFormationDeleteStackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/cloud_formation.py) | -| [aws.operators.datasync.AWSDataSyncOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/datasync.py) | -| [aws.operators.ec2_start_instance.EC2StartInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ec2_start_instance.py) | -| [aws.operators.ec2_stop_instance.EC2StopInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ec2_stop_instance.py) | -| [aws.operators.emr_modify_cluster.EmrModifyClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_modify_cluster.py) | -| [aws.operators.glacier.GlacierCreateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/glacier.py) | -| [aws.operators.glue.AwsGlueJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/glue.py) | -| [aws.operators.s3_bucket.S3CreateBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py) | -| [aws.operators.s3_bucket.S3DeleteBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py) | -| [aws.operators.s3_file_transform.S3FileTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_file_transform.py) | -| [aws.operators.sagemaker_processing.SageMakerProcessingOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_processing.py) | -| [aws.operators.step_function_get_execution_output.StepFunctionGetExecutionOutputOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/step_function_get_execution_output.py) | -| [aws.operators.step_function_start_execution.StepFunctionStartExecutionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/step_function_start_execution.py) | - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.operators.athena.AWSAthenaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/athena.py) | [contrib.operators.aws_athena_operator.AWSAthenaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/aws_athena_operator.py) | -| [aws.operators.batch.AwsBatchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/batch.py) | [contrib.operators.awsbatch_operator.AWSBatchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/awsbatch_operator.py) | -| [aws.operators.ecs.ECSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ecs.py) | [contrib.operators.ecs_operator.ECSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/ecs_operator.py) | -| [aws.operators.emr_add_steps.EmrAddStepsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_add_steps.py) | [contrib.operators.emr_add_steps_operator.EmrAddStepsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_add_steps_operator.py) | -| [aws.operators.emr_create_job_flow.EmrCreateJobFlowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_create_job_flow.py) | [contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_create_job_flow_operator.py) | -| [aws.operators.emr_terminate_job_flow.EmrTerminateJobFlowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py) | [contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_terminate_job_flow_operator.py) | -| [aws.operators.s3_copy_object.S3CopyObjectOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_copy_object.py) | [contrib.operators.s3_copy_object_operator.S3CopyObjectOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_copy_object_operator.py) | -| [aws.operators.s3_delete_objects.S3DeleteObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_delete_objects.py) | [contrib.operators.s3_delete_objects_operator.S3DeleteObjectsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_delete_objects_operator.py) | -| [aws.operators.s3_list.S3ListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_list.py) | [contrib.operators.s3_list_operator.S3ListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_list_operator.py) | -| [aws.operators.sagemaker_base.SageMakerBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_base.py) | [contrib.operators.sagemaker_base_operator.SageMakerBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_base_operator.py) | -| [aws.operators.sagemaker_endpoint.SageMakerEndpointOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py) | [contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_endpoint_operator.py) | -| [aws.operators.sagemaker_endpoint_config.SageMakerEndpointConfigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py) | [contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_endpoint_config_operator.py) | -| [aws.operators.sagemaker_model.SageMakerModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_model.py) | [contrib.operators.sagemaker_model_operator.SageMakerModelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_model_operator.py) | -| [aws.operators.sagemaker_training.SageMakerTrainingOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_training.py) | [contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_training_operator.py) | -| [aws.operators.sagemaker_transform.SageMakerTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_transform.py) | [contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_transform_operator.py) | -| [aws.operators.sagemaker_tuning.SageMakerTuningOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_tuning.py) | [contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_tuning_operator.py) | -| [aws.operators.sns.SnsPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sns.py) | [contrib.operators.sns_publish_operator.SnsPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sns_publish_operator.py) | -| [aws.operators.sqs.SQSPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sqs.py) | [contrib.operators.aws_sqs_publish_operator.SQSPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/aws_sqs_publish_operator.py) | - - -## Transfer operators - - -### New transfer operators - -| New Airflow 2.0 transfers: `airflow.providers.amazon` package | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.transfers.glacier_to_gcs.GlacierToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py) | -| [aws.transfers.mysql_to_s3.MySQLToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/mysql_to_s3.py) | - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.transfers.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py) | [contrib.operators.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dynamodb_to_s3.py) | -| [aws.transfers.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/gcs_to_s3.py) | [operators.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/gcs_to_s3.py) | -| [aws.transfers.google_api_to_s3.GoogleApiToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/google_api_to_s3.py) | [operators.google_api_to_s3_transfer.GoogleApiToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/google_api_to_s3_transfer.py) | -| [aws.transfers.hive_to_dynamodb.HiveToDynamoDBOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py) | [contrib.operators.hive_to_dynamodb.HiveToDynamoDBOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/hive_to_dynamodb.py) | -| [aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py) | [contrib.operators.imap_attachment_to_s3_operator.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/imap_attachment_to_s3_operator.py) | -| [aws.transfers.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/mongo_to_s3.py) | [contrib.operators.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mongo_to_s3.py) | -| [aws.transfers.redshift_to_s3.RedshiftToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/redshift_to_s3.py) | [operators.redshift_to_s3_operator.RedshiftToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/redshift_to_s3_operator.py) | -| [aws.transfers.s3_to_redshift.S3ToRedshiftOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/s3_to_redshift.py) | [operators.s3_to_redshift_operator.S3ToRedshiftTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_redshift_operator.py) | -| [aws.transfers.s3_to_sftp.S3ToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/s3_to_sftp.py) | [contrib.operators.s3_to_sftp_operator.S3ToSFTPOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_sftp_operator.py) | -| [aws.transfers.sftp_to_s3.SFTPToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/sftp_to_s3.py) | [contrib.operators.sftp_to_s3_operator.SFTPToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sftp_to_s3_operator.py) | - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.amazon` package | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.sensors.cloud_formation.CloudFormationCreateStackSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/cloud_formation.py) | -| [aws.sensors.cloud_formation.CloudFormationDeleteStackSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/cloud_formation.py) | -| [aws.sensors.ec2_instance_state.EC2InstanceStateSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/ec2_instance_state.py) | -| [aws.sensors.glacier.GlacierJobOperationSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glacier.py) | -| [aws.sensors.glue.AwsGlueJobSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glue.py) | -| [aws.sensors.redshift.AwsRedshiftClusterSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/redshift.py) | -| [aws.sensors.s3_keys_unchanged.S3KeysUnchangedSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/s3_keys_unchanged.py) | -| [aws.sensors.sagemaker_training.SageMakerTrainingSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_training.py) | -| [aws.sensors.step_function_execution.StepFunctionExecutionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/step_function_execution.py) | - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.sensors.athena.AthenaSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/athena.py) | [contrib.sensors.aws_athena_sensor.AthenaSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/aws_athena_sensor.py) | -| [aws.sensors.emr_base.EmrBaseSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/emr_base.py) | [contrib.sensors.emr_base_sensor.EmrBaseSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/emr_base_sensor.py) | -| [aws.sensors.emr_job_flow.EmrJobFlowSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/emr_job_flow.py) | [contrib.sensors.emr_job_flow_sensor.EmrJobFlowSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/emr_job_flow_sensor.py) | -| [aws.sensors.emr_step.EmrStepSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/emr_step.py) | [contrib.sensors.emr_step_sensor.EmrStepSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/emr_step_sensor.py) | -| [aws.sensors.glue_catalog_partition.AwsGlueCatalogPartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py) | [contrib.sensors.aws_glue_catalog_partition_sensor.AwsGlueCatalogPartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py) | -| [aws.sensors.s3_key.S3KeySensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/s3_key.py) | [sensors.s3_key_sensor.S3KeySensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/s3_key_sensor.py) | -| [aws.sensors.s3_prefix.S3PrefixSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/s3_prefix.py) | [sensors.s3_prefix_sensor.S3PrefixSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/s3_prefix_sensor.py) | -| [aws.sensors.sagemaker_base.SageMakerBaseSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_base.py) | [contrib.sensors.sagemaker_base_sensor.SageMakerBaseSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_base_sensor.py) | -| [aws.sensors.sagemaker_endpoint.SageMakerEndpointSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_endpoint.py) | [contrib.sensors.sagemaker_endpoint_sensor.SageMakerEndpointSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_endpoint_sensor.py) | -| [aws.sensors.sagemaker_transform.SageMakerTransformSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_transform.py) | [contrib.sensors.sagemaker_transform_sensor.SageMakerTransformSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_transform_sensor.py) | -| [aws.sensors.sagemaker_tuning.SageMakerTuningSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_tuning.py) | [contrib.sensors.sagemaker_tuning_sensor.SageMakerTuningSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_tuning_sensor.py) | -| [aws.sensors.sqs.SQSSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sqs.py) | [contrib.sensors.aws_sqs_sensor.SQSSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/aws_sqs_sensor.py) | - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.amazon` package | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.hooks.batch_client.AwsBatchClientHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/batch_client.py) | -| [aws.hooks.batch_waiters.AwsBatchWaitersHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/batch_waiters.py) | -| [aws.hooks.cloud_formation.AWSCloudFormationHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/cloud_formation.py) | -| [aws.hooks.ec2.EC2Hook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/ec2.py) | -| [aws.hooks.elasticache_replication_group.ElastiCacheReplicationGroupHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py) | -| [aws.hooks.glacier.GlacierHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glacier.py) | -| [aws.hooks.glue.AwsGlueJobHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glue.py) | -| [aws.hooks.kinesis.AwsFirehoseHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/kinesis.py) | -| [aws.hooks.redshift.RedshiftHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/redshift.py) | -| [aws.hooks.secrets_manager.SecretsManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/secrets_manager.py) | -| [aws.hooks.ses.SESHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/ses.py) | -| [aws.hooks.step_function.StepFunctionHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/step_function.py) | - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.hooks.athena.AWSAthenaHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/athena.py) | [contrib.hooks.aws_athena_hook.AWSAthenaHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_athena_hook.py) | -| [aws.hooks.base_aws.AwsBaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/base_aws.py) | [contrib.hooks.aws_hook.AwsHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_hook.py) | -| [aws.hooks.datasync.AWSDataSyncHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/datasync.py) | [contrib.hooks.aws_datasync_hook.AWSDataSyncHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_datasync_hook.py) | -| [aws.hooks.dynamodb.AwsDynamoDBHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/dynamodb.py) | [contrib.hooks.aws_dynamodb_hook.AwsDynamoDBHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_dynamodb_hook.py) | -| [aws.hooks.emr.EmrHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/emr.py) | [contrib.hooks.emr_hook.EmrHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/emr_hook.py) | -| [aws.hooks.glue_catalog.AwsGlueCatalogHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glue_catalog.py) | [contrib.hooks.aws_glue_catalog_hook.AwsGlueCatalogHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_glue_catalog_hook.py) | -| [aws.hooks.lambda_function.AwsLambdaHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/lambda_function.py) | [contrib.hooks.aws_lambda_hook.AwsLambdaHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_lambda_hook.py) | -| [aws.hooks.logs.AwsLogsHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/logs.py) | [contrib.hooks.aws_logs_hook.AwsLogsHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_logs_hook.py) | -| [aws.hooks.s3.S3Hook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/s3.py) | [hooks.S3_hook.S3Hook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/S3_hook.py) | -| [aws.hooks.sagemaker.SageMakerHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/sagemaker.py) | [contrib.hooks.sagemaker_hook.SageMakerHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/sagemaker_hook.py) | -| [aws.hooks.sns.AwsSnsHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/sns.py) | [contrib.hooks.aws_sns_hook.AwsSnsHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_sns_hook.py) | -| [aws.hooks.sqs.SQSHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/sqs.py) | [contrib.hooks.aws_sqs_hook.SQSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_sqs_hook.py) | - - -## Secrets - - - -### Moved secrets - -| Airflow 2.0 secrets: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.secrets.secrets_manager.SecretsManagerBackend](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/secrets/secrets_manager.py) | [contrib.secrets.aws_secrets_manager.SecretsManagerBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/aws_secrets_manager.py) | -| [aws.secrets.systems_manager.SystemsManagerParameterStoreBackend](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/secrets/systems_manager.py) | [contrib.secrets.aws_systems_manager.SystemsManagerParameterStoreBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/aws_systems_manager.py) | - - - -## Releases - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [c94b1241a](https://github.com/apache/airflow/commit/c94b1241a144294f5f1c5f461d5e3b92e4a8fc38) | 2020-11-13 | Add extra error handling to S3 remote logging (#9908) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08 | Add authentication to AWS with Google credentials (#12079) | -| [fb6bddba0](https://github.com/apache/airflow/commit/fb6bddba0c9e3e7ef2610b4fb3f73622e48d7ea0) | 2020-11-07 | In AWS Secrets backend, a lookup is optional (#12143) | -| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06 | Simplify string expressions (#12123) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5e77a6154](https://github.com/apache/airflow/commit/5e77a61543d26e5466d885d639247aa5189c011d) | 2020-11-02 | Docstring fix for S3DeleteBucketOperator (#12049) | -| [822285134](https://github.com/apache/airflow/commit/8222851348aa81424c9bdcea994e25e0d6692709) | 2020-10-29 | Add Template Fields to RedshiftToS3Operator & S3ToRedshiftOperator (#11844) | -| [db121f726](https://github.com/apache/airflow/commit/db121f726b3c7a37aca1ea05eb4714f884456005) | 2020-10-28 | Add truncate table (before copy) option to S3ToRedshiftOperator (#9246) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26 | Fix spellings (#11825) | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [3934ef224](https://github.com/apache/airflow/commit/3934ef22494db6d9613c229aaa82ea6a366b7c2f) | 2020-10-24 | Remove redundant builtins imports (#11809) | -| [4c8e033c0](https://github.com/apache/airflow/commit/4c8e033c0ee7d28963d504a9216205155f20f58f) | 2020-10-24 | Fix spelling and grammar (#11814) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [0df60b773](https://github.com/apache/airflow/commit/0df60b773671ecf8d4e5f582ac2be200cf2a2edd) | 2020-10-23 | Add reattach flag to ECSOperator (#10643) | -| [b9d677cdd](https://github.com/apache/airflow/commit/b9d677cdd660e0be8278a64658e73359276a9682) | 2020-10-22 | Add type hints to aws provider (#11531) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [674368f66](https://github.com/apache/airflow/commit/674368f66cf61b2a105f326f23868ac3aee08807) | 2020-10-19 | Fixes MySQLToS3 float to int conversion (#10437) | -| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16 | Add type annotations for AWS operators and hooks (#11434) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d38a0a781](https://github.com/apache/airflow/commit/d38a0a781e123c8c50313efdb23f767d6678afe0) | 2020-10-12 | added type hints for aws cloud formation (#11470) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [c3e340584](https://github.com/apache/airflow/commit/c3e340584bf1892c4f73aa9e7495b5823dab0c40) | 2020-10-11 | Change prefix of AwsDynamoDB hook module (#11209) | -| [42a23d16f](https://github.com/apache/airflow/commit/42a23d16fe9b2f165b0805fb767ecbb825c93657) | 2020-10-11 | Update MySQLToS3Operator's s3_bucket to template_fields (#10778) | -| [422b61a9d](https://github.com/apache/airflow/commit/422b61a9dd95ab9d00b239daa14d87d7cae5ae73) | 2020-10-09 | Adding ElastiCache Hook for creating, describing and deleting replication groups (#8701) | -| [dd98b2149](https://github.com/apache/airflow/commit/dd98b21494ff6036242b63268140abe1294b3657) | 2020-10-06 | Add acl_policy parameter to GCSToS3Operator (#10804) (#10829) | -| [32b3cfbcf](https://github.com/apache/airflow/commit/32b3cfbcf0209cb062dd641c1232ab25d02d4d6d) | 2020-10-06 | Strict type check for all hooks in amazon (#11250) | -| [6d573e8ab](https://github.com/apache/airflow/commit/6d573e8abbf87e3c7281347e03d428a6e5baccd4) | 2020-10-03 | Add s3 key to template fields for s3/redshift transfer operators (#10890) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [00ffedb8c](https://github.com/apache/airflow/commit/00ffedb8c402eb5638782628eb706a5f28215eac) | 2020-09-30 | Add amazon glacier to GCS transfer operator (#10947) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21 | Add D204 pydocstyle check (#11031) | -| [2410f592a](https://github.com/apache/airflow/commit/2410f592a4ab160b377f1a9e5de3b7262b9851cc) | 2020-09-19 | Get Airflow configs with sensitive data from AWS Systems Manager (#11023) | -| [2bf7b7cac](https://github.com/apache/airflow/commit/2bf7b7cac7858f5a6a495f1a9eb4780ec84f95b4) | 2020-09-19 | Add typing to amazon provider EMR (#10910) | -| [9edfcb7ac](https://github.com/apache/airflow/commit/9edfcb7ac46917836ec956264da8876e58d92392) | 2020-09-19 | Support extra_args in S3Hook and GCSToS3Operator (#11001) | -| [4e1f3a69d](https://github.com/apache/airflow/commit/4e1f3a69db8614c302e4916332555034053b935c) | 2020-09-14 | [AIRFLOW-10645] Add AWS Secrets Manager Hook (#10655) | -| [e9add7916](https://github.com/apache/airflow/commit/e9add79160e3a16bb348e30f4e83386a371dbc1e) | 2020-09-14 | Fix Failing static tests on Master (#10927) | -| [383a118d2](https://github.com/apache/airflow/commit/383a118d2df618e46d81c520cd2c4a31d81b33dd) | 2020-09-14 | Add more type annotations to AWS hooks (#10671) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [2934220dc](https://github.com/apache/airflow/commit/2934220dc98e295764f7791d33e121629ed2fbbb) | 2020-09-08 | Always return a list from S3Hook list methods (#10774) | -| [f40ac9b15](https://github.com/apache/airflow/commit/f40ac9b151124dbcd87197d6ae38c85191d41f38) | 2020-09-01 | Add placement_strategy option (#9444) | -| [e4878e677](https://github.com/apache/airflow/commit/e4878e6775bbe5cb2a1d786e57e009271b78bba0) | 2020-08-31 | fix type hints for s3 hook read_key method (#10653) | -| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29 | Update Google Cloud branding (#10642) | -| [8969b7185](https://github.com/apache/airflow/commit/8969b7185ebc3c90168ce9a2fb97dfbc74d2bed9) | 2020-08-28 | Removed bad characters from AWS operator (#10590) | -| [8349061f9](https://github.com/apache/airflow/commit/8349061f9cb01a92c87edd349cc844c4053851e8) | 2020-08-26 | Improve Docstring for AWS Athena Hook/Operator (#10580) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24 | Implement impersonation in google operators (#10052) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [c6358045f](https://github.com/apache/airflow/commit/c6358045f9d61af63c96833cb6682d6f382a6408) | 2020-08-22 | Fixes S3ToRedshift COPY query (#10436) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [27d08b76a](https://github.com/apache/airflow/commit/27d08b76a2d171d716a1599157a8a60a121dbec6) | 2020-08-21 | Amazon SES Hook (#10391) | -| [dea345b05](https://github.com/apache/airflow/commit/dea345b05c2cd226e70f97a3934d7456aa1cc754) | 2020-08-17 | Fix AwsGlueJobSensor to stop running after the Glue job finished (#9022) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [82f744b87](https://github.com/apache/airflow/commit/82f744b871bb2c5e9a2d628e1c45ae16c1244240) | 2020-08-11 | Add type annotations to AwsGlueJobHook, RedshiftHook modules (#10286) | -| [19bc97d0c](https://github.com/apache/airflow/commit/19bc97d0ce436a6ec9d8e9a5adcd48c0a769d01f) | 2020-08-10 | Revert "Add Amazon SES hook (#10004)" (#10276) | -| [f06fe616e](https://github.com/apache/airflow/commit/f06fe616e66256bdc53710de505c2c6b1bd21528) | 2020-08-10 | Add Amazon SES hook (#10004) | -| [0c77ea8a3](https://github.com/apache/airflow/commit/0c77ea8a3c417805f66d10f0c757ca218bf8dee0) | 2020-08-06 | Add type annotations to S3 hook module (#10164) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [9667314b2](https://github.com/apache/airflow/commit/9667314b2fb879edc451793a8350123507e1cfd6) | 2020-08-05 | Add correct signatures for operators in amazon provider package (#10167) | -| [000287753](https://github.com/apache/airflow/commit/000287753b478f29e6c25442ac253e3a6c8e8c87) | 2020-08-03 | Improve Typing coverage of amazon/aws/athena (#10025) | -| [53ada6e79](https://github.com/apache/airflow/commit/53ada6e7911f411e80ebb00be9f07a7cc0788d01) | 2020-08-03 | Add S3KeysUnchangedSensor (#9817) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [2b8dea64e](https://github.com/apache/airflow/commit/2b8dea64e9e8716fba8c38a1b439f7835bbd2918) | 2020-08-01 | Fix typo in Athena sensor retries (#10079) | -| [1508c43ec](https://github.com/apache/airflow/commit/1508c43ec9594e801b415dd82472fa017791b759) | 2020-07-29 | Adding new SageMaker operator for ProcessingJobs (#9594) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25 | Stop using start_date in default_args in example_dags (#9982) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [e7c87fe45](https://github.com/apache/airflow/commit/e7c87fe453c6a70ed087c7ffbccaacbf0d2831b9) | 2020-07-20 | Refactor AwsBaseHook._get_credentials (#9878) | -| [2577f9334](https://github.com/apache/airflow/commit/2577f9334a5cb71cccd97e62b0ae2d097cb99e1a) | 2020-07-16 | Fix S3FileTransformOperator to support S3 Select transformation only (#8936) | -| [52b6efe1e](https://github.com/apache/airflow/commit/52b6efe1ecaae74b9c2497f565e116305d575a76) | 2020-07-15 | Add option to delete by prefix to S3DeleteObjectsOperator (#9350) | -| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13 | Keep functions signatures in decorators (#9786) | -| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08 | Get Airflow configs with sensitive data from Secret Backends (#9645) | -| [07b81029e](https://github.com/apache/airflow/commit/07b81029ebc2a296fb54181f2cec11fcc7704d9d) | 2020-07-08 | Allow AWSAthenaHook to get more than 1000/first page of results (#6075) | -| [564192c16](https://github.com/apache/airflow/commit/564192c1625a552456cebb3751978c08eebdb2a1) | 2020-07-08 | Add AWS StepFunctions integrations to the aws provider (#8749) | -| [ecce1ace7](https://github.com/apache/airflow/commit/ecce1ace7a277c948c61d7d4cbfc8632cc216559) | 2020-07-08 | [AIRFLOW-XXXX] Remove unnecessary docstring in AWSAthenaOperator | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [ee20086b8](https://github.com/apache/airflow/commit/ee20086b8c499fa40dcaac71652f21b466e7f80f) | 2020-07-02 | Move S3TaskHandler to the AWS provider package (#9602) | -| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29 | Remove almost all references to airflow.contrib (#9559) | -| [c858babdd](https://github.com/apache/airflow/commit/c858babddf8b18b417993b5bfefec1c5635510da) | 2020-06-26 | Remove kwargs from Super calls in AWS Secrets Backends (#9523) | -| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25 | Use literal syntax instead of function calls to create data structure (#9516) | -| [c7a454aa3](https://github.com/apache/airflow/commit/c7a454aa32bf33133d042e8438ac259b32144b21) | 2020-06-22 | Add AWS ECS system test (#8888) | -| [df8efd04f](https://github.com/apache/airflow/commit/df8efd04f394afc4b5affb677bc78d8b7bd5275a) | 2020-06-21 | Enable & Fix "Docstring Content Issues" PyDocStyle Check (#9460) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [992a18c84](https://github.com/apache/airflow/commit/992a18c84a355d13e821c703e7364f12233c37dc) | 2020-06-19 | Move MySqlToS3Operator to transfers (#9400) | -| [a60f589aa](https://github.com/apache/airflow/commit/a60f589aa251cc3df6bec5b306ad4a7f736f539f) | 2020-06-19 | Add MySqlToS3Operator (#9054) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [58a8ec0e4](https://github.com/apache/airflow/commit/58a8ec0e46f624ee0369dd156dd8fb4f81884a21) | 2020-06-16 | AWSBatchOperator <> ClientHook relation changed to composition (#9306) | -| [a80cd25e8](https://github.com/apache/airflow/commit/a80cd25e8eb7f8b5d89af26cdcd62a5bbe44d65c) | 2020-06-15 | Close/Flush byte stream in s3 hook load_string and load_bytes (#9211) | -| [ffb857403](https://github.com/apache/airflow/commit/ffb85740373f7adb70d28ec7d5a8886380170e5e) | 2020-06-14 | Decrypt secrets from SystemsManagerParameterStoreBackend (#9214) | -| [a69b031f2](https://github.com/apache/airflow/commit/a69b031f20c5a1cd032f9873394374f661811e8f) | 2020-06-10 | Add S3ToRedshift example dag and system test (#8877) | -| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | -| [357e11e0c](https://github.com/apache/airflow/commit/357e11e0cfb4c02833018e073bc4f5e5b52fae4f) | 2020-05-29 | Add Delete/Create S3 bucket operators (#8895) | -| [1ed171bfb](https://github.com/apache/airflow/commit/1ed171bfb265ded8674058bdc425640d25f1f4fc) | 2020-05-28 | Add script_args for S3FileTransformOperator (#9019) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [f946f96da](https://github.com/apache/airflow/commit/f946f96da45d8e6101805450d8cab7ccb2774ad0) | 2020-05-23 | Old json boto compat removed from dynamodb_to_s3 operator (#8987) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [f4edd90a9](https://github.com/apache/airflow/commit/f4edd90a94b8f91bbefbbbfba367372399559596) | 2020-05-16 | Speed up TestAwsLambdaHook by not actually running a function (#8882) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [85bbab27d](https://github.com/apache/airflow/commit/85bbab27dbb4f55f6f322b894fe3d54797076c15) | 2020-05-15 | Add EMR operators howto docs (#8863) | -| [e61b9bb9b](https://github.com/apache/airflow/commit/e61b9bb9bbe6d8a0621310f3583483b9135c6770) | 2020-05-13 | Add AWS EMR System tests (#8618) | -| [ed3f5131a](https://github.com/apache/airflow/commit/ed3f5131a27e2ef0422f2495a4532630a6204f82) | 2020-05-13 | Correctly pass sleep time from AWSAthenaOperator down to the hook. (#8845) | -| [7236862a1](https://github.com/apache/airflow/commit/7236862a1f5361b5e99c03dd63dae9b966efcd24) | 2020-05-12 | [AIRFLOW-2310] Enable AWS Glue Job Integration (#6007) | -| [d590e5e76](https://github.com/apache/airflow/commit/d590e5e7679322bebb1472fa8c7ec6d183e4154a) | 2020-05-11 | Add option to propagate tags in ECSOperator (#8811) | -| [0c3db84c3](https://github.com/apache/airflow/commit/0c3db84c3ce5107f53ed5ecc48edfdfe1b97feff) | 2020-05-11 | [AIRFLOW-7068] Create EC2 Hook, Operator and Sensor (#7731) | -| [cbebed2b4](https://github.com/apache/airflow/commit/cbebed2b4d0bd1e0984c331c0270e83bf8df8540) | 2020-05-10 | Allow passing backend_kwargs to AWS SSM client (#8802) | -| [c7788a689](https://github.com/apache/airflow/commit/c7788a6894cb79c22153434dd9b977393b8236be) | 2020-05-10 | Add imap_attachment_to_s3 example dag and system test (#8669) | -| [ff5b70149](https://github.com/apache/airflow/commit/ff5b70149bf51012156378c8fc8b072c7c280d9d) | 2020-05-07 | Add google_api_to_s3_transfer example dags and system tests (#8581) | -| [4421f011e](https://github.com/apache/airflow/commit/4421f011eeec2d1022a39933e27f530fb9f9c1b1) | 2020-05-01 | Improve template capabilities of EMR job and step operators (#8572) | -| [379a884d6](https://github.com/apache/airflow/commit/379a884d645a4d73db1c81e3450adc82571989ea) | 2020-04-28 | fix: aws hook should work without conn id (#8534) | -| [74bc316c5](https://github.com/apache/airflow/commit/74bc316c56192f14677e9406d3878887a836062b) | 2020-04-27 | [AIRFLOW-4438] Add Gzip compression to S3_hook (#8571) | -| [7ea66a1a9](https://github.com/apache/airflow/commit/7ea66a1a9594704869e82513d3a06fe35b6109b2) | 2020-04-26 | Add example DAG for ECSOperator (#8452) | -| [b6434dedf](https://github.com/apache/airflow/commit/b6434dedf974085e5f8891446fa63104836c8fdf) | 2020-04-24 | [AIRFLOW-7111] Add generate_presigned_url method to S3Hook (#8441) | -| [becedd5af](https://github.com/apache/airflow/commit/becedd5af8df01a0210e0a3fa78e619785f39908) | 2020-04-19 | Remove unrelated EC2 references in ECSOperator (#8451) | -| [ab1290cb0](https://github.com/apache/airflow/commit/ab1290cb0c5856fa85c8596bfdf780fcdfd99c31) | 2020-04-13 | Make launch_type parameter optional (#8248) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [b46d6c060](https://github.com/apache/airflow/commit/b46d6c060280da59193a28cf67e791eb825cb51c) | 2020-04-08 | Add support for AWS Secrets Manager as Secrets Backend (#8186) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31 | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011) | -| [7239d9a82](https://github.com/apache/airflow/commit/7239d9a82dbb3b9bdf27b531daa70338af9dd796) | 2020-03-28 | Get Airflow Variables from AWS Systems Manager Parameter Store (#7945) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [438da7241](https://github.com/apache/airflow/commit/438da7241eb537e3ef5ae711629446155bf738a3) | 2020-03-28 | [AIRFLOW-5825] SageMakerEndpointOperator is not idempotent (#7891) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [a36002412](https://github.com/apache/airflow/commit/a36002412334c445e4eab41fdbb85ef31b6fd384) | 2020-03-19 | [AIRFLOW-5705] Make AwsSsmSecretsBackend consistent with VaultBackend (#7753) | -| [2a54512d7](https://github.com/apache/airflow/commit/2a54512d785ba603ba71381dc3dfa049e9f74063) | 2020-03-17 | [AIRFLOW-5705] Fix bugs in AWS SSM Secrets Backend (#7745) | -| [a8b5fc74d](https://github.com/apache/airflow/commit/a8b5fc74d07e50c91bb64cb66ca1a450aa5ce6e1) | 2020-03-16 | [AIRFLOW-4175] S3Hook load_file should support ACL policy paramete (#7733) | -| [e31e9ddd2](https://github.com/apache/airflow/commit/e31e9ddd2332e5d92422baf668acee441646ad68) | 2020-03-14 | [AIRFLOW-5705] Add secrets backend and support for AWS SSM (#6376) | -| [3bb60afc7](https://github.com/apache/airflow/commit/3bb60afc7b8319996385d681faac342afe2b3bd2) | 2020-03-13 | [AIRFLOW-6975] Base AWSHook AssumeRoleWithSAML (#7619) | -| [c0c5f11ad](https://github.com/apache/airflow/commit/c0c5f11ad11a5a38e0553c1a36aa75eb83efae51) | 2020-03-12 | [AIRFLOW-6884] Make SageMakerTrainingOperator idempotent (#7598) | -| [b7cdda1c6](https://github.com/apache/airflow/commit/b7cdda1c64595bc7f85519337029de259e573fce) | 2020-03-10 | [AIRFLOW-4438] Add Gzip compression to S3_hook (#7680) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [9a94ab246](https://github.com/apache/airflow/commit/9a94ab246db8c09aa83bb6a6d245b1ca9563bcd9) | 2020-03-01 | [AIRFLOW-6962] Fix compeleted to completed (#7600) | -| [1b38f6d9b](https://github.com/apache/airflow/commit/1b38f6d9b6710bd5e25fc16883599f1842ab7cb9) | 2020-02-29 | [AIRFLOW-5908] Add download_file to S3 Hook (#6577) | -| [3ea3e1a2b](https://github.com/apache/airflow/commit/3ea3e1a2b580b7ed10efe668de0cc37b03673500) | 2020-02-26 | [AIRFLOW-6824] EMRAddStepsOperator problem with multi-step XCom (#7443) | -| [6eaa7e3b1](https://github.com/apache/airflow/commit/6eaa7e3b1845644d5ec65a00a997f4029bec9628) | 2020-02-25 | [AIRFLOW-5924] Automatically unify bucket name and key in S3Hook (#6574) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [7d0e7122d](https://github.com/apache/airflow/commit/7d0e7122dd14576d834c6f66fe919a72b100b7f8) | 2020-02-24 | [AIRFLOW-6830] Add Subject/MessageAttributes to SNS hook and operator (#7451) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [47a922b86](https://github.com/apache/airflow/commit/47a922b86426968bfa07cc7892d2eeeca761d884) | 2020-02-21 | [AIRFLOW-6854] Fix missing typing_extensions on python 3.8 (#7474) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [58c3542ed](https://github.com/apache/airflow/commit/58c3542ed25061320ce61dbe0adf451a44c738dd) | 2020-02-12 | [AIRFLOW-5231] Fix S3Hook.delete_objects method (#7375) | -| [b7aa778b3](https://github.com/apache/airflow/commit/b7aa778b38df2f116a1c20031e72fea8b97315bf) | 2020-02-10 | [AIRFLOW-6767] Correct name for default Athena workgroup (#7394) | -| [9282185e6](https://github.com/apache/airflow/commit/9282185e6624e64bb7f17447f81c1b2d1bb4d56d) | 2020-02-09 | [AIRFLOW-6761] Fix WorkGroup param in AWSAthenaHook (#7386) | -| [94fccca97](https://github.com/apache/airflow/commit/94fccca97030ee59d89f302a98137b17e7b01a33) | 2020-02-04 | [AIRFLOW-XXXX] Add pre-commit check for utf-8 file encoding (#7347) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [88e40c714](https://github.com/apache/airflow/commit/88e40c714d2853aa8966796945b2907c263fed08) | 2020-02-03 | [AIRFLOW-6716] Fix AWS Datasync Example DAG (#7339) | -| [a311d3d82](https://github.com/apache/airflow/commit/a311d3d82e0c2e32bcb56e29f33c95ed0a2a2ddc) | 2020-02-03 | [AIRFLOW-6718] Fix more occurrences of utils.dates.days_ago (#7341) | -| [cb766b05b](https://github.com/apache/airflow/commit/cb766b05b17b80fd54a5ce6ac3ee35a631115000) | 2020-02-03 | [AIRFLOW-XXXX] Fix Static Checks on CI (#7342) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [7527eddc5](https://github.com/apache/airflow/commit/7527eddc5e9729aa7e732209a07d57985f6c73e4) | 2020-02-02 | [AIRFLOW-4364] Make all code in airflow/providers/amazon pylint compatible (#7336) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [63aa3db88](https://github.com/apache/airflow/commit/63aa3db88f8824efe79622301efd9f8ba75b991c) | 2020-02-02 | [AIRFLOW-6258] Add CloudFormation operators to AWS providers (#6824) | -| [af4157fde](https://github.com/apache/airflow/commit/af4157fdeffc0c18492b518708c0db44815067ab) | 2020-02-02 | [AIRFLOW-6672] AWS DataSync - better logging of error message (#7288) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [1988a97e8](https://github.com/apache/airflow/commit/1988a97e8f687e28a5a39b29677fb514e097753c) | 2020-01-28 | [AIRFLOW-6659] Move AWS Transfer operators to providers package (#7274) | -| [ab10443e9](https://github.com/apache/airflow/commit/ab10443e965269efe9c1efaf5fa33bcdbe609f13) | 2020-01-28 | [AIRFLOW-6424] Added a operator to modify EMR cluster (#7213) | -| [40246132a](https://github.com/apache/airflow/commit/40246132a7ef3b07fe3173c6e7646ed6b53aad6e) | 2020-01-28 | [AIRFLOW-6654] AWS DataSync - bugfix when creating locations (#7270) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [599e4791c](https://github.com/apache/airflow/commit/599e4791c91cff411b1bf1c45555db5094c2b420) | 2020-01-18 | [AIRFLOW-6541] Use EmrJobFlowSensor for other states (#7146) | -| [c319e81ca](https://github.com/apache/airflow/commit/c319e81cae1de31ad1373903252d8608ffce1fba) | 2020-01-17 | [AIRFLOW-6572] Move AWS classes to providers.amazon.aws package (#7178) | -| [941a07057](https://github.com/apache/airflow/commit/941a070578bc7d9410715b89658548167352cc4d) | 2020-01-15 | [AIRFLOW-6570] Add dag tag for all example dag (#7176) | -| [78d8fe694](https://github.com/apache/airflow/commit/78d8fe6944b689b9b0af99255286e34e06eedec3) | 2020-01-08 | [AIRFLOW-6245] Add custom waiters for AWS batch jobs (#6811) | -| [e0b022725](https://github.com/apache/airflow/commit/e0b022725749181bd4e30933e4a0ffefb993eede) | 2019-12-28 | [AIRFLOW-6319] Add support for AWS Athena workgroups (#6871) | -| [57da45685](https://github.com/apache/airflow/commit/57da45685457520d51a0967e2aeb5e5ff162dfa7) | 2019-12-24 | [AIRFLOW-6333] Bump Pylint to 2.4.4 & fix/disable new checks (#6888) | -| [cf647c27e](https://github.com/apache/airflow/commit/cf647c27e0f35bbd1183bfcf87a106cbdb69d3fa) | 2019-12-18 | [AIRFLOW-6038] AWS DataSync reworked (#6773) | -| [7502cad28](https://github.com/apache/airflow/commit/7502cad2844139d57e4276d971c0706a361d9dbe) | 2019-12-17 | [AIRFLOW-6206] Move and rename AWS batch operator [AIP-21] (#6764) | -| [c4c635df6](https://github.com/apache/airflow/commit/c4c635df6906f56e01724573923e19763bb0da62) | 2019-12-17 | [AIRFLOW-6083] Adding ability to pass custom configuration to lambda client. (#6678) | -| [4fb498f87](https://github.com/apache/airflow/commit/4fb498f87ef89acc30f2576ebc5090ab0653159e) | 2019-12-09 | [AIRFLOW-6072] aws_hook: Outbound http proxy setting and other enhancements (#6686) | -| [a1e2f8635](https://github.com/apache/airflow/commit/a1e2f863526973b17892ec31caf09eded95c1cd2) | 2019-11-20 | [AIRFLOW-6021] Replace list literal with list constructor (#6617) | -| [baae14084](https://github.com/apache/airflow/commit/baae140847cdf9d84e905fb6d1f119d6950eecf9) | 2019-11-19 | [AIRFLOW-5781] AIP-21 Migrate AWS Kinesis to /providers/amazon/aws (#6588) | -| [504cfbac1](https://github.com/apache/airflow/commit/504cfbac1a4ec2e2fd169523ed357808f63881bb) | 2019-11-18 | [AIRFLOW-5783] AIP-21 Move aws redshift into providers structure (#6539) | -| [992f0e3ac](https://github.com/apache/airflow/commit/992f0e3acf11163294508858515a5f79116e3ad8) | 2019-11-12 | AIRFLOW-5824: AWS DataSync Hook and Operators added (#6512) | -| [c015eb2f6](https://github.com/apache/airflow/commit/c015eb2f6496b9721afda9e85d5d4af3bbe0696b) | 2019-11-10 | [AIRFLOW-5786] Migrate AWS SNS to /providers/amazon/aws (#6502) | -| [3d76fb4bf](https://github.com/apache/airflow/commit/3d76fb4bf25e5b7d3d30e0d64867b5999b77f0b0) | 2019-11-09 | [AIRFLOW-5782] Migrate AWS Lambda to /providers/amazon/aws [AIP-21] (#6518) | diff --git a/airflow/providers/amazon/CHANGELOG.rst b/airflow/providers/amazon/CHANGELOG.rst new file mode 100644 index 0000000000000..09a64e2ef69ad --- /dev/null +++ b/airflow/providers/amazon/CHANGELOG.rst @@ -0,0 +1,26 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + + +Initial version of the provider. diff --git a/airflow/providers/amazon/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/amazon/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 9716b5a8f3d9e..0000000000000 --- a/airflow/providers/amazon/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,217 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [d5589673a](https://github.com/apache/airflow/commit/d5589673a95aaced0b851ea0a4061a010a924a82) | 2020-12-08 | Move dummy_operator.py to dummy.py (#11178) (#11293) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29 | Don't use time.time() or timezone.utcnow() for duration calculations (#12353) | -| [de3b1e687](https://github.com/apache/airflow/commit/de3b1e687b26c524c6909b7b4dfbb60d25019751) | 2020-11-28 | Move connection guides to provider documentation packages (#12653) | -| [663259d4b](https://github.com/apache/airflow/commit/663259d4b541ab10ce55fec4d2460e23917062c2) | 2020-11-25 | Fix AWS DataSync tests failing (#11020) | -| [3fa51f94d](https://github.com/apache/airflow/commit/3fa51f94d7a17f170ddc31908d36c91f4456a20b) | 2020-11-24 | Add check for duplicates in provider.yaml files (#12578) | -| [ed09915a0](https://github.com/apache/airflow/commit/ed09915a02b9b99e60689e647452addaab1688fc) | 2020-11-23 | [AIRFLOW-5115] Bugfix for S3KeySensor failing to accept template_fields (#12389) | -| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21 | Fix Python Docstring parameters (#12513) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [c94b1241a](https://github.com/apache/airflow/commit/c94b1241a144294f5f1c5f461d5e3b92e4a8fc38) | 2020-11-13 | Add extra error handling to S3 remote logging (#9908) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08 | Add authentication to AWS with Google credentials (#12079) | -| [fb6bddba0](https://github.com/apache/airflow/commit/fb6bddba0c9e3e7ef2610b4fb3f73622e48d7ea0) | 2020-11-07 | In AWS Secrets backend, a lookup is optional (#12143) | -| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06 | Simplify string expressions (#12123) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5e77a6154](https://github.com/apache/airflow/commit/5e77a61543d26e5466d885d639247aa5189c011d) | 2020-11-02 | Docstring fix for S3DeleteBucketOperator (#12049) | -| [822285134](https://github.com/apache/airflow/commit/8222851348aa81424c9bdcea994e25e0d6692709) | 2020-10-29 | Add Template Fields to RedshiftToS3Operator & S3ToRedshiftOperator (#11844) | -| [db121f726](https://github.com/apache/airflow/commit/db121f726b3c7a37aca1ea05eb4714f884456005) | 2020-10-28 | Add truncate table (before copy) option to S3ToRedshiftOperator (#9246) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26 | Fix spellings (#11825) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [3934ef224](https://github.com/apache/airflow/commit/3934ef22494db6d9613c229aaa82ea6a366b7c2f) | 2020-10-24 | Remove redundant builtins imports (#11809) | -| [4c8e033c0](https://github.com/apache/airflow/commit/4c8e033c0ee7d28963d504a9216205155f20f58f) | 2020-10-24 | Fix spelling and grammar (#11814) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [0df60b773](https://github.com/apache/airflow/commit/0df60b773671ecf8d4e5f582ac2be200cf2a2edd) | 2020-10-23 | Add reattach flag to ECSOperator (#10643) | -| [b9d677cdd](https://github.com/apache/airflow/commit/b9d677cdd660e0be8278a64658e73359276a9682) | 2020-10-22 | Add type hints to aws provider (#11531) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [674368f66](https://github.com/apache/airflow/commit/674368f66cf61b2a105f326f23868ac3aee08807) | 2020-10-19 | Fixes MySQLToS3 float to int conversion (#10437) | -| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16 | Add type annotations for AWS operators and hooks (#11434) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d38a0a781](https://github.com/apache/airflow/commit/d38a0a781e123c8c50313efdb23f767d6678afe0) | 2020-10-12 | added type hints for aws cloud formation (#11470) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [c3e340584](https://github.com/apache/airflow/commit/c3e340584bf1892c4f73aa9e7495b5823dab0c40) | 2020-10-11 | Change prefix of AwsDynamoDB hook module (#11209) | -| [42a23d16f](https://github.com/apache/airflow/commit/42a23d16fe9b2f165b0805fb767ecbb825c93657) | 2020-10-11 | Update MySQLToS3Operator's s3_bucket to template_fields (#10778) | -| [422b61a9d](https://github.com/apache/airflow/commit/422b61a9dd95ab9d00b239daa14d87d7cae5ae73) | 2020-10-09 | Adding ElastiCache Hook for creating, describing and deleting replication groups (#8701) | -| [dd98b2149](https://github.com/apache/airflow/commit/dd98b21494ff6036242b63268140abe1294b3657) | 2020-10-06 | Add acl_policy parameter to GCSToS3Operator (#10804) (#10829) | -| [32b3cfbcf](https://github.com/apache/airflow/commit/32b3cfbcf0209cb062dd641c1232ab25d02d4d6d) | 2020-10-06 | Strict type check for all hooks in amazon (#11250) | -| [6d573e8ab](https://github.com/apache/airflow/commit/6d573e8abbf87e3c7281347e03d428a6e5baccd4) | 2020-10-03 | Add s3 key to template fields for s3/redshift transfer operators (#10890) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [00ffedb8c](https://github.com/apache/airflow/commit/00ffedb8c402eb5638782628eb706a5f28215eac) | 2020-09-30 | Add amazon glacier to GCS transfer operator (#10947) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21 | Add D204 pydocstyle check (#11031) | -| [2410f592a](https://github.com/apache/airflow/commit/2410f592a4ab160b377f1a9e5de3b7262b9851cc) | 2020-09-19 | Get Airflow configs with sensitive data from AWS Systems Manager (#11023) | -| [2bf7b7cac](https://github.com/apache/airflow/commit/2bf7b7cac7858f5a6a495f1a9eb4780ec84f95b4) | 2020-09-19 | Add typing to amazon provider EMR (#10910) | -| [9edfcb7ac](https://github.com/apache/airflow/commit/9edfcb7ac46917836ec956264da8876e58d92392) | 2020-09-19 | Support extra_args in S3Hook and GCSToS3Operator (#11001) | -| [4e1f3a69d](https://github.com/apache/airflow/commit/4e1f3a69db8614c302e4916332555034053b935c) | 2020-09-14 | [AIRFLOW-10645] Add AWS Secrets Manager Hook (#10655) | -| [e9add7916](https://github.com/apache/airflow/commit/e9add79160e3a16bb348e30f4e83386a371dbc1e) | 2020-09-14 | Fix Failing static tests on Master (#10927) | -| [383a118d2](https://github.com/apache/airflow/commit/383a118d2df618e46d81c520cd2c4a31d81b33dd) | 2020-09-14 | Add more type annotations to AWS hooks (#10671) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [2934220dc](https://github.com/apache/airflow/commit/2934220dc98e295764f7791d33e121629ed2fbbb) | 2020-09-08 | Always return a list from S3Hook list methods (#10774) | -| [f40ac9b15](https://github.com/apache/airflow/commit/f40ac9b151124dbcd87197d6ae38c85191d41f38) | 2020-09-01 | Add placement_strategy option (#9444) | -| [e4878e677](https://github.com/apache/airflow/commit/e4878e6775bbe5cb2a1d786e57e009271b78bba0) | 2020-08-31 | fix type hints for s3 hook read_key method (#10653) | -| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29 | Update Google Cloud branding (#10642) | -| [8969b7185](https://github.com/apache/airflow/commit/8969b7185ebc3c90168ce9a2fb97dfbc74d2bed9) | 2020-08-28 | Removed bad characters from AWS operator (#10590) | -| [8349061f9](https://github.com/apache/airflow/commit/8349061f9cb01a92c87edd349cc844c4053851e8) | 2020-08-26 | Improve Docstring for AWS Athena Hook/Operator (#10580) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24 | Implement impersonation in google operators (#10052) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [c6358045f](https://github.com/apache/airflow/commit/c6358045f9d61af63c96833cb6682d6f382a6408) | 2020-08-22 | Fixes S3ToRedshift COPY query (#10436) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [27d08b76a](https://github.com/apache/airflow/commit/27d08b76a2d171d716a1599157a8a60a121dbec6) | 2020-08-21 | Amazon SES Hook (#10391) | -| [dea345b05](https://github.com/apache/airflow/commit/dea345b05c2cd226e70f97a3934d7456aa1cc754) | 2020-08-17 | Fix AwsGlueJobSensor to stop running after the Glue job finished (#9022) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [82f744b87](https://github.com/apache/airflow/commit/82f744b871bb2c5e9a2d628e1c45ae16c1244240) | 2020-08-11 | Add type annotations to AwsGlueJobHook, RedshiftHook modules (#10286) | -| [19bc97d0c](https://github.com/apache/airflow/commit/19bc97d0ce436a6ec9d8e9a5adcd48c0a769d01f) | 2020-08-10 | Revert "Add Amazon SES hook (#10004)" (#10276) | -| [f06fe616e](https://github.com/apache/airflow/commit/f06fe616e66256bdc53710de505c2c6b1bd21528) | 2020-08-10 | Add Amazon SES hook (#10004) | -| [0c77ea8a3](https://github.com/apache/airflow/commit/0c77ea8a3c417805f66d10f0c757ca218bf8dee0) | 2020-08-06 | Add type annotations to S3 hook module (#10164) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [9667314b2](https://github.com/apache/airflow/commit/9667314b2fb879edc451793a8350123507e1cfd6) | 2020-08-05 | Add correct signatures for operators in amazon provider package (#10167) | -| [000287753](https://github.com/apache/airflow/commit/000287753b478f29e6c25442ac253e3a6c8e8c87) | 2020-08-03 | Improve Typing coverage of amazon/aws/athena (#10025) | -| [53ada6e79](https://github.com/apache/airflow/commit/53ada6e7911f411e80ebb00be9f07a7cc0788d01) | 2020-08-03 | Add S3KeysUnchangedSensor (#9817) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [2b8dea64e](https://github.com/apache/airflow/commit/2b8dea64e9e8716fba8c38a1b439f7835bbd2918) | 2020-08-01 | Fix typo in Athena sensor retries (#10079) | -| [1508c43ec](https://github.com/apache/airflow/commit/1508c43ec9594e801b415dd82472fa017791b759) | 2020-07-29 | Adding new SageMaker operator for ProcessingJobs (#9594) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25 | Stop using start_date in default_args in example_dags (#9982) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [e7c87fe45](https://github.com/apache/airflow/commit/e7c87fe453c6a70ed087c7ffbccaacbf0d2831b9) | 2020-07-20 | Refactor AwsBaseHook._get_credentials (#9878) | -| [2577f9334](https://github.com/apache/airflow/commit/2577f9334a5cb71cccd97e62b0ae2d097cb99e1a) | 2020-07-16 | Fix S3FileTransformOperator to support S3 Select transformation only (#8936) | -| [52b6efe1e](https://github.com/apache/airflow/commit/52b6efe1ecaae74b9c2497f565e116305d575a76) | 2020-07-15 | Add option to delete by prefix to S3DeleteObjectsOperator (#9350) | -| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13 | Keep functions signatures in decorators (#9786) | -| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08 | Get Airflow configs with sensitive data from Secret Backends (#9645) | -| [07b81029e](https://github.com/apache/airflow/commit/07b81029ebc2a296fb54181f2cec11fcc7704d9d) | 2020-07-08 | Allow AWSAthenaHook to get more than 1000/first page of results (#6075) | -| [564192c16](https://github.com/apache/airflow/commit/564192c1625a552456cebb3751978c08eebdb2a1) | 2020-07-08 | Add AWS StepFunctions integrations to the aws provider (#8749) | -| [ecce1ace7](https://github.com/apache/airflow/commit/ecce1ace7a277c948c61d7d4cbfc8632cc216559) | 2020-07-08 | [AIRFLOW-XXXX] Remove unnecessary docstring in AWSAthenaOperator | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [ee20086b8](https://github.com/apache/airflow/commit/ee20086b8c499fa40dcaac71652f21b466e7f80f) | 2020-07-02 | Move S3TaskHandler to the AWS provider package (#9602) | -| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29 | Remove almost all references to airflow.contrib (#9559) | -| [c858babdd](https://github.com/apache/airflow/commit/c858babddf8b18b417993b5bfefec1c5635510da) | 2020-06-26 | Remove kwargs from Super calls in AWS Secrets Backends (#9523) | -| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25 | Use literal syntax instead of function calls to create data structure (#9516) | -| [c7a454aa3](https://github.com/apache/airflow/commit/c7a454aa32bf33133d042e8438ac259b32144b21) | 2020-06-22 | Add AWS ECS system test (#8888) | -| [df8efd04f](https://github.com/apache/airflow/commit/df8efd04f394afc4b5affb677bc78d8b7bd5275a) | 2020-06-21 | Enable & Fix "Docstring Content Issues" PyDocStyle Check (#9460) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [992a18c84](https://github.com/apache/airflow/commit/992a18c84a355d13e821c703e7364f12233c37dc) | 2020-06-19 | Move MySqlToS3Operator to transfers (#9400) | -| [a60f589aa](https://github.com/apache/airflow/commit/a60f589aa251cc3df6bec5b306ad4a7f736f539f) | 2020-06-19 | Add MySqlToS3Operator (#9054) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [58a8ec0e4](https://github.com/apache/airflow/commit/58a8ec0e46f624ee0369dd156dd8fb4f81884a21) | 2020-06-16 | AWSBatchOperator <> ClientHook relation changed to composition (#9306) | -| [a80cd25e8](https://github.com/apache/airflow/commit/a80cd25e8eb7f8b5d89af26cdcd62a5bbe44d65c) | 2020-06-15 | Close/Flush byte stream in s3 hook load_string and load_bytes (#9211) | -| [ffb857403](https://github.com/apache/airflow/commit/ffb85740373f7adb70d28ec7d5a8886380170e5e) | 2020-06-14 | Decrypt secrets from SystemsManagerParameterStoreBackend (#9214) | -| [a69b031f2](https://github.com/apache/airflow/commit/a69b031f20c5a1cd032f9873394374f661811e8f) | 2020-06-10 | Add S3ToRedshift example dag and system test (#8877) | -| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | -| [357e11e0c](https://github.com/apache/airflow/commit/357e11e0cfb4c02833018e073bc4f5e5b52fae4f) | 2020-05-29 | Add Delete/Create S3 bucket operators (#8895) | -| [1ed171bfb](https://github.com/apache/airflow/commit/1ed171bfb265ded8674058bdc425640d25f1f4fc) | 2020-05-28 | Add script_args for S3FileTransformOperator (#9019) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [f946f96da](https://github.com/apache/airflow/commit/f946f96da45d8e6101805450d8cab7ccb2774ad0) | 2020-05-23 | Old json boto compat removed from dynamodb_to_s3 operator (#8987) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [f4edd90a9](https://github.com/apache/airflow/commit/f4edd90a94b8f91bbefbbbfba367372399559596) | 2020-05-16 | Speed up TestAwsLambdaHook by not actually running a function (#8882) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [85bbab27d](https://github.com/apache/airflow/commit/85bbab27dbb4f55f6f322b894fe3d54797076c15) | 2020-05-15 | Add EMR operators howto docs (#8863) | -| [e61b9bb9b](https://github.com/apache/airflow/commit/e61b9bb9bbe6d8a0621310f3583483b9135c6770) | 2020-05-13 | Add AWS EMR System tests (#8618) | -| [ed3f5131a](https://github.com/apache/airflow/commit/ed3f5131a27e2ef0422f2495a4532630a6204f82) | 2020-05-13 | Correctly pass sleep time from AWSAthenaOperator down to the hook. (#8845) | -| [7236862a1](https://github.com/apache/airflow/commit/7236862a1f5361b5e99c03dd63dae9b966efcd24) | 2020-05-12 | [AIRFLOW-2310] Enable AWS Glue Job Integration (#6007) | -| [d590e5e76](https://github.com/apache/airflow/commit/d590e5e7679322bebb1472fa8c7ec6d183e4154a) | 2020-05-11 | Add option to propagate tags in ECSOperator (#8811) | -| [0c3db84c3](https://github.com/apache/airflow/commit/0c3db84c3ce5107f53ed5ecc48edfdfe1b97feff) | 2020-05-11 | [AIRFLOW-7068] Create EC2 Hook, Operator and Sensor (#7731) | -| [cbebed2b4](https://github.com/apache/airflow/commit/cbebed2b4d0bd1e0984c331c0270e83bf8df8540) | 2020-05-10 | Allow passing backend_kwargs to AWS SSM client (#8802) | -| [c7788a689](https://github.com/apache/airflow/commit/c7788a6894cb79c22153434dd9b977393b8236be) | 2020-05-10 | Add imap_attachment_to_s3 example dag and system test (#8669) | -| [ff5b70149](https://github.com/apache/airflow/commit/ff5b70149bf51012156378c8fc8b072c7c280d9d) | 2020-05-07 | Add google_api_to_s3_transfer example dags and system tests (#8581) | -| [4421f011e](https://github.com/apache/airflow/commit/4421f011eeec2d1022a39933e27f530fb9f9c1b1) | 2020-05-01 | Improve template capabilities of EMR job and step operators (#8572) | -| [379a884d6](https://github.com/apache/airflow/commit/379a884d645a4d73db1c81e3450adc82571989ea) | 2020-04-28 | fix: aws hook should work without conn id (#8534) | -| [74bc316c5](https://github.com/apache/airflow/commit/74bc316c56192f14677e9406d3878887a836062b) | 2020-04-27 | [AIRFLOW-4438] Add Gzip compression to S3_hook (#8571) | -| [7ea66a1a9](https://github.com/apache/airflow/commit/7ea66a1a9594704869e82513d3a06fe35b6109b2) | 2020-04-26 | Add example DAG for ECSOperator (#8452) | -| [b6434dedf](https://github.com/apache/airflow/commit/b6434dedf974085e5f8891446fa63104836c8fdf) | 2020-04-24 | [AIRFLOW-7111] Add generate_presigned_url method to S3Hook (#8441) | -| [becedd5af](https://github.com/apache/airflow/commit/becedd5af8df01a0210e0a3fa78e619785f39908) | 2020-04-19 | Remove unrelated EC2 references in ECSOperator (#8451) | -| [ab1290cb0](https://github.com/apache/airflow/commit/ab1290cb0c5856fa85c8596bfdf780fcdfd99c31) | 2020-04-13 | Make launch_type parameter optional (#8248) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [b46d6c060](https://github.com/apache/airflow/commit/b46d6c060280da59193a28cf67e791eb825cb51c) | 2020-04-08 | Add support for AWS Secrets Manager as Secrets Backend (#8186) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31 | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011) | -| [7239d9a82](https://github.com/apache/airflow/commit/7239d9a82dbb3b9bdf27b531daa70338af9dd796) | 2020-03-28 | Get Airflow Variables from AWS Systems Manager Parameter Store (#7945) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [438da7241](https://github.com/apache/airflow/commit/438da7241eb537e3ef5ae711629446155bf738a3) | 2020-03-28 | [AIRFLOW-5825] SageMakerEndpointOperator is not idempotent (#7891) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [a36002412](https://github.com/apache/airflow/commit/a36002412334c445e4eab41fdbb85ef31b6fd384) | 2020-03-19 | [AIRFLOW-5705] Make AwsSsmSecretsBackend consistent with VaultBackend (#7753) | -| [2a54512d7](https://github.com/apache/airflow/commit/2a54512d785ba603ba71381dc3dfa049e9f74063) | 2020-03-17 | [AIRFLOW-5705] Fix bugs in AWS SSM Secrets Backend (#7745) | -| [a8b5fc74d](https://github.com/apache/airflow/commit/a8b5fc74d07e50c91bb64cb66ca1a450aa5ce6e1) | 2020-03-16 | [AIRFLOW-4175] S3Hook load_file should support ACL policy paramete (#7733) | -| [e31e9ddd2](https://github.com/apache/airflow/commit/e31e9ddd2332e5d92422baf668acee441646ad68) | 2020-03-14 | [AIRFLOW-5705] Add secrets backend and support for AWS SSM (#6376) | -| [3bb60afc7](https://github.com/apache/airflow/commit/3bb60afc7b8319996385d681faac342afe2b3bd2) | 2020-03-13 | [AIRFLOW-6975] Base AWSHook AssumeRoleWithSAML (#7619) | -| [c0c5f11ad](https://github.com/apache/airflow/commit/c0c5f11ad11a5a38e0553c1a36aa75eb83efae51) | 2020-03-12 | [AIRFLOW-6884] Make SageMakerTrainingOperator idempotent (#7598) | -| [b7cdda1c6](https://github.com/apache/airflow/commit/b7cdda1c64595bc7f85519337029de259e573fce) | 2020-03-10 | [AIRFLOW-4438] Add Gzip compression to S3_hook (#7680) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [9a94ab246](https://github.com/apache/airflow/commit/9a94ab246db8c09aa83bb6a6d245b1ca9563bcd9) | 2020-03-01 | [AIRFLOW-6962] Fix compeleted to completed (#7600) | -| [1b38f6d9b](https://github.com/apache/airflow/commit/1b38f6d9b6710bd5e25fc16883599f1842ab7cb9) | 2020-02-29 | [AIRFLOW-5908] Add download_file to S3 Hook (#6577) | -| [3ea3e1a2b](https://github.com/apache/airflow/commit/3ea3e1a2b580b7ed10efe668de0cc37b03673500) | 2020-02-26 | [AIRFLOW-6824] EMRAddStepsOperator problem with multi-step XCom (#7443) | -| [6eaa7e3b1](https://github.com/apache/airflow/commit/6eaa7e3b1845644d5ec65a00a997f4029bec9628) | 2020-02-25 | [AIRFLOW-5924] Automatically unify bucket name and key in S3Hook (#6574) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [7d0e7122d](https://github.com/apache/airflow/commit/7d0e7122dd14576d834c6f66fe919a72b100b7f8) | 2020-02-24 | [AIRFLOW-6830] Add Subject/MessageAttributes to SNS hook and operator (#7451) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [47a922b86](https://github.com/apache/airflow/commit/47a922b86426968bfa07cc7892d2eeeca761d884) | 2020-02-21 | [AIRFLOW-6854] Fix missing typing_extensions on python 3.8 (#7474) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [58c3542ed](https://github.com/apache/airflow/commit/58c3542ed25061320ce61dbe0adf451a44c738dd) | 2020-02-12 | [AIRFLOW-5231] Fix S3Hook.delete_objects method (#7375) | -| [b7aa778b3](https://github.com/apache/airflow/commit/b7aa778b38df2f116a1c20031e72fea8b97315bf) | 2020-02-10 | [AIRFLOW-6767] Correct name for default Athena workgroup (#7394) | -| [9282185e6](https://github.com/apache/airflow/commit/9282185e6624e64bb7f17447f81c1b2d1bb4d56d) | 2020-02-09 | [AIRFLOW-6761] Fix WorkGroup param in AWSAthenaHook (#7386) | -| [94fccca97](https://github.com/apache/airflow/commit/94fccca97030ee59d89f302a98137b17e7b01a33) | 2020-02-04 | [AIRFLOW-XXXX] Add pre-commit check for utf-8 file encoding (#7347) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [88e40c714](https://github.com/apache/airflow/commit/88e40c714d2853aa8966796945b2907c263fed08) | 2020-02-03 | [AIRFLOW-6716] Fix AWS Datasync Example DAG (#7339) | -| [a311d3d82](https://github.com/apache/airflow/commit/a311d3d82e0c2e32bcb56e29f33c95ed0a2a2ddc) | 2020-02-03 | [AIRFLOW-6718] Fix more occurrences of utils.dates.days_ago (#7341) | -| [cb766b05b](https://github.com/apache/airflow/commit/cb766b05b17b80fd54a5ce6ac3ee35a631115000) | 2020-02-03 | [AIRFLOW-XXXX] Fix Static Checks on CI (#7342) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [7527eddc5](https://github.com/apache/airflow/commit/7527eddc5e9729aa7e732209a07d57985f6c73e4) | 2020-02-02 | [AIRFLOW-4364] Make all code in airflow/providers/amazon pylint compatible (#7336) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [63aa3db88](https://github.com/apache/airflow/commit/63aa3db88f8824efe79622301efd9f8ba75b991c) | 2020-02-02 | [AIRFLOW-6258] Add CloudFormation operators to AWS providers (#6824) | -| [af4157fde](https://github.com/apache/airflow/commit/af4157fdeffc0c18492b518708c0db44815067ab) | 2020-02-02 | [AIRFLOW-6672] AWS DataSync - better logging of error message (#7288) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [1988a97e8](https://github.com/apache/airflow/commit/1988a97e8f687e28a5a39b29677fb514e097753c) | 2020-01-28 | [AIRFLOW-6659] Move AWS Transfer operators to providers package (#7274) | -| [ab10443e9](https://github.com/apache/airflow/commit/ab10443e965269efe9c1efaf5fa33bcdbe609f13) | 2020-01-28 | [AIRFLOW-6424] Added a operator to modify EMR cluster (#7213) | -| [40246132a](https://github.com/apache/airflow/commit/40246132a7ef3b07fe3173c6e7646ed6b53aad6e) | 2020-01-28 | [AIRFLOW-6654] AWS DataSync - bugfix when creating locations (#7270) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [599e4791c](https://github.com/apache/airflow/commit/599e4791c91cff411b1bf1c45555db5094c2b420) | 2020-01-18 | [AIRFLOW-6541] Use EmrJobFlowSensor for other states (#7146) | -| [c319e81ca](https://github.com/apache/airflow/commit/c319e81cae1de31ad1373903252d8608ffce1fba) | 2020-01-17 | [AIRFLOW-6572] Move AWS classes to providers.amazon.aws package (#7178) | -| [941a07057](https://github.com/apache/airflow/commit/941a070578bc7d9410715b89658548167352cc4d) | 2020-01-15 | [AIRFLOW-6570] Add dag tag for all example dag (#7176) | -| [78d8fe694](https://github.com/apache/airflow/commit/78d8fe6944b689b9b0af99255286e34e06eedec3) | 2020-01-08 | [AIRFLOW-6245] Add custom waiters for AWS batch jobs (#6811) | -| [e0b022725](https://github.com/apache/airflow/commit/e0b022725749181bd4e30933e4a0ffefb993eede) | 2019-12-28 | [AIRFLOW-6319] Add support for AWS Athena workgroups (#6871) | -| [57da45685](https://github.com/apache/airflow/commit/57da45685457520d51a0967e2aeb5e5ff162dfa7) | 2019-12-24 | [AIRFLOW-6333] Bump Pylint to 2.4.4 & fix/disable new checks (#6888) | -| [cf647c27e](https://github.com/apache/airflow/commit/cf647c27e0f35bbd1183bfcf87a106cbdb69d3fa) | 2019-12-18 | [AIRFLOW-6038] AWS DataSync reworked (#6773) | -| [7502cad28](https://github.com/apache/airflow/commit/7502cad2844139d57e4276d971c0706a361d9dbe) | 2019-12-17 | [AIRFLOW-6206] Move and rename AWS batch operator [AIP-21] (#6764) | -| [c4c635df6](https://github.com/apache/airflow/commit/c4c635df6906f56e01724573923e19763bb0da62) | 2019-12-17 | [AIRFLOW-6083] Adding ability to pass custom configuration to lambda client. (#6678) | -| [4fb498f87](https://github.com/apache/airflow/commit/4fb498f87ef89acc30f2576ebc5090ab0653159e) | 2019-12-09 | [AIRFLOW-6072] aws_hook: Outbound http proxy setting and other enhancements (#6686) | -| [a1e2f8635](https://github.com/apache/airflow/commit/a1e2f863526973b17892ec31caf09eded95c1cd2) | 2019-11-20 | [AIRFLOW-6021] Replace list literal with list constructor (#6617) | -| [baae14084](https://github.com/apache/airflow/commit/baae140847cdf9d84e905fb6d1f119d6950eecf9) | 2019-11-19 | [AIRFLOW-5781] AIP-21 Migrate AWS Kinesis to /providers/amazon/aws (#6588) | -| [504cfbac1](https://github.com/apache/airflow/commit/504cfbac1a4ec2e2fd169523ed357808f63881bb) | 2019-11-18 | [AIRFLOW-5783] AIP-21 Move aws redshift into providers structure (#6539) | -| [992f0e3ac](https://github.com/apache/airflow/commit/992f0e3acf11163294508858515a5f79116e3ad8) | 2019-11-12 | AIRFLOW-5824: AWS DataSync Hook and Operators added (#6512) | -| [c015eb2f6](https://github.com/apache/airflow/commit/c015eb2f6496b9721afda9e85d5d4af3bbe0696b) | 2019-11-10 | [AIRFLOW-5786] Migrate AWS SNS to /providers/amazon/aws (#6502) | -| [3d76fb4bf](https://github.com/apache/airflow/commit/3d76fb4bf25e5b7d3d30e0d64867b5999b77f0b0) | 2019-11-09 | [AIRFLOW-5782] Migrate AWS Lambda to /providers/amazon/aws [AIP-21] (#6518) | diff --git a/airflow/providers/amazon/README.md b/airflow/providers/amazon/README.md deleted file mode 100644 index f361c163ae5c5..0000000000000 --- a/airflow/providers/amazon/README.md +++ /dev/null @@ -1,483 +0,0 @@ - - - -# Package apache-airflow-providers-amazon - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [New transfer operators](#new-transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) - - [Moved hooks](#moved-hooks) - - [Secrets](#secrets) - - [Moved secrets](#moved-secrets) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `amazon` provider. All classes for this provider package -are in `airflow.providers.amazon` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-amazon` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| boto3 | >=1.15.0,<1.16.0 | -| botocore | >=1.18.0,<1.19.0 | -| watchtower | ~=0.7.3 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-amazon[apache.hive] -``` - -| Dependent package | Extra | -|:------------------------------------------------------------------------------------------------------|:------------| -| [apache-airflow-providers-apache-hive](https://pypi.org/project/apache-airflow-providers-apache-hive) | apache.hive | -| [apache-airflow-providers-google](https://pypi.org/project/apache-airflow-providers-google) | google | -| [apache-airflow-providers-imap](https://pypi.org/project/apache-airflow-providers-imap) | imap | -| [apache-airflow-providers-mongo](https://pypi.org/project/apache-airflow-providers-mongo) | mongo | -| [apache-airflow-providers-mysql](https://pypi.org/project/apache-airflow-providers-mysql) | mysql | -| [apache-airflow-providers-postgres](https://pypi.org/project/apache-airflow-providers-postgres) | postgres | -| [apache-airflow-providers-ssh](https://pypi.org/project/apache-airflow-providers-ssh) | ssh | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `amazon` provider -are in the `airflow.providers.amazon` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.amazon` package | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.operators.cloud_formation.CloudFormationCreateStackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/cloud_formation.py) | -| [aws.operators.cloud_formation.CloudFormationDeleteStackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/cloud_formation.py) | -| [aws.operators.datasync.AWSDataSyncOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/datasync.py) | -| [aws.operators.ec2_start_instance.EC2StartInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ec2_start_instance.py) | -| [aws.operators.ec2_stop_instance.EC2StopInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ec2_stop_instance.py) | -| [aws.operators.emr_modify_cluster.EmrModifyClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_modify_cluster.py) | -| [aws.operators.glacier.GlacierCreateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/glacier.py) | -| [aws.operators.glue.AwsGlueJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/glue.py) | -| [aws.operators.s3_bucket.S3CreateBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py) | -| [aws.operators.s3_bucket.S3DeleteBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_bucket.py) | -| [aws.operators.s3_file_transform.S3FileTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_file_transform.py) | -| [aws.operators.sagemaker_processing.SageMakerProcessingOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_processing.py) | -| [aws.operators.step_function_get_execution_output.StepFunctionGetExecutionOutputOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/step_function_get_execution_output.py) | -| [aws.operators.step_function_start_execution.StepFunctionStartExecutionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/step_function_start_execution.py) | - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.operators.athena.AWSAthenaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/athena.py) | [contrib.operators.aws_athena_operator.AWSAthenaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/aws_athena_operator.py) | -| [aws.operators.batch.AwsBatchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/batch.py) | [contrib.operators.awsbatch_operator.AWSBatchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/awsbatch_operator.py) | -| [aws.operators.ecs.ECSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/ecs.py) | [contrib.operators.ecs_operator.ECSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/ecs_operator.py) | -| [aws.operators.emr_add_steps.EmrAddStepsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_add_steps.py) | [contrib.operators.emr_add_steps_operator.EmrAddStepsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_add_steps_operator.py) | -| [aws.operators.emr_create_job_flow.EmrCreateJobFlowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_create_job_flow.py) | [contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_create_job_flow_operator.py) | -| [aws.operators.emr_terminate_job_flow.EmrTerminateJobFlowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/emr_terminate_job_flow.py) | [contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/emr_terminate_job_flow_operator.py) | -| [aws.operators.s3_copy_object.S3CopyObjectOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_copy_object.py) | [contrib.operators.s3_copy_object_operator.S3CopyObjectOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_copy_object_operator.py) | -| [aws.operators.s3_delete_objects.S3DeleteObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_delete_objects.py) | [contrib.operators.s3_delete_objects_operator.S3DeleteObjectsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_delete_objects_operator.py) | -| [aws.operators.s3_list.S3ListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/s3_list.py) | [contrib.operators.s3_list_operator.S3ListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_list_operator.py) | -| [aws.operators.sagemaker_base.SageMakerBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_base.py) | [contrib.operators.sagemaker_base_operator.SageMakerBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_base_operator.py) | -| [aws.operators.sagemaker_endpoint.SageMakerEndpointOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_endpoint.py) | [contrib.operators.sagemaker_endpoint_operator.SageMakerEndpointOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_endpoint_operator.py) | -| [aws.operators.sagemaker_endpoint_config.SageMakerEndpointConfigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_endpoint_config.py) | [contrib.operators.sagemaker_endpoint_config_operator.SageMakerEndpointConfigOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_endpoint_config_operator.py) | -| [aws.operators.sagemaker_model.SageMakerModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_model.py) | [contrib.operators.sagemaker_model_operator.SageMakerModelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_model_operator.py) | -| [aws.operators.sagemaker_training.SageMakerTrainingOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_training.py) | [contrib.operators.sagemaker_training_operator.SageMakerTrainingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_training_operator.py) | -| [aws.operators.sagemaker_transform.SageMakerTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_transform.py) | [contrib.operators.sagemaker_transform_operator.SageMakerTransformOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_transform_operator.py) | -| [aws.operators.sagemaker_tuning.SageMakerTuningOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sagemaker_tuning.py) | [contrib.operators.sagemaker_tuning_operator.SageMakerTuningOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sagemaker_tuning_operator.py) | -| [aws.operators.sns.SnsPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sns.py) | [contrib.operators.sns_publish_operator.SnsPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sns_publish_operator.py) | -| [aws.operators.sqs.SQSPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/operators/sqs.py) | [contrib.operators.aws_sqs_publish_operator.SQSPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/aws_sqs_publish_operator.py) | - - -## Transfer operators - - -### New transfer operators - -| New Airflow 2.0 transfers: `airflow.providers.amazon` package | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.transfers.glacier_to_gcs.GlacierToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/glacier_to_gcs.py) | -| [aws.transfers.mysql_to_s3.MySQLToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/mysql_to_s3.py) | - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.transfers.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/dynamodb_to_s3.py) | [contrib.operators.dynamodb_to_s3.DynamoDBToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dynamodb_to_s3.py) | -| [aws.transfers.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/gcs_to_s3.py) | [operators.gcs_to_s3.GCSToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/gcs_to_s3.py) | -| [aws.transfers.google_api_to_s3.GoogleApiToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/google_api_to_s3.py) | [operators.google_api_to_s3_transfer.GoogleApiToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/google_api_to_s3_transfer.py) | -| [aws.transfers.hive_to_dynamodb.HiveToDynamoDBOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/hive_to_dynamodb.py) | [contrib.operators.hive_to_dynamodb.HiveToDynamoDBOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/hive_to_dynamodb.py) | -| [aws.transfers.imap_attachment_to_s3.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/imap_attachment_to_s3.py) | [contrib.operators.imap_attachment_to_s3_operator.ImapAttachmentToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/imap_attachment_to_s3_operator.py) | -| [aws.transfers.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/mongo_to_s3.py) | [contrib.operators.mongo_to_s3.MongoToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mongo_to_s3.py) | -| [aws.transfers.redshift_to_s3.RedshiftToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/redshift_to_s3.py) | [operators.redshift_to_s3_operator.RedshiftToS3Transfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/redshift_to_s3_operator.py) | -| [aws.transfers.s3_to_redshift.S3ToRedshiftOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/s3_to_redshift.py) | [operators.s3_to_redshift_operator.S3ToRedshiftTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_redshift_operator.py) | -| [aws.transfers.s3_to_sftp.S3ToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/s3_to_sftp.py) | [contrib.operators.s3_to_sftp_operator.S3ToSFTPOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_sftp_operator.py) | -| [aws.transfers.sftp_to_s3.SFTPToS3Operator](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/transfers/sftp_to_s3.py) | [contrib.operators.sftp_to_s3_operator.SFTPToS3Operator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sftp_to_s3_operator.py) | - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.amazon` package | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.sensors.cloud_formation.CloudFormationCreateStackSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/cloud_formation.py) | -| [aws.sensors.cloud_formation.CloudFormationDeleteStackSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/cloud_formation.py) | -| [aws.sensors.ec2_instance_state.EC2InstanceStateSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/ec2_instance_state.py) | -| [aws.sensors.glacier.GlacierJobOperationSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glacier.py) | -| [aws.sensors.glue.AwsGlueJobSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glue.py) | -| [aws.sensors.redshift.AwsRedshiftClusterSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/redshift.py) | -| [aws.sensors.s3_keys_unchanged.S3KeysUnchangedSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/s3_keys_unchanged.py) | -| [aws.sensors.sagemaker_training.SageMakerTrainingSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_training.py) | -| [aws.sensors.step_function_execution.StepFunctionExecutionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/step_function_execution.py) | - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.sensors.athena.AthenaSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/athena.py) | [contrib.sensors.aws_athena_sensor.AthenaSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/aws_athena_sensor.py) | -| [aws.sensors.emr_base.EmrBaseSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/emr_base.py) | [contrib.sensors.emr_base_sensor.EmrBaseSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/emr_base_sensor.py) | -| [aws.sensors.emr_job_flow.EmrJobFlowSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/emr_job_flow.py) | [contrib.sensors.emr_job_flow_sensor.EmrJobFlowSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/emr_job_flow_sensor.py) | -| [aws.sensors.emr_step.EmrStepSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/emr_step.py) | [contrib.sensors.emr_step_sensor.EmrStepSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/emr_step_sensor.py) | -| [aws.sensors.glue_catalog_partition.AwsGlueCatalogPartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/glue_catalog_partition.py) | [contrib.sensors.aws_glue_catalog_partition_sensor.AwsGlueCatalogPartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/aws_glue_catalog_partition_sensor.py) | -| [aws.sensors.s3_key.S3KeySensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/s3_key.py) | [sensors.s3_key_sensor.S3KeySensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/s3_key_sensor.py) | -| [aws.sensors.s3_prefix.S3PrefixSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/s3_prefix.py) | [sensors.s3_prefix_sensor.S3PrefixSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/s3_prefix_sensor.py) | -| [aws.sensors.sagemaker_base.SageMakerBaseSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_base.py) | [contrib.sensors.sagemaker_base_sensor.SageMakerBaseSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_base_sensor.py) | -| [aws.sensors.sagemaker_endpoint.SageMakerEndpointSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_endpoint.py) | [contrib.sensors.sagemaker_endpoint_sensor.SageMakerEndpointSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_endpoint_sensor.py) | -| [aws.sensors.sagemaker_transform.SageMakerTransformSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_transform.py) | [contrib.sensors.sagemaker_transform_sensor.SageMakerTransformSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_transform_sensor.py) | -| [aws.sensors.sagemaker_tuning.SageMakerTuningSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sagemaker_tuning.py) | [contrib.sensors.sagemaker_tuning_sensor.SageMakerTuningSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sagemaker_tuning_sensor.py) | -| [aws.sensors.sqs.SQSSensor](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/sensors/sqs.py) | [contrib.sensors.aws_sqs_sensor.SQSSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/aws_sqs_sensor.py) | - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.amazon` package | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.hooks.batch_client.AwsBatchClientHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/batch_client.py) | -| [aws.hooks.batch_waiters.AwsBatchWaitersHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/batch_waiters.py) | -| [aws.hooks.cloud_formation.AWSCloudFormationHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/cloud_formation.py) | -| [aws.hooks.ec2.EC2Hook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/ec2.py) | -| [aws.hooks.elasticache_replication_group.ElastiCacheReplicationGroupHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/elasticache_replication_group.py) | -| [aws.hooks.glacier.GlacierHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glacier.py) | -| [aws.hooks.glue.AwsGlueJobHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glue.py) | -| [aws.hooks.kinesis.AwsFirehoseHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/kinesis.py) | -| [aws.hooks.redshift.RedshiftHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/redshift.py) | -| [aws.hooks.secrets_manager.SecretsManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/secrets_manager.py) | -| [aws.hooks.ses.SESHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/ses.py) | -| [aws.hooks.step_function.StepFunctionHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/step_function.py) | - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.hooks.athena.AWSAthenaHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/athena.py) | [contrib.hooks.aws_athena_hook.AWSAthenaHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_athena_hook.py) | -| [aws.hooks.base_aws.AwsBaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/base_aws.py) | [contrib.hooks.aws_hook.AwsHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_hook.py) | -| [aws.hooks.datasync.AWSDataSyncHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/datasync.py) | [contrib.hooks.aws_datasync_hook.AWSDataSyncHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_datasync_hook.py) | -| [aws.hooks.dynamodb.AwsDynamoDBHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/dynamodb.py) | [contrib.hooks.aws_dynamodb_hook.AwsDynamoDBHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_dynamodb_hook.py) | -| [aws.hooks.emr.EmrHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/emr.py) | [contrib.hooks.emr_hook.EmrHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/emr_hook.py) | -| [aws.hooks.glue_catalog.AwsGlueCatalogHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/glue_catalog.py) | [contrib.hooks.aws_glue_catalog_hook.AwsGlueCatalogHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_glue_catalog_hook.py) | -| [aws.hooks.lambda_function.AwsLambdaHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/lambda_function.py) | [contrib.hooks.aws_lambda_hook.AwsLambdaHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_lambda_hook.py) | -| [aws.hooks.logs.AwsLogsHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/logs.py) | [contrib.hooks.aws_logs_hook.AwsLogsHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_logs_hook.py) | -| [aws.hooks.s3.S3Hook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/s3.py) | [hooks.S3_hook.S3Hook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/S3_hook.py) | -| [aws.hooks.sagemaker.SageMakerHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/sagemaker.py) | [contrib.hooks.sagemaker_hook.SageMakerHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/sagemaker_hook.py) | -| [aws.hooks.sns.AwsSnsHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/sns.py) | [contrib.hooks.aws_sns_hook.AwsSnsHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_sns_hook.py) | -| [aws.hooks.sqs.SQSHook](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/hooks/sqs.py) | [contrib.hooks.aws_sqs_hook.SQSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/aws_sqs_hook.py) | - - -## Secrets - - - -### Moved secrets - -| Airflow 2.0 secrets: `airflow.providers.amazon` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [aws.secrets.secrets_manager.SecretsManagerBackend](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/secrets/secrets_manager.py) | [contrib.secrets.aws_secrets_manager.SecretsManagerBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/aws_secrets_manager.py) | -| [aws.secrets.systems_manager.SystemsManagerParameterStoreBackend](https://github.com/apache/airflow/blob/master/airflow/providers/amazon/aws/secrets/systems_manager.py) | [contrib.secrets.aws_systems_manager.SystemsManagerParameterStoreBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/aws_systems_manager.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [d5589673a](https://github.com/apache/airflow/commit/d5589673a95aaced0b851ea0a4061a010a924a82) | 2020-12-08 | Move dummy_operator.py to dummy.py (#11178) (#11293) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29 | Don't use time.time() or timezone.utcnow() for duration calculations (#12353) | -| [de3b1e687](https://github.com/apache/airflow/commit/de3b1e687b26c524c6909b7b4dfbb60d25019751) | 2020-11-28 | Move connection guides to provider documentation packages (#12653) | -| [663259d4b](https://github.com/apache/airflow/commit/663259d4b541ab10ce55fec4d2460e23917062c2) | 2020-11-25 | Fix AWS DataSync tests failing (#11020) | -| [3fa51f94d](https://github.com/apache/airflow/commit/3fa51f94d7a17f170ddc31908d36c91f4456a20b) | 2020-11-24 | Add check for duplicates in provider.yaml files (#12578) | -| [ed09915a0](https://github.com/apache/airflow/commit/ed09915a02b9b99e60689e647452addaab1688fc) | 2020-11-23 | [AIRFLOW-5115] Bugfix for S3KeySensor failing to accept template_fields (#12389) | -| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21 | Fix Python Docstring parameters (#12513) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [c94b1241a](https://github.com/apache/airflow/commit/c94b1241a144294f5f1c5f461d5e3b92e4a8fc38) | 2020-11-13 | Add extra error handling to S3 remote logging (#9908) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08 | Add authentication to AWS with Google credentials (#12079) | -| [fb6bddba0](https://github.com/apache/airflow/commit/fb6bddba0c9e3e7ef2610b4fb3f73622e48d7ea0) | 2020-11-07 | In AWS Secrets backend, a lookup is optional (#12143) | -| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06 | Simplify string expressions (#12123) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5e77a6154](https://github.com/apache/airflow/commit/5e77a61543d26e5466d885d639247aa5189c011d) | 2020-11-02 | Docstring fix for S3DeleteBucketOperator (#12049) | -| [822285134](https://github.com/apache/airflow/commit/8222851348aa81424c9bdcea994e25e0d6692709) | 2020-10-29 | Add Template Fields to RedshiftToS3Operator & S3ToRedshiftOperator (#11844) | -| [db121f726](https://github.com/apache/airflow/commit/db121f726b3c7a37aca1ea05eb4714f884456005) | 2020-10-28 | Add truncate table (before copy) option to S3ToRedshiftOperator (#9246) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26 | Fix spellings (#11825) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [3934ef224](https://github.com/apache/airflow/commit/3934ef22494db6d9613c229aaa82ea6a366b7c2f) | 2020-10-24 | Remove redundant builtins imports (#11809) | -| [4c8e033c0](https://github.com/apache/airflow/commit/4c8e033c0ee7d28963d504a9216205155f20f58f) | 2020-10-24 | Fix spelling and grammar (#11814) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [0df60b773](https://github.com/apache/airflow/commit/0df60b773671ecf8d4e5f582ac2be200cf2a2edd) | 2020-10-23 | Add reattach flag to ECSOperator (#10643) | -| [b9d677cdd](https://github.com/apache/airflow/commit/b9d677cdd660e0be8278a64658e73359276a9682) | 2020-10-22 | Add type hints to aws provider (#11531) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [674368f66](https://github.com/apache/airflow/commit/674368f66cf61b2a105f326f23868ac3aee08807) | 2020-10-19 | Fixes MySQLToS3 float to int conversion (#10437) | -| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16 | Add type annotations for AWS operators and hooks (#11434) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d38a0a781](https://github.com/apache/airflow/commit/d38a0a781e123c8c50313efdb23f767d6678afe0) | 2020-10-12 | added type hints for aws cloud formation (#11470) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [c3e340584](https://github.com/apache/airflow/commit/c3e340584bf1892c4f73aa9e7495b5823dab0c40) | 2020-10-11 | Change prefix of AwsDynamoDB hook module (#11209) | -| [42a23d16f](https://github.com/apache/airflow/commit/42a23d16fe9b2f165b0805fb767ecbb825c93657) | 2020-10-11 | Update MySQLToS3Operator's s3_bucket to template_fields (#10778) | -| [422b61a9d](https://github.com/apache/airflow/commit/422b61a9dd95ab9d00b239daa14d87d7cae5ae73) | 2020-10-09 | Adding ElastiCache Hook for creating, describing and deleting replication groups (#8701) | -| [dd98b2149](https://github.com/apache/airflow/commit/dd98b21494ff6036242b63268140abe1294b3657) | 2020-10-06 | Add acl_policy parameter to GCSToS3Operator (#10804) (#10829) | -| [32b3cfbcf](https://github.com/apache/airflow/commit/32b3cfbcf0209cb062dd641c1232ab25d02d4d6d) | 2020-10-06 | Strict type check for all hooks in amazon (#11250) | -| [6d573e8ab](https://github.com/apache/airflow/commit/6d573e8abbf87e3c7281347e03d428a6e5baccd4) | 2020-10-03 | Add s3 key to template fields for s3/redshift transfer operators (#10890) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [00ffedb8c](https://github.com/apache/airflow/commit/00ffedb8c402eb5638782628eb706a5f28215eac) | 2020-09-30 | Add amazon glacier to GCS transfer operator (#10947) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21 | Add D204 pydocstyle check (#11031) | -| [2410f592a](https://github.com/apache/airflow/commit/2410f592a4ab160b377f1a9e5de3b7262b9851cc) | 2020-09-19 | Get Airflow configs with sensitive data from AWS Systems Manager (#11023) | -| [2bf7b7cac](https://github.com/apache/airflow/commit/2bf7b7cac7858f5a6a495f1a9eb4780ec84f95b4) | 2020-09-19 | Add typing to amazon provider EMR (#10910) | -| [9edfcb7ac](https://github.com/apache/airflow/commit/9edfcb7ac46917836ec956264da8876e58d92392) | 2020-09-19 | Support extra_args in S3Hook and GCSToS3Operator (#11001) | -| [4e1f3a69d](https://github.com/apache/airflow/commit/4e1f3a69db8614c302e4916332555034053b935c) | 2020-09-14 | [AIRFLOW-10645] Add AWS Secrets Manager Hook (#10655) | -| [e9add7916](https://github.com/apache/airflow/commit/e9add79160e3a16bb348e30f4e83386a371dbc1e) | 2020-09-14 | Fix Failing static tests on Master (#10927) | -| [383a118d2](https://github.com/apache/airflow/commit/383a118d2df618e46d81c520cd2c4a31d81b33dd) | 2020-09-14 | Add more type annotations to AWS hooks (#10671) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [2934220dc](https://github.com/apache/airflow/commit/2934220dc98e295764f7791d33e121629ed2fbbb) | 2020-09-08 | Always return a list from S3Hook list methods (#10774) | -| [f40ac9b15](https://github.com/apache/airflow/commit/f40ac9b151124dbcd87197d6ae38c85191d41f38) | 2020-09-01 | Add placement_strategy option (#9444) | -| [e4878e677](https://github.com/apache/airflow/commit/e4878e6775bbe5cb2a1d786e57e009271b78bba0) | 2020-08-31 | fix type hints for s3 hook read_key method (#10653) | -| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29 | Update Google Cloud branding (#10642) | -| [8969b7185](https://github.com/apache/airflow/commit/8969b7185ebc3c90168ce9a2fb97dfbc74d2bed9) | 2020-08-28 | Removed bad characters from AWS operator (#10590) | -| [8349061f9](https://github.com/apache/airflow/commit/8349061f9cb01a92c87edd349cc844c4053851e8) | 2020-08-26 | Improve Docstring for AWS Athena Hook/Operator (#10580) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24 | Implement impersonation in google operators (#10052) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [c6358045f](https://github.com/apache/airflow/commit/c6358045f9d61af63c96833cb6682d6f382a6408) | 2020-08-22 | Fixes S3ToRedshift COPY query (#10436) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [27d08b76a](https://github.com/apache/airflow/commit/27d08b76a2d171d716a1599157a8a60a121dbec6) | 2020-08-21 | Amazon SES Hook (#10391) | -| [dea345b05](https://github.com/apache/airflow/commit/dea345b05c2cd226e70f97a3934d7456aa1cc754) | 2020-08-17 | Fix AwsGlueJobSensor to stop running after the Glue job finished (#9022) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [82f744b87](https://github.com/apache/airflow/commit/82f744b871bb2c5e9a2d628e1c45ae16c1244240) | 2020-08-11 | Add type annotations to AwsGlueJobHook, RedshiftHook modules (#10286) | -| [19bc97d0c](https://github.com/apache/airflow/commit/19bc97d0ce436a6ec9d8e9a5adcd48c0a769d01f) | 2020-08-10 | Revert "Add Amazon SES hook (#10004)" (#10276) | -| [f06fe616e](https://github.com/apache/airflow/commit/f06fe616e66256bdc53710de505c2c6b1bd21528) | 2020-08-10 | Add Amazon SES hook (#10004) | -| [0c77ea8a3](https://github.com/apache/airflow/commit/0c77ea8a3c417805f66d10f0c757ca218bf8dee0) | 2020-08-06 | Add type annotations to S3 hook module (#10164) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [9667314b2](https://github.com/apache/airflow/commit/9667314b2fb879edc451793a8350123507e1cfd6) | 2020-08-05 | Add correct signatures for operators in amazon provider package (#10167) | -| [000287753](https://github.com/apache/airflow/commit/000287753b478f29e6c25442ac253e3a6c8e8c87) | 2020-08-03 | Improve Typing coverage of amazon/aws/athena (#10025) | -| [53ada6e79](https://github.com/apache/airflow/commit/53ada6e7911f411e80ebb00be9f07a7cc0788d01) | 2020-08-03 | Add S3KeysUnchangedSensor (#9817) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [2b8dea64e](https://github.com/apache/airflow/commit/2b8dea64e9e8716fba8c38a1b439f7835bbd2918) | 2020-08-01 | Fix typo in Athena sensor retries (#10079) | -| [1508c43ec](https://github.com/apache/airflow/commit/1508c43ec9594e801b415dd82472fa017791b759) | 2020-07-29 | Adding new SageMaker operator for ProcessingJobs (#9594) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25 | Stop using start_date in default_args in example_dags (#9982) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [e7c87fe45](https://github.com/apache/airflow/commit/e7c87fe453c6a70ed087c7ffbccaacbf0d2831b9) | 2020-07-20 | Refactor AwsBaseHook._get_credentials (#9878) | -| [2577f9334](https://github.com/apache/airflow/commit/2577f9334a5cb71cccd97e62b0ae2d097cb99e1a) | 2020-07-16 | Fix S3FileTransformOperator to support S3 Select transformation only (#8936) | -| [52b6efe1e](https://github.com/apache/airflow/commit/52b6efe1ecaae74b9c2497f565e116305d575a76) | 2020-07-15 | Add option to delete by prefix to S3DeleteObjectsOperator (#9350) | -| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13 | Keep functions signatures in decorators (#9786) | -| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08 | Get Airflow configs with sensitive data from Secret Backends (#9645) | -| [07b81029e](https://github.com/apache/airflow/commit/07b81029ebc2a296fb54181f2cec11fcc7704d9d) | 2020-07-08 | Allow AWSAthenaHook to get more than 1000/first page of results (#6075) | -| [564192c16](https://github.com/apache/airflow/commit/564192c1625a552456cebb3751978c08eebdb2a1) | 2020-07-08 | Add AWS StepFunctions integrations to the aws provider (#8749) | -| [ecce1ace7](https://github.com/apache/airflow/commit/ecce1ace7a277c948c61d7d4cbfc8632cc216559) | 2020-07-08 | [AIRFLOW-XXXX] Remove unnecessary docstring in AWSAthenaOperator | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [ee20086b8](https://github.com/apache/airflow/commit/ee20086b8c499fa40dcaac71652f21b466e7f80f) | 2020-07-02 | Move S3TaskHandler to the AWS provider package (#9602) | -| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29 | Remove almost all references to airflow.contrib (#9559) | -| [c858babdd](https://github.com/apache/airflow/commit/c858babddf8b18b417993b5bfefec1c5635510da) | 2020-06-26 | Remove kwargs from Super calls in AWS Secrets Backends (#9523) | -| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25 | Use literal syntax instead of function calls to create data structure (#9516) | -| [c7a454aa3](https://github.com/apache/airflow/commit/c7a454aa32bf33133d042e8438ac259b32144b21) | 2020-06-22 | Add AWS ECS system test (#8888) | -| [df8efd04f](https://github.com/apache/airflow/commit/df8efd04f394afc4b5affb677bc78d8b7bd5275a) | 2020-06-21 | Enable & Fix "Docstring Content Issues" PyDocStyle Check (#9460) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [992a18c84](https://github.com/apache/airflow/commit/992a18c84a355d13e821c703e7364f12233c37dc) | 2020-06-19 | Move MySqlToS3Operator to transfers (#9400) | -| [a60f589aa](https://github.com/apache/airflow/commit/a60f589aa251cc3df6bec5b306ad4a7f736f539f) | 2020-06-19 | Add MySqlToS3Operator (#9054) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [58a8ec0e4](https://github.com/apache/airflow/commit/58a8ec0e46f624ee0369dd156dd8fb4f81884a21) | 2020-06-16 | AWSBatchOperator <> ClientHook relation changed to composition (#9306) | -| [a80cd25e8](https://github.com/apache/airflow/commit/a80cd25e8eb7f8b5d89af26cdcd62a5bbe44d65c) | 2020-06-15 | Close/Flush byte stream in s3 hook load_string and load_bytes (#9211) | -| [ffb857403](https://github.com/apache/airflow/commit/ffb85740373f7adb70d28ec7d5a8886380170e5e) | 2020-06-14 | Decrypt secrets from SystemsManagerParameterStoreBackend (#9214) | -| [a69b031f2](https://github.com/apache/airflow/commit/a69b031f20c5a1cd032f9873394374f661811e8f) | 2020-06-10 | Add S3ToRedshift example dag and system test (#8877) | -| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | -| [357e11e0c](https://github.com/apache/airflow/commit/357e11e0cfb4c02833018e073bc4f5e5b52fae4f) | 2020-05-29 | Add Delete/Create S3 bucket operators (#8895) | -| [1ed171bfb](https://github.com/apache/airflow/commit/1ed171bfb265ded8674058bdc425640d25f1f4fc) | 2020-05-28 | Add script_args for S3FileTransformOperator (#9019) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [f946f96da](https://github.com/apache/airflow/commit/f946f96da45d8e6101805450d8cab7ccb2774ad0) | 2020-05-23 | Old json boto compat removed from dynamodb_to_s3 operator (#8987) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [f4edd90a9](https://github.com/apache/airflow/commit/f4edd90a94b8f91bbefbbbfba367372399559596) | 2020-05-16 | Speed up TestAwsLambdaHook by not actually running a function (#8882) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [85bbab27d](https://github.com/apache/airflow/commit/85bbab27dbb4f55f6f322b894fe3d54797076c15) | 2020-05-15 | Add EMR operators howto docs (#8863) | -| [e61b9bb9b](https://github.com/apache/airflow/commit/e61b9bb9bbe6d8a0621310f3583483b9135c6770) | 2020-05-13 | Add AWS EMR System tests (#8618) | -| [ed3f5131a](https://github.com/apache/airflow/commit/ed3f5131a27e2ef0422f2495a4532630a6204f82) | 2020-05-13 | Correctly pass sleep time from AWSAthenaOperator down to the hook. (#8845) | -| [7236862a1](https://github.com/apache/airflow/commit/7236862a1f5361b5e99c03dd63dae9b966efcd24) | 2020-05-12 | [AIRFLOW-2310] Enable AWS Glue Job Integration (#6007) | -| [d590e5e76](https://github.com/apache/airflow/commit/d590e5e7679322bebb1472fa8c7ec6d183e4154a) | 2020-05-11 | Add option to propagate tags in ECSOperator (#8811) | -| [0c3db84c3](https://github.com/apache/airflow/commit/0c3db84c3ce5107f53ed5ecc48edfdfe1b97feff) | 2020-05-11 | [AIRFLOW-7068] Create EC2 Hook, Operator and Sensor (#7731) | -| [cbebed2b4](https://github.com/apache/airflow/commit/cbebed2b4d0bd1e0984c331c0270e83bf8df8540) | 2020-05-10 | Allow passing backend_kwargs to AWS SSM client (#8802) | -| [c7788a689](https://github.com/apache/airflow/commit/c7788a6894cb79c22153434dd9b977393b8236be) | 2020-05-10 | Add imap_attachment_to_s3 example dag and system test (#8669) | -| [ff5b70149](https://github.com/apache/airflow/commit/ff5b70149bf51012156378c8fc8b072c7c280d9d) | 2020-05-07 | Add google_api_to_s3_transfer example dags and system tests (#8581) | -| [4421f011e](https://github.com/apache/airflow/commit/4421f011eeec2d1022a39933e27f530fb9f9c1b1) | 2020-05-01 | Improve template capabilities of EMR job and step operators (#8572) | -| [379a884d6](https://github.com/apache/airflow/commit/379a884d645a4d73db1c81e3450adc82571989ea) | 2020-04-28 | fix: aws hook should work without conn id (#8534) | -| [74bc316c5](https://github.com/apache/airflow/commit/74bc316c56192f14677e9406d3878887a836062b) | 2020-04-27 | [AIRFLOW-4438] Add Gzip compression to S3_hook (#8571) | -| [7ea66a1a9](https://github.com/apache/airflow/commit/7ea66a1a9594704869e82513d3a06fe35b6109b2) | 2020-04-26 | Add example DAG for ECSOperator (#8452) | -| [b6434dedf](https://github.com/apache/airflow/commit/b6434dedf974085e5f8891446fa63104836c8fdf) | 2020-04-24 | [AIRFLOW-7111] Add generate_presigned_url method to S3Hook (#8441) | -| [becedd5af](https://github.com/apache/airflow/commit/becedd5af8df01a0210e0a3fa78e619785f39908) | 2020-04-19 | Remove unrelated EC2 references in ECSOperator (#8451) | -| [ab1290cb0](https://github.com/apache/airflow/commit/ab1290cb0c5856fa85c8596bfdf780fcdfd99c31) | 2020-04-13 | Make launch_type parameter optional (#8248) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [b46d6c060](https://github.com/apache/airflow/commit/b46d6c060280da59193a28cf67e791eb825cb51c) | 2020-04-08 | Add support for AWS Secrets Manager as Secrets Backend (#8186) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31 | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011) | -| [7239d9a82](https://github.com/apache/airflow/commit/7239d9a82dbb3b9bdf27b531daa70338af9dd796) | 2020-03-28 | Get Airflow Variables from AWS Systems Manager Parameter Store (#7945) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [438da7241](https://github.com/apache/airflow/commit/438da7241eb537e3ef5ae711629446155bf738a3) | 2020-03-28 | [AIRFLOW-5825] SageMakerEndpointOperator is not idempotent (#7891) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [a36002412](https://github.com/apache/airflow/commit/a36002412334c445e4eab41fdbb85ef31b6fd384) | 2020-03-19 | [AIRFLOW-5705] Make AwsSsmSecretsBackend consistent with VaultBackend (#7753) | -| [2a54512d7](https://github.com/apache/airflow/commit/2a54512d785ba603ba71381dc3dfa049e9f74063) | 2020-03-17 | [AIRFLOW-5705] Fix bugs in AWS SSM Secrets Backend (#7745) | -| [a8b5fc74d](https://github.com/apache/airflow/commit/a8b5fc74d07e50c91bb64cb66ca1a450aa5ce6e1) | 2020-03-16 | [AIRFLOW-4175] S3Hook load_file should support ACL policy paramete (#7733) | -| [e31e9ddd2](https://github.com/apache/airflow/commit/e31e9ddd2332e5d92422baf668acee441646ad68) | 2020-03-14 | [AIRFLOW-5705] Add secrets backend and support for AWS SSM (#6376) | -| [3bb60afc7](https://github.com/apache/airflow/commit/3bb60afc7b8319996385d681faac342afe2b3bd2) | 2020-03-13 | [AIRFLOW-6975] Base AWSHook AssumeRoleWithSAML (#7619) | -| [c0c5f11ad](https://github.com/apache/airflow/commit/c0c5f11ad11a5a38e0553c1a36aa75eb83efae51) | 2020-03-12 | [AIRFLOW-6884] Make SageMakerTrainingOperator idempotent (#7598) | -| [b7cdda1c6](https://github.com/apache/airflow/commit/b7cdda1c64595bc7f85519337029de259e573fce) | 2020-03-10 | [AIRFLOW-4438] Add Gzip compression to S3_hook (#7680) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [9a94ab246](https://github.com/apache/airflow/commit/9a94ab246db8c09aa83bb6a6d245b1ca9563bcd9) | 2020-03-01 | [AIRFLOW-6962] Fix compeleted to completed (#7600) | -| [1b38f6d9b](https://github.com/apache/airflow/commit/1b38f6d9b6710bd5e25fc16883599f1842ab7cb9) | 2020-02-29 | [AIRFLOW-5908] Add download_file to S3 Hook (#6577) | -| [3ea3e1a2b](https://github.com/apache/airflow/commit/3ea3e1a2b580b7ed10efe668de0cc37b03673500) | 2020-02-26 | [AIRFLOW-6824] EMRAddStepsOperator problem with multi-step XCom (#7443) | -| [6eaa7e3b1](https://github.com/apache/airflow/commit/6eaa7e3b1845644d5ec65a00a997f4029bec9628) | 2020-02-25 | [AIRFLOW-5924] Automatically unify bucket name and key in S3Hook (#6574) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [7d0e7122d](https://github.com/apache/airflow/commit/7d0e7122dd14576d834c6f66fe919a72b100b7f8) | 2020-02-24 | [AIRFLOW-6830] Add Subject/MessageAttributes to SNS hook and operator (#7451) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [47a922b86](https://github.com/apache/airflow/commit/47a922b86426968bfa07cc7892d2eeeca761d884) | 2020-02-21 | [AIRFLOW-6854] Fix missing typing_extensions on python 3.8 (#7474) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [58c3542ed](https://github.com/apache/airflow/commit/58c3542ed25061320ce61dbe0adf451a44c738dd) | 2020-02-12 | [AIRFLOW-5231] Fix S3Hook.delete_objects method (#7375) | -| [b7aa778b3](https://github.com/apache/airflow/commit/b7aa778b38df2f116a1c20031e72fea8b97315bf) | 2020-02-10 | [AIRFLOW-6767] Correct name for default Athena workgroup (#7394) | -| [9282185e6](https://github.com/apache/airflow/commit/9282185e6624e64bb7f17447f81c1b2d1bb4d56d) | 2020-02-09 | [AIRFLOW-6761] Fix WorkGroup param in AWSAthenaHook (#7386) | -| [94fccca97](https://github.com/apache/airflow/commit/94fccca97030ee59d89f302a98137b17e7b01a33) | 2020-02-04 | [AIRFLOW-XXXX] Add pre-commit check for utf-8 file encoding (#7347) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [88e40c714](https://github.com/apache/airflow/commit/88e40c714d2853aa8966796945b2907c263fed08) | 2020-02-03 | [AIRFLOW-6716] Fix AWS Datasync Example DAG (#7339) | -| [a311d3d82](https://github.com/apache/airflow/commit/a311d3d82e0c2e32bcb56e29f33c95ed0a2a2ddc) | 2020-02-03 | [AIRFLOW-6718] Fix more occurrences of utils.dates.days_ago (#7341) | -| [cb766b05b](https://github.com/apache/airflow/commit/cb766b05b17b80fd54a5ce6ac3ee35a631115000) | 2020-02-03 | [AIRFLOW-XXXX] Fix Static Checks on CI (#7342) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [7527eddc5](https://github.com/apache/airflow/commit/7527eddc5e9729aa7e732209a07d57985f6c73e4) | 2020-02-02 | [AIRFLOW-4364] Make all code in airflow/providers/amazon pylint compatible (#7336) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [63aa3db88](https://github.com/apache/airflow/commit/63aa3db88f8824efe79622301efd9f8ba75b991c) | 2020-02-02 | [AIRFLOW-6258] Add CloudFormation operators to AWS providers (#6824) | -| [af4157fde](https://github.com/apache/airflow/commit/af4157fdeffc0c18492b518708c0db44815067ab) | 2020-02-02 | [AIRFLOW-6672] AWS DataSync - better logging of error message (#7288) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [1988a97e8](https://github.com/apache/airflow/commit/1988a97e8f687e28a5a39b29677fb514e097753c) | 2020-01-28 | [AIRFLOW-6659] Move AWS Transfer operators to providers package (#7274) | -| [ab10443e9](https://github.com/apache/airflow/commit/ab10443e965269efe9c1efaf5fa33bcdbe609f13) | 2020-01-28 | [AIRFLOW-6424] Added a operator to modify EMR cluster (#7213) | -| [40246132a](https://github.com/apache/airflow/commit/40246132a7ef3b07fe3173c6e7646ed6b53aad6e) | 2020-01-28 | [AIRFLOW-6654] AWS DataSync - bugfix when creating locations (#7270) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [599e4791c](https://github.com/apache/airflow/commit/599e4791c91cff411b1bf1c45555db5094c2b420) | 2020-01-18 | [AIRFLOW-6541] Use EmrJobFlowSensor for other states (#7146) | -| [c319e81ca](https://github.com/apache/airflow/commit/c319e81cae1de31ad1373903252d8608ffce1fba) | 2020-01-17 | [AIRFLOW-6572] Move AWS classes to providers.amazon.aws package (#7178) | -| [941a07057](https://github.com/apache/airflow/commit/941a070578bc7d9410715b89658548167352cc4d) | 2020-01-15 | [AIRFLOW-6570] Add dag tag for all example dag (#7176) | -| [78d8fe694](https://github.com/apache/airflow/commit/78d8fe6944b689b9b0af99255286e34e06eedec3) | 2020-01-08 | [AIRFLOW-6245] Add custom waiters for AWS batch jobs (#6811) | -| [e0b022725](https://github.com/apache/airflow/commit/e0b022725749181bd4e30933e4a0ffefb993eede) | 2019-12-28 | [AIRFLOW-6319] Add support for AWS Athena workgroups (#6871) | -| [57da45685](https://github.com/apache/airflow/commit/57da45685457520d51a0967e2aeb5e5ff162dfa7) | 2019-12-24 | [AIRFLOW-6333] Bump Pylint to 2.4.4 & fix/disable new checks (#6888) | -| [cf647c27e](https://github.com/apache/airflow/commit/cf647c27e0f35bbd1183bfcf87a106cbdb69d3fa) | 2019-12-18 | [AIRFLOW-6038] AWS DataSync reworked (#6773) | -| [7502cad28](https://github.com/apache/airflow/commit/7502cad2844139d57e4276d971c0706a361d9dbe) | 2019-12-17 | [AIRFLOW-6206] Move and rename AWS batch operator [AIP-21] (#6764) | -| [c4c635df6](https://github.com/apache/airflow/commit/c4c635df6906f56e01724573923e19763bb0da62) | 2019-12-17 | [AIRFLOW-6083] Adding ability to pass custom configuration to lambda client. (#6678) | -| [4fb498f87](https://github.com/apache/airflow/commit/4fb498f87ef89acc30f2576ebc5090ab0653159e) | 2019-12-09 | [AIRFLOW-6072] aws_hook: Outbound http proxy setting and other enhancements (#6686) | -| [a1e2f8635](https://github.com/apache/airflow/commit/a1e2f863526973b17892ec31caf09eded95c1cd2) | 2019-11-20 | [AIRFLOW-6021] Replace list literal with list constructor (#6617) | -| [baae14084](https://github.com/apache/airflow/commit/baae140847cdf9d84e905fb6d1f119d6950eecf9) | 2019-11-19 | [AIRFLOW-5781] AIP-21 Migrate AWS Kinesis to /providers/amazon/aws (#6588) | -| [504cfbac1](https://github.com/apache/airflow/commit/504cfbac1a4ec2e2fd169523ed357808f63881bb) | 2019-11-18 | [AIRFLOW-5783] AIP-21 Move aws redshift into providers structure (#6539) | -| [992f0e3ac](https://github.com/apache/airflow/commit/992f0e3acf11163294508858515a5f79116e3ad8) | 2019-11-12 | AIRFLOW-5824: AWS DataSync Hook and Operators added (#6512) | -| [c015eb2f6](https://github.com/apache/airflow/commit/c015eb2f6496b9721afda9e85d5d4af3bbe0696b) | 2019-11-10 | [AIRFLOW-5786] Migrate AWS SNS to /providers/amazon/aws (#6502) | -| [3d76fb4bf](https://github.com/apache/airflow/commit/3d76fb4bf25e5b7d3d30e0d64867b5999b77f0b0) | 2019-11-09 | [AIRFLOW-5782] Migrate AWS Lambda to /providers/amazon/aws [AIP-21] (#6518) | diff --git a/airflow/providers/amazon/aws/ADDITIONAL_INFO.md b/airflow/providers/amazon/aws/ADDITIONAL_INFO.md deleted file mode 100644 index f0b0464da7b23..0000000000000 --- a/airflow/providers/amazon/aws/ADDITIONAL_INFO.md +++ /dev/null @@ -1,26 +0,0 @@ - - -## Change in import paths - -If you are upgrading from 2020.10.5 note the following changes in import paths - -| Old path | New path | -| --------------------------------------------------------------- | ----------------------------------------------------------- | -| airflow.providers.amazon.aws.hooks.aws_dynamodb.AwsDynamoDBHook | airflow.providers.amazon.aws.hooks.dynamodb.AwsDynamoDBHook | diff --git a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py index c2330396aa989..49beca0e17b16 100644 --- a/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py +++ b/airflow/providers/amazon/aws/log/cloudwatch_task_handler.py @@ -63,6 +63,7 @@ def hook(self): 'the Cloudwatch logs connection exists.', remote_conn_id, ) + return None def _render_filename(self, ti, try_number): # Replace unsupported log group name characters diff --git a/airflow/providers/amazon/aws/log/s3_task_handler.py b/airflow/providers/amazon/aws/log/s3_task_handler.py index 58d5eca079890..5a5e01bd05496 100644 --- a/airflow/providers/amazon/aws/log/s3_task_handler.py +++ b/airflow/providers/amazon/aws/log/s3_task_handler.py @@ -54,6 +54,7 @@ def hook(self): 'the S3 connection exists.', remote_conn_id, ) + return None def set_context(self, ti): super().set_context(ti) diff --git a/airflow/providers/amazon/provider.yaml b/airflow/providers/amazon/provider.yaml index 9a8a81a151c5e..9db3dfd83e73e 100644 --- a/airflow/providers/amazon/provider.yaml +++ b/airflow/providers/amazon/provider.yaml @@ -27,12 +27,14 @@ versions: integrations: - integration-name: Amazon Athena external-doc-url: https://aws.amazon.com/athena/ + logo: /integration-logos/aws/Amazon-Athena_light-bg@4x.png tags: [aws] - integration-name: Amazon CloudFormation external-doc-url: https://aws.amazon.com/cloudformation/ tags: [aws] - integration-name: Amazon CloudWatch Logs external-doc-url: https://aws.amazon.com/cloudwatch/ + logo: /integration-logos/aws/Amazon-CloudWatch_light-bg@4x.png tags: [aws] - integration-name: Amazon DataSync external-doc-url: https://aws.amazon.com/datasync/ @@ -41,9 +43,11 @@ integrations: tags: [aws] - integration-name: Amazon DynamoDB external-doc-url: https://aws.amazon.com/dynamodb/ + logo: /integration-logos/aws/Amazon-DynamoDB_light-bg@4x.png tags: [aws] - integration-name: Amazon EC2 external-doc-url: https://aws.amazon.com/ec2/ + logo: /integration-logos/aws/Amazon-EC2_light-bg@4x.png tags: [aws] - integration-name: Amazon ECS external-doc-url: https://aws.amazon.com/ecs/ @@ -55,6 +59,7 @@ integrations: external-doc-url: https://aws.amazon.com/emr/ how-to-guide: - /docs/apache-airflow-providers-amazon/operators/emr.rst + logo: /integration-logos/aws/Amazon-EMR_light-bg@4x.png tags: [aws] - integration-name: Amazon Glacier external-doc-url: https://aws.amazon.com/glacier/ @@ -63,12 +68,15 @@ integrations: tags: [aws] - integration-name: Amazon Kinesis Data Firehose external-doc-url: https://aws.amazon.com/kinesis/data-firehose/ + logo: /integration-logos/aws/Amazon-Kinesis-Data-Firehose_light-bg@4x.png tags: [aws] - integration-name: Amazon Redshift external-doc-url: https://aws.amazon.com/redshift/ + logo: /integration-logos/aws/Amazon-Redshift_light-bg@4x.png tags: [aws] - integration-name: Amazon SageMaker external-doc-url: https://aws.amazon.com/sagemaker/ + logo: /integration-logos/aws/Amazon-SageMaker_light-bg@4x.png tags: [aws] - integration-name: Amazon SecretsManager external-doc-url: https://aws.amazon.com/secrets-manager/ @@ -80,27 +88,33 @@ integrations: tags: [aws] - integration-name: Amazon Simple Notification Service (SNS) external-doc-url: https://aws.amazon.com/sns/ + logo: /integration-logos/aws/Amazon-Simple-Notification-Service-SNS_light-bg@4x.png tags: [aws] - integration-name: Amazon Simple Queue Service (SQS) external-doc-url: https://aws.amazon.com/sqs/ + logo: /integration-logos/aws/Amazon-Simple-Queue-Service-SQS_light-bg@4x.png tags: [aws] - integration-name: Amazon Simple Storage Service (S3) external-doc-url: https://aws.amazon.com/s3/ + logo: /integration-logos/aws/Amazon-Simple-Storage-Service-S3_light-bg@4x.png tags: [aws] - integration-name: Amazon Web Services external-doc-url: https://aws.amazon.com/ tags: [aws] - integration-name: AWS Batch external-doc-url: https://aws.amazon.com/batch/ + logo: /integration-logos/aws/AWS-Batch_light-bg@4x.png tags: [aws] - integration-name: AWS DataSync external-doc-url: https://aws.amazon.com/datasync/ tags: [aws] - integration-name: AWS Glue external-doc-url: https://aws.amazon.com/glue/ + logo: /integration-logos/aws/AWS-Glue_light-bg@4x.png tags: [aws] - integration-name: AWS Lambda external-doc-url: https://aws.amazon.com/lambda/ + logo: /integration-logos/aws/AWS-Lambda_light-bg@4x.png tags: [aws] - integration-name: AWS Step Functions external-doc-url: https://aws.amazon.com/step-functions/ diff --git a/airflow/providers/apache/beam/CHANGELOG.rst b/airflow/providers/apache/beam/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/apache/beam/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/telegram/README.md b/airflow/providers/apache/beam/README.md similarity index 53% rename from airflow/providers/telegram/README.md rename to airflow/providers/apache/beam/README.md index 2575d502ccfe1..34f2863b55985 100644 --- a/airflow/providers/telegram/README.md +++ b/airflow/providers/apache/beam/README.md @@ -18,29 +18,26 @@ --> -# Package apache-airflow-providers-telegram +# Package apache-airflow-providers-apache-beam -Release: 1.0.0 +Release: 0.0.1 **Table of contents** - [Provider package](#provider-package) - [Installation](#installation) - [PIP requirements](#pip-requirements) +- [Cross provider package dependencies](#cross-provider-package-dependencies) - [Provider class summary](#provider-classes-summary) - [Operators](#operators) - - [New operators](#new-operators) + - [Transfer operators](#transfer-operators) - [Hooks](#hooks) - - [New hooks](#new-hooks) - [Releases](#releases) - - [Release 1.0.0](#release-100) ## Provider package -This is a provider package for `telegram` provider. All classes for this provider package -are in `airflow.providers.telegram` python package. - - +This is a provider package for `apache.beam` provider. All classes for this provider package +are in `airflow.providers.apache.beam` python package. ## Installation @@ -53,49 +50,48 @@ of extras. In order to install Airflow you need to either downgrade pip to versi `--use-deprecated legacy-resolver` to your pip install command. You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-telegram` +`pip install apache-airflow-providers-apache-beam` + +## Cross provider package dependencies + +Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified provider packages in order to use them. -## PIP requirements +You can install such cross-provider dependencies when installing from PyPI. For example: + +```bash +pip install apache-airflow-providers-apache-beam[google] +``` + +| Dependent package | Extra | +|:--------------------------------------------------------------------------------------------|:------------| +| [apache-airflow-providers-google](https://pypi.org/project/apache-airflow-providers-google) | google | -| PIP package | Version required | -|:--------------------|:-------------------| -| python-telegram-bot | ==13.0 | # Provider classes summary -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `telegram` provider -are in the `airflow.providers.telegram` package. You can read more about the naming conventions used +In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.beam` provider +are in the `airflow.providers.apache.beam` package. You can read more about the naming conventions used in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) ## Operators - ### New operators -| New Airflow 2.0 operators: `airflow.providers.telegram` package | -|:--------------------------------------------------------------------------------------------------------------------------------------| -| [operators.telegram.TelegramOperator](https://github.com/apache/airflow/blob/master/airflow/providers/telegram/operators/telegram.py) | - +| New Airflow 2.0 operators: `airflow.providers.apache.beam` package | +|:-----------------------------------------------------------------------------------------------------------------------------------------------| +| [operators.beam.BeamRunJavaPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/operators/beam.py) | +| [operators.beam.BeamRunPythonPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/operators/beam.py) | ## Hooks - ### New hooks -| New Airflow 2.0 hooks: `airflow.providers.telegram` package | -|:--------------------------------------------------------------------------------------------------------------------------| -| [hooks.telegram.TelegramHook](https://github.com/apache/airflow/blob/master/airflow/providers/telegram/hooks/telegram.py) | - - +| New Airflow 2.0 hooks: `airflow.providers.apache.beam` package | +|:-----------------------------------------------------------------------------------------------------------------| +| [hooks.beam.BeamHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/beam/hooks/beam.py) | ## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [cd66450b4](https://github.com/apache/airflow/commit/cd66450b4ee2a219ddc847970255e420ed679700) | 2020-12-05 | Add Telegram hook and operator (#11850) | diff --git a/airflow/providers/apache/beam/__init__.py b/airflow/providers/apache/beam/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/apache/beam/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/apache/beam/example_dags/__init__.py b/airflow/providers/apache/beam/example_dags/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/apache/beam/example_dags/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/apache/beam/example_dags/example_beam.py b/airflow/providers/apache/beam/example_dags/example_beam.py new file mode 100644 index 0000000000000..d20c4cef0ce07 --- /dev/null +++ b/airflow/providers/apache/beam/example_dags/example_beam.py @@ -0,0 +1,315 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Example Airflow DAG for Apache Beam operators +""" +import os +from urllib.parse import urlparse + +from airflow import models +from airflow.providers.apache.beam.operators.beam import ( + BeamRunJavaPipelineOperator, + BeamRunPythonPipelineOperator, +) +from airflow.providers.google.cloud.hooks.dataflow import DataflowJobStatus +from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration +from airflow.providers.google.cloud.sensors.dataflow import DataflowJobStatusSensor +from airflow.providers.google.cloud.transfers.gcs_to_local import GCSToLocalFilesystemOperator +from airflow.utils.dates import days_ago + +GCP_PROJECT_ID = os.environ.get('GCP_PROJECT_ID', 'example-project') +GCS_INPUT = os.environ.get('APACHE_BEAM_PYTHON', 'gs://apache-beam-samples/shakespeare/kinglear.txt') +GCS_TMP = os.environ.get('APACHE_BEAM_GCS_TMP', 'gs://test-dataflow-example/temp/') +GCS_STAGING = os.environ.get('APACHE_BEAM_GCS_STAGING', 'gs://test-dataflow-example/staging/') +GCS_OUTPUT = os.environ.get('APACHE_BEAM_GCS_OUTPUT', 'gs://test-dataflow-example/output') +GCS_PYTHON = os.environ.get('APACHE_BEAM_PYTHON', 'gs://test-dataflow-example/wordcount_debugging.py') +GCS_PYTHON_DATAFLOW_ASYNC = os.environ.get( + 'APACHE_BEAM_PYTHON_DATAFLOW_ASYNC', 'gs://test-dataflow-example/wordcount_debugging.py' +) + +GCS_JAR_DIRECT_RUNNER = os.environ.get( + 'APACHE_BEAM_DIRECT_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-DirectRunner.jar', +) +GCS_JAR_DATAFLOW_RUNNER = os.environ.get( + 'APACHE_BEAM_DATAFLOW_RUNNER_JAR', 'gs://test-dataflow-example/word-count-beam-bundled-0.1.jar' +) +GCS_JAR_SPARK_RUNNER = os.environ.get( + 'APACHE_BEAM_SPARK_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-SparkRunner.jar', +) +GCS_JAR_FLINK_RUNNER = os.environ.get( + 'APACHE_BEAM_FLINK_RUNNER_JAR', + 'gs://test-dataflow-example/tests/dataflow-templates-bundled-java=11-beam-v2.25.0-FlinkRunner.jar', +) + +GCS_JAR_DIRECT_RUNNER_PARTS = urlparse(GCS_JAR_DIRECT_RUNNER) +GCS_JAR_DIRECT_RUNNER_BUCKET_NAME = GCS_JAR_DIRECT_RUNNER_PARTS.netloc +GCS_JAR_DIRECT_RUNNER_OBJECT_NAME = GCS_JAR_DIRECT_RUNNER_PARTS.path[1:] +GCS_JAR_DATAFLOW_RUNNER_PARTS = urlparse(GCS_JAR_DATAFLOW_RUNNER) +GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME = GCS_JAR_DATAFLOW_RUNNER_PARTS.netloc +GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME = GCS_JAR_DATAFLOW_RUNNER_PARTS.path[1:] +GCS_JAR_SPARK_RUNNER_PARTS = urlparse(GCS_JAR_SPARK_RUNNER) +GCS_JAR_SPARK_RUNNER_BUCKET_NAME = GCS_JAR_SPARK_RUNNER_PARTS.netloc +GCS_JAR_SPARK_RUNNER_OBJECT_NAME = GCS_JAR_SPARK_RUNNER_PARTS.path[1:] +GCS_JAR_FLINK_RUNNER_PARTS = urlparse(GCS_JAR_FLINK_RUNNER) +GCS_JAR_FLINK_RUNNER_BUCKET_NAME = GCS_JAR_FLINK_RUNNER_PARTS.netloc +GCS_JAR_FLINK_RUNNER_OBJECT_NAME = GCS_JAR_FLINK_RUNNER_PARTS.path[1:] + + +default_args = { + 'default_pipeline_options': { + 'output': '/tmp/example_beam', + }, + "trigger_rule": "all_done", +} + + +with models.DAG( + "example_beam_native_java_direct_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_direct_runner: + + # [START howto_operator_start_java_direct_runner_pipeline] + jar_to_local_direct_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_direct_runner", + bucket=GCS_JAR_DIRECT_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_DIRECT_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_direct_runner_{{ ds_nodash }}.jar", + ) + + start_java_pipeline_direct_runner = BeamRunJavaPipelineOperator( + task_id="start_java_pipeline_direct_runner", + jar="/tmp/beam_wordcount_direct_runner_{{ ds_nodash }}.jar", + pipeline_options={ + 'output': '/tmp/start_java_pipeline_direct_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_direct_runner >> start_java_pipeline_direct_runner + # [END howto_operator_start_java_direct_runner_pipeline] + +with models.DAG( + "example_beam_native_java_dataflow_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_dataflow_runner: + # [START howto_operator_start_java_dataflow_runner_pipeline] + jar_to_local_dataflow_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_dataflow_runner", + bucket=GCS_JAR_DATAFLOW_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_DATAFLOW_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_dataflow_runner_{{ ds_nodash }}.jar", + ) + + start_java_pipeline_dataflow = BeamRunJavaPipelineOperator( + task_id="start_java_pipeline_dataflow", + runner="DataflowRunner", + jar="/tmp/beam_wordcount_dataflow_runner_{{ ds_nodash }}.jar", + pipeline_options={ + 'tempLocation': GCS_TMP, + 'stagingLocation': GCS_STAGING, + 'output': GCS_OUTPUT, + }, + job_class='org.apache.beam.examples.WordCount', + dataflow_config={"job_name": "{{task.task_id}}", "location": "us-central1"}, + ) + + jar_to_local_dataflow_runner >> start_java_pipeline_dataflow + # [END howto_operator_start_java_dataflow_runner_pipeline] + +with models.DAG( + "example_beam_native_java_spark_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_spark_runner: + + jar_to_local_spark_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_spark_runner", + bucket=GCS_JAR_SPARK_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_SPARK_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_spark_runner_{{ ds_nodash }}.jar", + ) + + start_java_pipeline_spark_runner = BeamRunJavaPipelineOperator( + task_id="start_java_pipeline_spark_runner", + runner="SparkRunner", + jar="/tmp/beam_wordcount_spark_runner_{{ ds_nodash }}.jar", + pipeline_options={ + 'output': '/tmp/start_java_pipeline_spark_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_spark_runner >> start_java_pipeline_spark_runner + +with models.DAG( + "example_beam_native_java_flink_runner", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=['example'], +) as dag_native_java_flink_runner: + + jar_to_local_flink_runner = GCSToLocalFilesystemOperator( + task_id="jar_to_local_flink_runner", + bucket=GCS_JAR_FLINK_RUNNER_BUCKET_NAME, + object_name=GCS_JAR_FLINK_RUNNER_OBJECT_NAME, + filename="/tmp/beam_wordcount_flink_runner_{{ ds_nodash }}.jar", + ) + + start_java_pipeline_flink_runner = BeamRunJavaPipelineOperator( + task_id="start_java_pipeline_flink_runner", + runner="FlinkRunner", + jar="/tmp/beam_wordcount_flink_runner_{{ ds_nodash }}.jar", + pipeline_options={ + 'output': '/tmp/start_java_pipeline_flink_runner', + 'inputFile': GCS_INPUT, + }, + job_class='org.apache.beam.examples.WordCount', + ) + + jar_to_local_flink_runner >> start_java_pipeline_flink_runner + + +with models.DAG( + "example_beam_native_python", + default_args=default_args, + start_date=days_ago(1), + schedule_interval=None, # Override to match your needs + tags=['example'], +) as dag_native_python: + + # [START howto_operator_start_python_direct_runner_pipeline_local_file] + start_python_pipeline_local_direct_runner = BeamRunPythonPipelineOperator( + task_id="start_python_pipeline_local_direct_runner", + py_file='apache_beam.examples.wordcount', + py_options=['-m'], + py_requirements=['apache-beam[gcp]==2.26.0'], + py_interpreter='python3', + py_system_site_packages=False, + ) + # [END howto_operator_start_python_direct_runner_pipeline_local_file] + + # [START howto_operator_start_python_direct_runner_pipeline_gcs_file] + start_python_pipeline_direct_runner = BeamRunPythonPipelineOperator( + task_id="start_python_pipeline_direct_runner", + py_file=GCS_PYTHON, + py_options=[], + pipeline_options={"output": GCS_OUTPUT}, + py_requirements=['apache-beam[gcp]==2.26.0'], + py_interpreter='python3', + py_system_site_packages=False, + ) + # [END howto_operator_start_python_direct_runner_pipeline_gcs_file] + + # [START howto_operator_start_python_dataflow_runner_pipeline_gcs_file] + start_python_pipeline_dataflow_runner = BeamRunPythonPipelineOperator( + task_id="start_python_pipeline_dataflow_runner", + runner="DataflowRunner", + py_file=GCS_PYTHON, + pipeline_options={ + 'tempLocation': GCS_TMP, + 'stagingLocation': GCS_STAGING, + 'output': GCS_OUTPUT, + }, + py_options=[], + py_requirements=['apache-beam[gcp]==2.26.0'], + py_interpreter='python3', + py_system_site_packages=False, + dataflow_config=DataflowConfiguration( + job_name='{{task.task_id}}', project_id=GCP_PROJECT_ID, location="us-central1" + ), + ) + # [END howto_operator_start_python_dataflow_runner_pipeline_gcs_file] + + start_python_pipeline_local_spark_runner = BeamRunPythonPipelineOperator( + task_id="start_python_pipeline_local_spark_runner", + py_file='apache_beam.examples.wordcount', + runner="SparkRunner", + py_options=['-m'], + py_requirements=['apache-beam[gcp]==2.26.0'], + py_interpreter='python3', + py_system_site_packages=False, + ) + + start_python_pipeline_local_flink_runner = BeamRunPythonPipelineOperator( + task_id="start_python_pipeline_local_flink_runner", + py_file='apache_beam.examples.wordcount', + runner="FlinkRunner", + py_options=['-m'], + pipeline_options={ + 'output': '/tmp/start_python_pipeline_local_flink_runner', + }, + py_requirements=['apache-beam[gcp]==2.26.0'], + py_interpreter='python3', + py_system_site_packages=False, + ) + + [ + start_python_pipeline_local_direct_runner, + start_python_pipeline_direct_runner, + ] >> start_python_pipeline_local_flink_runner >> start_python_pipeline_local_spark_runner + + +with models.DAG( + "example_beam_native_python_dataflow_async", + default_args=default_args, + start_date=days_ago(1), + schedule_interval=None, # Override to match your needs + tags=['example'], +) as dag_native_python_dataflow_async: + # [START howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file] + start_python_job_dataflow_runner_async = BeamRunPythonPipelineOperator( + task_id="start_python_job_dataflow_runner_async", + runner="DataflowRunner", + py_file=GCS_PYTHON_DATAFLOW_ASYNC, + pipeline_options={ + 'tempLocation': GCS_TMP, + 'stagingLocation': GCS_STAGING, + 'output': GCS_OUTPUT, + }, + py_options=[], + py_requirements=['apache-beam[gcp]==2.26.0'], + py_interpreter='python3', + py_system_site_packages=False, + dataflow_config=DataflowConfiguration( + job_name='{{task.task_id}}', + project_id=GCP_PROJECT_ID, + location="us-central1", + wait_until_finished=False, + ), + ) + + wait_for_python_job_dataflow_runner_async_done = DataflowJobStatusSensor( + task_id="wait-for-python-job-async-done", + job_id="{{task_instance.xcom_pull('start_python_job_dataflow_runner_async')['dataflow_job_id']}}", + expected_statuses={DataflowJobStatus.JOB_STATE_DONE}, + project_id=GCP_PROJECT_ID, + location='us-central1', + ) + + start_python_job_dataflow_runner_async >> wait_for_python_job_dataflow_runner_async_done + # [END howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file] diff --git a/airflow/providers/apache/beam/hooks/__init__.py b/airflow/providers/apache/beam/hooks/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/apache/beam/hooks/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/apache/beam/hooks/beam.py b/airflow/providers/apache/beam/hooks/beam.py new file mode 100644 index 0000000000000..8e188b0b33d4e --- /dev/null +++ b/airflow/providers/apache/beam/hooks/beam.py @@ -0,0 +1,289 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""This module contains a Apache Beam Hook.""" +import json +import select +import shlex +import subprocess +import textwrap +from tempfile import TemporaryDirectory +from typing import Callable, List, Optional + +from airflow.exceptions import AirflowException +from airflow.hooks.base_hook import BaseHook +from airflow.utils.log.logging_mixin import LoggingMixin +from airflow.utils.python_virtualenv import prepare_virtualenv + + +class BeamRunnerType: + """ + Helper class for listing runner types. + For more information about runners see: + https://beam.apache.org/documentation/ + """ + + DataflowRunner = "DataflowRunner" + DirectRunner = "DirectRunner" + SparkRunner = "SparkRunner" + FlinkRunner = "FlinkRunner" + SamzaRunner = "SamzaRunner" + NemoRunner = "NemoRunner" + JetRunner = "JetRunner" + Twister2Runner = "Twister2Runner" + + +def beam_options_to_args(options: dict) -> List[str]: + """ + Returns a formatted pipeline options from a dictionary of arguments + + The logic of this method should be compatible with Apache Beam: + https://github.com/apache/beam/blob/b56740f0e8cd80c2873412847d0b336837429fb9/sdks/python/ + apache_beam/options/pipeline_options.py#L230-L251 + + :param options: Dictionary with options + :type options: dict + :return: List of arguments + :rtype: List[str] + """ + if not options: + return [] + + args: List[str] = [] + for attr, value in options.items(): + if value is None or (isinstance(value, bool) and value): + args.append(f"--{attr}") + elif isinstance(value, list): + args.extend([f"--{attr}={v}" for v in value]) + else: + args.append(f"--{attr}={value}") + return args + + +class BeamCommandRunner(LoggingMixin): + """ + Class responsible for running pipeline command in subprocess + + :param cmd: Parts of the command to be run in subprocess + :type cmd: List[str] + :param process_line_callback: Optional callback which can be used to process + stdout and stderr to detect job id + :type process_line_callback: Optional[Callable[[str], None]] + """ + + def __init__( + self, + cmd: List[str], + process_line_callback: Optional[Callable[[str], None]] = None, + ) -> None: + super().__init__() + self.log.info("Running command: %s", " ".join(shlex.quote(c) for c in cmd)) + self.process_line_callback = process_line_callback + self.job_id: Optional[str] = None + self._proc = subprocess.Popen( + cmd, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + close_fds=True, + ) + + def _process_fd(self, fd): + """ + Prints output to logs. + + :param fd: File descriptor. + """ + if fd not in (self._proc.stdout, self._proc.stderr): + raise Exception("No data in stderr or in stdout.") + + fd_to_log = {self._proc.stderr: self.log.warning, self._proc.stdout: self.log.info} + func_log = fd_to_log[fd] + + while True: + line = fd.readline().decode() + if not line: + return + if self.process_line_callback: + self.process_line_callback(line) + func_log(line.rstrip("\n")) + + def wait_for_done(self) -> None: + """Waits for Apache Beam pipeline to complete.""" + self.log.info("Start waiting for Apache Beam process to complete.") + reads = [self._proc.stderr, self._proc.stdout] + while True: + # Wait for at least one available fd. + readable_fds, _, _ = select.select(reads, [], [], 5) + if readable_fds is None: + self.log.info("Waiting for Apache Beam process to complete.") + continue + + for readable_fd in readable_fds: + self._process_fd(readable_fd) + + if self._proc.poll() is not None: + break + + # Corner case: check if more output was created between the last read and the process termination + for readable_fd in reads: + self._process_fd(readable_fd) + + self.log.info("Process exited with return code: %s", self._proc.returncode) + + if self._proc.returncode != 0: + raise AirflowException(f"Apache Beam process failed with return code {self._proc.returncode}") + + +class BeamHook(BaseHook): + """ + Hook for Apache Beam. + + All the methods in the hook where project_id is used must be called with + keyword arguments rather than positional. + + :param runner: Runner type + :type runner: str + """ + + def __init__( + self, + runner: str, + ) -> None: + self.runner = runner + super().__init__() + + def _start_pipeline( + self, + variables: dict, + command_prefix: List[str], + process_line_callback: Optional[Callable[[str], None]] = None, + ) -> None: + cmd = command_prefix + [ + f"--runner={self.runner}", + ] + if variables: + cmd.extend(beam_options_to_args(variables)) + cmd_runner = BeamCommandRunner( + cmd=cmd, + process_line_callback=process_line_callback, + ) + cmd_runner.wait_for_done() + + def start_python_pipeline( # pylint: disable=too-many-arguments + self, + variables: dict, + py_file: str, + py_options: List[str], + py_interpreter: str = "python3", + py_requirements: Optional[List[str]] = None, + py_system_site_packages: bool = False, + process_line_callback: Optional[Callable[[str], None]] = None, + ): + """ + Starts Apache Beam python pipeline. + + :param variables: Variables passed to the pipeline. + :type variables: Dict + :param py_options: Additional options. + :type py_options: List[str] + :param py_interpreter: Python version of the Apache Beam pipeline. + If None, this defaults to the python3. + To track python versions supported by beam and related + issues check: https://issues.apache.org/jira/browse/BEAM-1251 + :type py_interpreter: str + :param py_requirements: Additional python package(s) to install. + If a value is passed to this parameter, a new virtual environment has been created with + additional packages installed. + + You could also install the apache-beam package if it is not installed on your system or you want + to use a different version. + :type py_requirements: List[str] + :param py_system_site_packages: Whether to include system_site_packages in your virtualenv. + See virtualenv documentation for more information. + + This option is only relevant if the ``py_requirements`` parameter is not None. + :type py_system_site_packages: bool + :param on_new_job_id_callback: Callback called when the job ID is known. + :type on_new_job_id_callback: callable + """ + if "labels" in variables: + variables["labels"] = [f"{key}={value}" for key, value in variables["labels"].items()] + + if py_requirements is not None: + if not py_requirements and not py_system_site_packages: + warning_invalid_environment = textwrap.dedent( + """\ + Invalid method invocation. You have disabled inclusion of system packages and empty list + required for installation, so it is not possible to create a valid virtual environment. + In the virtual environment, apache-beam package must be installed for your job to be \ + executed. To fix this problem: + * install apache-beam on the system, then set parameter py_system_site_packages to True, + * add apache-beam to the list of required packages in parameter py_requirements. + """ + ) + raise AirflowException(warning_invalid_environment) + + with TemporaryDirectory(prefix="apache-beam-venv") as tmp_dir: + py_interpreter = prepare_virtualenv( + venv_directory=tmp_dir, + python_bin=py_interpreter, + system_site_packages=py_system_site_packages, + requirements=py_requirements, + ) + command_prefix = [py_interpreter] + py_options + [py_file] + + self._start_pipeline( + variables=variables, + command_prefix=command_prefix, + process_line_callback=process_line_callback, + ) + else: + command_prefix = [py_interpreter] + py_options + [py_file] + + self._start_pipeline( + variables=variables, + command_prefix=command_prefix, + process_line_callback=process_line_callback, + ) + + def start_java_pipeline( + self, + variables: dict, + jar: str, + job_class: Optional[str] = None, + process_line_callback: Optional[Callable[[str], None]] = None, + ) -> None: + """ + Starts Apache Beam Java pipeline. + + :param variables: Variables passed to the job. + :type variables: dict + :param jar: Name of the jar for the pipeline + :type job_class: str + :param job_class: Name of the java class for the pipeline. + :type job_class: str + """ + if "labels" in variables: + variables["labels"] = json.dumps(variables["labels"], separators=(",", ":")) + + command_prefix = ["java", "-cp", jar, job_class] if job_class else ["java", "-jar", jar] + self._start_pipeline( + variables=variables, + command_prefix=command_prefix, + process_line_callback=process_line_callback, + ) diff --git a/airflow/providers/apache/beam/operators/__init__.py b/airflow/providers/apache/beam/operators/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/apache/beam/operators/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/apache/beam/operators/beam.py b/airflow/providers/apache/beam/operators/beam.py new file mode 100644 index 0000000000000..849298e10d989 --- /dev/null +++ b/airflow/providers/apache/beam/operators/beam.py @@ -0,0 +1,446 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +"""This module contains Apache Beam operators.""" +from contextlib import ExitStack +from typing import Callable, List, Optional, Union + +from airflow.models import BaseOperator +from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType +from airflow.providers.google.cloud.hooks.dataflow import ( + DataflowHook, + process_line_and_extract_dataflow_job_id_callback, +) +from airflow.providers.google.cloud.hooks.gcs import GCSHook +from airflow.providers.google.cloud.operators.dataflow import CheckJobRunning, DataflowConfiguration +from airflow.utils.decorators import apply_defaults +from airflow.utils.helpers import convert_camel_to_snake +from airflow.version import version + + +class BeamRunPythonPipelineOperator(BaseOperator): + """ + Launching Apache Beam pipelines written in Python. Note that both + ``default_pipeline_options`` and ``pipeline_options`` will be merged to specify pipeline + execution parameter, and ``default_pipeline_options`` is expected to save + high-level options, for instances, project and zone information, which + apply to all beam operators in the DAG. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:BeamRunPythonPipelineOperator` + + .. seealso:: + For more detail on Apache Beam have a look at the reference: + https://beam.apache.org/documentation/ + + :param py_file: Reference to the python Apache Beam pipeline file.py, e.g., + /some/local/file/path/to/your/python/pipeline/file. (templated) + :type py_file: str + :param runner: Runner on which pipeline will be run. By default "DirectRunner" is being used. + Other possible options: DataflowRunner, SparkRunner, FlinkRunner. + See: :class:`~providers.apache.beam.hooks.beam.BeamRunnerType` + See: https://beam.apache.org/documentation/runners/capability-matrix/ + + If you use Dataflow runner check dedicated operator: + :class:`~providers.google.cloud.operators.dataflow.DataflowCreatePythonJobOperator` + :type runner: str + :param py_options: Additional python options, e.g., ["-m", "-v"]. + :type py_options: list[str] + :param default_pipeline_options: Map of default pipeline options. + :type default_pipeline_options: dict + :param pipeline_options: Map of pipeline options.The key must be a dictionary. + The value can contain different types: + + * If the value is None, the single option - ``--key`` (without value) will be added. + * If the value is False, this option will be skipped + * If the value is True, the single option - ``--key`` (without value) will be added. + * If the value is list, the many options will be added for each key. + If the value is ``['A', 'B']`` and the key is ``key`` then the ``--key=A --key-B`` options + will be left + * Other value types will be replaced with the Python textual representation. + + When defining labels (``labels`` option), you can also provide a dictionary. + :type pipeline_options: dict + :param py_interpreter: Python version of the beam pipeline. + If None, this defaults to the python3. + To track python versions supported by beam and related + issues check: https://issues.apache.org/jira/browse/BEAM-1251 + :type py_interpreter: str + :param py_requirements: Additional python package(s) to install. + If a value is passed to this parameter, a new virtual environment has been created with + additional packages installed. + + You could also install the apache_beam package if it is not installed on your system or you want + to use a different version. + :type py_requirements: List[str] + :param py_system_site_packages: Whether to include system_site_packages in your virtualenv. + See virtualenv documentation for more information. + + This option is only relevant if the ``py_requirements`` parameter is not None. + :param gcp_conn_id: Optional. + The connection ID to use connecting to Google Cloud Storage if python file is on GCS. + :type gcp_conn_id: str + :param delegate_to: Optional. + The account to impersonate using domain-wide delegation of authority, + if any. For this to work, the service account making the request must have + domain-wide delegation enabled. + :type delegate_to: str + :param dataflow_config: Dataflow configuration, used when runner type is set to DataflowRunner + :type dataflow_config: Union[dict, providers.google.cloud.operators.dataflow.DataflowConfiguration] + """ + + template_fields = ["py_file", "runner", "pipeline_options", "default_pipeline_options", "dataflow_config"] + template_fields_renderers = {'dataflow_config': 'json', 'pipeline_options': 'json'} + + @apply_defaults + def __init__( + self, + *, + py_file: str, + runner: str = "DirectRunner", + default_pipeline_options: Optional[dict] = None, + pipeline_options: Optional[dict] = None, + py_interpreter: str = "python3", + py_options: Optional[List[str]] = None, + py_requirements: Optional[List[str]] = None, + py_system_site_packages: bool = False, + gcp_conn_id: str = "google_cloud_default", + delegate_to: Optional[str] = None, + dataflow_config: Optional[Union[DataflowConfiguration, dict]] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + + self.py_file = py_file + self.runner = runner + self.py_options = py_options or [] + self.default_pipeline_options = default_pipeline_options or {} + self.pipeline_options = pipeline_options or {} + self.pipeline_options.setdefault("labels", {}).update( + {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")} + ) + self.py_interpreter = py_interpreter + self.py_requirements = py_requirements + self.py_system_site_packages = py_system_site_packages + self.gcp_conn_id = gcp_conn_id + self.delegate_to = delegate_to + self.dataflow_config = dataflow_config or {} + self.beam_hook: Optional[BeamHook] = None + self.dataflow_hook: Optional[DataflowHook] = None + self.dataflow_job_id: Optional[str] = None + + if self.dataflow_config and self.runner.lower() != BeamRunnerType.DataflowRunner.lower(): + self.log.warning( + "dataflow_config is defined but runner is different than DataflowRunner (%s)", self.runner + ) + + def execute(self, context): + """Execute the Apache Beam Pipeline.""" + self.beam_hook = BeamHook(runner=self.runner) + pipeline_options = self.default_pipeline_options.copy() + process_line_callback: Optional[Callable] = None + is_dataflow = self.runner.lower() == BeamRunnerType.DataflowRunner.lower() + + if isinstance(self.dataflow_config, dict): + self.dataflow_config = DataflowConfiguration(**self.dataflow_config) + + if is_dataflow: + self.dataflow_hook = DataflowHook( + gcp_conn_id=self.dataflow_config.gcp_conn_id or self.gcp_conn_id, + delegate_to=self.dataflow_config.delegate_to or self.delegate_to, + poll_sleep=self.dataflow_config.poll_sleep, + impersonation_chain=self.dataflow_config.impersonation_chain, + drain_pipeline=self.dataflow_config.drain_pipeline, + cancel_timeout=self.dataflow_config.cancel_timeout, + wait_until_finished=self.dataflow_config.wait_until_finished, + ) + self.dataflow_config.project_id = self.dataflow_config.project_id or self.dataflow_hook.project_id + + dataflow_job_name = DataflowHook.build_dataflow_job_name( + self.dataflow_config.job_name, self.dataflow_config.append_job_name + ) + pipeline_options["job_name"] = dataflow_job_name + pipeline_options["project"] = self.dataflow_config.project_id + pipeline_options["region"] = self.dataflow_config.location + pipeline_options.setdefault("labels", {}).update( + {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")} + ) + + def set_current_dataflow_job_id(job_id): + self.dataflow_job_id = job_id + + process_line_callback = process_line_and_extract_dataflow_job_id_callback( + on_new_job_id_callback=set_current_dataflow_job_id + ) + + pipeline_options.update(self.pipeline_options) + + # Convert argument names from lowerCamelCase to snake case. + formatted_pipeline_options = { + convert_camel_to_snake(key): pipeline_options[key] for key in pipeline_options + } + + with ExitStack() as exit_stack: + if self.py_file.lower().startswith("gs://"): + gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to) + tmp_gcs_file = exit_stack.enter_context( # pylint: disable=no-member + gcs_hook.provide_file(object_url=self.py_file) + ) + self.py_file = tmp_gcs_file.name + + self.beam_hook.start_python_pipeline( + variables=formatted_pipeline_options, + py_file=self.py_file, + py_options=self.py_options, + py_interpreter=self.py_interpreter, + py_requirements=self.py_requirements, + py_system_site_packages=self.py_system_site_packages, + process_line_callback=process_line_callback, + ) + + if is_dataflow: + self.dataflow_hook.wait_for_done( # pylint: disable=no-value-for-parameter + job_name=dataflow_job_name, + location=self.dataflow_config.location, + job_id=self.dataflow_job_id, + multiple_jobs=False, + ) + + return {"dataflow_job_id": self.dataflow_job_id} + + def on_kill(self) -> None: + if self.dataflow_hook and self.dataflow_job_id: + self.log.info('Dataflow job with id: `%s` was requested to be cancelled.', self.dataflow_job_id) + self.dataflow_hook.cancel_job( + job_id=self.dataflow_job_id, + project_id=self.dataflow_config.project_id, + ) + + +# pylint: disable=too-many-instance-attributes +class BeamRunJavaPipelineOperator(BaseOperator): + """ + Launching Apache Beam pipelines written in Java. + + Note that both + ``default_pipeline_options`` and ``pipeline_options`` will be merged to specify pipeline + execution parameter, and ``default_pipeline_options`` is expected to save + high-level pipeline_options, for instances, project and zone information, which + apply to all Apache Beam operators in the DAG. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:BeamRunJavaPipelineOperator` + + .. seealso:: + For more detail on Apache Beam have a look at the reference: + https://beam.apache.org/documentation/ + + You need to pass the path to your jar file as a file reference with the ``jar`` + parameter, the jar needs to be a self executing jar (see documentation here: + https://beam.apache.org/documentation/runners/dataflow/#self-executing-jar). + Use ``pipeline_options`` to pass on pipeline_options to your job. + + :param jar: The reference to a self executing Apache Beam jar (templated). + :type jar: str + :param runner: Runner on which pipeline will be run. By default "DirectRunner" is being used. + See: + https://beam.apache.org/documentation/runners/capability-matrix/ + If you use Dataflow runner check dedicated operator: + :class:`~providers.google.cloud.operators.dataflow.DataflowCreateJavaJobOperator` + :type runner: str + :param job_class: The name of the Apache Beam pipeline class to be executed, it + is often not the main class configured in the pipeline jar file. + :type job_class: str + :param default_pipeline_options: Map of default job pipeline_options. + :type default_pipeline_options: dict + :param pipeline_options: Map of job specific pipeline_options.The key must be a dictionary. + The value can contain different types: + + * If the value is None, the single option - ``--key`` (without value) will be added. + * If the value is False, this option will be skipped + * If the value is True, the single option - ``--key`` (without value) will be added. + * If the value is list, the many pipeline_options will be added for each key. + If the value is ``['A', 'B']`` and the key is ``key`` then the ``--key=A --key-B`` pipeline_options + will be left + * Other value types will be replaced with the Python textual representation. + + When defining labels (``labels`` option), you can also provide a dictionary. + :type pipeline_options: dict + :param gcp_conn_id: The connection ID to use connecting to Google Cloud Storage if jar is on GCS + :type gcp_conn_id: str + :param delegate_to: The account to impersonate using domain-wide delegation of authority, + if any. For this to work, the service account making the request must have + domain-wide delegation enabled. + :type delegate_to: str + :param dataflow_config: Dataflow configuration, used when runner type is set to DataflowRunner + :type dataflow_config: Union[dict, providers.google.cloud.operators.dataflow.DataflowConfiguration] + """ + + template_fields = [ + "jar", + "runner", + "job_class", + "pipeline_options", + "default_pipeline_options", + "dataflow_config", + ] + template_fields_renderers = {'dataflow_config': 'json', 'pipeline_options': 'json'} + ui_color = "#0273d4" + + @apply_defaults + def __init__( + self, + *, + jar: str, + runner: str = "DirectRunner", + job_class: Optional[str] = None, + default_pipeline_options: Optional[dict] = None, + pipeline_options: Optional[dict] = None, + gcp_conn_id: str = "google_cloud_default", + delegate_to: Optional[str] = None, + dataflow_config: Optional[Union[DataflowConfiguration, dict]] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + + self.jar = jar + self.runner = runner + self.default_pipeline_options = default_pipeline_options or {} + self.pipeline_options = pipeline_options or {} + self.job_class = job_class + self.dataflow_config = dataflow_config or {} + self.gcp_conn_id = gcp_conn_id + self.delegate_to = delegate_to + self.dataflow_job_id = None + self.dataflow_hook: Optional[DataflowHook] = None + self.beam_hook: Optional[BeamHook] = None + self._dataflow_job_name: Optional[str] = None + + if self.dataflow_config and self.runner.lower() != BeamRunnerType.DataflowRunner.lower(): + self.log.warning( + "dataflow_config is defined but runner is different than DataflowRunner (%s)", self.runner + ) + + def execute(self, context): + """Execute the Apache Beam Pipeline.""" + self.beam_hook = BeamHook(runner=self.runner) + pipeline_options = self.default_pipeline_options.copy() + process_line_callback: Optional[Callable] = None + is_dataflow = self.runner.lower() == BeamRunnerType.DataflowRunner.lower() + + if isinstance(self.dataflow_config, dict): + self.dataflow_config = DataflowConfiguration(**self.dataflow_config) + + if is_dataflow: + self.dataflow_hook = DataflowHook( + gcp_conn_id=self.dataflow_config.gcp_conn_id or self.gcp_conn_id, + delegate_to=self.dataflow_config.delegate_to or self.delegate_to, + poll_sleep=self.dataflow_config.poll_sleep, + impersonation_chain=self.dataflow_config.impersonation_chain, + drain_pipeline=self.dataflow_config.drain_pipeline, + cancel_timeout=self.dataflow_config.cancel_timeout, + wait_until_finished=self.dataflow_config.wait_until_finished, + ) + self.dataflow_config.project_id = self.dataflow_config.project_id or self.dataflow_hook.project_id + + self._dataflow_job_name = DataflowHook.build_dataflow_job_name( + self.dataflow_config.job_name, self.dataflow_config.append_job_name + ) + pipeline_options["jobName"] = self.dataflow_config.job_name + pipeline_options["project"] = self.dataflow_config.project_id + pipeline_options["region"] = self.dataflow_config.location + pipeline_options.setdefault("labels", {}).update( + {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")} + ) + + def set_current_dataflow_job_id(job_id): + self.dataflow_job_id = job_id + + process_line_callback = process_line_and_extract_dataflow_job_id_callback( + on_new_job_id_callback=set_current_dataflow_job_id + ) + + pipeline_options.update(self.pipeline_options) + + with ExitStack() as exit_stack: + if self.jar.lower().startswith("gs://"): + gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to) + tmp_gcs_file = exit_stack.enter_context( # pylint: disable=no-member + gcs_hook.provide_file(object_url=self.jar) + ) + self.jar = tmp_gcs_file.name + + if is_dataflow: + is_running = False + if self.dataflow_config.check_if_running != CheckJobRunning.IgnoreJob: + is_running = ( + # The reason for disable=no-value-for-parameter is that project_id parameter is + # required but here is not passed, moreover it cannot be passed here. + # This method is wrapped by @_fallback_to_project_id_from_variables decorator which + # fallback project_id value from variables and raise error if project_id is + # defined both in variables and as parameter (here is already defined in variables) + self.dataflow_hook.is_job_dataflow_running( # pylint: disable=no-value-for-parameter + name=self.dataflow_config.job_name, + variables=pipeline_options, + ) + ) + while is_running and self.dataflow_config.check_if_running == CheckJobRunning.WaitForRun: + # The reason for disable=no-value-for-parameter is that project_id parameter is + # required but here is not passed, moreover it cannot be passed here. + # This method is wrapped by @_fallback_to_project_id_from_variables decorator which + # fallback project_id value from variables and raise error if project_id is + # defined both in variables and as parameter (here is already defined in variables) + # pylint: disable=no-value-for-parameter + is_running = self.dataflow_hook.is_job_dataflow_running( + name=self.dataflow_config.job_name, + variables=pipeline_options, + ) + if not is_running: + pipeline_options["jobName"] = self._dataflow_job_name + self.beam_hook.start_java_pipeline( + variables=pipeline_options, + jar=self.jar, + job_class=self.job_class, + process_line_callback=process_line_callback, + ) + self.dataflow_hook.wait_for_done( + job_name=self._dataflow_job_name, + location=self.dataflow_config.location, + job_id=self.dataflow_job_id, + multiple_jobs=self.dataflow_config.multiple_jobs, + project_id=self.dataflow_config.project_id, + ) + + else: + self.beam_hook.start_java_pipeline( + variables=pipeline_options, + jar=self.jar, + job_class=self.job_class, + process_line_callback=process_line_callback, + ) + + return {"dataflow_job_id": self.dataflow_job_id} + + def on_kill(self) -> None: + if self.dataflow_hook and self.dataflow_job_id: + self.log.info('Dataflow job with id: `%s` was requested to be cancelled.', self.dataflow_job_id) + self.dataflow_hook.cancel_job( + job_id=self.dataflow_job_id, + project_id=self.dataflow_config.project_id, + ) diff --git a/tests/airflow_pylint/do_not_use_asserts.py b/airflow/providers/apache/beam/provider.yaml similarity index 54% rename from tests/airflow_pylint/do_not_use_asserts.py rename to airflow/providers/apache/beam/provider.yaml index 47a0e208b3862..4325265d16ab3 100644 --- a/tests/airflow_pylint/do_not_use_asserts.py +++ b/airflow/providers/apache/beam/provider.yaml @@ -15,30 +15,31 @@ # specific language governing permissions and limitations # under the License. -from pylint.checkers import BaseChecker -from pylint.interfaces import IAstroidChecker -from pylint.lint import PyLinter +--- +package-name: apache-airflow-providers-apache-beam +name: Apache Beam +description: | + `Apache Beam `__. +versions: + - 0.0.1 -class DoNotUseAssertsChecker(BaseChecker): - __implements__ = IAstroidChecker +integrations: + - integration-name: Apache Beam + external-doc-url: https://beam.apache.org/ + how-to-guide: + - /docs/apache-airflow-providers-apache-beam/operators.rst + tags: [apache] - name = 'do-not-use-asserts' - priority = -1 - msgs = { - 'E7401': ( - 'Do not use asserts.', - 'do-not-use-asserts', - 'Asserts should not be used in the main Airflow code.', - ), - } +operators: + - integration-name: Apache Beam + python-modules: + - airflow.providers.apache.beam.operators.beam - def visit_assert(self, node): - self.add_message( - self.name, - node=node, - ) +hooks: + - integration-name: Apache Beam + python-modules: + - airflow.providers.apache.beam.hooks.beam - -def register(linter: PyLinter): - linter.register_checker(DoNotUseAssertsChecker(linter)) +hook-class-names: + - airflow.providers.apache.beam.hooks.beam.BeamHook diff --git a/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 051aeb78776f8..0000000000000 --- a/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,20 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01 | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701) | -| [f88f06c86](https://github.com/apache/airflow/commit/f88f06c862b6096e974871decd14b86811cc4bc6) | 2019-11-30 | [AIRFLOW-6131] Make Cassandra hooks/sensors pylint compatible (#6693) | -| [f987646d7](https://github.com/apache/airflow/commit/f987646d7d85683cdc73ae9438a2a8c4a2992c7f) | 2019-11-22 | [AIRFLOW-5950] AIP-21 Change import paths for "apache/cassandra" modules (#6609) | diff --git a/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index ee95935d4cf4c..0000000000000 --- a/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [750555f26](https://github.com/apache/airflow/commit/750555f261616d809d24b8550b9482a713ba3171) | 2020-07-19 | Add guide for Cassandra Operators (#9877) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 55db62f4f7f64..0000000000000 --- a/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,11 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [0646849e3](https://github.com/apache/airflow/commit/0646849e3dacdc2bc62705ae136f3ad3b16232e9) | 2020-10-14 | Add protocol_version to conn_config for Cassandrahook (#11036) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_README.md deleted file mode 100644 index f42844b4f168d..0000000000000 --- a/airflow/providers/apache/cassandra/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,141 +0,0 @@ - - - -# Package apache-airflow-backport-providers-apache-cassandra - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `apache.cassandra` provider. All classes for this provider package -are in `airflow.providers.apache.cassandra` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-apache-cassandra` - -## PIP requirements - -| PIP package | Version required | -|:-----------------|:-------------------| -| cassandra-driver | >=3.13.0,<3.21.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.cassandra` provider -are in the `airflow.providers.apache.cassandra` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.apache.cassandra` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.record.CassandraRecordSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/cassandra/sensors/record.py) | [contrib.sensors.cassandra_record_sensor.CassandraRecordSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/cassandra_record_sensor.py) | -| [sensors.table.CassandraTableSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/cassandra/sensors/table.py) | [contrib.sensors.cassandra_table_sensor.CassandraTableSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/cassandra_table_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.cassandra` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.cassandra.CassandraHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/cassandra/hooks/cassandra.py) | [contrib.hooks.cassandra_hook.CassandraHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/cassandra_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [0646849e3](https://github.com/apache/airflow/commit/0646849e3dacdc2bc62705ae136f3ad3b16232e9) | 2020-10-14 | Add protocol_version to conn_config for Cassandrahook (#11036) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [750555f26](https://github.com/apache/airflow/commit/750555f261616d809d24b8550b9482a713ba3171) | 2020-07-19 | Add guide for Cassandra Operators (#9877) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01 | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701) | -| [f88f06c86](https://github.com/apache/airflow/commit/f88f06c862b6096e974871decd14b86811cc4bc6) | 2019-11-30 | [AIRFLOW-6131] Make Cassandra hooks/sensors pylint compatible (#6693) | -| [f987646d7](https://github.com/apache/airflow/commit/f987646d7d85683cdc73ae9438a2a8c4a2992c7f) | 2019-11-22 | [AIRFLOW-5950] AIP-21 Change import paths for "apache/cassandra" modules (#6609) | diff --git a/airflow/providers/apache/cassandra/CHANGELOG.rst b/airflow/providers/apache/cassandra/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/apache/cassandra/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/apache/cassandra/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/cassandra/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index e413e973100b1..0000000000000 --- a/airflow/providers/apache/cassandra/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,50 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [0646849e3](https://github.com/apache/airflow/commit/0646849e3dacdc2bc62705ae136f3ad3b16232e9) | 2020-10-14 | Add protocol_version to conn_config for Cassandrahook (#11036) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [750555f26](https://github.com/apache/airflow/commit/750555f261616d809d24b8550b9482a713ba3171) | 2020-07-19 | Add guide for Cassandra Operators (#9877) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01 | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701) | -| [f88f06c86](https://github.com/apache/airflow/commit/f88f06c862b6096e974871decd14b86811cc4bc6) | 2019-11-30 | [AIRFLOW-6131] Make Cassandra hooks/sensors pylint compatible (#6693) | -| [f987646d7](https://github.com/apache/airflow/commit/f987646d7d85683cdc73ae9438a2a8c4a2992c7f) | 2019-11-22 | [AIRFLOW-5950] AIP-21 Change import paths for "apache/cassandra" modules (#6609) | diff --git a/airflow/providers/apache/cassandra/README.md b/airflow/providers/apache/cassandra/README.md deleted file mode 100644 index 83f3b99352680..0000000000000 --- a/airflow/providers/apache/cassandra/README.md +++ /dev/null @@ -1,144 +0,0 @@ - - - -# Package apache-airflow-providers-apache-cassandra - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `apache.cassandra` provider. All classes for this provider package -are in `airflow.providers.apache.cassandra` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-apache-cassandra` - -## PIP requirements - -| PIP package | Version required | -|:-----------------|:-------------------| -| cassandra-driver | >=3.13.0,<3.21.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.cassandra` provider -are in the `airflow.providers.apache.cassandra` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.apache.cassandra` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.record.CassandraRecordSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/cassandra/sensors/record.py) | [contrib.sensors.cassandra_record_sensor.CassandraRecordSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/cassandra_record_sensor.py) | -| [sensors.table.CassandraTableSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/cassandra/sensors/table.py) | [contrib.sensors.cassandra_table_sensor.CassandraTableSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/cassandra_table_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.cassandra` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.cassandra.CassandraHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/cassandra/hooks/cassandra.py) | [contrib.hooks.cassandra_hook.CassandraHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/cassandra_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [0646849e3](https://github.com/apache/airflow/commit/0646849e3dacdc2bc62705ae136f3ad3b16232e9) | 2020-10-14 | Add protocol_version to conn_config for Cassandrahook (#11036) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [750555f26](https://github.com/apache/airflow/commit/750555f261616d809d24b8550b9482a713ba3171) | 2020-07-19 | Add guide for Cassandra Operators (#9877) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01 | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701) | -| [f88f06c86](https://github.com/apache/airflow/commit/f88f06c862b6096e974871decd14b86811cc4bc6) | 2019-11-30 | [AIRFLOW-6131] Make Cassandra hooks/sensors pylint compatible (#6693) | -| [f987646d7](https://github.com/apache/airflow/commit/f987646d7d85683cdc73ae9438a2a8c4a2992c7f) | 2019-11-22 | [AIRFLOW-5950] AIP-21 Change import paths for "apache/cassandra" modules (#6609) | diff --git a/airflow/providers/apache/cassandra/provider.yaml b/airflow/providers/apache/cassandra/provider.yaml index 276505abbb7e5..13edafeef2fc0 100644 --- a/airflow/providers/apache/cassandra/provider.yaml +++ b/airflow/providers/apache/cassandra/provider.yaml @@ -29,6 +29,7 @@ integrations: external-doc-url: http://cassandra.apache.org/ how-to-guide: - /docs/apache-airflow-providers-apache-cassandra/operators.rst + logo: /integration-logos/apache/cassandra-3.png tags: [apache] sensors: diff --git a/airflow/providers/apache/druid/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/apache/druid/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 628201b7ba1c8..0000000000000 --- a/airflow/providers/apache/druid/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,21 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [086d731ce](https://github.com/apache/airflow/commit/086d731ce0066b3037d96df2a05cea1101ed3c17) | 2020-01-14 | [AIRFLOW-6510] Fix druid operator templating (#7127) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/druid/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/apache/druid/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 7e2d48fc7606d..0000000000000 --- a/airflow/providers/apache/druid/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/apache/druid/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/apache/druid/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/apache/druid/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/apache/druid/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/druid/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 3e5ca0b13e633..0000000000000 --- a/airflow/providers/apache/druid/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,172 +0,0 @@ - - - -# Package apache-airflow-backport-providers-apache-druid - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `apache.druid` provider. All classes for this provider package -are in `airflow.providers.apache.druid` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-apache-druid` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pydruid | >=0.4.1 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-apache-druid[apache.hive] -``` - -| Dependent package | Extra | -|:-----------------------------------------------------------------------------------------------------------------------------|:------------| -| [apache-airflow-backport-providers-apache-hive](https://github.com/apache/airflow/tree/master/airflow/providers/apache/hive) | apache.hive | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.druid` provider -are in the `airflow.providers.apache.druid` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.apache.druid` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.druid.DruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid.py) | [contrib.operators.druid_operator.DruidOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/druid_operator.py) | -| [operators.druid_check.DruidCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid_check.py) | [operators.druid_check_operator.DruidCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/druid_check_operator.py) | - - -## Transfer operators - - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.apache.druid` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.hive_to_druid.HiveToDruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/transfers/hive_to_druid.py) | [operators.hive_to_druid.HiveToDruidTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_druid.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.druid` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------| -| [hooks.druid.DruidDbApiHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/hooks/druid.py) | [hooks.druid_hook.DruidDbApiHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/druid_hook.py) | -| [hooks.druid.DruidHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/hooks/druid.py) | [hooks.druid_hook.DruidHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/druid_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [086d731ce](https://github.com/apache/airflow/commit/086d731ce0066b3037d96df2a05cea1101ed3c17) | 2020-01-14 | [AIRFLOW-6510] Fix druid operator templating (#7127) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/druid/CHANGELOG.rst b/airflow/providers/apache/druid/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/apache/druid/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/apache/druid/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/druid/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 4c0a7b17c2be4..0000000000000 --- a/airflow/providers/apache/druid/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,52 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [3a72fc824](https://github.com/apache/airflow/commit/3a72fc82475df3b745a00a7b5e34eef9d27b3329) | 2020-11-14 | Fix Description of Provider Docs (#12361) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [086d731ce](https://github.com/apache/airflow/commit/086d731ce0066b3037d96df2a05cea1101ed3c17) | 2020-01-14 | [AIRFLOW-6510] Fix druid operator templating (#7127) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/druid/README.md b/airflow/providers/apache/druid/README.md deleted file mode 100644 index 11aa2ac89c49a..0000000000000 --- a/airflow/providers/apache/druid/README.md +++ /dev/null @@ -1,176 +0,0 @@ - - - -# Package apache-airflow-providers-apache-druid - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `apache.druid` provider. All classes for this provider package -are in `airflow.providers.apache.druid` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-apache-druid` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pydruid | >=0.4.1 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-apache-druid[apache.hive] -``` - -| Dependent package | Extra | -|:------------------------------------------------------------------------------------------------------|:------------| -| [apache-airflow-providers-apache-hive](https://pypi.org/project/apache-airflow-providers-apache-hive) | apache.hive | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.druid` provider -are in the `airflow.providers.apache.druid` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.apache.druid` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.druid.DruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid.py) | [contrib.operators.druid_operator.DruidOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/druid_operator.py) | -| [operators.druid_check.DruidCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/operators/druid_check.py) | [operators.druid_check_operator.DruidCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/druid_check_operator.py) | - - -## Transfer operators - - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.apache.druid` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.hive_to_druid.HiveToDruidOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/transfers/hive_to_druid.py) | [operators.hive_to_druid.HiveToDruidTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_druid.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.druid` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------| -| [hooks.druid.DruidDbApiHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/hooks/druid.py) | [hooks.druid_hook.DruidDbApiHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/druid_hook.py) | -| [hooks.druid.DruidHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/druid/hooks/druid.py) | [hooks.druid_hook.DruidHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/druid_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [3a72fc824](https://github.com/apache/airflow/commit/3a72fc82475df3b745a00a7b5e34eef9d27b3329) | 2020-11-14 | Fix Description of Provider Docs (#12361) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [086d731ce](https://github.com/apache/airflow/commit/086d731ce0066b3037d96df2a05cea1101ed3c17) | 2020-01-14 | [AIRFLOW-6510] Fix druid operator templating (#7127) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/druid/provider.yaml b/airflow/providers/apache/druid/provider.yaml index c9d8e6949b342..faa7534803b40 100644 --- a/airflow/providers/apache/druid/provider.yaml +++ b/airflow/providers/apache/druid/provider.yaml @@ -27,6 +27,7 @@ versions: integrations: - integration-name: Apache Druid external-doc-url: https://druid.apache.org/ + logo: /integration-logos/apache/druid-1.png tags: [apache] operators: diff --git a/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index d8e6f69f1b96e..0000000000000 --- a/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,22 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [6c670870a](https://github.com/apache/airflow/commit/6c670870aa6ea5d82a86f912bb6de8b88e711ca5) | 2020-03-25 | [AIRFLOW-6833] HA for webhdfs connection (#7454) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 101b7435ab94c..0000000000000 --- a/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 020e941966b2c..0000000000000 --- a/airflow/providers/apache/hdfs/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,146 +0,0 @@ - - - -# Package apache-airflow-backport-providers-apache-hdfs - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `apache.hdfs` provider. All classes for this provider package -are in `airflow.providers.apache.hdfs` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-apache-hdfs` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| snakebite-py3 | | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.hdfs` provider -are in the `airflow.providers.apache.hdfs` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.apache.hdfs` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.hdfs.HdfsFolderSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/hdfs.py) | [contrib.sensors.hdfs_sensor.HdfsSensorFolder](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/hdfs_sensor.py) | -| [sensors.hdfs.HdfsRegexSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/hdfs.py) | [contrib.sensors.hdfs_sensor.HdfsSensorRegex](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/hdfs_sensor.py) | -| [sensors.hdfs.HdfsSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/hdfs.py) | [sensors.hdfs_sensor.HdfsSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/hdfs_sensor.py) | -| [sensors.web_hdfs.WebHdfsSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/web_hdfs.py) | [sensors.web_hdfs_sensor.WebHdfsSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/web_hdfs_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.hdfs` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------| -| [hooks.hdfs.HDFSHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/hooks/hdfs.py) | [hooks.hdfs_hook.HDFSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hdfs_hook.py) | -| [hooks.webhdfs.WebHDFSHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/hooks/webhdfs.py) | [hooks.webhdfs_hook.WebHDFSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/webhdfs_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [6c670870a](https://github.com/apache/airflow/commit/6c670870aa6ea5d82a86f912bb6de8b88e711ca5) | 2020-03-25 | [AIRFLOW-6833] HA for webhdfs connection (#7454) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/hdfs/CHANGELOG.rst b/airflow/providers/apache/hdfs/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/apache/hdfs/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/apache/hdfs/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/hdfs/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index d5863562554b4..0000000000000 --- a/airflow/providers/apache/hdfs/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,53 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21 | Fix Python Docstring parameters (#12513) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [6c670870a](https://github.com/apache/airflow/commit/6c670870aa6ea5d82a86f912bb6de8b88e711ca5) | 2020-03-25 | [AIRFLOW-6833] HA for webhdfs connection (#7454) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/hdfs/README.md b/airflow/providers/apache/hdfs/README.md deleted file mode 100644 index 805d28946faaf..0000000000000 --- a/airflow/providers/apache/hdfs/README.md +++ /dev/null @@ -1,150 +0,0 @@ - - - -# Package apache-airflow-providers-apache-hdfs - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `apache.hdfs` provider. All classes for this provider package -are in `airflow.providers.apache.hdfs` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-apache-hdfs` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| snakebite-py3 | | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.hdfs` provider -are in the `airflow.providers.apache.hdfs` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.apache.hdfs` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.hdfs.HdfsFolderSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/hdfs.py) | [contrib.sensors.hdfs_sensor.HdfsSensorFolder](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/hdfs_sensor.py) | -| [sensors.hdfs.HdfsRegexSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/hdfs.py) | [contrib.sensors.hdfs_sensor.HdfsSensorRegex](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/hdfs_sensor.py) | -| [sensors.hdfs.HdfsSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/hdfs.py) | [sensors.hdfs_sensor.HdfsSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/hdfs_sensor.py) | -| [sensors.web_hdfs.WebHdfsSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/sensors/web_hdfs.py) | [sensors.web_hdfs_sensor.WebHdfsSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/web_hdfs_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.hdfs` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------| -| [hooks.hdfs.HDFSHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/hooks/hdfs.py) | [hooks.hdfs_hook.HDFSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hdfs_hook.py) | -| [hooks.webhdfs.WebHDFSHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hdfs/hooks/webhdfs.py) | [hooks.webhdfs_hook.WebHDFSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/webhdfs_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21 | Fix Python Docstring parameters (#12513) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [6c670870a](https://github.com/apache/airflow/commit/6c670870aa6ea5d82a86f912bb6de8b88e711ca5) | 2020-03-25 | [AIRFLOW-6833] HA for webhdfs connection (#7454) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/hdfs/provider.yaml b/airflow/providers/apache/hdfs/provider.yaml index f71126fa6f84c..0bda8a57f706c 100644 --- a/airflow/providers/apache/hdfs/provider.yaml +++ b/airflow/providers/apache/hdfs/provider.yaml @@ -28,9 +28,11 @@ versions: integrations: - integration-name: Hadoop Distributed File System (HDFS) external-doc-url: https://hadoop.apache.org/docs/r1.2.1/hdfs_design.html + logo: /integration-logos/apache/hadoop.png tags: [apache] - integration-name: WebHDFS external-doc-url: https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-hdfs/WebHDFS.html + logo: /integration-logos/apache/hadoop.png tags: [apache] sensors: diff --git a/airflow/providers/apache/hive/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/apache/hive/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 01ba354f76f8d..0000000000000 --- a/airflow/providers/apache/hive/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,32 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [c78e2a5fe](https://github.com/apache/airflow/commit/c78e2a5feae15e84b05430cfc5935f0e289fb6b4) | 2020-06-16 | Make hive macros py3 compatible (#8598) | -| [6350fd6eb](https://github.com/apache/airflow/commit/6350fd6ebb9958982cb3fa1d466168fc31708035) | 2020-06-08 | Don't use the term "whitelist" - language matters (#9174) | -| [10796cb7c](https://github.com/apache/airflow/commit/10796cb7ce52c8ac2f68024e531fdda779547bdf) | 2020-06-03 | Remove Hive/Hadoop/Java dependency from unit tests (#9029) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [93ea05880](https://github.com/apache/airflow/commit/93ea05880283a56e3d42ab07db7453977a3de8ec) | 2020-04-21 | [AIRFLOW-7059] pass hive_conf to get_pandas_df in HiveServer2Hook (#8380) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30 | Remove sql like function in base_hook (#7901) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/hive/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/apache/hive/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 65ae03f219b00..0000000000000 --- a/airflow/providers/apache/hive/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,29 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08 | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [27339a5a0](https://github.com/apache/airflow/commit/27339a5a0f9e382dbc7d32a128f0831a48ef9a12) | 2020-08-22 | Remove mentions of Airflow Gitter (#10460) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [5013fda8f](https://github.com/apache/airflow/commit/5013fda8f072e633c114fb39fb59a22f60200b40) | 2020-07-20 | Add drop_partition functionality for HiveMetastoreHook (#9472) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/apache/hive/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/apache/hive/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/apache/hive/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/apache/hive/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/hive/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 6abc7523484a9..0000000000000 --- a/airflow/providers/apache/hive/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,221 +0,0 @@ - - - -# Package apache-airflow-backport-providers-apache-hive - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `apache.hive` provider. All classes for this provider package -are in `airflow.providers.apache.hive` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-apache-hive` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| hmsclient | >=0.1.0 | -| pyhive[hive] | >=0.6.0 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-apache-hive[amazon] -``` - -| Dependent package | Extra | -|:-------------------------------------------------------------------------------------------------------------------------------------|:----------------| -| [apache-airflow-backport-providers-amazon](https://github.com/apache/airflow/tree/master/airflow/providers/amazon) | amazon | -| [apache-airflow-backport-providers-microsoft-mssql](https://github.com/apache/airflow/tree/master/airflow/providers/microsoft/mssql) | microsoft.mssql | -| [apache-airflow-backport-providers-mysql](https://github.com/apache/airflow/tree/master/airflow/providers/mysql) | mysql | -| [apache-airflow-backport-providers-presto](https://github.com/apache/airflow/tree/master/airflow/providers/presto) | presto | -| [apache-airflow-backport-providers-samba](https://github.com/apache/airflow/tree/master/airflow/providers/samba) | samba | -| [apache-airflow-backport-providers-vertica](https://github.com/apache/airflow/tree/master/airflow/providers/vertica) | vertica | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.hive` provider -are in the `airflow.providers.apache.hive` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.apache.hive` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.hive.HiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive.py) | [operators.hive_operator.HiveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_operator.py) | -| [operators.hive_stats.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive_stats.py) | [operators.hive_stats_operator.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_stats_operator.py) | - - -## Transfer operators - - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.apache.hive` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.hive_to_mysql.HiveToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/hive_to_mysql.py) | [operators.hive_to_mysql.HiveToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_mysql.py) | -| [transfers.hive_to_samba.HiveToSambaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/hive_to_samba.py) | [operators.hive_to_samba_operator.HiveToSambaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_samba_operator.py) | -| [transfers.mssql_to_hive.MsSqlToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/mssql_to_hive.py) | [operators.mssql_to_hive.MsSqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mssql_to_hive.py) | -| [transfers.mysql_to_hive.MySqlToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/mysql_to_hive.py) | [operators.mysql_to_hive.MySqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_to_hive.py) | -| [transfers.s3_to_hive.S3ToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/s3_to_hive.py) | [operators.s3_to_hive_operator.S3ToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_hive_operator.py) | -| [transfers.vertica_to_hive.VerticaToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/vertica_to_hive.py) | [contrib.operators.vertica_to_hive.VerticaToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_hive.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.apache.hive` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.hive_partition.HivePartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/sensors/hive_partition.py) | [sensors.hive_partition_sensor.HivePartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/hive_partition_sensor.py) | -| [sensors.metastore_partition.MetastorePartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/sensors/metastore_partition.py) | [sensors.metastore_partition_sensor.MetastorePartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/metastore_partition_sensor.py) | -| [sensors.named_hive_partition.NamedHivePartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/sensors/named_hive_partition.py) | [sensors.named_hive_partition_sensor.NamedHivePartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/named_hive_partition_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.hive` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------| -| [hooks.hive.HiveCliHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/hooks/hive.py) | [hooks.hive_hooks.HiveCliHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hive_hooks.py) | -| [hooks.hive.HiveMetastoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/hooks/hive.py) | [hooks.hive_hooks.HiveMetastoreHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hive_hooks.py) | -| [hooks.hive.HiveServer2Hook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/hooks/hive.py) | [hooks.hive_hooks.HiveServer2Hook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hive_hooks.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08 | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [27339a5a0](https://github.com/apache/airflow/commit/27339a5a0f9e382dbc7d32a128f0831a48ef9a12) | 2020-08-22 | Remove mentions of Airflow Gitter (#10460) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [5013fda8f](https://github.com/apache/airflow/commit/5013fda8f072e633c114fb39fb59a22f60200b40) | 2020-07-20 | Add drop_partition functionality for HiveMetastoreHook (#9472) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [c78e2a5fe](https://github.com/apache/airflow/commit/c78e2a5feae15e84b05430cfc5935f0e289fb6b4) | 2020-06-16 | Make hive macros py3 compatible (#8598) | -| [6350fd6eb](https://github.com/apache/airflow/commit/6350fd6ebb9958982cb3fa1d466168fc31708035) | 2020-06-08 | Don't use the term "whitelist" - language matters (#9174) | -| [10796cb7c](https://github.com/apache/airflow/commit/10796cb7ce52c8ac2f68024e531fdda779547bdf) | 2020-06-03 | Remove Hive/Hadoop/Java dependency from unit tests (#9029) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [93ea05880](https://github.com/apache/airflow/commit/93ea05880283a56e3d42ab07db7453977a3de8ec) | 2020-04-21 | [AIRFLOW-7059] pass hive_conf to get_pandas_df in HiveServer2Hook (#8380) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30 | Remove sql like function in base_hook (#7901) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/hive/CHANGELOG.rst b/airflow/providers/apache/hive/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/apache/hive/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/apache/hive/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/hive/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 0f9b0f19e6d5b..0000000000000 --- a/airflow/providers/apache/hive/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,77 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [a075b6df9](https://github.com/apache/airflow/commit/a075b6df99a4f5e21d198f7be56b577432e6f9db) | 2020-12-09 | Rename remaining Sensors to match AIP-21 (#12927) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [502ba309e](https://github.com/apache/airflow/commit/502ba309ea470943f0e99c634269e3d2d13ce6ca) | 2020-11-10 | Enable Markdownlint rule - MD022/blanks-around-headings (#12225) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08 | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [27339a5a0](https://github.com/apache/airflow/commit/27339a5a0f9e382dbc7d32a128f0831a48ef9a12) | 2020-08-22 | Remove mentions of Airflow Gitter (#10460) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [5013fda8f](https://github.com/apache/airflow/commit/5013fda8f072e633c114fb39fb59a22f60200b40) | 2020-07-20 | Add drop_partition functionality for HiveMetastoreHook (#9472) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [c78e2a5fe](https://github.com/apache/airflow/commit/c78e2a5feae15e84b05430cfc5935f0e289fb6b4) | 2020-06-16 | Make hive macros py3 compatible (#8598) | -| [6350fd6eb](https://github.com/apache/airflow/commit/6350fd6ebb9958982cb3fa1d466168fc31708035) | 2020-06-08 | Don't use the term "whitelist" - language matters (#9174) | -| [10796cb7c](https://github.com/apache/airflow/commit/10796cb7ce52c8ac2f68024e531fdda779547bdf) | 2020-06-03 | Remove Hive/Hadoop/Java dependency from unit tests (#9029) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [93ea05880](https://github.com/apache/airflow/commit/93ea05880283a56e3d42ab07db7453977a3de8ec) | 2020-04-21 | [AIRFLOW-7059] pass hive_conf to get_pandas_df in HiveServer2Hook (#8380) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30 | Remove sql like function in base_hook (#7901) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/hive/README.md b/airflow/providers/apache/hive/README.md deleted file mode 100644 index d232982634f98..0000000000000 --- a/airflow/providers/apache/hive/README.md +++ /dev/null @@ -1,228 +0,0 @@ - - - -# Package apache-airflow-providers-apache-hive - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `apache.hive` provider. All classes for this provider package -are in `airflow.providers.apache.hive` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-apache-hive` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| hmsclient | >=0.1.0 | -| pyhive[hive] | >=0.6.0 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-apache-hive[amazon] -``` - -| Dependent package | Extra | -|:--------------------------------------------------------------------------------------------------------------|:----------------| -| [apache-airflow-providers-amazon](https://pypi.org/project/apache-airflow-providers-amazon) | amazon | -| [apache-airflow-providers-microsoft-mssql](https://pypi.org/project/apache-airflow-providers-microsoft-mssql) | microsoft.mssql | -| [apache-airflow-providers-mysql](https://pypi.org/project/apache-airflow-providers-mysql) | mysql | -| [apache-airflow-providers-presto](https://pypi.org/project/apache-airflow-providers-presto) | presto | -| [apache-airflow-providers-samba](https://pypi.org/project/apache-airflow-providers-samba) | samba | -| [apache-airflow-providers-vertica](https://pypi.org/project/apache-airflow-providers-vertica) | vertica | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.hive` provider -are in the `airflow.providers.apache.hive` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.apache.hive` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.hive.HiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive.py) | [operators.hive_operator.HiveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_operator.py) | -| [operators.hive_stats.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/operators/hive_stats.py) | [operators.hive_stats_operator.HiveStatsCollectionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_stats_operator.py) | - - -## Transfer operators - - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.apache.hive` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.hive_to_mysql.HiveToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/hive_to_mysql.py) | [operators.hive_to_mysql.HiveToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_mysql.py) | -| [transfers.hive_to_samba.HiveToSambaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/hive_to_samba.py) | [operators.hive_to_samba_operator.HiveToSambaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/hive_to_samba_operator.py) | -| [transfers.mssql_to_hive.MsSqlToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/mssql_to_hive.py) | [operators.mssql_to_hive.MsSqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mssql_to_hive.py) | -| [transfers.mysql_to_hive.MySqlToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/mysql_to_hive.py) | [operators.mysql_to_hive.MySqlToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_to_hive.py) | -| [transfers.s3_to_hive.S3ToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/s3_to_hive.py) | [operators.s3_to_hive_operator.S3ToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/s3_to_hive_operator.py) | -| [transfers.vertica_to_hive.VerticaToHiveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/transfers/vertica_to_hive.py) | [contrib.operators.vertica_to_hive.VerticaToHiveTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_hive.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.apache.hive` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.hive_partition.HivePartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/sensors/hive_partition.py) | [sensors.hive_partition_sensor.HivePartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/hive_partition_sensor.py) | -| [sensors.metastore_partition.MetastorePartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/sensors/metastore_partition.py) | [sensors.metastore_partition_sensor.MetastorePartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/metastore_partition_sensor.py) | -| [sensors.named_hive_partition.NamedHivePartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/sensors/named_hive_partition.py) | [sensors.named_hive_partition_sensor.NamedHivePartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/named_hive_partition_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.hive` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------| -| [hooks.hive.HiveCliHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/hooks/hive.py) | [hooks.hive_hooks.HiveCliHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hive_hooks.py) | -| [hooks.hive.HiveMetastoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/hooks/hive.py) | [hooks.hive_hooks.HiveMetastoreHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hive_hooks.py) | -| [hooks.hive.HiveServer2Hook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/hive/hooks/hive.py) | [hooks.hive_hooks.HiveServer2Hook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/hive_hooks.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [a075b6df9](https://github.com/apache/airflow/commit/a075b6df99a4f5e21d198f7be56b577432e6f9db) | 2020-12-09 | Rename remaining Sensors to match AIP-21 (#12927) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [502ba309e](https://github.com/apache/airflow/commit/502ba309ea470943f0e99c634269e3d2d13ce6ca) | 2020-11-10 | Enable Markdownlint rule - MD022/blanks-around-headings (#12225) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08 | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [27339a5a0](https://github.com/apache/airflow/commit/27339a5a0f9e382dbc7d32a128f0831a48ef9a12) | 2020-08-22 | Remove mentions of Airflow Gitter (#10460) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [5013fda8f](https://github.com/apache/airflow/commit/5013fda8f072e633c114fb39fb59a22f60200b40) | 2020-07-20 | Add drop_partition functionality for HiveMetastoreHook (#9472) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [c78e2a5fe](https://github.com/apache/airflow/commit/c78e2a5feae15e84b05430cfc5935f0e289fb6b4) | 2020-06-16 | Make hive macros py3 compatible (#8598) | -| [6350fd6eb](https://github.com/apache/airflow/commit/6350fd6ebb9958982cb3fa1d466168fc31708035) | 2020-06-08 | Don't use the term "whitelist" - language matters (#9174) | -| [10796cb7c](https://github.com/apache/airflow/commit/10796cb7ce52c8ac2f68024e531fdda779547bdf) | 2020-06-03 | Remove Hive/Hadoop/Java dependency from unit tests (#9029) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [93ea05880](https://github.com/apache/airflow/commit/93ea05880283a56e3d42ab07db7453977a3de8ec) | 2020-04-21 | [AIRFLOW-7059] pass hive_conf to get_pandas_df in HiveServer2Hook (#8380) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30 | Remove sql like function in base_hook (#7901) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/hive/example_dags/example_twitter_README.md b/airflow/providers/apache/hive/example_dags/example_twitter_README.md index ff68856b66b31..c22ca2c85d201 100644 --- a/airflow/providers/apache/hive/example_dags/example_twitter_README.md +++ b/airflow/providers/apache/hive/example_dags/example_twitter_README.md @@ -50,7 +50,7 @@ CREATE TABLE toTwitter_A(id BIGINT, id_str STRING alter table toTwitter_A SET serdeproperties ('skip.header.line.count' = '1'); ``` -When you review the code for the DAG, you will notice that these tasks are generated using for loop. These two for loops could be combined into one loop. However, in most cases, you will be running different analysis on your incoming incoming and outgoing tweets, and hence they are kept separated in this example. +When you review the code for the DAG, you will notice that these tasks are generated using for loop. These two for loops could be combined into one loop. However, in most cases, you will be running different analysis on your incoming and outgoing tweets, and hence they are kept separated in this example. Final step is a running the broker script, brokerapi.py, which will run queries in Hive and store the summarized data to MySQL in our case. To connect to Hive, pyhs2 library is extremely useful and easy to use. To insert data into MySQL from Python, sqlalchemy is also a good one to use. I hope you find this tutorial useful. If you have question feel free to ask me on [Twitter](https://twitter.com/EkhtiarSyed).

-Ekhtiar Syed diff --git a/airflow/providers/apache/hive/example_dags/example_twitter_dag.py b/airflow/providers/apache/hive/example_dags/example_twitter_dag.py index 8c9d1f390f1c8..b336d6f4d3278 100644 --- a/airflow/providers/apache/hive/example_dags/example_twitter_dag.py +++ b/airflow/providers/apache/hive/example_dags/example_twitter_dag.py @@ -132,7 +132,7 @@ def transfertodb(): # The following tasks are generated using for loop. The first task puts the eight # csv files to HDFS. The second task loads these files from HDFS to respected Hive # tables. These two for loops could be combined into one loop. However, in most cases, - # you will be running different analysis on your incoming incoming and outgoing tweets, + # you will be running different analysis on your incoming and outgoing tweets, # and hence they are kept separated in this example. # -------------------------------------------------------------------------------- diff --git a/airflow/providers/apache/hive/hooks/hive.py b/airflow/providers/apache/hive/hooks/hive.py index ab7b7b74e1098..d261ab22b0f2a 100644 --- a/airflow/providers/apache/hive/hooks/hive.py +++ b/airflow/providers/apache/hive/hooks/hive.py @@ -487,7 +487,7 @@ def __init__(self, metastore_conn_id: str = default_conn_name) -> None: def __getstate__(self) -> Dict[str, Any]: # This is for pickling to work despite the thrift hive client not - # being pickable + # being picklable state = dict(self.__dict__) del state['metastore'] return state diff --git a/airflow/providers/apache/hive/provider.yaml b/airflow/providers/apache/hive/provider.yaml index fdcb14d340f85..4dc4315c611a6 100644 --- a/airflow/providers/apache/hive/provider.yaml +++ b/airflow/providers/apache/hive/provider.yaml @@ -27,6 +27,7 @@ versions: integrations: - integration-name: Apache Hive external-doc-url: https://hive.apache.org/ + logo: /integration-logos/apache/hive.png tags: [apache] operators: diff --git a/airflow/providers/apache/kylin/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/apache/kylin/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 9ae75b4448fe2..0000000000000 --- a/airflow/providers/apache/kylin/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25 | Fix incorrect Usage of Optional[str] & Optional[int] (#11141) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [edc51e313](https://github.com/apache/airflow/commit/edc51e313b50359e0258cce5f7f7283f69342fb9) | 2020-08-08 | Remove Unnecessary list literal in Tuple for Kylin Operator (#10252) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [a2c5389a6](https://github.com/apache/airflow/commit/a2c5389a60f68482a60eb40c67b1542d827c187e) | 2020-07-14 | Add kylin operator (#9149) | diff --git a/airflow/providers/apache/kylin/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/apache/kylin/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index b7467a0db0db0..0000000000000 --- a/airflow/providers/apache/kylin/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,9 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/apache/kylin/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/kylin/BACKPORT_PROVIDER_README.md deleted file mode 100644 index d5b6a84fa7a2d..0000000000000 --- a/airflow/providers/apache/kylin/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,119 +0,0 @@ - - - -# Package apache-airflow-backport-providers-apache-kylin - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - -## Backport package - -This is a backport providers package for `apache.kylin` provider. All classes for this provider package -are in `airflow.providers.apache.kylin` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-apache-kylin` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| kylinpy | >=2.6 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.kylin` provider -are in the `airflow.providers.apache.kylin` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.apache.kylin` package | -|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.kylin_cube.KylinCubeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/kylin/operators/kylin_cube.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.apache.kylin` package | -|:---------------------------------------------------------------------------------------------------------------------| -| [hooks.kylin.KylinHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/kylin/hooks/kylin.py) | - - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25 | Fix incorrect Usage of Optional[str] & Optional[int] (#11141) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [edc51e313](https://github.com/apache/airflow/commit/edc51e313b50359e0258cce5f7f7283f69342fb9) | 2020-08-08 | Remove Unnecessary list literal in Tuple for Kylin Operator (#10252) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [a2c5389a6](https://github.com/apache/airflow/commit/a2c5389a60f68482a60eb40c67b1542d827c187e) | 2020-07-14 | Add kylin operator (#9149) | diff --git a/airflow/providers/apache/kylin/CHANGELOG.rst b/airflow/providers/apache/kylin/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/apache/kylin/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/apache/kylin/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/kylin/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 9dc4fabcc4c28..0000000000000 --- a/airflow/providers/apache/kylin/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,35 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29 | Don't use time.time() or timezone.utcnow() for duration calculations (#12353) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25 | Fix incorrect Usage of Optional[str] & Optional[int] (#11141) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [edc51e313](https://github.com/apache/airflow/commit/edc51e313b50359e0258cce5f7f7283f69342fb9) | 2020-08-08 | Remove Unnecessary list literal in Tuple for Kylin Operator (#10252) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [a2c5389a6](https://github.com/apache/airflow/commit/a2c5389a60f68482a60eb40c67b1542d827c187e) | 2020-07-14 | Add kylin operator (#9149) | diff --git a/airflow/providers/apache/kylin/README.md b/airflow/providers/apache/kylin/README.md deleted file mode 100644 index f4efc35ba0a9b..0000000000000 --- a/airflow/providers/apache/kylin/README.md +++ /dev/null @@ -1,128 +0,0 @@ - - - -# Package apache-airflow-providers-apache-kylin - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `apache.kylin` provider. All classes for this provider package -are in `airflow.providers.apache.kylin` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-apache-kylin` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| kylinpy | >=2.6 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.kylin` provider -are in the `airflow.providers.apache.kylin` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.apache.kylin` package | -|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.kylin_cube.KylinCubeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/kylin/operators/kylin_cube.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.apache.kylin` package | -|:---------------------------------------------------------------------------------------------------------------------| -| [hooks.kylin.KylinHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/kylin/hooks/kylin.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29 | Don't use time.time() or timezone.utcnow() for duration calculations (#12353) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25 | Fix incorrect Usage of Optional[str] & Optional[int] (#11141) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [edc51e313](https://github.com/apache/airflow/commit/edc51e313b50359e0258cce5f7f7283f69342fb9) | 2020-08-08 | Remove Unnecessary list literal in Tuple for Kylin Operator (#10252) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [a2c5389a6](https://github.com/apache/airflow/commit/a2c5389a60f68482a60eb40c67b1542d827c187e) | 2020-07-14 | Add kylin operator (#9149) | diff --git a/airflow/providers/apache/livy/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/apache/livy/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 64145b42ce89b..0000000000000 --- a/airflow/providers/apache/livy/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [d3cf23dc0](https://github.com/apache/airflow/commit/d3cf23dc07b5fb92ee2a5be07b0685a4fca36f86) | 2020-02-19 | [AIRFLOW-5470] Add Apache Livy REST operator (#6090) | diff --git a/airflow/providers/apache/livy/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/apache/livy/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index f776b80888629..0000000000000 --- a/airflow/providers/apache/livy/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/apache/livy/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/apache/livy/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 0155169a85ad2..0000000000000 --- a/airflow/providers/apache/livy/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,11 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/apache/livy/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/livy/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 55948ee899eda..0000000000000 --- a/airflow/providers/apache/livy/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,160 +0,0 @@ - - - -# Package apache-airflow-backport-providers-apache-livy - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `apache.livy` provider. All classes for this provider package -are in `airflow.providers.apache.livy` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-apache-livy` - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-apache-livy[http] -``` - -| Dependent package | Extra | -|:---------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-http](https://github.com/apache/airflow/tree/master/airflow/providers/http) | http | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.livy` provider -are in the `airflow.providers.apache.livy` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.apache.livy` package | -|:-----------------------------------------------------------------------------------------------------------------------------| -| [operators.livy.LivyOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/livy/operators/livy.py) | - - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.apache.livy` package | -|:-----------------------------------------------------------------------------------------------------------------------| -| [sensors.livy.LivySensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/livy/sensors/livy.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.apache.livy` package | -|:-----------------------------------------------------------------------------------------------------------------| -| [hooks.livy.LivyHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/livy/hooks/livy.py) | - - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [d3cf23dc0](https://github.com/apache/airflow/commit/d3cf23dc07b5fb92ee2a5be07b0685a4fca36f86) | 2020-02-19 | [AIRFLOW-5470] Add Apache Livy REST operator (#6090) | diff --git a/airflow/providers/apache/livy/CHANGELOG.rst b/airflow/providers/apache/livy/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/apache/livy/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/apache/livy/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/livy/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index c3ad1ccc15ed5..0000000000000 --- a/airflow/providers/apache/livy/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,47 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [d3cf23dc0](https://github.com/apache/airflow/commit/d3cf23dc07b5fb92ee2a5be07b0685a4fca36f86) | 2020-02-19 | [AIRFLOW-5470] Add Apache Livy REST operator (#6090) | diff --git a/airflow/providers/apache/livy/README.md b/airflow/providers/apache/livy/README.md deleted file mode 100644 index b2fed118a3558..0000000000000 --- a/airflow/providers/apache/livy/README.md +++ /dev/null @@ -1,162 +0,0 @@ - - - -# Package apache-airflow-providers-apache-livy - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `apache.livy` provider. All classes for this provider package -are in `airflow.providers.apache.livy` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-apache-livy` - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-apache-livy[http] -``` - -| Dependent package | Extra | -|:----------------------------------------------------------------------------------------|:--------| -| [apache-airflow-providers-http](https://pypi.org/project/apache-airflow-providers-http) | http | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.livy` provider -are in the `airflow.providers.apache.livy` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.apache.livy` package | -|:-----------------------------------------------------------------------------------------------------------------------------| -| [operators.livy.LivyOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/livy/operators/livy.py) | - - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.apache.livy` package | -|:-----------------------------------------------------------------------------------------------------------------------| -| [sensors.livy.LivySensor](https://github.com/apache/airflow/blob/master/airflow/providers/apache/livy/sensors/livy.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.apache.livy` package | -|:-----------------------------------------------------------------------------------------------------------------| -| [hooks.livy.LivyHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/livy/hooks/livy.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [d3cf23dc0](https://github.com/apache/airflow/commit/d3cf23dc07b5fb92ee2a5be07b0685a4fca36f86) | 2020-02-19 | [AIRFLOW-5470] Add Apache Livy REST operator (#6090) | diff --git a/airflow/providers/apache/pig/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/apache/pig/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 4ab13f6bcb148..0000000000000 --- a/airflow/providers/apache/pig/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,22 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/pig/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/apache/pig/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index f28d4c469ab28..0000000000000 --- a/airflow/providers/apache/pig/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/apache/pig/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/apache/pig/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/apache/pig/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/apache/pig/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/pig/BACKPORT_PROVIDER_README.md deleted file mode 100644 index aa3ac815f39b4..0000000000000 --- a/airflow/providers/apache/pig/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,135 +0,0 @@ - - - -# Package apache-airflow-backport-providers-apache-pig - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `apache.pig` provider. All classes for this provider package -are in `airflow.providers.apache.pig` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-apache-pig` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.pig` provider -are in the `airflow.providers.apache.pig` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.apache.pig` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------| -| [operators.pig.PigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pig/operators/pig.py) | [operators.pig_operator.PigOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/pig_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.pig` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------| -| [hooks.pig.PigCliHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pig/hooks/pig.py) | [hooks.pig_hook.PigCliHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/pig_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/pig/CHANGELOG.rst b/airflow/providers/apache/pig/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/apache/pig/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/apache/pig/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/pig/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 3d34579ff65a0..0000000000000 --- a/airflow/providers/apache/pig/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,51 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/pig/README.md b/airflow/providers/apache/pig/README.md deleted file mode 100644 index 3e25474839227..0000000000000 --- a/airflow/providers/apache/pig/README.md +++ /dev/null @@ -1,137 +0,0 @@ - - - -# Package apache-airflow-providers-apache-pig - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `apache.pig` provider. All classes for this provider package -are in `airflow.providers.apache.pig` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-apache-pig` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.pig` provider -are in the `airflow.providers.apache.pig` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.apache.pig` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------| -| [operators.pig.PigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pig/operators/pig.py) | [operators.pig_operator.PigOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/pig_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.pig` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------| -| [hooks.pig.PigCliHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pig/hooks/pig.py) | [hooks.pig_hook.PigCliHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/pig_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/pig/provider.yaml b/airflow/providers/apache/pig/provider.yaml index cdfa6f2e9c0c0..7838aab896006 100644 --- a/airflow/providers/apache/pig/provider.yaml +++ b/airflow/providers/apache/pig/provider.yaml @@ -26,6 +26,7 @@ versions: integrations: - integration-name: Apache Pig external-doc-url: https://pig.apache.org/ + logo: /integration-logos/apache/pig.png tags: [apache] operators: diff --git a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 833e46d4987f8..0000000000000 --- a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,20 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13 | Less aggressive eager upgrade of requirements (#8267) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index b0e6e4d2e962f..0000000000000 --- a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,14 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.11.23.md deleted file mode 100644 index 195cce519c10c..0000000000000 --- a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_CHANGES_2020.11.23.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [309b325c1](https://github.com/apache/airflow/commit/309b325c177d46f3cea8e1812a8728623c633604) | 2020-11-13 | Update deprecated Apache Pinot Broker API (#12333) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | diff --git a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/pinot/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 0107ac3107841..0000000000000 --- a/airflow/providers/apache/pinot/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,143 +0,0 @@ - - - -# Package apache-airflow-backport-providers-apache-pinot - -Release: 2020.11.23 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.11.23](#release-20201123) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `apache.pinot` provider. All classes for this provider package -are in `airflow.providers.apache.pinot` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-apache-pinot` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pinotdb | ==0.1.1 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.pinot` provider -are in the `airflow.providers.apache.pinot` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.pinot` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------| -| [hooks.pinot.PinotAdminHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pinot/hooks/pinot.py) | [contrib.hooks.pinot_hook.PinotAdminHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/pinot_hook.py) | -| [hooks.pinot.PinotDbApiHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pinot/hooks/pinot.py) | [contrib.hooks.pinot_hook.PinotDbApiHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/pinot_hook.py) | - - - -## Releases - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [309b325c1](https://github.com/apache/airflow/commit/309b325c177d46f3cea8e1812a8728623c633604) | 2020-11-13 | Update deprecated Apache Pinot Broker API (#12333) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13 | Less aggressive eager upgrade of requirements (#8267) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/pinot/CHANGELOG.rst b/airflow/providers/apache/pinot/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/apache/pinot/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/apache/pinot/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/pinot/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index d6470d3bf49df..0000000000000 --- a/airflow/providers/apache/pinot/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,47 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [309b325c1](https://github.com/apache/airflow/commit/309b325c177d46f3cea8e1812a8728623c633604) | 2020-11-13 | Update deprecated Apache Pinot Broker API (#12333) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13 | Less aggressive eager upgrade of requirements (#8267) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/pinot/README.md b/airflow/providers/apache/pinot/README.md deleted file mode 100644 index a6f6d96a676e9..0000000000000 --- a/airflow/providers/apache/pinot/README.md +++ /dev/null @@ -1,128 +0,0 @@ - - - -# Package apache-airflow-providers-apache-pinot - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `apache.pinot` provider. All classes for this provider package -are in `airflow.providers.apache.pinot` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-apache-pinot` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pinotdb | ==0.1.1 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.pinot` provider -are in the `airflow.providers.apache.pinot` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.pinot` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------| -| [hooks.pinot.PinotAdminHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pinot/hooks/pinot.py) | [contrib.hooks.pinot_hook.PinotAdminHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/pinot_hook.py) | -| [hooks.pinot.PinotDbApiHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/pinot/hooks/pinot.py) | [contrib.hooks.pinot_hook.PinotDbApiHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/pinot_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [309b325c1](https://github.com/apache/airflow/commit/309b325c177d46f3cea8e1812a8728623c633604) | 2020-11-13 | Update deprecated Apache Pinot Broker API (#12333) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13 | Less aggressive eager upgrade of requirements (#8267) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/pinot/provider.yaml b/airflow/providers/apache/pinot/provider.yaml index 7aff6a4a921ec..be0714aa08174 100644 --- a/airflow/providers/apache/pinot/provider.yaml +++ b/airflow/providers/apache/pinot/provider.yaml @@ -26,6 +26,7 @@ versions: integrations: - integration-name: Apache Pinot external-doc-url: https://pinot.apache.org/ + logo: /integration-logos/apache/pinot.png tags: [apache] hooks: diff --git a/airflow/providers/apache/spark/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/apache/spark/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 255595568e657..0000000000000 --- a/airflow/providers/apache/spark/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,27 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [7506c73f1](https://github.com/apache/airflow/commit/7506c73f1721151e9c50ef8bdb70d2136a16190b) | 2020-05-10 | Add default `conf` parameter to Spark JDBC Hook (#8787) | -| [487b5cc50](https://github.com/apache/airflow/commit/487b5cc50c5b28a045cb12a1527a5453b0a6a7af) | 2020-05-06 | Add guide for Apache Spark operators (#8305) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [be1451b0e](https://github.com/apache/airflow/commit/be1451b0e1b7e33f4621e24649f6a4fa87c34e01) | 2020-04-02 | [AIRFLOW-7026] Improve SparkSqlHook's error message (#7749) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [2327aa5a2](https://github.com/apache/airflow/commit/2327aa5a263f25beeaf4ba79670f10f001daf0bf) | 2020-03-12 | [AIRFLOW-7025] Fix SparkSqlHook.run_query to handle its parameter properly (#7677) | -| [024b4bf96](https://github.com/apache/airflow/commit/024b4bf962bc30ecb70da9650e68b523a0dbcff8) | 2020-03-10 | [AIRFLOW-7024] Add the verbose parameter support to SparkSqlOperator (#7676) | -| [b59042b5a](https://github.com/apache/airflow/commit/b59042b5ab083c77ba08ba804df76b7c728815dc) | 2020-02-28 | [AIRFLOW-6949] Respect explicit `spark.kubernetes.namespace` conf to SparkSubmitOperator (#7575) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/spark/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/apache/spark/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index b0f9f802dea85..0000000000000 --- a/airflow/providers/apache/spark/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,24 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [1427e4acb](https://github.com/apache/airflow/commit/1427e4acb4a1dc5be28cfeef75c90032d515aab6) | 2020-07-22 | Update Spark submit operator for Spark 3 support (#8730) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [0873070e0](https://github.com/apache/airflow/commit/0873070e08f7216b6949e7de4e2329175a764321) | 2020-07-11 | Mask other forms of password arguments in SparkSubmitOperator (#9615) | -| [13a827d80](https://github.com/apache/airflow/commit/13a827d80fef738e25f30ea20c095ad4dbd401f6) | 2020-07-09 | Ensure Kerberos token is valid in SparkSubmitOperator before running `yarn kill` (#9044) | -| [067806d59](https://github.com/apache/airflow/commit/067806d5985301f21da78f0a81056dbec348e6ba) | 2020-06-29 | Add tests for spark_jdbc_script (#9491) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/apache/spark/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/apache/spark/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 50ad6077367a3..0000000000000 --- a/airflow/providers/apache/spark/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,11 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/apache/spark/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/spark/BACKPORT_PROVIDER_README.md deleted file mode 100644 index aa98a86033be4..0000000000000 --- a/airflow/providers/apache/spark/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,158 +0,0 @@ - - - -# Package apache-airflow-backport-providers-apache-spark - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `apache.spark` provider. All classes for this provider package -are in `airflow.providers.apache.spark` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-apache-spark` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pyspark | | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.spark` provider -are in the `airflow.providers.apache.spark` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.apache.spark` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.spark_jdbc.SparkJDBCOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/operators/spark_jdbc.py) | [contrib.operators.spark_jdbc_operator.SparkJDBCOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/spark_jdbc_operator.py) | -| [operators.spark_sql.SparkSqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/operators/spark_sql.py) | [contrib.operators.spark_sql_operator.SparkSqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/spark_sql_operator.py) | -| [operators.spark_submit.SparkSubmitOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/operators/spark_submit.py) | [contrib.operators.spark_submit_operator.SparkSubmitOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/spark_submit_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.spark` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.spark_jdbc.SparkJDBCHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/hooks/spark_jdbc.py) | [contrib.hooks.spark_jdbc_hook.SparkJDBCHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/spark_jdbc_hook.py) | -| [hooks.spark_sql.SparkSqlHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/hooks/spark_sql.py) | [contrib.hooks.spark_sql_hook.SparkSqlHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/spark_sql_hook.py) | -| [hooks.spark_submit.SparkSubmitHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/hooks/spark_submit.py) | [contrib.hooks.spark_submit_hook.SparkSubmitHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/spark_submit_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [1427e4acb](https://github.com/apache/airflow/commit/1427e4acb4a1dc5be28cfeef75c90032d515aab6) | 2020-07-22 | Update Spark submit operator for Spark 3 support (#8730) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [0873070e0](https://github.com/apache/airflow/commit/0873070e08f7216b6949e7de4e2329175a764321) | 2020-07-11 | Mask other forms of password arguments in SparkSubmitOperator (#9615) | -| [13a827d80](https://github.com/apache/airflow/commit/13a827d80fef738e25f30ea20c095ad4dbd401f6) | 2020-07-09 | Ensure Kerberos token is valid in SparkSubmitOperator before running `yarn kill` (#9044) | -| [067806d59](https://github.com/apache/airflow/commit/067806d5985301f21da78f0a81056dbec348e6ba) | 2020-06-29 | Add tests for spark_jdbc_script (#9491) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [7506c73f1](https://github.com/apache/airflow/commit/7506c73f1721151e9c50ef8bdb70d2136a16190b) | 2020-05-10 | Add default `conf` parameter to Spark JDBC Hook (#8787) | -| [487b5cc50](https://github.com/apache/airflow/commit/487b5cc50c5b28a045cb12a1527a5453b0a6a7af) | 2020-05-06 | Add guide for Apache Spark operators (#8305) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [be1451b0e](https://github.com/apache/airflow/commit/be1451b0e1b7e33f4621e24649f6a4fa87c34e01) | 2020-04-02 | [AIRFLOW-7026] Improve SparkSqlHook's error message (#7749) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [2327aa5a2](https://github.com/apache/airflow/commit/2327aa5a263f25beeaf4ba79670f10f001daf0bf) | 2020-03-12 | [AIRFLOW-7025] Fix SparkSqlHook.run_query to handle its parameter properly (#7677) | -| [024b4bf96](https://github.com/apache/airflow/commit/024b4bf962bc30ecb70da9650e68b523a0dbcff8) | 2020-03-10 | [AIRFLOW-7024] Add the verbose parameter support to SparkSqlOperator (#7676) | -| [b59042b5a](https://github.com/apache/airflow/commit/b59042b5ab083c77ba08ba804df76b7c728815dc) | 2020-02-28 | [AIRFLOW-6949] Respect explicit `spark.kubernetes.namespace` conf to SparkSubmitOperator (#7575) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/spark/CHANGELOG.rst b/airflow/providers/apache/spark/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/apache/spark/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/apache/spark/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/spark/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 25acd98caafef..0000000000000 --- a/airflow/providers/apache/spark/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,64 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [1427e4acb](https://github.com/apache/airflow/commit/1427e4acb4a1dc5be28cfeef75c90032d515aab6) | 2020-07-22 | Update Spark submit operator for Spark 3 support (#8730) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [0873070e0](https://github.com/apache/airflow/commit/0873070e08f7216b6949e7de4e2329175a764321) | 2020-07-11 | Mask other forms of password arguments in SparkSubmitOperator (#9615) | -| [13a827d80](https://github.com/apache/airflow/commit/13a827d80fef738e25f30ea20c095ad4dbd401f6) | 2020-07-09 | Ensure Kerberos token is valid in SparkSubmitOperator before running `yarn kill` (#9044) | -| [067806d59](https://github.com/apache/airflow/commit/067806d5985301f21da78f0a81056dbec348e6ba) | 2020-06-29 | Add tests for spark_jdbc_script (#9491) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [7506c73f1](https://github.com/apache/airflow/commit/7506c73f1721151e9c50ef8bdb70d2136a16190b) | 2020-05-10 | Add default `conf` parameter to Spark JDBC Hook (#8787) | -| [487b5cc50](https://github.com/apache/airflow/commit/487b5cc50c5b28a045cb12a1527a5453b0a6a7af) | 2020-05-06 | Add guide for Apache Spark operators (#8305) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [be1451b0e](https://github.com/apache/airflow/commit/be1451b0e1b7e33f4621e24649f6a4fa87c34e01) | 2020-04-02 | [AIRFLOW-7026] Improve SparkSqlHook's error message (#7749) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [2327aa5a2](https://github.com/apache/airflow/commit/2327aa5a263f25beeaf4ba79670f10f001daf0bf) | 2020-03-12 | [AIRFLOW-7025] Fix SparkSqlHook.run_query to handle its parameter properly (#7677) | -| [024b4bf96](https://github.com/apache/airflow/commit/024b4bf962bc30ecb70da9650e68b523a0dbcff8) | 2020-03-10 | [AIRFLOW-7024] Add the verbose parameter support to SparkSqlOperator (#7676) | -| [b59042b5a](https://github.com/apache/airflow/commit/b59042b5ab083c77ba08ba804df76b7c728815dc) | 2020-02-28 | [AIRFLOW-6949] Respect explicit `spark.kubernetes.namespace` conf to SparkSubmitOperator (#7575) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/spark/README.md b/airflow/providers/apache/spark/README.md deleted file mode 100644 index 02308e77a9319..0000000000000 --- a/airflow/providers/apache/spark/README.md +++ /dev/null @@ -1,161 +0,0 @@ - - - -# Package apache-airflow-providers-apache-spark - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `apache.spark` provider. All classes for this provider package -are in `airflow.providers.apache.spark` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-apache-spark` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pyspark | | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.spark` provider -are in the `airflow.providers.apache.spark` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.apache.spark` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.spark_jdbc.SparkJDBCOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/operators/spark_jdbc.py) | [contrib.operators.spark_jdbc_operator.SparkJDBCOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/spark_jdbc_operator.py) | -| [operators.spark_sql.SparkSqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/operators/spark_sql.py) | [contrib.operators.spark_sql_operator.SparkSqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/spark_sql_operator.py) | -| [operators.spark_submit.SparkSubmitOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/operators/spark_submit.py) | [contrib.operators.spark_submit_operator.SparkSubmitOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/spark_submit_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.spark` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.spark_jdbc.SparkJDBCHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/hooks/spark_jdbc.py) | [contrib.hooks.spark_jdbc_hook.SparkJDBCHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/spark_jdbc_hook.py) | -| [hooks.spark_sql.SparkSqlHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/hooks/spark_sql.py) | [contrib.hooks.spark_sql_hook.SparkSqlHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/spark_sql_hook.py) | -| [hooks.spark_submit.SparkSubmitHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/spark/hooks/spark_submit.py) | [contrib.hooks.spark_submit_hook.SparkSubmitHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/spark_submit_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [1427e4acb](https://github.com/apache/airflow/commit/1427e4acb4a1dc5be28cfeef75c90032d515aab6) | 2020-07-22 | Update Spark submit operator for Spark 3 support (#8730) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [0873070e0](https://github.com/apache/airflow/commit/0873070e08f7216b6949e7de4e2329175a764321) | 2020-07-11 | Mask other forms of password arguments in SparkSubmitOperator (#9615) | -| [13a827d80](https://github.com/apache/airflow/commit/13a827d80fef738e25f30ea20c095ad4dbd401f6) | 2020-07-09 | Ensure Kerberos token is valid in SparkSubmitOperator before running `yarn kill` (#9044) | -| [067806d59](https://github.com/apache/airflow/commit/067806d5985301f21da78f0a81056dbec348e6ba) | 2020-06-29 | Add tests for spark_jdbc_script (#9491) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [7506c73f1](https://github.com/apache/airflow/commit/7506c73f1721151e9c50ef8bdb70d2136a16190b) | 2020-05-10 | Add default `conf` parameter to Spark JDBC Hook (#8787) | -| [487b5cc50](https://github.com/apache/airflow/commit/487b5cc50c5b28a045cb12a1527a5453b0a6a7af) | 2020-05-06 | Add guide for Apache Spark operators (#8305) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [be1451b0e](https://github.com/apache/airflow/commit/be1451b0e1b7e33f4621e24649f6a4fa87c34e01) | 2020-04-02 | [AIRFLOW-7026] Improve SparkSqlHook's error message (#7749) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [2327aa5a2](https://github.com/apache/airflow/commit/2327aa5a263f25beeaf4ba79670f10f001daf0bf) | 2020-03-12 | [AIRFLOW-7025] Fix SparkSqlHook.run_query to handle its parameter properly (#7677) | -| [024b4bf96](https://github.com/apache/airflow/commit/024b4bf962bc30ecb70da9650e68b523a0dbcff8) | 2020-03-10 | [AIRFLOW-7024] Add the verbose parameter support to SparkSqlOperator (#7676) | -| [b59042b5a](https://github.com/apache/airflow/commit/b59042b5ab083c77ba08ba804df76b7c728815dc) | 2020-02-28 | [AIRFLOW-6949] Respect explicit `spark.kubernetes.namespace` conf to SparkSubmitOperator (#7575) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/spark/provider.yaml b/airflow/providers/apache/spark/provider.yaml index 193ec8c47ea80..0933ac2effe23 100644 --- a/airflow/providers/apache/spark/provider.yaml +++ b/airflow/providers/apache/spark/provider.yaml @@ -28,6 +28,7 @@ integrations: external-doc-url: https://spark.apache.org/ how-to-guide: - /docs/apache-airflow-providers-apache-spark/operators.rst + logo: /integration-logos/apache/spark.png tags: [apache] operators: diff --git a/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 899a6ae561765..0000000000000 --- a/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 699945a30f52d..0000000000000 --- a/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,15 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_README.md b/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 368cc1139e11c..0000000000000 --- a/airflow/providers/apache/sqoop/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,129 +0,0 @@ - - - -# Package apache-airflow-backport-providers-apache-sqoop - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `apache.sqoop` provider. All classes for this provider package -are in `airflow.providers.apache.sqoop` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-apache-sqoop` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.sqoop` provider -are in the `airflow.providers.apache.sqoop` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.apache.sqoop` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.sqoop.SqoopOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/sqoop/operators/sqoop.py) | [contrib.operators.sqoop_operator.SqoopOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sqoop_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.sqoop` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------| -| [hooks.sqoop.SqoopHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/sqoop/hooks/sqoop.py) | [contrib.hooks.sqoop_hook.SqoopHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/sqoop_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/sqoop/CHANGELOG.rst b/airflow/providers/apache/sqoop/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/apache/sqoop/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/apache/sqoop/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/apache/sqoop/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index c878b4f70cac0..0000000000000 --- a/airflow/providers/apache/sqoop/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,46 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/sqoop/README.md b/airflow/providers/apache/sqoop/README.md deleted file mode 100644 index aeb24498e2c90..0000000000000 --- a/airflow/providers/apache/sqoop/README.md +++ /dev/null @@ -1,132 +0,0 @@ - - - -# Package apache-airflow-providers-apache-sqoop - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `apache.sqoop` provider. All classes for this provider package -are in `airflow.providers.apache.sqoop` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-apache-sqoop` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `apache.sqoop` provider -are in the `airflow.providers.apache.sqoop` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.apache.sqoop` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.sqoop.SqoopOperator](https://github.com/apache/airflow/blob/master/airflow/providers/apache/sqoop/operators/sqoop.py) | [contrib.operators.sqoop_operator.SqoopOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sqoop_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.apache.sqoop` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------| -| [hooks.sqoop.SqoopHook](https://github.com/apache/airflow/blob/master/airflow/providers/apache/sqoop/hooks/sqoop.py) | [contrib.hooks.sqoop_hook.SqoopHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/sqoop_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [3b3287d7a](https://github.com/apache/airflow/commit/3b3287d7acc76430f12b758d52cec61c7f74e726) | 2020-08-05 | Enforce keyword only arguments on apache operators (#10170) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [7e6372a68](https://github.com/apache/airflow/commit/7e6372a681a2a543f4710b083219aeb53b074388) | 2020-03-23 | Add call to Super call in apache providers (#7820) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [0481b9a95](https://github.com/apache/airflow/commit/0481b9a95786a62de4776a735ae80e746583ef2b) | 2020-01-12 | [AIRFLOW-6539][AIP-21] Move Apache classes to providers.apache package (#7142) | diff --git a/airflow/providers/apache/sqoop/provider.yaml b/airflow/providers/apache/sqoop/provider.yaml index a04a09c79cce1..f9b12fa4be467 100644 --- a/airflow/providers/apache/sqoop/provider.yaml +++ b/airflow/providers/apache/sqoop/provider.yaml @@ -26,6 +26,7 @@ versions: integrations: - integration-name: Apache Sqoop external-doc-url: https://sqoop.apache.org/ + logo: /integration-logos/apache/sqoop.png tags: [apache] operators: diff --git a/airflow/providers/celery/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/celery/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 1c62754585b00..0000000000000 --- a/airflow/providers/celery/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/celery/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/celery/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 5f8de4a19dc1c..0000000000000 --- a/airflow/providers/celery/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,15 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [5bb228d84](https://github.com/apache/airflow/commit/5bb228d841585cd1780c15f6175c6d64cd98aeab) | 2020-07-11 | improve type hinting for celery provider (#9762) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/celery/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/celery/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index b7467a0db0db0..0000000000000 --- a/airflow/providers/celery/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,9 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/celery/BACKPORT_PROVIDER_README.md b/airflow/providers/celery/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 50786dfa3ae38..0000000000000 --- a/airflow/providers/celery/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,122 +0,0 @@ - - - -# Package apache-airflow-backport-providers-celery - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `celery` provider. All classes for this provider package -are in `airflow.providers.celery` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-celery` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| celery | ~=4.4.2 | -| flower | >=0.7.3, <1.0 | -| vine | ~=1.3 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `celery` provider -are in the `airflow.providers.celery` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.celery` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.celery_queue.CeleryQueueSensor](https://github.com/apache/airflow/blob/master/airflow/providers/celery/sensors/celery_queue.py) | [contrib.sensors.celery_queue_sensor.CeleryQueueSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/celery_queue_sensor.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [5bb228d84](https://github.com/apache/airflow/commit/5bb228d841585cd1780c15f6175c6d64cd98aeab) | 2020-07-11 | improve type hinting for celery provider (#9762) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/celery/CHANGELOG.rst b/airflow/providers/celery/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/celery/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/celery/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/celery/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 1b0859b804537..0000000000000 --- a/airflow/providers/celery/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,41 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [5bb228d84](https://github.com/apache/airflow/commit/5bb228d841585cd1780c15f6175c6d64cd98aeab) | 2020-07-11 | improve type hinting for celery provider (#9762) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/celery/README.md b/airflow/providers/celery/README.md deleted file mode 100644 index f25829969c816..0000000000000 --- a/airflow/providers/celery/README.md +++ /dev/null @@ -1,123 +0,0 @@ - - - -# Package apache-airflow-providers-celery - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `celery` provider. All classes for this provider package -are in `airflow.providers.celery` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-celery` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| celery | ~=4.4.2 | -| flower | >=0.7.3, <1.0 | -| vine | ~=1.3 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `celery` provider -are in the `airflow.providers.celery` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.celery` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.celery_queue.CeleryQueueSensor](https://github.com/apache/airflow/blob/master/airflow/providers/celery/sensors/celery_queue.py) | [contrib.sensors.celery_queue_sensor.CeleryQueueSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/celery_queue_sensor.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [5bb228d84](https://github.com/apache/airflow/commit/5bb228d841585cd1780c15f6175c6d64cd98aeab) | 2020-07-11 | improve type hinting for celery provider (#9762) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/cloudant/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/cloudant/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 1158b21df9980..0000000000000 --- a/airflow/providers/cloudant/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/cloudant/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/cloudant/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 2be1e646bc266..0000000000000 --- a/airflow/providers/cloudant/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,13 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [35fe97225](https://github.com/apache/airflow/commit/35fe97225ee0a29aa350bb6ed805428fd707ab2f) | 2020-07-15 | Improve type hinting to provider cloudant (#9825) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/cloudant/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/cloudant/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index b7467a0db0db0..0000000000000 --- a/airflow/providers/cloudant/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,9 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/cloudant/BACKPORT_PROVIDER_README.md b/airflow/providers/cloudant/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 3c23bdb7d3650..0000000000000 --- a/airflow/providers/cloudant/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,119 +0,0 @@ - - - -# Package apache-airflow-backport-providers-cloudant - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `cloudant` provider. All classes for this provider package -are in `airflow.providers.cloudant` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-cloudant` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| cloudant | >=2.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `cloudant` provider -are in the `airflow.providers.cloudant` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.cloudant` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.cloudant.CloudantHook](https://github.com/apache/airflow/blob/master/airflow/providers/cloudant/hooks/cloudant.py) | [contrib.hooks.cloudant_hook.CloudantHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/cloudant_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [35fe97225](https://github.com/apache/airflow/commit/35fe97225ee0a29aa350bb6ed805428fd707ab2f) | 2020-07-15 | Improve type hinting to provider cloudant (#9825) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/cloudant/CHANGELOG.rst b/airflow/providers/cloudant/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/cloudant/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/cloudant/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/cloudant/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 2ab48accc51b5..0000000000000 --- a/airflow/providers/cloudant/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,42 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [35fe97225](https://github.com/apache/airflow/commit/35fe97225ee0a29aa350bb6ed805428fd707ab2f) | 2020-07-15 | Improve type hinting to provider cloudant (#9825) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/cloudant/README.md b/airflow/providers/cloudant/README.md deleted file mode 100644 index b7c6ac4470235..0000000000000 --- a/airflow/providers/cloudant/README.md +++ /dev/null @@ -1,122 +0,0 @@ - - - -# Package apache-airflow-providers-cloudant - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `cloudant` provider. All classes for this provider package -are in `airflow.providers.cloudant` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-cloudant` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| cloudant | >=2.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `cloudant` provider -are in the `airflow.providers.cloudant` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.cloudant` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.cloudant.CloudantHook](https://github.com/apache/airflow/blob/master/airflow/providers/cloudant/hooks/cloudant.py) | [contrib.hooks.cloudant_hook.CloudantHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/cloudant_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [35fe97225](https://github.com/apache/airflow/commit/35fe97225ee0a29aa350bb6ed805428fd707ab2f) | 2020-07-15 | Improve type hinting to provider cloudant (#9825) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/cncf/kubernetes/ADDITIONAL_INFO.md b/airflow/providers/cncf/kubernetes/ADDITIONAL_INFO.md deleted file mode 100644 index 824bc480e08ca..0000000000000 --- a/airflow/providers/cncf/kubernetes/ADDITIONAL_INFO.md +++ /dev/null @@ -1,24 +0,0 @@ - - -## Additional limitations - -This provider is only usable with Apache Airflow >= 1.10.12 version due to refactorings implemented in -Apache Airflow 1.10.11 and fixes implemented in 1.10.11. The package has appropriate requirements -set so you should not be able to install it with Apache Airflow < 1.10.12. diff --git a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 18a8f1b16c667..0000000000000 --- a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,62 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [a888198c2](https://github.com/apache/airflow/commit/a888198c27bcdbc4538c02360c308ffcaca182fa) | 2020-09-27 | Allow overrides for pod_template_file (#11162) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21 | Add D204 pydocstyle check (#11031) | -| [cba51d49e](https://github.com/apache/airflow/commit/cba51d49eea6a0563044191c8111978836d697ef) | 2020-09-17 | Simplify the K8sExecutor and K8sPodOperator (#10393) | -| [1294e15d4](https://github.com/apache/airflow/commit/1294e15d44c08498e7f1022fdd6f0bc5e50e533f) | 2020-09-16 | KubernetesPodOperator template fix (#10963) | -| [5d6d5a2f7](https://github.com/apache/airflow/commit/5d6d5a2f7d330c83297e1dc35728a0ba803aa866) | 2020-09-14 | Allow to specify path to kubeconfig in KubernetesHook (#10453) | -| [7edfac957](https://github.com/apache/airflow/commit/7edfac957bc17c9abcdcfe8d524772bd2783ac5a) | 2020-09-09 | Add connection caching to KubernetesHook (#10447) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [90c150568](https://github.com/apache/airflow/commit/90c1505686b063332dba87c0c948a8b29d8fd1d4) | 2020-09-04 | Make grace_period_seconds option on K8sPodOperator (#10727) | -| [338b412c0](https://github.com/apache/airflow/commit/338b412c04abc3fef8126f9724b448d1a9fd0bbc) | 2020-09-02 | Add on_kill support for the KubernetesPodOperator (#10666) | -| [596bc1337](https://github.com/apache/airflow/commit/596bc1337988f9377571295ddb748ef8703c19c0) | 2020-08-31 | Adds 'cncf.kubernetes' package back to backport provider packages. (#10659) | -| [1e5aa4465](https://github.com/apache/airflow/commit/1e5aa4465c5ef8f05745bda64da62fe542f2fe28) | 2020-08-26 | Spark-on-K8S sensor - add driver logs (#10023) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [8cd2be9e1](https://github.com/apache/airflow/commit/8cd2be9e161635480581a0dc723b69ed24166f8d) | 2020-08-11 | Fix KubernetesPodOperator reattachment (#10230) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [f1fd3e2c4](https://github.com/apache/airflow/commit/f1fd3e2c453ddce3e87ce63787598fea0707ffcf) | 2020-07-31 | Fix typo on reattach property of kubernetespodoperator (#10056) | -| [03c435174](https://github.com/apache/airflow/commit/03c43517445019081c55b4ac5fad3b0debdee336) | 2020-07-31 | Allow `image` in `KubernetesPodOperator` to be templated (#10068) | -| [88c160306](https://github.com/apache/airflow/commit/88c1603060fd484d4145bc253c0dc0e6797e13dd) | 2020-07-31 | Improve docstring note about GKEStartPodOperator on KubernetesPodOperator (#10049) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [719ae2bf6](https://github.com/apache/airflow/commit/719ae2bf6227894c3e926f717eb4dc669549d615) | 2020-07-22 | Dump Pod as YAML in logs for KubernetesPodOperator (#9895) | -| [840799d55](https://github.com/apache/airflow/commit/840799d5597f0d005e1deec154f6c95bad6dce61) | 2020-07-20 | Improve KubernetesPodOperator guide (#9079) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [8bd15ef63](https://github.com/apache/airflow/commit/8bd15ef634cca40f3cf6ca3442262f3e05144512) | 2020-07-01 | Switches to Helm Chart for Kubernetes tests (#9468) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23 | Fix typo in test_project_structure (#8978) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [8985df0bf](https://github.com/apache/airflow/commit/8985df0bfcb5f2b2cd69a21b9814021f9f8ce953) | 2020-05-16 | Monitor pods by labels instead of names (#6377) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [f82ad452b](https://github.com/apache/airflow/commit/f82ad452b0f4ebd1428bc9669641a632dc87bb8c) | 2020-05-15 | Fix KubernetesPodOperator pod name length validation (#8829) | -| [1ccafc617](https://github.com/apache/airflow/commit/1ccafc617c4cb9622e3460ad7c190f3ee67c3b32) | 2020-04-02 | Add spark_kubernetes system test (#7875) | -| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23 | Add missing call to Super class in 'cncf' & 'docker' providers (#7825) | -| [6c39a3bf9](https://github.com/apache/airflow/commit/6c39a3bf97414ba2438669894db65c36ccbeb61a) | 2020-03-10 | [AIRFLOW-6542] Add spark-on-k8s operator/hook/sensor (#7163) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [0ec277412](https://github.com/apache/airflow/commit/0ec2774120d43fa667a371b384e6006e1d1c7821) | 2020-02-24 | [AIRFLOW-5629] Implement Kubernetes priorityClassName in KubernetesPodOperator (#7395) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [967930c0c](https://github.com/apache/airflow/commit/967930c0cb6e2293f2a49e5c9add5aa1917f3527) | 2020-02-11 | [AIRFLOW-5413] Allow K8S worker pod to be configured from JSON/YAML file (#6230) | -| [96f834389](https://github.com/apache/airflow/commit/96f834389e03884025534fabd862155061f53fd0) | 2020-02-03 | [AIRFLOW-6678] Pull event logs from Kubernetes (#7292) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index cd14f3365c10f..0000000000000 --- a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,16 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21 | Enforce strict rules for yamllint (#11709) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [eee4e30f2](https://github.com/apache/airflow/commit/eee4e30f2caf02e16088ff5d1af1ea380a73e982) | 2020-10-15 | Add better debug logging to K8sexec and K8sPodOp (#11502) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [8640fb6c1](https://github.com/apache/airflow/commit/8640fb6c100a2c6aa231798559ba194331576975) | 2020-10-09 | fix tests (#11368) | -| [298052fce](https://github.com/apache/airflow/commit/298052fcee9d30b1f60b8dc1c9006398cd16645e) | 2020-10-10 | [airflow/providers/cncf/kubernetes] correct hook methods name (#11008) | -| [49aad025b](https://github.com/apache/airflow/commit/49aad025b53211a5815b10aa35f7d7b489cb5316) | 2020-10-09 | Users can specify sub-secrets and paths k8spodop (#11369) | -| [b93b6c5be](https://github.com/apache/airflow/commit/b93b6c5be3ab60960f650d0d4ee6c91271ac7909) | 2020-10-05 | Allow labels in KubernetesPodOperator to be templated (#10796) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.11.23.md deleted file mode 100644 index 18206b748f2fc..0000000000000 --- a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_CHANGES_2020.11.23.md +++ /dev/null @@ -1,23 +0,0 @@ - - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [763b40d22](https://github.com/apache/airflow/commit/763b40d223e5e5512494a97f8335e16960e6adc3) | 2020-11-18 | Raise correct Warning in kubernetes/backcompat/volume_mount.py (#12432) | -| [bc4bb3058](https://github.com/apache/airflow/commit/bc4bb30588607b10b069ab63ddf2ba7b7ee673ed) | 2020-11-18 | Fix docstrings for Kubernetes Backcompat module (#12422) | -| [cab86d80d](https://github.com/apache/airflow/commit/cab86d80d48227849906319917126f6d558b2e00) | 2020-11-17 | Make K8sPodOperator backwards compatible (#12384) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [221f809c1](https://github.com/apache/airflow/commit/221f809c1b4e4b78d5a437d012aa7daffd8410a4) | 2020-11-14 | Fix full_pod_spec for k8spodoperator (#12354) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [3f59e75cd](https://github.com/apache/airflow/commit/3f59e75cdf4a95829ac60b151135e03267e63a12) | 2020-11-09 | KubernetesPodOperator: use randomized name to get the failure status (#12171) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [7825be50d](https://github.com/apache/airflow/commit/7825be50d80d04da0db8fcee55df5e1339864c88) | 2020-11-05 | Randomize pod name (#12117) | -| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04 | Format all files (without excepions) by black (#12091) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | diff --git a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_README.md b/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 32b206c843464..0000000000000 --- a/airflow/providers/cncf/kubernetes/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,220 +0,0 @@ - - - -# Package apache-airflow-backport-providers-cncf-kubernetes - -Release: 2020.11.23 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 2020.11.23](#release-20201123) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - -## Backport package - -This is a backport providers package for `cncf.kubernetes` provider. All classes for this provider package -are in `airflow.providers.cncf.kubernetes` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - -## Additional limitations - -This provider is only usable with Apache Airflow >= 1.10.12 version due to refactorings implemented in -Apache Airflow 1.10.11 and fixes implemented in 1.10.11. The package has appropriate requirements -set so you should not be able to install it with Apache Airflow < 1.10.12. - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-cncf-kubernetes` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| cryptography | >=2.0.0 | -| kubernetes | >=3.0.0, <12.0.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `cncf.kubernetes` provider -are in the `airflow.providers.cncf.kubernetes` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.cncf.kubernetes` package | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.spark_kubernetes.SparkKubernetesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py) | - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.cncf.kubernetes` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.kubernetes_pod.KubernetesPodOperator](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py) | [contrib.operators.kubernetes_pod_operator.KubernetesPodOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/kubernetes_pod_operator.py) | - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.cncf.kubernetes` package | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.spark_kubernetes.SparkKubernetesSensor](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.cncf.kubernetes` package | -|:---------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.kubernetes.KubernetesHook](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/hooks/kubernetes.py) | - - - - -## Releases - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [763b40d22](https://github.com/apache/airflow/commit/763b40d223e5e5512494a97f8335e16960e6adc3) | 2020-11-18 | Raise correct Warning in kubernetes/backcompat/volume_mount.py (#12432) | -| [bc4bb3058](https://github.com/apache/airflow/commit/bc4bb30588607b10b069ab63ddf2ba7b7ee673ed) | 2020-11-18 | Fix docstrings for Kubernetes Backcompat module (#12422) | -| [cab86d80d](https://github.com/apache/airflow/commit/cab86d80d48227849906319917126f6d558b2e00) | 2020-11-17 | Make K8sPodOperator backwards compatible (#12384) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [221f809c1](https://github.com/apache/airflow/commit/221f809c1b4e4b78d5a437d012aa7daffd8410a4) | 2020-11-14 | Fix full_pod_spec for k8spodoperator (#12354) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [3f59e75cd](https://github.com/apache/airflow/commit/3f59e75cdf4a95829ac60b151135e03267e63a12) | 2020-11-09 | KubernetesPodOperator: use randomized name to get the failure status (#12171) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [7825be50d](https://github.com/apache/airflow/commit/7825be50d80d04da0db8fcee55df5e1339864c88) | 2020-11-05 | Randomize pod name (#12117) | -| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04 | Format all files (without excepions) by black (#12091) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21 | Enforce strict rules for yamllint (#11709) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [eee4e30f2](https://github.com/apache/airflow/commit/eee4e30f2caf02e16088ff5d1af1ea380a73e982) | 2020-10-15 | Add better debug logging to K8sexec and K8sPodOp (#11502) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [8640fb6c1](https://github.com/apache/airflow/commit/8640fb6c100a2c6aa231798559ba194331576975) | 2020-10-09 | fix tests (#11368) | -| [298052fce](https://github.com/apache/airflow/commit/298052fcee9d30b1f60b8dc1c9006398cd16645e) | 2020-10-10 | [airflow/providers/cncf/kubernetes] correct hook methods name (#11008) | -| [49aad025b](https://github.com/apache/airflow/commit/49aad025b53211a5815b10aa35f7d7b489cb5316) | 2020-10-09 | Users can specify sub-secrets and paths k8spodop (#11369) | -| [b93b6c5be](https://github.com/apache/airflow/commit/b93b6c5be3ab60960f650d0d4ee6c91271ac7909) | 2020-10-05 | Allow labels in KubernetesPodOperator to be templated (#10796) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [a888198c2](https://github.com/apache/airflow/commit/a888198c27bcdbc4538c02360c308ffcaca182fa) | 2020-09-27 | Allow overrides for pod_template_file (#11162) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21 | Add D204 pydocstyle check (#11031) | -| [cba51d49e](https://github.com/apache/airflow/commit/cba51d49eea6a0563044191c8111978836d697ef) | 2020-09-17 | Simplify the K8sExecutor and K8sPodOperator (#10393) | -| [1294e15d4](https://github.com/apache/airflow/commit/1294e15d44c08498e7f1022fdd6f0bc5e50e533f) | 2020-09-16 | KubernetesPodOperator template fix (#10963) | -| [5d6d5a2f7](https://github.com/apache/airflow/commit/5d6d5a2f7d330c83297e1dc35728a0ba803aa866) | 2020-09-14 | Allow to specify path to kubeconfig in KubernetesHook (#10453) | -| [7edfac957](https://github.com/apache/airflow/commit/7edfac957bc17c9abcdcfe8d524772bd2783ac5a) | 2020-09-09 | Add connection caching to KubernetesHook (#10447) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [90c150568](https://github.com/apache/airflow/commit/90c1505686b063332dba87c0c948a8b29d8fd1d4) | 2020-09-04 | Make grace_period_seconds option on K8sPodOperator (#10727) | -| [338b412c0](https://github.com/apache/airflow/commit/338b412c04abc3fef8126f9724b448d1a9fd0bbc) | 2020-09-02 | Add on_kill support for the KubernetesPodOperator (#10666) | -| [596bc1337](https://github.com/apache/airflow/commit/596bc1337988f9377571295ddb748ef8703c19c0) | 2020-08-31 | Adds 'cncf.kubernetes' package back to backport provider packages. (#10659) | -| [1e5aa4465](https://github.com/apache/airflow/commit/1e5aa4465c5ef8f05745bda64da62fe542f2fe28) | 2020-08-26 | Spark-on-K8S sensor - add driver logs (#10023) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [8cd2be9e1](https://github.com/apache/airflow/commit/8cd2be9e161635480581a0dc723b69ed24166f8d) | 2020-08-11 | Fix KubernetesPodOperator reattachment (#10230) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [f1fd3e2c4](https://github.com/apache/airflow/commit/f1fd3e2c453ddce3e87ce63787598fea0707ffcf) | 2020-07-31 | Fix typo on reattach property of kubernetespodoperator (#10056) | -| [03c435174](https://github.com/apache/airflow/commit/03c43517445019081c55b4ac5fad3b0debdee336) | 2020-07-31 | Allow `image` in `KubernetesPodOperator` to be templated (#10068) | -| [88c160306](https://github.com/apache/airflow/commit/88c1603060fd484d4145bc253c0dc0e6797e13dd) | 2020-07-31 | Improve docstring note about GKEStartPodOperator on KubernetesPodOperator (#10049) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [719ae2bf6](https://github.com/apache/airflow/commit/719ae2bf6227894c3e926f717eb4dc669549d615) | 2020-07-22 | Dump Pod as YAML in logs for KubernetesPodOperator (#9895) | -| [840799d55](https://github.com/apache/airflow/commit/840799d5597f0d005e1deec154f6c95bad6dce61) | 2020-07-20 | Improve KubernetesPodOperator guide (#9079) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [8bd15ef63](https://github.com/apache/airflow/commit/8bd15ef634cca40f3cf6ca3442262f3e05144512) | 2020-07-01 | Switches to Helm Chart for Kubernetes tests (#9468) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23 | Fix typo in test_project_structure (#8978) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [8985df0bf](https://github.com/apache/airflow/commit/8985df0bfcb5f2b2cd69a21b9814021f9f8ce953) | 2020-05-16 | Monitor pods by labels instead of names (#6377) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [f82ad452b](https://github.com/apache/airflow/commit/f82ad452b0f4ebd1428bc9669641a632dc87bb8c) | 2020-05-15 | Fix KubernetesPodOperator pod name length validation (#8829) | -| [1ccafc617](https://github.com/apache/airflow/commit/1ccafc617c4cb9622e3460ad7c190f3ee67c3b32) | 2020-04-02 | Add spark_kubernetes system test (#7875) | -| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23 | Add missing call to Super class in 'cncf' & 'docker' providers (#7825) | -| [6c39a3bf9](https://github.com/apache/airflow/commit/6c39a3bf97414ba2438669894db65c36ccbeb61a) | 2020-03-10 | [AIRFLOW-6542] Add spark-on-k8s operator/hook/sensor (#7163) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [0ec277412](https://github.com/apache/airflow/commit/0ec2774120d43fa667a371b384e6006e1d1c7821) | 2020-02-24 | [AIRFLOW-5629] Implement Kubernetes priorityClassName in KubernetesPodOperator (#7395) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [967930c0c](https://github.com/apache/airflow/commit/967930c0cb6e2293f2a49e5c9add5aa1917f3527) | 2020-02-11 | [AIRFLOW-5413] Allow K8S worker pod to be configured from JSON/YAML file (#6230) | -| [96f834389](https://github.com/apache/airflow/commit/96f834389e03884025534fabd862155061f53fd0) | 2020-02-03 | [AIRFLOW-6678] Pull event logs from Kubernetes (#7292) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/cncf/kubernetes/CHANGELOG.rst b/airflow/providers/cncf/kubernetes/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/cncf/kubernetes/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/cncf/kubernetes/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/cncf/kubernetes/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 217060eda9f5c..0000000000000 --- a/airflow/providers/cncf/kubernetes/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,101 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [de3b1e687](https://github.com/apache/airflow/commit/de3b1e687b26c524c6909b7b4dfbb60d25019751) | 2020-11-28 | Move connection guides to provider documentation packages (#12653) | -| [c02a3f59e](https://github.com/apache/airflow/commit/c02a3f59e45d3cdd0e4c1c3bda2c62b951bcbea3) | 2020-11-23 | Spark-on-k8s sensor logs - properly pass defined namespace to pod log call (#11199) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [9e089ab89](https://github.com/apache/airflow/commit/9e089ab89567b0a52b232f22ed3e708a05137924) | 2020-11-19 | Fix Kube tests (#12479) | -| [d32fe78c0](https://github.com/apache/airflow/commit/d32fe78c0d9d14f016df70a462dc3972f28abe9d) | 2020-11-18 | Update readmes for cncf.kube provider fixes (#12457) | -| [d84a52dc8](https://github.com/apache/airflow/commit/d84a52dc8fc597d89c5bb4941df67f5f35b70a29) | 2020-11-18 | Fix broken example_kubernetes DAG (#12455) | -| [7c8b71d20](https://github.com/apache/airflow/commit/7c8b71d2012d56888f21b24c4844a6838dc3e4b1) | 2020-11-18 | Fix backwards compatibility further (#12451) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [763b40d22](https://github.com/apache/airflow/commit/763b40d223e5e5512494a97f8335e16960e6adc3) | 2020-11-18 | Raise correct Warning in kubernetes/backcompat/volume_mount.py (#12432) | -| [bc4bb3058](https://github.com/apache/airflow/commit/bc4bb30588607b10b069ab63ddf2ba7b7ee673ed) | 2020-11-18 | Fix docstrings for Kubernetes Backcompat module (#12422) | -| [cab86d80d](https://github.com/apache/airflow/commit/cab86d80d48227849906319917126f6d558b2e00) | 2020-11-17 | Make K8sPodOperator backwards compatible (#12384) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [221f809c1](https://github.com/apache/airflow/commit/221f809c1b4e4b78d5a437d012aa7daffd8410a4) | 2020-11-14 | Fix full_pod_spec for k8spodoperator (#12354) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [3f59e75cd](https://github.com/apache/airflow/commit/3f59e75cdf4a95829ac60b151135e03267e63a12) | 2020-11-09 | KubernetesPodOperator: use randomized name to get the failure status (#12171) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [7825be50d](https://github.com/apache/airflow/commit/7825be50d80d04da0db8fcee55df5e1339864c88) | 2020-11-05 | Randomize pod name (#12117) | -| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04 | Format all files (without excepions) by black (#12091) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21 | Enforce strict rules for yamllint (#11709) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [eee4e30f2](https://github.com/apache/airflow/commit/eee4e30f2caf02e16088ff5d1af1ea380a73e982) | 2020-10-15 | Add better debug logging to K8sexec and K8sPodOp (#11502) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [8640fb6c1](https://github.com/apache/airflow/commit/8640fb6c100a2c6aa231798559ba194331576975) | 2020-10-09 | fix tests (#11368) | -| [298052fce](https://github.com/apache/airflow/commit/298052fcee9d30b1f60b8dc1c9006398cd16645e) | 2020-10-10 | [airflow/providers/cncf/kubernetes] correct hook methods name (#11008) | -| [49aad025b](https://github.com/apache/airflow/commit/49aad025b53211a5815b10aa35f7d7b489cb5316) | 2020-10-09 | Users can specify sub-secrets and paths k8spodop (#11369) | -| [b93b6c5be](https://github.com/apache/airflow/commit/b93b6c5be3ab60960f650d0d4ee6c91271ac7909) | 2020-10-05 | Allow labels in KubernetesPodOperator to be templated (#10796) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [a888198c2](https://github.com/apache/airflow/commit/a888198c27bcdbc4538c02360c308ffcaca182fa) | 2020-09-27 | Allow overrides for pod_template_file (#11162) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21 | Add D204 pydocstyle check (#11031) | -| [cba51d49e](https://github.com/apache/airflow/commit/cba51d49eea6a0563044191c8111978836d697ef) | 2020-09-17 | Simplify the K8sExecutor and K8sPodOperator (#10393) | -| [1294e15d4](https://github.com/apache/airflow/commit/1294e15d44c08498e7f1022fdd6f0bc5e50e533f) | 2020-09-16 | KubernetesPodOperator template fix (#10963) | -| [5d6d5a2f7](https://github.com/apache/airflow/commit/5d6d5a2f7d330c83297e1dc35728a0ba803aa866) | 2020-09-14 | Allow to specify path to kubeconfig in KubernetesHook (#10453) | -| [7edfac957](https://github.com/apache/airflow/commit/7edfac957bc17c9abcdcfe8d524772bd2783ac5a) | 2020-09-09 | Add connection caching to KubernetesHook (#10447) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [90c150568](https://github.com/apache/airflow/commit/90c1505686b063332dba87c0c948a8b29d8fd1d4) | 2020-09-04 | Make grace_period_seconds option on K8sPodOperator (#10727) | -| [338b412c0](https://github.com/apache/airflow/commit/338b412c04abc3fef8126f9724b448d1a9fd0bbc) | 2020-09-02 | Add on_kill support for the KubernetesPodOperator (#10666) | -| [596bc1337](https://github.com/apache/airflow/commit/596bc1337988f9377571295ddb748ef8703c19c0) | 2020-08-31 | Adds 'cncf.kubernetes' package back to backport provider packages. (#10659) | -| [1e5aa4465](https://github.com/apache/airflow/commit/1e5aa4465c5ef8f05745bda64da62fe542f2fe28) | 2020-08-26 | Spark-on-K8S sensor - add driver logs (#10023) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [8cd2be9e1](https://github.com/apache/airflow/commit/8cd2be9e161635480581a0dc723b69ed24166f8d) | 2020-08-11 | Fix KubernetesPodOperator reattachment (#10230) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [f1fd3e2c4](https://github.com/apache/airflow/commit/f1fd3e2c453ddce3e87ce63787598fea0707ffcf) | 2020-07-31 | Fix typo on reattach property of kubernetespodoperator (#10056) | -| [03c435174](https://github.com/apache/airflow/commit/03c43517445019081c55b4ac5fad3b0debdee336) | 2020-07-31 | Allow `image` in `KubernetesPodOperator` to be templated (#10068) | -| [88c160306](https://github.com/apache/airflow/commit/88c1603060fd484d4145bc253c0dc0e6797e13dd) | 2020-07-31 | Improve docstring note about GKEStartPodOperator on KubernetesPodOperator (#10049) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [719ae2bf6](https://github.com/apache/airflow/commit/719ae2bf6227894c3e926f717eb4dc669549d615) | 2020-07-22 | Dump Pod as YAML in logs for KubernetesPodOperator (#9895) | -| [840799d55](https://github.com/apache/airflow/commit/840799d5597f0d005e1deec154f6c95bad6dce61) | 2020-07-20 | Improve KubernetesPodOperator guide (#9079) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [8bd15ef63](https://github.com/apache/airflow/commit/8bd15ef634cca40f3cf6ca3442262f3e05144512) | 2020-07-01 | Switches to Helm Chart for Kubernetes tests (#9468) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23 | Fix typo in test_project_structure (#8978) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [8985df0bf](https://github.com/apache/airflow/commit/8985df0bfcb5f2b2cd69a21b9814021f9f8ce953) | 2020-05-16 | Monitor pods by labels instead of names (#6377) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [f82ad452b](https://github.com/apache/airflow/commit/f82ad452b0f4ebd1428bc9669641a632dc87bb8c) | 2020-05-15 | Fix KubernetesPodOperator pod name length validation (#8829) | -| [1ccafc617](https://github.com/apache/airflow/commit/1ccafc617c4cb9622e3460ad7c190f3ee67c3b32) | 2020-04-02 | Add spark_kubernetes system test (#7875) | -| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23 | Add missing call to Super class in 'cncf' & 'docker' providers (#7825) | -| [6c39a3bf9](https://github.com/apache/airflow/commit/6c39a3bf97414ba2438669894db65c36ccbeb61a) | 2020-03-10 | [AIRFLOW-6542] Add spark-on-k8s operator/hook/sensor (#7163) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [0ec277412](https://github.com/apache/airflow/commit/0ec2774120d43fa667a371b384e6006e1d1c7821) | 2020-02-24 | [AIRFLOW-5629] Implement Kubernetes priorityClassName in KubernetesPodOperator (#7395) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [967930c0c](https://github.com/apache/airflow/commit/967930c0cb6e2293f2a49e5c9add5aa1917f3527) | 2020-02-11 | [AIRFLOW-5413] Allow K8S worker pod to be configured from JSON/YAML file (#6230) | -| [96f834389](https://github.com/apache/airflow/commit/96f834389e03884025534fabd862155061f53fd0) | 2020-02-03 | [AIRFLOW-6678] Pull event logs from Kubernetes (#7292) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/cncf/kubernetes/README.md b/airflow/providers/cncf/kubernetes/README.md deleted file mode 100644 index ba940e98a1dd3..0000000000000 --- a/airflow/providers/cncf/kubernetes/README.md +++ /dev/null @@ -1,221 +0,0 @@ - - - -# Package apache-airflow-providers-cncf-kubernetes - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `cncf.kubernetes` provider. All classes for this provider package -are in `airflow.providers.cncf.kubernetes` python package. - - -## Additional limitations - -This provider is only usable with Apache Airflow >= 1.10.12 version due to refactorings implemented in -Apache Airflow 1.10.11 and fixes implemented in 1.10.11. The package has appropriate requirements -set so you should not be able to install it with Apache Airflow < 1.10.12. - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-cncf-kubernetes` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| cryptography | >=2.0.0 | -| kubernetes | >=3.0.0, <12.0.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `cncf.kubernetes` provider -are in the `airflow.providers.cncf.kubernetes` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.cncf.kubernetes` package | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.spark_kubernetes.SparkKubernetesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/operators/spark_kubernetes.py) | - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.cncf.kubernetes` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.kubernetes_pod.KubernetesPodOperator](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py) | [contrib.operators.kubernetes_pod_operator.KubernetesPodOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/kubernetes_pod_operator.py) | - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.cncf.kubernetes` package | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.spark_kubernetes.SparkKubernetesSensor](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/sensors/spark_kubernetes.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.cncf.kubernetes` package | -|:---------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.kubernetes.KubernetesHook](https://github.com/apache/airflow/blob/master/airflow/providers/cncf/kubernetes/hooks/kubernetes.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [de3b1e687](https://github.com/apache/airflow/commit/de3b1e687b26c524c6909b7b4dfbb60d25019751) | 2020-11-28 | Move connection guides to provider documentation packages (#12653) | -| [c02a3f59e](https://github.com/apache/airflow/commit/c02a3f59e45d3cdd0e4c1c3bda2c62b951bcbea3) | 2020-11-23 | Spark-on-k8s sensor logs - properly pass defined namespace to pod log call (#11199) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [9e089ab89](https://github.com/apache/airflow/commit/9e089ab89567b0a52b232f22ed3e708a05137924) | 2020-11-19 | Fix Kube tests (#12479) | -| [d32fe78c0](https://github.com/apache/airflow/commit/d32fe78c0d9d14f016df70a462dc3972f28abe9d) | 2020-11-18 | Update readmes for cncf.kube provider fixes (#12457) | -| [d84a52dc8](https://github.com/apache/airflow/commit/d84a52dc8fc597d89c5bb4941df67f5f35b70a29) | 2020-11-18 | Fix broken example_kubernetes DAG (#12455) | -| [7c8b71d20](https://github.com/apache/airflow/commit/7c8b71d2012d56888f21b24c4844a6838dc3e4b1) | 2020-11-18 | Fix backwards compatibility further (#12451) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [763b40d22](https://github.com/apache/airflow/commit/763b40d223e5e5512494a97f8335e16960e6adc3) | 2020-11-18 | Raise correct Warning in kubernetes/backcompat/volume_mount.py (#12432) | -| [bc4bb3058](https://github.com/apache/airflow/commit/bc4bb30588607b10b069ab63ddf2ba7b7ee673ed) | 2020-11-18 | Fix docstrings for Kubernetes Backcompat module (#12422) | -| [cab86d80d](https://github.com/apache/airflow/commit/cab86d80d48227849906319917126f6d558b2e00) | 2020-11-17 | Make K8sPodOperator backwards compatible (#12384) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [221f809c1](https://github.com/apache/airflow/commit/221f809c1b4e4b78d5a437d012aa7daffd8410a4) | 2020-11-14 | Fix full_pod_spec for k8spodoperator (#12354) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [3f59e75cd](https://github.com/apache/airflow/commit/3f59e75cdf4a95829ac60b151135e03267e63a12) | 2020-11-09 | KubernetesPodOperator: use randomized name to get the failure status (#12171) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [7825be50d](https://github.com/apache/airflow/commit/7825be50d80d04da0db8fcee55df5e1339864c88) | 2020-11-05 | Randomize pod name (#12117) | -| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04 | Format all files (without excepions) by black (#12091) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21 | Enforce strict rules for yamllint (#11709) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [eee4e30f2](https://github.com/apache/airflow/commit/eee4e30f2caf02e16088ff5d1af1ea380a73e982) | 2020-10-15 | Add better debug logging to K8sexec and K8sPodOp (#11502) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [8640fb6c1](https://github.com/apache/airflow/commit/8640fb6c100a2c6aa231798559ba194331576975) | 2020-10-09 | fix tests (#11368) | -| [298052fce](https://github.com/apache/airflow/commit/298052fcee9d30b1f60b8dc1c9006398cd16645e) | 2020-10-10 | [airflow/providers/cncf/kubernetes] correct hook methods name (#11008) | -| [49aad025b](https://github.com/apache/airflow/commit/49aad025b53211a5815b10aa35f7d7b489cb5316) | 2020-10-09 | Users can specify sub-secrets and paths k8spodop (#11369) | -| [b93b6c5be](https://github.com/apache/airflow/commit/b93b6c5be3ab60960f650d0d4ee6c91271ac7909) | 2020-10-05 | Allow labels in KubernetesPodOperator to be templated (#10796) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [a888198c2](https://github.com/apache/airflow/commit/a888198c27bcdbc4538c02360c308ffcaca182fa) | 2020-09-27 | Allow overrides for pod_template_file (#11162) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [b61225a88](https://github.com/apache/airflow/commit/b61225a8850b20be17842c2428b91d873584c4da) | 2020-09-21 | Add D204 pydocstyle check (#11031) | -| [cba51d49e](https://github.com/apache/airflow/commit/cba51d49eea6a0563044191c8111978836d697ef) | 2020-09-17 | Simplify the K8sExecutor and K8sPodOperator (#10393) | -| [1294e15d4](https://github.com/apache/airflow/commit/1294e15d44c08498e7f1022fdd6f0bc5e50e533f) | 2020-09-16 | KubernetesPodOperator template fix (#10963) | -| [5d6d5a2f7](https://github.com/apache/airflow/commit/5d6d5a2f7d330c83297e1dc35728a0ba803aa866) | 2020-09-14 | Allow to specify path to kubeconfig in KubernetesHook (#10453) | -| [7edfac957](https://github.com/apache/airflow/commit/7edfac957bc17c9abcdcfe8d524772bd2783ac5a) | 2020-09-09 | Add connection caching to KubernetesHook (#10447) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [90c150568](https://github.com/apache/airflow/commit/90c1505686b063332dba87c0c948a8b29d8fd1d4) | 2020-09-04 | Make grace_period_seconds option on K8sPodOperator (#10727) | -| [338b412c0](https://github.com/apache/airflow/commit/338b412c04abc3fef8126f9724b448d1a9fd0bbc) | 2020-09-02 | Add on_kill support for the KubernetesPodOperator (#10666) | -| [596bc1337](https://github.com/apache/airflow/commit/596bc1337988f9377571295ddb748ef8703c19c0) | 2020-08-31 | Adds 'cncf.kubernetes' package back to backport provider packages. (#10659) | -| [1e5aa4465](https://github.com/apache/airflow/commit/1e5aa4465c5ef8f05745bda64da62fe542f2fe28) | 2020-08-26 | Spark-on-K8S sensor - add driver logs (#10023) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [8cd2be9e1](https://github.com/apache/airflow/commit/8cd2be9e161635480581a0dc723b69ed24166f8d) | 2020-08-11 | Fix KubernetesPodOperator reattachment (#10230) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [f1fd3e2c4](https://github.com/apache/airflow/commit/f1fd3e2c453ddce3e87ce63787598fea0707ffcf) | 2020-07-31 | Fix typo on reattach property of kubernetespodoperator (#10056) | -| [03c435174](https://github.com/apache/airflow/commit/03c43517445019081c55b4ac5fad3b0debdee336) | 2020-07-31 | Allow `image` in `KubernetesPodOperator` to be templated (#10068) | -| [88c160306](https://github.com/apache/airflow/commit/88c1603060fd484d4145bc253c0dc0e6797e13dd) | 2020-07-31 | Improve docstring note about GKEStartPodOperator on KubernetesPodOperator (#10049) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [719ae2bf6](https://github.com/apache/airflow/commit/719ae2bf6227894c3e926f717eb4dc669549d615) | 2020-07-22 | Dump Pod as YAML in logs for KubernetesPodOperator (#9895) | -| [840799d55](https://github.com/apache/airflow/commit/840799d5597f0d005e1deec154f6c95bad6dce61) | 2020-07-20 | Improve KubernetesPodOperator guide (#9079) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [8bd15ef63](https://github.com/apache/airflow/commit/8bd15ef634cca40f3cf6ca3442262f3e05144512) | 2020-07-01 | Switches to Helm Chart for Kubernetes tests (#9468) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23 | Fix typo in test_project_structure (#8978) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [8985df0bf](https://github.com/apache/airflow/commit/8985df0bfcb5f2b2cd69a21b9814021f9f8ce953) | 2020-05-16 | Monitor pods by labels instead of names (#6377) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [f82ad452b](https://github.com/apache/airflow/commit/f82ad452b0f4ebd1428bc9669641a632dc87bb8c) | 2020-05-15 | Fix KubernetesPodOperator pod name length validation (#8829) | -| [1ccafc617](https://github.com/apache/airflow/commit/1ccafc617c4cb9622e3460ad7c190f3ee67c3b32) | 2020-04-02 | Add spark_kubernetes system test (#7875) | -| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23 | Add missing call to Super class in 'cncf' & 'docker' providers (#7825) | -| [6c39a3bf9](https://github.com/apache/airflow/commit/6c39a3bf97414ba2438669894db65c36ccbeb61a) | 2020-03-10 | [AIRFLOW-6542] Add spark-on-k8s operator/hook/sensor (#7163) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [0ec277412](https://github.com/apache/airflow/commit/0ec2774120d43fa667a371b384e6006e1d1c7821) | 2020-02-24 | [AIRFLOW-5629] Implement Kubernetes priorityClassName in KubernetesPodOperator (#7395) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [967930c0c](https://github.com/apache/airflow/commit/967930c0cb6e2293f2a49e5c9add5aa1917f3527) | 2020-02-11 | [AIRFLOW-5413] Allow K8S worker pod to be configured from JSON/YAML file (#6230) | -| [96f834389](https://github.com/apache/airflow/commit/96f834389e03884025534fabd862155061f53fd0) | 2020-02-03 | [AIRFLOW-6678] Pull event logs from Kubernetes (#7292) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py index cf27713a991fd..10c751078454a 100644 --- a/airflow/providers/cncf/kubernetes/hooks/kubernetes.py +++ b/airflow/providers/cncf/kubernetes/hooks/kubernetes.py @@ -17,10 +17,14 @@ import tempfile from typing import Any, Dict, Generator, Optional, Tuple, Union -import yaml from cached_property import cached_property from kubernetes import client, config, watch +try: + import airflow.utils.yaml as yaml +except ImportError: + import yaml + from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook diff --git a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py index 3f42ab10630eb..e6d1bae02b83e 100644 --- a/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py +++ b/airflow/providers/cncf/kubernetes/operators/kubernetes_pod.py @@ -19,9 +19,13 @@ import warnings from typing import Any, Dict, Iterable, List, Optional, Tuple -import yaml from kubernetes.client import CoreV1Api, models as k8s +try: + import airflow.utils.yaml as yaml +except ImportError: + import yaml + from airflow.exceptions import AirflowException from airflow.kubernetes import kube_client, pod_generator, pod_launcher from airflow.kubernetes.pod_generator import PodGenerator diff --git a/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index f945b0a982413..0000000000000 --- a/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,24 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [f1073381e](https://github.com/apache/airflow/commit/f1073381ed764a218b2502d15ca28a5b326f9f2d) | 2020-05-22 | Add support for spark python and submit tasks in Databricks operator(#8846) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [649935e8c](https://github.com/apache/airflow/commit/649935e8ce906759fdd08884ab1e3db0a03f6953) | 2020-04-27 | [AIRFLOW-8472]: `PATCH` for Databricks hook `_do_api_call` (#8473) | -| [16903ba3a](https://github.com/apache/airflow/commit/16903ba3a6ee5e61f1c6b5d17a8c6cf3c3a9a7f6) | 2020-04-24 | [AIRFLOW-8474]: Adding possibility to get job_id from Databricks run (#8475) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index f5f76596d7367..0000000000000 --- a/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,20 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [966a06d96](https://github.com/apache/airflow/commit/966a06d96bbfe330f1d2825f7b7eaa16d43b7a00) | 2020-09-18 | Fetching databricks host from connection if not supplied in extras. (#10762) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [bfefcce0c](https://github.com/apache/airflow/commit/bfefcce0c9f273042dd79ff50eb9af032ecacf59) | 2020-08-25 | Updated REST API call so GET requests pass payload in query string instead of request body (#10462) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.11.23.md deleted file mode 100644 index 20dd200d83cc4..0000000000000 --- a/airflow/providers/databricks/BACKPORT_PROVIDER_CHANGES_2020.11.23.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [b02722313](https://github.com/apache/airflow/commit/b0272231320a4975cc39968dec8f0abf7a5cca11) | 2020-11-13 | Add install/uninstall api to databricks hook (#12316) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [7e0d08e1f](https://github.com/apache/airflow/commit/7e0d08e1f074871307f0eb9e9ae7a66f7ce67626) | 2020-11-09 | Add how-to Guide for Databricks operators (#12175) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | diff --git a/airflow/providers/databricks/BACKPORT_PROVIDER_README.md b/airflow/providers/databricks/BACKPORT_PROVIDER_README.md deleted file mode 100644 index e86909a618abb..0000000000000 --- a/airflow/providers/databricks/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,166 +0,0 @@ - - - -# Package apache-airflow-backport-providers-databricks - -Release: 2020.11.23 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.11.23](#release-20201123) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `databricks` provider. All classes for this provider package -are in `airflow.providers.databricks` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-databricks` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| requests | >=2.20.0, <3 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `databricks` provider -are in the `airflow.providers.databricks` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.databricks` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.databricks.DatabricksRunNowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/databricks/operators/databricks.py) | [contrib.operators.databricks_operator.DatabricksRunNowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/databricks_operator.py) | -| [operators.databricks.DatabricksSubmitRunOperator](https://github.com/apache/airflow/blob/master/airflow/providers/databricks/operators/databricks.py) | [contrib.operators.databricks_operator.DatabricksSubmitRunOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/databricks_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.databricks` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.databricks.DatabricksHook](https://github.com/apache/airflow/blob/master/airflow/providers/databricks/hooks/databricks.py) | [contrib.hooks.databricks_hook.DatabricksHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/databricks_hook.py) | - - - -## Releases - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [b02722313](https://github.com/apache/airflow/commit/b0272231320a4975cc39968dec8f0abf7a5cca11) | 2020-11-13 | Add install/uninstall api to databricks hook (#12316) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [7e0d08e1f](https://github.com/apache/airflow/commit/7e0d08e1f074871307f0eb9e9ae7a66f7ce67626) | 2020-11-09 | Add how-to Guide for Databricks operators (#12175) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [966a06d96](https://github.com/apache/airflow/commit/966a06d96bbfe330f1d2825f7b7eaa16d43b7a00) | 2020-09-18 | Fetching databricks host from connection if not supplied in extras. (#10762) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [bfefcce0c](https://github.com/apache/airflow/commit/bfefcce0c9f273042dd79ff50eb9af032ecacf59) | 2020-08-25 | Updated REST API call so GET requests pass payload in query string instead of request body (#10462) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [f1073381e](https://github.com/apache/airflow/commit/f1073381ed764a218b2502d15ca28a5b326f9f2d) | 2020-05-22 | Add support for spark python and submit tasks in Databricks operator(#8846) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [649935e8c](https://github.com/apache/airflow/commit/649935e8ce906759fdd08884ab1e3db0a03f6953) | 2020-04-27 | [AIRFLOW-8472]: `PATCH` for Databricks hook `_do_api_call` (#8473) | -| [16903ba3a](https://github.com/apache/airflow/commit/16903ba3a6ee5e61f1c6b5d17a8c6cf3c3a9a7f6) | 2020-04-24 | [AIRFLOW-8474]: Adding possibility to get job_id from Databricks run (#8475) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/databricks/CHANGELOG.rst b/airflow/providers/databricks/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/databricks/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/databricks/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/databricks/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 37c6c751319a9..0000000000000 --- a/airflow/providers/databricks/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,59 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [b02722313](https://github.com/apache/airflow/commit/b0272231320a4975cc39968dec8f0abf7a5cca11) | 2020-11-13 | Add install/uninstall api to databricks hook (#12316) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [7e0d08e1f](https://github.com/apache/airflow/commit/7e0d08e1f074871307f0eb9e9ae7a66f7ce67626) | 2020-11-09 | Add how-to Guide for Databricks operators (#12175) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [966a06d96](https://github.com/apache/airflow/commit/966a06d96bbfe330f1d2825f7b7eaa16d43b7a00) | 2020-09-18 | Fetching databricks host from connection if not supplied in extras. (#10762) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [bfefcce0c](https://github.com/apache/airflow/commit/bfefcce0c9f273042dd79ff50eb9af032ecacf59) | 2020-08-25 | Updated REST API call so GET requests pass payload in query string instead of request body (#10462) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [f1073381e](https://github.com/apache/airflow/commit/f1073381ed764a218b2502d15ca28a5b326f9f2d) | 2020-05-22 | Add support for spark python and submit tasks in Databricks operator(#8846) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [649935e8c](https://github.com/apache/airflow/commit/649935e8ce906759fdd08884ab1e3db0a03f6953) | 2020-04-27 | [AIRFLOW-8472]: `PATCH` for Databricks hook `_do_api_call` (#8473) | -| [16903ba3a](https://github.com/apache/airflow/commit/16903ba3a6ee5e61f1c6b5d17a8c6cf3c3a9a7f6) | 2020-04-24 | [AIRFLOW-8474]: Adding possibility to get job_id from Databricks run (#8475) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/databricks/README.md b/airflow/providers/databricks/README.md deleted file mode 100644 index c4c2bc1270199..0000000000000 --- a/airflow/providers/databricks/README.md +++ /dev/null @@ -1,153 +0,0 @@ - - - -# Package apache-airflow-providers-databricks - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `databricks` provider. All classes for this provider package -are in `airflow.providers.databricks` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-databricks` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| requests | >=2.20.0, <3 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `databricks` provider -are in the `airflow.providers.databricks` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.databricks` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.databricks.DatabricksRunNowOperator](https://github.com/apache/airflow/blob/master/airflow/providers/databricks/operators/databricks.py) | [contrib.operators.databricks_operator.DatabricksRunNowOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/databricks_operator.py) | -| [operators.databricks.DatabricksSubmitRunOperator](https://github.com/apache/airflow/blob/master/airflow/providers/databricks/operators/databricks.py) | [contrib.operators.databricks_operator.DatabricksSubmitRunOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/databricks_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.databricks` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.databricks.DatabricksHook](https://github.com/apache/airflow/blob/master/airflow/providers/databricks/hooks/databricks.py) | [contrib.hooks.databricks_hook.DatabricksHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/databricks_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [b02722313](https://github.com/apache/airflow/commit/b0272231320a4975cc39968dec8f0abf7a5cca11) | 2020-11-13 | Add install/uninstall api to databricks hook (#12316) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [7e0d08e1f](https://github.com/apache/airflow/commit/7e0d08e1f074871307f0eb9e9ae7a66f7ce67626) | 2020-11-09 | Add how-to Guide for Databricks operators (#12175) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [966a06d96](https://github.com/apache/airflow/commit/966a06d96bbfe330f1d2825f7b7eaa16d43b7a00) | 2020-09-18 | Fetching databricks host from connection if not supplied in extras. (#10762) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [bfefcce0c](https://github.com/apache/airflow/commit/bfefcce0c9f273042dd79ff50eb9af032ecacf59) | 2020-08-25 | Updated REST API call so GET requests pass payload in query string instead of request body (#10462) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [f1073381e](https://github.com/apache/airflow/commit/f1073381ed764a218b2502d15ca28a5b326f9f2d) | 2020-05-22 | Add support for spark python and submit tasks in Databricks operator(#8846) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [649935e8c](https://github.com/apache/airflow/commit/649935e8ce906759fdd08884ab1e3db0a03f6953) | 2020-04-27 | [AIRFLOW-8472]: `PATCH` for Databricks hook `_do_api_call` (#8473) | -| [16903ba3a](https://github.com/apache/airflow/commit/16903ba3a6ee5e61f1c6b5d17a8c6cf3c3a9a7f6) | 2020-04-24 | [AIRFLOW-8474]: Adding possibility to get job_id from Databricks run (#8475) | -| [5648dfbc3](https://github.com/apache/airflow/commit/5648dfbc300337b10567ef4e07045ea29d33ec06) | 2020-03-23 | Add missing call to Super class in 'amazon', 'cloudant & 'databricks' providers (#7827) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/datadog/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/datadog/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 3fbe69dbbdbeb..0000000000000 --- a/airflow/providers/datadog/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/datadog/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/datadog/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 63e23bf0a7026..0000000000000 --- a/airflow/providers/datadog/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,15 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [89fe5feb7](https://github.com/apache/airflow/commit/89fe5feb7b8088db7b5aaaec2b7a292cbc507209) | 2020-07-20 | improve typing for datadog provider (#9775) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/datadog/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/datadog/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/datadog/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/datadog/BACKPORT_PROVIDER_README.md b/airflow/providers/datadog/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 543a06f36426e..0000000000000 --- a/airflow/providers/datadog/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,136 +0,0 @@ - - - -# Package apache-airflow-backport-providers-datadog - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `datadog` provider. All classes for this provider package -are in `airflow.providers.datadog` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-datadog` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| datadog | >=0.14.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `datadog` provider -are in the `airflow.providers.datadog` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.datadog` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.datadog.DatadogSensor](https://github.com/apache/airflow/blob/master/airflow/providers/datadog/sensors/datadog.py) | [contrib.sensors.datadog_sensor.DatadogSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/datadog_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.datadog` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.datadog.DatadogHook](https://github.com/apache/airflow/blob/master/airflow/providers/datadog/hooks/datadog.py) | [contrib.hooks.datadog_hook.DatadogHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/datadog_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [89fe5feb7](https://github.com/apache/airflow/commit/89fe5feb7b8088db7b5aaaec2b7a292cbc507209) | 2020-07-20 | improve typing for datadog provider (#9775) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/datadog/CHANGELOG.rst b/airflow/providers/datadog/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/datadog/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/datadog/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/datadog/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 4fb2934a66b05..0000000000000 --- a/airflow/providers/datadog/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,44 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [89fe5feb7](https://github.com/apache/airflow/commit/89fe5feb7b8088db7b5aaaec2b7a292cbc507209) | 2020-07-20 | improve typing for datadog provider (#9775) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/datadog/README.md b/airflow/providers/datadog/README.md deleted file mode 100644 index ff24ee3e249fe..0000000000000 --- a/airflow/providers/datadog/README.md +++ /dev/null @@ -1,137 +0,0 @@ - - - -# Package apache-airflow-providers-datadog - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `datadog` provider. All classes for this provider package -are in `airflow.providers.datadog` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-datadog` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| datadog | >=0.14.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `datadog` provider -are in the `airflow.providers.datadog` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.datadog` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.datadog.DatadogSensor](https://github.com/apache/airflow/blob/master/airflow/providers/datadog/sensors/datadog.py) | [contrib.sensors.datadog_sensor.DatadogSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/datadog_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.datadog` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.datadog.DatadogHook](https://github.com/apache/airflow/blob/master/airflow/providers/datadog/hooks/datadog.py) | [contrib.hooks.datadog_hook.DatadogHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/datadog_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [89fe5feb7](https://github.com/apache/airflow/commit/89fe5feb7b8088db7b5aaaec2b7a292cbc507209) | 2020-07-20 | improve typing for datadog provider (#9775) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/dependencies.json b/airflow/providers/dependencies.json index 748b1a50557bc..602765648b344 100644 --- a/airflow/providers/dependencies.json +++ b/airflow/providers/dependencies.json @@ -1,4 +1,7 @@ { + "airbyte": [ + "http" + ], "amazon": [ "apache.hive", "google", @@ -8,6 +11,9 @@ "postgres", "ssh" ], + "apache.beam": [ + "google" + ], "apache.druid": [ "apache.hive" ], @@ -30,6 +36,7 @@ ], "google": [ "amazon", + "apache.beam", "apache.cassandra", "cncf.kubernetes", "facebook", @@ -40,7 +47,8 @@ "presto", "salesforce", "sftp", - "ssh" + "ssh", + "trino" ], "hashicorp": [ "google" @@ -55,6 +63,7 @@ "mysql": [ "amazon", "presto", + "trino", "vertica" ], "opsgenie": [ @@ -63,6 +72,9 @@ "postgres": [ "amazon" ], + "salesforce": [ + "tableau" + ], "sftp": [ "ssh" ], diff --git a/airflow/providers/dingding/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/dingding/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 6a47fe4c03bbb..0000000000000 --- a/airflow/providers/dingding/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,23 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/dingding/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/dingding/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index f21767813a8e1..0000000000000 --- a/airflow/providers/dingding/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [ce19657ec](https://github.com/apache/airflow/commit/ce19657ec685abff5871df80c8d47f8585eeed99) | 2020-09-15 | Fix case of GitHub. (#10955) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/dingding/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/dingding/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 6da88e8c32ce3..0000000000000 --- a/airflow/providers/dingding/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,11 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [172820db4](https://github.com/apache/airflow/commit/172820db4d2009dd26fa8aef4a864fb8a3d7e78d) | 2020-10-21 | Fix case of GitHub (#11398) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/dingding/BACKPORT_PROVIDER_README.md b/airflow/providers/dingding/BACKPORT_PROVIDER_README.md deleted file mode 100644 index e6ed823325bc2..0000000000000 --- a/airflow/providers/dingding/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,152 +0,0 @@ - - - -# Package apache-airflow-backport-providers-dingding - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `dingding` provider. All classes for this provider package -are in `airflow.providers.dingding` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-dingding` - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-dingding[http] -``` - -| Dependent package | Extra | -|:---------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-http](https://github.com/apache/airflow/tree/master/airflow/providers/http) | http | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `dingding` provider -are in the `airflow.providers.dingding` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.dingding` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.dingding.DingdingOperator](https://github.com/apache/airflow/blob/master/airflow/providers/dingding/operators/dingding.py) | [contrib.operators.dingding_operator.DingdingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dingding_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.dingding` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.dingding.DingdingHook](https://github.com/apache/airflow/blob/master/airflow/providers/dingding/hooks/dingding.py) | [contrib.hooks.dingding_hook.DingdingHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/dingding_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [172820db4](https://github.com/apache/airflow/commit/172820db4d2009dd26fa8aef4a864fb8a3d7e78d) | 2020-10-21 | Fix case of GitHub (#11398) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [ce19657ec](https://github.com/apache/airflow/commit/ce19657ec685abff5871df80c8d47f8585eeed99) | 2020-09-15 | Fix case of GitHub. (#10955) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/dingding/CHANGELOG.rst b/airflow/providers/dingding/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/dingding/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/dingding/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/dingding/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index e6d3c58421956..0000000000000 --- a/airflow/providers/dingding/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,55 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [de15aa30d](https://github.com/apache/airflow/commit/de15aa30d476411379e33bc9d5ce4cf0544e858c) | 2020-11-22 | Deprecate Read the Docs (#12541) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [3a72fc824](https://github.com/apache/airflow/commit/3a72fc82475df3b745a00a7b5e34eef9d27b3329) | 2020-11-14 | Fix Description of Provider Docs (#12361) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [1dc709931](https://github.com/apache/airflow/commit/1dc7099315fce0ab3765fbfdde43f44500df08b7) | 2020-11-03 | Fixes import of BaseOperator in dinging (#12063) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [172820db4](https://github.com/apache/airflow/commit/172820db4d2009dd26fa8aef4a864fb8a3d7e78d) | 2020-10-21 | Fix case of GitHub (#11398) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [ce19657ec](https://github.com/apache/airflow/commit/ce19657ec685abff5871df80c8d47f8585eeed99) | 2020-09-15 | Fix case of GitHub. (#10955) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/dingding/README.md b/airflow/providers/dingding/README.md deleted file mode 100644 index 409585e7455cb..0000000000000 --- a/airflow/providers/dingding/README.md +++ /dev/null @@ -1,157 +0,0 @@ - - - -# Package apache-airflow-providers-dingding - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `dingding` provider. All classes for this provider package -are in `airflow.providers.dingding` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-dingding` - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-dingding[http] -``` - -| Dependent package | Extra | -|:----------------------------------------------------------------------------------------|:--------| -| [apache-airflow-providers-http](https://pypi.org/project/apache-airflow-providers-http) | http | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `dingding` provider -are in the `airflow.providers.dingding` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.dingding` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.dingding.DingdingOperator](https://github.com/apache/airflow/blob/master/airflow/providers/dingding/operators/dingding.py) | [contrib.operators.dingding_operator.DingdingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dingding_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.dingding` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.dingding.DingdingHook](https://github.com/apache/airflow/blob/master/airflow/providers/dingding/hooks/dingding.py) | [contrib.hooks.dingding_hook.DingdingHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/dingding_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [de15aa30d](https://github.com/apache/airflow/commit/de15aa30d476411379e33bc9d5ce4cf0544e858c) | 2020-11-22 | Deprecate Read the Docs (#12541) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [3a72fc824](https://github.com/apache/airflow/commit/3a72fc82475df3b745a00a7b5e34eef9d27b3329) | 2020-11-14 | Fix Description of Provider Docs (#12361) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [1dc709931](https://github.com/apache/airflow/commit/1dc7099315fce0ab3765fbfdde43f44500df08b7) | 2020-11-03 | Fixes import of BaseOperator in dinging (#12063) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [172820db4](https://github.com/apache/airflow/commit/172820db4d2009dd26fa8aef4a864fb8a3d7e78d) | 2020-10-21 | Fix case of GitHub (#11398) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [ce19657ec](https://github.com/apache/airflow/commit/ce19657ec685abff5871df80c8d47f8585eeed99) | 2020-09-15 | Fix case of GitHub. (#10955) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/discord/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/discord/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index f0ea5abbedb99..0000000000000 --- a/airflow/providers/discord/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/discord/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/discord/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 6a3e58df9854c..0000000000000 --- a/airflow/providers/discord/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,16 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [a518801f8](https://github.com/apache/airflow/commit/a518801f8d5abe4ceb8b8678c27e6858f51f288a) | 2020-07-12 | Add type hinting for discord provider (#9773) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/discord/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/discord/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/discord/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/discord/BACKPORT_PROVIDER_README.md b/airflow/providers/discord/BACKPORT_PROVIDER_README.md deleted file mode 100644 index bbc13c5448cec..0000000000000 --- a/airflow/providers/discord/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,146 +0,0 @@ - - - -# Package apache-airflow-backport-providers-discord - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `discord` provider. All classes for this provider package -are in `airflow.providers.discord` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-discord` - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-discord[http] -``` - -| Dependent package | Extra | -|:---------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-http](https://github.com/apache/airflow/tree/master/airflow/providers/http) | http | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `discord` provider -are in the `airflow.providers.discord` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.discord` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.discord_webhook.DiscordWebhookOperator](https://github.com/apache/airflow/blob/master/airflow/providers/discord/operators/discord_webhook.py) | [contrib.operators.discord_webhook_operator.DiscordWebhookOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/discord_webhook_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.discord` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.discord_webhook.DiscordWebhookHook](https://github.com/apache/airflow/blob/master/airflow/providers/discord/hooks/discord_webhook.py) | [contrib.hooks.discord_webhook_hook.DiscordWebhookHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/discord_webhook_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [a518801f8](https://github.com/apache/airflow/commit/a518801f8d5abe4ceb8b8678c27e6858f51f288a) | 2020-07-12 | Add type hinting for discord provider (#9773) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/discord/CHANGELOG.rst b/airflow/providers/discord/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/discord/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/discord/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/discord/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index e85fc42db86be..0000000000000 --- a/airflow/providers/discord/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,45 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [e9b2ff57b](https://github.com/apache/airflow/commit/e9b2ff57b81b12cfbf559d957a370d497015acc2) | 2020-12-05 | Add notes about PIP 20.3 breaking Airflow installation (#12840) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [a518801f8](https://github.com/apache/airflow/commit/a518801f8d5abe4ceb8b8678c27e6858f51f288a) | 2020-07-12 | Add type hinting for discord provider (#9773) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/discord/README.md b/airflow/providers/discord/README.md deleted file mode 100644 index 40230dff8022c..0000000000000 --- a/airflow/providers/discord/README.md +++ /dev/null @@ -1,147 +0,0 @@ - - - -# Package apache-airflow-providers-discord - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `discord` provider. All classes for this provider package -are in `airflow.providers.discord` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-discord` - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-discord[http] -``` - -| Dependent package | Extra | -|:----------------------------------------------------------------------------------------|:--------| -| [apache-airflow-providers-http](https://pypi.org/project/apache-airflow-providers-http) | http | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `discord` provider -are in the `airflow.providers.discord` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.discord` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.discord_webhook.DiscordWebhookOperator](https://github.com/apache/airflow/blob/master/airflow/providers/discord/operators/discord_webhook.py) | [contrib.operators.discord_webhook_operator.DiscordWebhookOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/discord_webhook_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.discord` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.discord_webhook.DiscordWebhookHook](https://github.com/apache/airflow/blob/master/airflow/providers/discord/hooks/discord_webhook.py) | [contrib.hooks.discord_webhook_hook.DiscordWebhookHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/discord_webhook_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [e9b2ff57b](https://github.com/apache/airflow/commit/e9b2ff57b81b12cfbf559d957a370d497015acc2) | 2020-12-05 | Add notes about PIP 20.3 breaking Airflow installation (#12840) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [a518801f8](https://github.com/apache/airflow/commit/a518801f8d5abe4ceb8b8678c27e6858f51f288a) | 2020-07-12 | Add type hinting for discord provider (#9773) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/docker/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/docker/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 104a267d438d0..0000000000000 --- a/airflow/providers/docker/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,31 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [4a74cf1a3](https://github.com/apache/airflow/commit/4a74cf1a34cf20e49383f27e7cdc3ae80b9b0cde) | 2020-06-08 | Fix xcom in DockerOperator when auto_remove is used (#9173) | -| [b4b84a193](https://github.com/apache/airflow/commit/b4b84a1933d055a2803b80b990482a7257a203ff) | 2020-06-07 | Add kernel capabilities in DockerOperator(#9142) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [511d98e30](https://github.com/apache/airflow/commit/511d98e30ded2bcce9d246b358f806cea45ebcb7) | 2020-05-01 | [AIRFLOW-4363] Fix JSON encoding error (#8287) | -| [0a1de1668](https://github.com/apache/airflow/commit/0a1de16682da1d0a3fac668437434a72b3149fda) | 2020-04-27 | Stop DockerSwarmOperator from pulling Docker images (#8533) | -| [3237c7e31](https://github.com/apache/airflow/commit/3237c7e31d008f73e6ba0ecc1f2331c7c80f0e17) | 2020-04-26 | [AIRFLOW-5850] Capture task logs in DockerSwarmOperator (#6552) | -| [9626b03d1](https://github.com/apache/airflow/commit/9626b03d19905c6d1bfbd53064f85ffd3c39f0bf) | 2020-03-30 | [AIRFLOW-6574] Adding private_environment to docker operator. (#7671) | -| [733d3d3c3](https://github.com/apache/airflow/commit/733d3d3c32e0305691f82102cfc346e8e85478b0) | 2020-03-25 | [AIRFLOW-4363] Fix JSON encoding error (#7628) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23 | Add missing call to Super class in 'cncf' & 'docker' providers (#7825) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [dbcd3d878](https://github.com/apache/airflow/commit/dbcd3d8787741fd8203b6d9bdbc5d1da4b10a15b) | 2020-02-18 | [AIRFLOW-6804] Add the basic test for all example DAGs (#7419) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/docker/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/docker/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index ee50ae25ef104..0000000000000 --- a/airflow/providers/docker/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,21 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [2e56ee7b2](https://github.com/apache/airflow/commit/2e56ee7b2283d9413cab6939ffbe241c154b39e2) | 2020-08-27 | DockerOperator extra_hosts argument support added (#10546) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [d79e7221d](https://github.com/apache/airflow/commit/d79e7221de76f01b5cd36c15224b59e8bb451c90) | 2020-08-06 | Type annotation for Docker operator (#9733) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [5d61580c5](https://github.com/apache/airflow/commit/5d61580c572118ed97b9ff32d7e3684be1fcb755) | 2020-06-21 | Enable 'Public function Missing Docstrings' PyDocStyle Check (#9463) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/docker/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/docker/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/docker/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/docker/BACKPORT_PROVIDER_README.md b/airflow/providers/docker/BACKPORT_PROVIDER_README.md deleted file mode 100644 index e8fda913aa1a0..0000000000000 --- a/airflow/providers/docker/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,155 +0,0 @@ - - - -# Package apache-airflow-backport-providers-docker - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `docker` provider. All classes for this provider package -are in `airflow.providers.docker` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-docker` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| docker | ~=3.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `docker` provider -are in the `airflow.providers.docker` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.docker` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.docker.DockerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/docker/operators/docker.py) | [operators.docker_operator.DockerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/docker_operator.py) | -| [operators.docker_swarm.DockerSwarmOperator](https://github.com/apache/airflow/blob/master/airflow/providers/docker/operators/docker_swarm.py) | [contrib.operators.docker_swarm_operator.DockerSwarmOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/docker_swarm_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.docker` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------| -| [hooks.docker.DockerHook](https://github.com/apache/airflow/blob/master/airflow/providers/docker/hooks/docker.py) | [hooks.docker_hook.DockerHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/docker_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [2e56ee7b2](https://github.com/apache/airflow/commit/2e56ee7b2283d9413cab6939ffbe241c154b39e2) | 2020-08-27 | DockerOperator extra_hosts argument support added (#10546) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [d79e7221d](https://github.com/apache/airflow/commit/d79e7221de76f01b5cd36c15224b59e8bb451c90) | 2020-08-06 | Type annotation for Docker operator (#9733) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [5d61580c5](https://github.com/apache/airflow/commit/5d61580c572118ed97b9ff32d7e3684be1fcb755) | 2020-06-21 | Enable 'Public function Missing Docstrings' PyDocStyle Check (#9463) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [4a74cf1a3](https://github.com/apache/airflow/commit/4a74cf1a34cf20e49383f27e7cdc3ae80b9b0cde) | 2020-06-08 | Fix xcom in DockerOperator when auto_remove is used (#9173) | -| [b4b84a193](https://github.com/apache/airflow/commit/b4b84a1933d055a2803b80b990482a7257a203ff) | 2020-06-07 | Add kernel capabilities in DockerOperator(#9142) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [511d98e30](https://github.com/apache/airflow/commit/511d98e30ded2bcce9d246b358f806cea45ebcb7) | 2020-05-01 | [AIRFLOW-4363] Fix JSON encoding error (#8287) | -| [0a1de1668](https://github.com/apache/airflow/commit/0a1de16682da1d0a3fac668437434a72b3149fda) | 2020-04-27 | Stop DockerSwarmOperator from pulling Docker images (#8533) | -| [3237c7e31](https://github.com/apache/airflow/commit/3237c7e31d008f73e6ba0ecc1f2331c7c80f0e17) | 2020-04-26 | [AIRFLOW-5850] Capture task logs in DockerSwarmOperator (#6552) | -| [9626b03d1](https://github.com/apache/airflow/commit/9626b03d19905c6d1bfbd53064f85ffd3c39f0bf) | 2020-03-30 | [AIRFLOW-6574] Adding private_environment to docker operator. (#7671) | -| [733d3d3c3](https://github.com/apache/airflow/commit/733d3d3c32e0305691f82102cfc346e8e85478b0) | 2020-03-25 | [AIRFLOW-4363] Fix JSON encoding error (#7628) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23 | Add missing call to Super class in 'cncf' & 'docker' providers (#7825) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [dbcd3d878](https://github.com/apache/airflow/commit/dbcd3d8787741fd8203b6d9bdbc5d1da4b10a15b) | 2020-02-18 | [AIRFLOW-6804] Add the basic test for all example DAGs (#7419) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/docker/CHANGELOG.rst b/airflow/providers/docker/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/docker/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/docker/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/docker/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 735477d5b17f1..0000000000000 --- a/airflow/providers/docker/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,66 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [6b339c70c](https://github.com/apache/airflow/commit/6b339c70c45a2bad0e1e2c3f6638f4c59475569e) | 2020-12-03 | Avoid log spam & have more meaningful log when pull image in DockerOperator (#12763) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [0314a3a21](https://github.com/apache/airflow/commit/0314a3a218f864f78ec260cc66134e7acae34bc5) | 2020-11-01 | Allow airflow.providers to be installed in multiple python folders (#10806) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [2e56ee7b2](https://github.com/apache/airflow/commit/2e56ee7b2283d9413cab6939ffbe241c154b39e2) | 2020-08-27 | DockerOperator extra_hosts argument support added (#10546) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [d79e7221d](https://github.com/apache/airflow/commit/d79e7221de76f01b5cd36c15224b59e8bb451c90) | 2020-08-06 | Type annotation for Docker operator (#9733) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [5d61580c5](https://github.com/apache/airflow/commit/5d61580c572118ed97b9ff32d7e3684be1fcb755) | 2020-06-21 | Enable 'Public function Missing Docstrings' PyDocStyle Check (#9463) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [4a74cf1a3](https://github.com/apache/airflow/commit/4a74cf1a34cf20e49383f27e7cdc3ae80b9b0cde) | 2020-06-08 | Fix xcom in DockerOperator when auto_remove is used (#9173) | -| [b4b84a193](https://github.com/apache/airflow/commit/b4b84a1933d055a2803b80b990482a7257a203ff) | 2020-06-07 | Add kernel capabilities in DockerOperator(#9142) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [511d98e30](https://github.com/apache/airflow/commit/511d98e30ded2bcce9d246b358f806cea45ebcb7) | 2020-05-01 | [AIRFLOW-4363] Fix JSON encoding error (#8287) | -| [0a1de1668](https://github.com/apache/airflow/commit/0a1de16682da1d0a3fac668437434a72b3149fda) | 2020-04-27 | Stop DockerSwarmOperator from pulling Docker images (#8533) | -| [3237c7e31](https://github.com/apache/airflow/commit/3237c7e31d008f73e6ba0ecc1f2331c7c80f0e17) | 2020-04-26 | [AIRFLOW-5850] Capture task logs in DockerSwarmOperator (#6552) | -| [9626b03d1](https://github.com/apache/airflow/commit/9626b03d19905c6d1bfbd53064f85ffd3c39f0bf) | 2020-03-30 | [AIRFLOW-6574] Adding private_environment to docker operator. (#7671) | -| [733d3d3c3](https://github.com/apache/airflow/commit/733d3d3c32e0305691f82102cfc346e8e85478b0) | 2020-03-25 | [AIRFLOW-4363] Fix JSON encoding error (#7628) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23 | Add missing call to Super class in 'cncf' & 'docker' providers (#7825) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [dbcd3d878](https://github.com/apache/airflow/commit/dbcd3d8787741fd8203b6d9bdbc5d1da4b10a15b) | 2020-02-18 | [AIRFLOW-6804] Add the basic test for all example DAGs (#7419) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/docker/README.md b/airflow/providers/docker/README.md deleted file mode 100644 index efbd747995f68..0000000000000 --- a/airflow/providers/docker/README.md +++ /dev/null @@ -1,160 +0,0 @@ - - - -# Package apache-airflow-providers-docker - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `docker` provider. All classes for this provider package -are in `airflow.providers.docker` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-docker` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| docker | ~=3.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `docker` provider -are in the `airflow.providers.docker` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.docker` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.docker.DockerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/docker/operators/docker.py) | [operators.docker_operator.DockerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/docker_operator.py) | -| [operators.docker_swarm.DockerSwarmOperator](https://github.com/apache/airflow/blob/master/airflow/providers/docker/operators/docker_swarm.py) | [contrib.operators.docker_swarm_operator.DockerSwarmOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/docker_swarm_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.docker` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------| -| [hooks.docker.DockerHook](https://github.com/apache/airflow/blob/master/airflow/providers/docker/hooks/docker.py) | [hooks.docker_hook.DockerHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/docker_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [6b339c70c](https://github.com/apache/airflow/commit/6b339c70c45a2bad0e1e2c3f6638f4c59475569e) | 2020-12-03 | Avoid log spam & have more meaningful log when pull image in DockerOperator (#12763) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [0314a3a21](https://github.com/apache/airflow/commit/0314a3a218f864f78ec260cc66134e7acae34bc5) | 2020-11-01 | Allow airflow.providers to be installed in multiple python folders (#10806) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [2e56ee7b2](https://github.com/apache/airflow/commit/2e56ee7b2283d9413cab6939ffbe241c154b39e2) | 2020-08-27 | DockerOperator extra_hosts argument support added (#10546) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [d79e7221d](https://github.com/apache/airflow/commit/d79e7221de76f01b5cd36c15224b59e8bb451c90) | 2020-08-06 | Type annotation for Docker operator (#9733) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [5d61580c5](https://github.com/apache/airflow/commit/5d61580c572118ed97b9ff32d7e3684be1fcb755) | 2020-06-21 | Enable 'Public function Missing Docstrings' PyDocStyle Check (#9463) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [4a74cf1a3](https://github.com/apache/airflow/commit/4a74cf1a34cf20e49383f27e7cdc3ae80b9b0cde) | 2020-06-08 | Fix xcom in DockerOperator when auto_remove is used (#9173) | -| [b4b84a193](https://github.com/apache/airflow/commit/b4b84a1933d055a2803b80b990482a7257a203ff) | 2020-06-07 | Add kernel capabilities in DockerOperator(#9142) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [511d98e30](https://github.com/apache/airflow/commit/511d98e30ded2bcce9d246b358f806cea45ebcb7) | 2020-05-01 | [AIRFLOW-4363] Fix JSON encoding error (#8287) | -| [0a1de1668](https://github.com/apache/airflow/commit/0a1de16682da1d0a3fac668437434a72b3149fda) | 2020-04-27 | Stop DockerSwarmOperator from pulling Docker images (#8533) | -| [3237c7e31](https://github.com/apache/airflow/commit/3237c7e31d008f73e6ba0ecc1f2331c7c80f0e17) | 2020-04-26 | [AIRFLOW-5850] Capture task logs in DockerSwarmOperator (#6552) | -| [9626b03d1](https://github.com/apache/airflow/commit/9626b03d19905c6d1bfbd53064f85ffd3c39f0bf) | 2020-03-30 | [AIRFLOW-6574] Adding private_environment to docker operator. (#7671) | -| [733d3d3c3](https://github.com/apache/airflow/commit/733d3d3c32e0305691f82102cfc346e8e85478b0) | 2020-03-25 | [AIRFLOW-4363] Fix JSON encoding error (#7628) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [cd546b664](https://github.com/apache/airflow/commit/cd546b664fa35a2bf85acd77af578c909a327d92) | 2020-03-23 | Add missing call to Super class in 'cncf' & 'docker' providers (#7825) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [dbcd3d878](https://github.com/apache/airflow/commit/dbcd3d8787741fd8203b6d9bdbc5d1da4b10a15b) | 2020-02-18 | [AIRFLOW-6804] Add the basic test for all example DAGs (#7419) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 51574a9b44f39..0000000000000 --- a/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,16 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [65dd28eb7](https://github.com/apache/airflow/commit/65dd28eb77d996ec8306c67d5ce1ccee2c14cc9d) | 2020-02-18 | [AIRFLOW-1202] Create Elasticsearch Hook (#7358) | diff --git a/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index d51acdf773d6a..0000000000000 --- a/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08 | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499) | -| [70f05ac67](https://github.com/apache/airflow/commit/70f05ac6775152d856d212f845e9561282232844) | 2020-09-01 | Add `log_id` field to log lines on ES handler (#10411) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d5d119bab](https://github.com/apache/airflow/commit/d5d119babc97bbe3f3f690ad4a93e3b73bd3b172) | 2020-07-21 | Increase typing coverage for Elasticsearch (#9911) | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.11.13.md b/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.11.13.md deleted file mode 100644 index 80d4bc94963ef..0000000000000 --- a/airflow/providers/elasticsearch/BACKPORT_PROVIDER_CHANGES_2020.11.13.md +++ /dev/null @@ -1,12 +0,0 @@ - - -### Release 2020.11.13 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------| -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | diff --git a/airflow/providers/elasticsearch/BACKPORT_PROVIDER_README.md b/airflow/providers/elasticsearch/BACKPORT_PROVIDER_README.md deleted file mode 100644 index decdb3f71a96e..0000000000000 --- a/airflow/providers/elasticsearch/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,130 +0,0 @@ - - - -# Package apache-airflow-backport-providers-elasticsearch - -Release: 2020.11.13 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 2020.11.13](#release-20201113) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `elasticsearch` provider. All classes for this provider package -are in `airflow.providers.elasticsearch` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-elasticsearch` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `elasticsearch` provider -are in the `airflow.providers.elasticsearch` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.elasticsearch` package | -|:----------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.elasticsearch.ElasticsearchHook](https://github.com/apache/airflow/blob/master/airflow/providers/elasticsearch/hooks/elasticsearch.py) | - - - - -## Releases - -### Release 2020.11.13 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------| -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08 | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499) | -| [70f05ac67](https://github.com/apache/airflow/commit/70f05ac6775152d856d212f845e9561282232844) | 2020-09-01 | Add `log_id` field to log lines on ES handler (#10411) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d5d119bab](https://github.com/apache/airflow/commit/d5d119babc97bbe3f3f690ad4a93e3b73bd3b172) | 2020-07-21 | Increase typing coverage for Elasticsearch (#9911) | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [65dd28eb7](https://github.com/apache/airflow/commit/65dd28eb77d996ec8306c67d5ce1ccee2c14cc9d) | 2020-02-18 | [AIRFLOW-1202] Create Elasticsearch Hook (#7358) | diff --git a/airflow/providers/elasticsearch/CHANGELOG.rst b/airflow/providers/elasticsearch/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/elasticsearch/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/elasticsearch/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/elasticsearch/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index e63084e2fcbee..0000000000000 --- a/airflow/providers/elasticsearch/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,48 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [61feb6ec4](https://github.com/apache/airflow/commit/61feb6ec453f8dda1a0e1fe3ebcc0f1e3224b634) | 2020-11-09 | Provider's readmes generated for elasticsearch and google packages (#12194) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08 | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499) | -| [70f05ac67](https://github.com/apache/airflow/commit/70f05ac6775152d856d212f845e9561282232844) | 2020-09-01 | Add `log_id` field to log lines on ES handler (#10411) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d5d119bab](https://github.com/apache/airflow/commit/d5d119babc97bbe3f3f690ad4a93e3b73bd3b172) | 2020-07-21 | Increase typing coverage for Elasticsearch (#9911) | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [65dd28eb7](https://github.com/apache/airflow/commit/65dd28eb77d996ec8306c67d5ce1ccee2c14cc9d) | 2020-02-18 | [AIRFLOW-1202] Create Elasticsearch Hook (#7358) | diff --git a/airflow/providers/elasticsearch/README.md b/airflow/providers/elasticsearch/README.md deleted file mode 100644 index 5b78d03efda3a..0000000000000 --- a/airflow/providers/elasticsearch/README.md +++ /dev/null @@ -1,130 +0,0 @@ - - - -# Package apache-airflow-providers-elasticsearch - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `elasticsearch` provider. All classes for this provider package -are in `airflow.providers.elasticsearch` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-elasticsearch` - -## PIP requirements - -| PIP package | Version required | -|:--------------------|:-------------------| -| elasticsearch | >7, <7.6.0 | -| elasticsearch-dbapi | ==0.1.0 | -| elasticsearch-dsl | >=5.0.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `elasticsearch` provider -are in the `airflow.providers.elasticsearch` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.elasticsearch` package | -|:----------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.elasticsearch.ElasticsearchHook](https://github.com/apache/airflow/blob/master/airflow/providers/elasticsearch/hooks/elasticsearch.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [61feb6ec4](https://github.com/apache/airflow/commit/61feb6ec453f8dda1a0e1fe3ebcc0f1e3224b634) | 2020-11-09 | Provider's readmes generated for elasticsearch and google packages (#12194) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [ac943c9e1](https://github.com/apache/airflow/commit/ac943c9e18f75259d531dbda8c51e650f57faa4c) | 2020-09-08 | [AIRFLOW-3964][AIP-17] Consolidate and de-dup sensor tasks using Smart Sensor (#5499) | -| [70f05ac67](https://github.com/apache/airflow/commit/70f05ac6775152d856d212f845e9561282232844) | 2020-09-01 | Add `log_id` field to log lines on ES handler (#10411) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d5d119bab](https://github.com/apache/airflow/commit/d5d119babc97bbe3f3f690ad4a93e3b73bd3b172) | 2020-07-21 | Increase typing coverage for Elasticsearch (#9911) | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [65dd28eb7](https://github.com/apache/airflow/commit/65dd28eb77d996ec8306c67d5ce1ccee2c14cc9d) | 2020-02-18 | [AIRFLOW-1202] Create Elasticsearch Hook (#7358) | diff --git a/airflow/providers/exasol/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/exasol/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 70f74f796ae90..0000000000000 --- a/airflow/providers/exasol/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [69dc91b4e](https://github.com/apache/airflow/commit/69dc91b4ef92d0f89abe097afd27bbe7ec2febd0) | 2020-04-02 | [AIRFLOW-6982] add native python exasol support (#7621) | diff --git a/airflow/providers/exasol/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/exasol/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 48c7ad51ddc81..0000000000000 --- a/airflow/providers/exasol/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,16 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/exasol/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/exasol/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 25170182e3347..0000000000000 --- a/airflow/providers/exasol/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,12 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10 | Remove redundant parentheses from Python files (#10967) | -| [7b0a2f5d8](https://github.com/apache/airflow/commit/7b0a2f5d8e6c3ff17094a7c1e31440300defb0b7) | 2020-10-10 | Replaced basestring with str in the Exasol hook (#11360) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/exasol/BACKPORT_PROVIDER_README.md b/airflow/providers/exasol/BACKPORT_PROVIDER_README.md deleted file mode 100644 index a8fc6cc172b49..0000000000000 --- a/airflow/providers/exasol/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,137 +0,0 @@ - - - -# Package apache-airflow-backport-providers-exasol - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `exasol` provider. All classes for this provider package -are in `airflow.providers.exasol` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-exasol` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pyexasol | >=0.5.1,<1.0.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `exasol` provider -are in the `airflow.providers.exasol` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.exasol` package | -|:------------------------------------------------------------------------------------------------------------------------------| -| [operators.exasol.ExasolOperator](https://github.com/apache/airflow/blob/master/airflow/providers/exasol/operators/exasol.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.exasol` package | -|:------------------------------------------------------------------------------------------------------------------| -| [hooks.exasol.ExasolHook](https://github.com/apache/airflow/blob/master/airflow/providers/exasol/hooks/exasol.py) | - - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10 | Remove redundant parentheses from Python files (#10967) | -| [7b0a2f5d8](https://github.com/apache/airflow/commit/7b0a2f5d8e6c3ff17094a7c1e31440300defb0b7) | 2020-10-10 | Replaced basestring with str in the Exasol hook (#11360) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [69dc91b4e](https://github.com/apache/airflow/commit/69dc91b4ef92d0f89abe097afd27bbe7ec2febd0) | 2020-04-02 | [AIRFLOW-6982] add native python exasol support (#7621) | diff --git a/airflow/providers/exasol/CHANGELOG.rst b/airflow/providers/exasol/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/exasol/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/exasol/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/exasol/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index f2e0d34e1e135..0000000000000 --- a/airflow/providers/exasol/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,47 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10 | Remove redundant parentheses from Python files (#10967) | -| [7b0a2f5d8](https://github.com/apache/airflow/commit/7b0a2f5d8e6c3ff17094a7c1e31440300defb0b7) | 2020-10-10 | Replaced basestring with str in the Exasol hook (#11360) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [69dc91b4e](https://github.com/apache/airflow/commit/69dc91b4ef92d0f89abe097afd27bbe7ec2febd0) | 2020-04-02 | [AIRFLOW-6982] add native python exasol support (#7621) | diff --git a/airflow/providers/exasol/README.md b/airflow/providers/exasol/README.md deleted file mode 100644 index 07e137fa9d3a3..0000000000000 --- a/airflow/providers/exasol/README.md +++ /dev/null @@ -1,140 +0,0 @@ - - - -# Package apache-airflow-providers-exasol - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `exasol` provider. All classes for this provider package -are in `airflow.providers.exasol` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-exasol` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pyexasol | >=0.5.1,<1.0.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `exasol` provider -are in the `airflow.providers.exasol` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.exasol` package | -|:------------------------------------------------------------------------------------------------------------------------------| -| [operators.exasol.ExasolOperator](https://github.com/apache/airflow/blob/master/airflow/providers/exasol/operators/exasol.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.exasol` package | -|:------------------------------------------------------------------------------------------------------------------| -| [hooks.exasol.ExasolHook](https://github.com/apache/airflow/blob/master/airflow/providers/exasol/hooks/exasol.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10 | Remove redundant parentheses from Python files (#10967) | -| [7b0a2f5d8](https://github.com/apache/airflow/commit/7b0a2f5d8e6c3ff17094a7c1e31440300defb0b7) | 2020-10-10 | Replaced basestring with str in the Exasol hook (#11360) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [69dc91b4e](https://github.com/apache/airflow/commit/69dc91b4ef92d0f89abe097afd27bbe7ec2febd0) | 2020-04-02 | [AIRFLOW-6982] add native python exasol support (#7621) | diff --git a/airflow/providers/facebook/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/facebook/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 9e892eed6d02a..0000000000000 --- a/airflow/providers/facebook/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03 | Add system test and docs for Facebook Ads operators (#8503) | -| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14 | Added Facebook Ads Operator #7887 (#8008) | diff --git a/airflow/providers/facebook/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/facebook/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 89f3ea7348e71..0000000000000 --- a/airflow/providers/facebook/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,15 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ef8df1734](https://github.com/apache/airflow/commit/ef8df17348e3c567e2d2f0aface641acae3896ba) | 2020-08-22 | Fix typo in Facebook Ads Provider (#10484) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/facebook/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/facebook/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/facebook/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/facebook/BACKPORT_PROVIDER_README.md b/airflow/providers/facebook/BACKPORT_PROVIDER_README.md deleted file mode 100644 index ca9b6726ca584..0000000000000 --- a/airflow/providers/facebook/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,121 +0,0 @@ - - - -# Package apache-airflow-backport-providers-facebook - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `facebook` provider. All classes for this provider package -are in `airflow.providers.facebook` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-facebook` - -## PIP requirements - -| PIP package | Version required | -|:------------------|:-------------------| -| facebook-business | >=6.0.2 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `facebook` provider -are in the `airflow.providers.facebook` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.facebook` package | -|:------------------------------------------------------------------------------------------------------------------------------------| -| [ads.hooks.ads.FacebookAdsReportingHook](https://github.com/apache/airflow/blob/master/airflow/providers/facebook/ads/hooks/ads.py) | - - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ef8df1734](https://github.com/apache/airflow/commit/ef8df17348e3c567e2d2f0aface641acae3896ba) | 2020-08-22 | Fix typo in Facebook Ads Provider (#10484) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03 | Add system test and docs for Facebook Ads operators (#8503) | -| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14 | Added Facebook Ads Operator #7887 (#8008) | diff --git a/airflow/providers/facebook/CHANGELOG.rst b/airflow/providers/facebook/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/facebook/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/facebook/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/facebook/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 27a9319778d67..0000000000000 --- a/airflow/providers/facebook/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,43 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ef8df1734](https://github.com/apache/airflow/commit/ef8df17348e3c567e2d2f0aface641acae3896ba) | 2020-08-22 | Fix typo in Facebook Ads Provider (#10484) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03 | Add system test and docs for Facebook Ads operators (#8503) | -| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14 | Added Facebook Ads Operator #7887 (#8008) | diff --git a/airflow/providers/facebook/README.md b/airflow/providers/facebook/README.md deleted file mode 100644 index c6d24544aaed7..0000000000000 --- a/airflow/providers/facebook/README.md +++ /dev/null @@ -1,123 +0,0 @@ - - - -# Package apache-airflow-providers-facebook - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `facebook` provider. All classes for this provider package -are in `airflow.providers.facebook` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-facebook` - -## PIP requirements - -| PIP package | Version required | -|:------------------|:-------------------| -| facebook-business | >=6.0.2 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `facebook` provider -are in the `airflow.providers.facebook` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.facebook` package | -|:------------------------------------------------------------------------------------------------------------------------------------| -| [ads.hooks.ads.FacebookAdsReportingHook](https://github.com/apache/airflow/blob/master/airflow/providers/facebook/ads/hooks/ads.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ef8df1734](https://github.com/apache/airflow/commit/ef8df17348e3c567e2d2f0aface641acae3896ba) | 2020-08-22 | Fix typo in Facebook Ads Provider (#10484) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03 | Add system test and docs for Facebook Ads operators (#8503) | -| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14 | Added Facebook Ads Operator #7887 (#8008) | diff --git a/airflow/providers/ftp/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/ftp/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index e6d8535669a6c..0000000000000 --- a/airflow/providers/ftp/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,20 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [74c2a6ded](https://github.com/apache/airflow/commit/74c2a6ded4d615de8e1b1c04a25146344138e920) | 2020-03-23 | Add call to Super class in 'ftp' & 'ssh' providers (#7822) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/ftp/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/ftp/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 2df40d3c52085..0000000000000 --- a/airflow/providers/ftp/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [bcdd3bb7b](https://github.com/apache/airflow/commit/bcdd3bb7bb0e73ec957fa4077b025eb5c1fef90d) | 2020-09-24 | Increasing type coverage FTP (#11107) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [c60e476fb](https://github.com/apache/airflow/commit/c60e476fb24d4fa2eb192f8fce51edea4166f1d0) | 2020-08-25 | Remove mlsd function from hooks/ftp.py (#10538) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [027cc1682](https://github.com/apache/airflow/commit/027cc1682c3b068dfeee143ca538b5e8dadfcd17) | 2020-07-17 | Improve type annotations for Ftp provider (#9868) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/ftp/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/ftp/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/ftp/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/ftp/BACKPORT_PROVIDER_README.md b/airflow/providers/ftp/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 0efdd30472580..0000000000000 --- a/airflow/providers/ftp/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,134 +0,0 @@ - - - -# Package apache-airflow-backport-providers-ftp - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `ftp` provider. All classes for this provider package -are in `airflow.providers.ftp` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-ftp` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `ftp` provider -are in the `airflow.providers.ftp` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.ftp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------| -| [sensors.ftp.FTPSSensor](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/sensors/ftp.py) | [contrib.sensors.ftp_sensor.FTPSSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/ftp_sensor.py) | -| [sensors.ftp.FTPSensor](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/sensors/ftp.py) | [contrib.sensors.ftp_sensor.FTPSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/ftp_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.ftp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------| -| [hooks.ftp.FTPHook](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/hooks/ftp.py) | [contrib.hooks.ftp_hook.FTPHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/ftp_hook.py) | -| [hooks.ftp.FTPSHook](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/hooks/ftp.py) | [contrib.hooks.ftp_hook.FTPSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/ftp_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [bcdd3bb7b](https://github.com/apache/airflow/commit/bcdd3bb7bb0e73ec957fa4077b025eb5c1fef90d) | 2020-09-24 | Increasing type coverage FTP (#11107) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [c60e476fb](https://github.com/apache/airflow/commit/c60e476fb24d4fa2eb192f8fce51edea4166f1d0) | 2020-08-25 | Remove mlsd function from hooks/ftp.py (#10538) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [027cc1682](https://github.com/apache/airflow/commit/027cc1682c3b068dfeee143ca538b5e8dadfcd17) | 2020-07-17 | Improve type annotations for Ftp provider (#9868) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [74c2a6ded](https://github.com/apache/airflow/commit/74c2a6ded4d615de8e1b1c04a25146344138e920) | 2020-03-23 | Add call to Super class in 'ftp' & 'ssh' providers (#7822) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/ftp/CHANGELOG.rst b/airflow/providers/ftp/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/ftp/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/ftp/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/ftp/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 60d2e6b8752e7..0000000000000 --- a/airflow/providers/ftp/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,48 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [bcdd3bb7b](https://github.com/apache/airflow/commit/bcdd3bb7bb0e73ec957fa4077b025eb5c1fef90d) | 2020-09-24 | Increasing type coverage FTP (#11107) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [c60e476fb](https://github.com/apache/airflow/commit/c60e476fb24d4fa2eb192f8fce51edea4166f1d0) | 2020-08-25 | Remove mlsd function from hooks/ftp.py (#10538) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [027cc1682](https://github.com/apache/airflow/commit/027cc1682c3b068dfeee143ca538b5e8dadfcd17) | 2020-07-17 | Improve type annotations for Ftp provider (#9868) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [74c2a6ded](https://github.com/apache/airflow/commit/74c2a6ded4d615de8e1b1c04a25146344138e920) | 2020-03-23 | Add call to Super class in 'ftp' & 'ssh' providers (#7822) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/ftp/README.md b/airflow/providers/ftp/README.md deleted file mode 100644 index 07e590b896899..0000000000000 --- a/airflow/providers/ftp/README.md +++ /dev/null @@ -1,136 +0,0 @@ - - - -# Package apache-airflow-providers-ftp - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `ftp` provider. All classes for this provider package -are in `airflow.providers.ftp` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-ftp` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `ftp` provider -are in the `airflow.providers.ftp` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.ftp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------| -| [sensors.ftp.FTPSSensor](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/sensors/ftp.py) | [contrib.sensors.ftp_sensor.FTPSSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/ftp_sensor.py) | -| [sensors.ftp.FTPSensor](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/sensors/ftp.py) | [contrib.sensors.ftp_sensor.FTPSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/ftp_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.ftp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------| -| [hooks.ftp.FTPHook](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/hooks/ftp.py) | [contrib.hooks.ftp_hook.FTPHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/ftp_hook.py) | -| [hooks.ftp.FTPSHook](https://github.com/apache/airflow/blob/master/airflow/providers/ftp/hooks/ftp.py) | [contrib.hooks.ftp_hook.FTPSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/ftp_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [bcdd3bb7b](https://github.com/apache/airflow/commit/bcdd3bb7bb0e73ec957fa4077b025eb5c1fef90d) | 2020-09-24 | Increasing type coverage FTP (#11107) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [c60e476fb](https://github.com/apache/airflow/commit/c60e476fb24d4fa2eb192f8fce51edea4166f1d0) | 2020-08-25 | Remove mlsd function from hooks/ftp.py (#10538) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [027cc1682](https://github.com/apache/airflow/commit/027cc1682c3b068dfeee143ca538b5e8dadfcd17) | 2020-07-17 | Improve type annotations for Ftp provider (#9868) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [74c2a6ded](https://github.com/apache/airflow/commit/74c2a6ded4d615de8e1b1c04a25146344138e920) | 2020-03-23 | Add call to Super class in 'ftp' & 'ssh' providers (#7822) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 9457acb89f810..0000000000000 --- a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,194 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [416334e2e](https://github.com/apache/airflow/commit/416334e2ecd21d8a532af6102f1cfa9ac921a97a) | 2020-06-19 | Properly propagated warnings in operators (#9348) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [4e09c6442](https://github.com/apache/airflow/commit/4e09c64423bfaabd02a18b5fe7757dc15451ab73) | 2020-06-18 | Adds GCP Secret Manager Hook (#9368) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [639972d99](https://github.com/apache/airflow/commit/639972d995d848b16a3f283576efdbde28b8fdef) | 2020-06-16 | Add support for latest Apache Beam SDK in Dataflow operators (#9323) | -| [1459970b3](https://github.com/apache/airflow/commit/1459970b3b9780e139ce029ae889fd8f69a37bc7) | 2020-06-15 | Rename CloudBuildCreateBuildOperator to CloudBuildCreateOperator (#9314) | -| [431ea3291](https://github.com/apache/airflow/commit/431ea3291c9bf236bccdf8446d753c630ada2b25) | 2020-06-15 | Resolve upstream tasks when template field is XComArg (#8805) | -| [aee6ab94e](https://github.com/apache/airflow/commit/aee6ab94eb956347ad560cfe2673bc6011074513) | 2020-06-15 | Wait for pipeline state in Data Fusion operators (#8954) | -| [fb1c8b83d](https://github.com/apache/airflow/commit/fb1c8b83d400506a16c10e3d6623a913847e5cf5) | 2020-06-10 | Add test for BQ operations using location (#9206) | -| [a26afbfa5](https://github.com/apache/airflow/commit/a26afbfa51b0981ae742c6171938b57a80aace2b) | 2020-06-10 | Make generated job_id more informative in BQ insert_job (#9203) | -| [c41192fa1](https://github.com/apache/airflow/commit/c41192fa1fc5c2b3e7b8414c59f656ab67bbef28) | 2020-06-10 | Upgrade pendulum to latest major version ~2.0 (#9184) | -| [b1c8c5ed5](https://github.com/apache/airflow/commit/b1c8c5ed5bba3a852a5446f3fdd1131b4b22637a) | 2020-06-09 | Allows using private endpoints in GKEStartPodOperator (#9169) | -| [5918efc86](https://github.com/apache/airflow/commit/5918efc86a2217caa641a6ada289eee1c21407f8) | 2020-06-05 | Add 3.8 to the test matrices (#8836) | -| [9bcdadaf7](https://github.com/apache/airflow/commit/9bcdadaf7e6e73d3d2246fbbd32a9f30a1b43ca9) | 2020-06-05 | Add 'main' param to template_fields in DataprocSubmitPySparkJobOperator (#9154) | -| [f56811dff](https://github.com/apache/airflow/commit/f56811dff3af66cbceb0418f11e00507bab58674) | 2020-06-05 | [AIRFLOW-6290] Create guide for GKE operators (#8883) | -| [76962867b](https://github.com/apache/airflow/commit/76962867b5877cf5ffd1b6004453f783c0732ab1) | 2020-06-04 | Fix sql_to_gcs hook gzip of schema_file (#9140) | -| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | -| [789852546](https://github.com/apache/airflow/commit/78985254683c359f7444a7eb5f6ee4967c37d61f) | 2020-06-01 | Add BigQueryInsertJobOperator (#8868) | -| [29eb68b90](https://github.com/apache/airflow/commit/29eb68b90b5df692ac322be0939af5e7fa9b71bc) | 2020-05-31 | Create guide for Dataproc Operators (#9037) | -| [886afaf62](https://github.com/apache/airflow/commit/886afaf622602aa97f925bc3ee4fc27aa995c445) | 2020-05-29 | Add example dag and system test for LocalFilesystemToGCSOperator (#9043) | -| [a779c4dfc](https://github.com/apache/airflow/commit/a779c4dfc278d6ece480b012764ea5814dc78dee) | 2020-05-29 | add separate example dags and system tests for GCSToGoogleSheetsOperator (#9066) | -| [ada26be23](https://github.com/apache/airflow/commit/ada26be23c913796c2ae77b91cb7d113dfec75a6) | 2020-05-29 | Add correct description for dst param in LocalFilesystemToGCSOperator (#9055) | -| [81b2761b8](https://github.com/apache/airflow/commit/81b2761b86dae2d21a6ee859d49c08d46fea6def) | 2020-05-29 | add example dag and system test for GoogleSheetsToGCSOperator (#9056) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [3994030ea](https://github.com/apache/airflow/commit/3994030ea678727daaf9c2bfed0ca94a096f8d2a) | 2020-05-26 | Refactor BigQuery operators (#8858) | -| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [cf5cf45e1](https://github.com/apache/airflow/commit/cf5cf45e1c0dff9a40e02f0dc221542f974831a7) | 2020-05-23 | Support YAML input for CloudBuildCreateOperator (#8808) | -| [499493c5c](https://github.com/apache/airflow/commit/499493c5c5cf324ab8452ead80a10b71ce0c3b14) | 2020-05-19 | [AIRFLOW-6586] Improvements to gcs sensor (#7197) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [841d81664](https://github.com/apache/airflow/commit/841d81664737c25d73d095a7dab5de80d369c87c) | 2020-05-19 | Allow setting the pooling time in DLPHook (#8824) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [15273f0ea](https://github.com/apache/airflow/commit/15273f0ea05ec579c631ce26b5d620233ebdc4d2) | 2020-05-16 | Check for same task instead of Equality to detect Duplicate Tasks (#8828) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [e1e833bb2](https://github.com/apache/airflow/commit/e1e833bb260879ecb9a1f80f28450a3656c0e598) | 2020-05-13 | Update GoogleBaseHook to not follow 308 and use 60s timeout (#8816) | -| [8b5491971](https://github.com/apache/airflow/commit/8b54919711a203c3f35d98c6310a55d4df5da590) | 2020-05-12 | Refactor BigQuery hook methods to use python library (#8631) | -| [6911dfe83](https://github.com/apache/airflow/commit/6911dfe8372a33df67ce1fdd3c2bca1047718f60) | 2020-05-12 | Fix template fields in Google operators (#8840) | -| [4b06fde0f](https://github.com/apache/airflow/commit/4b06fde0f10ce178b3c336c5d901e3b089f2863d) | 2020-05-12 | Fix Flake8 errors (#8841) | -| [1d12c347c](https://github.com/apache/airflow/commit/1d12c347cb258e7081804da1f9f5ffdedc003163) | 2020-05-12 | Refactor BigQuery check operators (#8813) | -| [493b685d7](https://github.com/apache/airflow/commit/493b685d7879cfee532390ba0909d4b1d8764267) | 2020-05-10 | Add separate example DAGs and system tests for google cloud speech (#8778) | -| [79ef8bed8](https://github.com/apache/airflow/commit/79ef8bed891c22eb76adf99158288d1b44426dc0) | 2020-05-10 | Added Upload Multiple Entity Read Files to specified big query dataset (#8610) | -| [280f1f0c4](https://github.com/apache/airflow/commit/280f1f0c4cc49aba1b2f8b456326795733769d18) | 2020-05-10 | Correctly restore upstream_task_ids when deserializing Operators (#8775) | -| [58aefb23b](https://github.com/apache/airflow/commit/58aefb23b1d456bbb24876a4e3ff14f25d6274b0) | 2020-05-08 | Added SDFtoGCSOperator (#8740) | -| [723c52c94](https://github.com/apache/airflow/commit/723c52c942b49b0e8c8fa8667a4a6a45fa249498) | 2020-05-07 | Add documentation for SpannerDeployInstanceOperator (#8750) | -| [25ee4211b](https://github.com/apache/airflow/commit/25ee4211b345ce7c19fb7366fd230838c34f1d47) | 2020-05-06 | Support all RuntimeEnvironment parameters in DataflowTemplatedJobStartOperator (#8531) | -| [8d6f1aa4b](https://github.com/apache/airflow/commit/8d6f1aa4b5bb8809ffc55dc0c62e6d0e89f331e5) | 2020-05-05 | Support num_retries field in env var for GCP connection (#8700) | -| [67caae0f2](https://github.com/apache/airflow/commit/67caae0f25db4eec42b8e81c85683aabdd8d6c1a) | 2020-05-04 | Add system test for gcs_to_bigquery (#8556) | -| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03 | Add system test and docs for Facebook Ads operators (#8503) | -| [a28c66f23](https://github.com/apache/airflow/commit/a28c66f23d373cd0f8bfc765a515f21d4b66a0e9) | 2020-04-30 | [AIRFLOW-4734] Upsert functionality for PostgresHook.insert_rows() (#8625) | -| [992a24ce4](https://github.com/apache/airflow/commit/992a24ce41067d3b73f293878e71835892cbb632) | 2020-04-28 | Split and improve BigQuery example DAG (#8529) | -| [c1fb28230](https://github.com/apache/airflow/commit/c1fb28230fa0d36ef86c452c70254b253a113f9c) | 2020-04-28 | Refactor BigQueryHook dataset operations (#8477) | -| [e8d0f8fea](https://github.com/apache/airflow/commit/e8d0f8feab0ec08e248cd381359112ad6a832f5b) | 2020-04-26 | Improve idempodency in CloudDataTransferServiceCreateJobOperator (#8430) | -| [37fdfa977](https://github.com/apache/airflow/commit/37fdfa9775f43a5fa15de9c53ab33ecdf97513c5) | 2020-04-26 | [AIRFLOW-6281] Create guide for GCS to GCS transfer operators (#8442) | -| [14b22e6ff](https://github.com/apache/airflow/commit/14b22e6ffeb3af1f68e8362a1d0061b41364019c) | 2020-04-25 | Add hook and operator for Google Cloud Life Sciences (#8481) | -| [72ddc94d1](https://github.com/apache/airflow/commit/72ddc94d1ee08b414102e0b8ac197a3d8e965707) | 2020-04-23 | Pass location using parmamter in Dataflow integration (#8382) | -| [912aa4b42](https://github.com/apache/airflow/commit/912aa4b4237695275db6379cf2f0a633ea6087bc) | 2020-04-23 | Added GoogleDisplayVideo360DownloadLineItemsOperator (#8174) | -| [57c8c0583](https://github.com/apache/airflow/commit/57c8c05839f66ed2909b1bee8ff6976432db82aa) | 2020-04-22 | Use python client in BQ hook create_empty_table/dataset and table_exists (#8377) | -| [5d3a7eef3](https://github.com/apache/airflow/commit/5d3a7eef30b30fa466d8173f13abe4c356d73aef) | 2020-04-20 | Allow multiple extra_packages in Dataflow (#8394) | -| [79c99b1b6](https://github.com/apache/airflow/commit/79c99b1b6ae2ff5b0c8ab892f7f3fb1b44724121) | 2020-04-18 | Added location parameter to BigQueryCheckOperator (#8273) | -| [79d3f33c1](https://github.com/apache/airflow/commit/79d3f33c1b65c9c7e7b1a75e25d38cab9aa4517f) | 2020-04-17 | Clean up temporary files in Dataflow operators (#8313) | -| [efcffa323](https://github.com/apache/airflow/commit/efcffa323ddb5aa9f5907aa86808f3f3b4f5bd87) | 2020-04-16 | Add Dataproc SparkR Example (#8240) | -| [b198a1fa9](https://github.com/apache/airflow/commit/b198a1fa94c44228dc7358552aeb6a5371ae0da2) | 2020-04-15 | Create guide for BigQuery operators (#8276) | -| [2636cc932](https://github.com/apache/airflow/commit/2636cc932c3b156644edd46635cf9ff995c83159) | 2020-04-14 | Raise exception when GCP credential doesn't support account impersonation (#8213) | -| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14 | Added Facebook Ads Operator #7887 (#8008) | -| [8cae07ea1](https://github.com/apache/airflow/commit/8cae07ea1873a90516120d9ffbd28e7fdd2f78a4) | 2020-04-14 | fixed typo (#8294) | -| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13 | Less aggressive eager upgrade of requirements (#8267) | -| [1fd9ed384](https://github.com/apache/airflow/commit/1fd9ed3840361afa1e9456ccb0dfd5a60fba4e85) | 2020-04-13 | Add mypy plugin for decorators. (#8145) | -| [327b0a9f7](https://github.com/apache/airflow/commit/327b0a9f77bbcbe3f977a37de04264c2eff4bee1) | 2020-04-13 | Added GoogleDisplayVideo360UploadLineItemsOperator (#8216) | -| [bb5e403a3](https://github.com/apache/airflow/commit/bb5e403a320e7377e5040cb180f61b4f5a9ea558) | 2020-04-10 | Honor schema type for MySQL to GCS data pre-process (#8090) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [3fc89f29f](https://github.com/apache/airflow/commit/3fc89f29f5bcd1529089fa6cb9c44843614f9ec5) | 2020-04-06 | [AIRFLOW-7106] Cloud data fusion integration - Allow to pass args to start pipeline (#7849) | -| [7ef75d239](https://github.com/apache/airflow/commit/7ef75d2393f30d155de550e6d1ee8c055e2abfee) | 2020-04-03 | [AIRFLOW-7117] Honor self.schema in sql_to_gcs as schema to upload (#8049) | -| [ed2bc0057](https://github.com/apache/airflow/commit/ed2bc00576b39a88e3e1fb79092494f4bfdcbf5c) | 2020-04-02 | Add Google Ads list accounts operator (#8007) | -| [3808a6206](https://github.com/apache/airflow/commit/3808a6206e70d4af84b39ea7078df54f02c1435e) | 2020-04-01 | Unify Google class/package names (#8033) | -| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31 | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011) | -| [8e8978007](https://github.com/apache/airflow/commit/8e897800716c8ccedd1c53f2d083cb295786aa50) | 2020-03-31 | Add more refactor steps for providers.google (#8010) | -| [aae3b8fb2](https://github.com/apache/airflow/commit/aae3b8fb27870cb3cfba5ed73e35e08d520ef014) | 2020-03-31 | Individual package READMEs (#8012) | -| [779023968](https://github.com/apache/airflow/commit/779023968f983c91701f687bc823dc338934cdad) | 2020-03-30 | [AIRFLOW-7075] Operators for storing information from GCS into GA (#7743) | -| [49abce521](https://github.com/apache/airflow/commit/49abce52178c81954f8a25608f70ffe02fcf7b19) | 2020-03-30 | Improve system tests for Cloud Build (#8003) | -| [0f19a930d](https://github.com/apache/airflow/commit/0f19a930d1a7dec2a96bab0de144829f83cc0626) | 2020-03-29 | Remove GKEStartPodOperator when backporting (#7908) | -| [0e1c238b2](https://github.com/apache/airflow/commit/0e1c238b2fff3a092c93368125bc8d82abc4b308) | 2020-03-28 | Get Airflow Variables from GCP Secrets Manager (#7946) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [01f99426f](https://github.com/apache/airflow/commit/01f99426fddd2a24552f352edcb271fa78cf3b15) | 2020-03-28 | Add download/upload operators for GCS and Google Sheets (#7866) | -| [892522f8e](https://github.com/apache/airflow/commit/892522f8e2aeedc1ad842a08aaea967b0cae077f) | 2020-03-26 | Change signature of GSheetsHook methods (#7853) | -| [bfd425157](https://github.com/apache/airflow/commit/bfd425157a746402b516f8fc9e48f4ddccd794ce) | 2020-03-26 | Improve idempotency in MLEngineHook.create_model (#7811) | -| [f9c226343](https://github.com/apache/airflow/commit/f9c226343d94a7732da280d1dd086bf1ba291c77) | 2020-03-26 | Fix CloudSecretsManagerBackend invalid connections_prefix (#7861) | -| [e3920f12f](https://github.com/apache/airflow/commit/e3920f12f483b53950507c50f6ab6a4318072859) | 2020-03-26 | Improve setUp/tearDown in Cloud Firestore system test (#7862) | -| [8ba8a7295](https://github.com/apache/airflow/commit/8ba8a7295a31f6b44894bfcaea36fa93b8d8c0d0) | 2020-03-26 | Improve example DAGs for Cloud Memorystore (#7855) | -| [f7d1a437c](https://github.com/apache/airflow/commit/f7d1a437c17461b5ab768b75d58f0cb026b2a818) | 2020-03-26 | Fix CloudMemorystoreCreateInstanceAndImportOperator operator (#7856) | -| [beef6c230](https://github.com/apache/airflow/commit/beef6c230e4ff266af7c16b639bfda659b2bf6c0) | 2020-03-26 | Improve authorization in GCP system tests (#7863) | -| [5f165f3e4](https://github.com/apache/airflow/commit/5f165f3e4231ebd420ce643211a93e1fecf4877e) | 2020-03-26 | [AIRFLOW-5801] Get GCP credentials from file instead of JSON blob (#7869) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [1982c3fdc](https://github.com/apache/airflow/commit/1982c3fdca1f04cfc41fc5b5e285d8f01c6b76ab) | 2020-03-24 | Run Dataflow for ML Engine summary in venv (#7809) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [529db07b2](https://github.com/apache/airflow/commit/529db07b2ee73d886e37e8b3415462c730187b15) | 2020-03-23 | Improve Google PubSub hook publish method (#7831) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a001489b5](https://github.com/apache/airflow/commit/a001489b5928ebfc35f990a29d1c9c2ecb80bd61) | 2020-03-23 | Improve example DAG for ML Engine (#7810) | -| [9e5a8e7f8](https://github.com/apache/airflow/commit/9e5a8e7f83cf2368315fce62f8d81304f7ba2f04) | 2020-03-23 | Add call to Super class in 'google' providers (#7823) | -| [b86bf79bf](https://github.com/apache/airflow/commit/b86bf79bff615e61de98bead4d02eace5690d5fb) | 2020-03-23 | Fix typo in GCP credentials_provider's docstring (#7818) | -| [56c013ce9](https://github.com/apache/airflow/commit/56c013ce922eb18e5f7dd4410986afbcc6f29025) | 2020-03-23 | Add missing docstring in BigQueryHook.create_empty_table (#7817) | -| [426a79847](https://github.com/apache/airflow/commit/426a79847ced832ca3f67c135fd8830ebf1de7d2) | 2020-03-23 | Imrove support for laatest API in MLEngineStartTrainingJobOperator (#7812) | -| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23 | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795) | -| [27dac00e1](https://github.com/apache/airflow/commit/27dac00e125b87626a0b87074d61e6d38031bf47) | 2020-03-22 | [AIRFLOW-7099] Improve system test for cloud transfer service (#7794) | -| [0daf5d729](https://github.com/apache/airflow/commit/0daf5d729acef4e9aef5226452dff774e80430cd) | 2020-03-22 | Add ability to specify a maximum modified time for objects in GCSToGCSOperator (#7791) | -| [c8088c2bd](https://github.com/apache/airflow/commit/c8088c2bd70a16605a5d4b1a66a22309359d6712) | 2020-03-20 | [AIRFLOW-7100] Add GoogleAnalyticsGetAdsLinkOperator (#7781) | -| [5106a2931](https://github.com/apache/airflow/commit/5106a29314b413d168bcba7a64bf91c04fdb5dfe) | 2020-03-20 | [AIRFLOW-6752] Add GoogleAnalyticsRetrieveAdsLinksListOperator (#7748) | -| [759ce2a80](https://github.com/apache/airflow/commit/759ce2a80c95832fe4773c9f4fde23e1b03cbc6f) | 2020-03-20 | [AIRFLOW-6978] Add PubSubPullOperator (#7766) | -| [6b9b214e4](https://github.com/apache/airflow/commit/6b9b214e4c3b3afa8ea2e1a5c1e24993013d60ac) | 2020-03-20 | [AIRFLOW-6732] Add GoogleAdsHook and GoogleAdsToGcsOperator (#7692) | -| [b11891696](https://github.com/apache/airflow/commit/b11891696946d1461174b385c88d6af8abb99768) | 2020-03-19 | [AIRFLOW-7069] Fix cloudsql system tests (#7770) | -| [ae854cae5](https://github.com/apache/airflow/commit/ae854cae5a2cf8cae37edf7e0813ad01bccfbc30) | 2020-03-19 | [AIRFLOW-7082] Remove catch_http_exception decorator in GCP hooks (#7756) | -| [7e1e954d2](https://github.com/apache/airflow/commit/7e1e954d23ce272b0a71188f0f535e20d54be443) | 2020-03-19 | [AIRFLOW-7085] Cache credentials, project_id in GCP Base Hook (#7759) | -| [6e21c139b](https://github.com/apache/airflow/commit/6e21c139b3cce3f895040939f0b02e3e0ba36141) | 2020-03-19 | [AIRFLOW-XXXX] Fix reference to GCP classes in guides (#7762) | -| [ce022a3f7](https://github.com/apache/airflow/commit/ce022a3f72b7735087d4c3bbe81d293a0ab75327) | 2020-03-19 | [AIRFLOW-XXXX] Add cross-references for operators guide (#7760) | -| [029c84e55](https://github.com/apache/airflow/commit/029c84e5527b6db6bdbdbe026f455da325bedef3) | 2020-03-18 | [AIRFLOW-5421] Add Presto to GCS transfer operator (#7718) | -| [63a3102ed](https://github.com/apache/airflow/commit/63a3102ede8fb8f764d251b20cad5ee5bef84f50) | 2020-03-18 | [AIRFLOW-7064] Add CloudFirestoreExportDatabaseOperator (#7725) | -| [73305c7bd](https://github.com/apache/airflow/commit/73305c7bd57f14444804c13b8b290f479832d3db) | 2020-03-18 | [AIRFLOW-7081] Remove env variables from GCP guide (#7755) | -| [60fdbf6d9](https://github.com/apache/airflow/commit/60fdbf6d9255d34a8967400e9585b1cd5d29d3e9) | 2020-03-18 | [AIRFLOW-5610] Add ability to specify multiple objects to copy in GCSToGCSOperator (#7728) | -| [de7e934ca](https://github.com/apache/airflow/commit/de7e934ca3f21ce82f67accf92811b3ac044476f) | 2020-03-17 | [AIRFLOW-7079] Remove redundant code for storing template_fields (#7750) | -| [0de0347b2](https://github.com/apache/airflow/commit/0de0347b27a961c46ee49da6dfa9205321657749) | 2020-03-17 | [AIRFLOW-6855]: Escape project_dataset_table in SQL query in gcs to bq … (#7475) | -| [91557c6f8](https://github.com/apache/airflow/commit/91557c6f87529c010b8ad1110ece35fd7fd751e4) | 2020-03-17 | [AIRFLOW-7073] GKEStartPodOperator always use connection credentials (#7738) | -| [51161dbd9](https://github.com/apache/airflow/commit/51161dbd9de0c966016cec4d5036877890daee7c) | 2020-03-16 | [AIRFLOW-5664] Store timestamps with microseconds precision (#6354) | -| [2bc020c43](https://github.com/apache/airflow/commit/2bc020c43112dd3a769311de8d5012e8e8f399ee) | 2020-03-14 | [AIRFLOW-7055] Verbose logging option for google provider (#7711) | -| [c997cab42](https://github.com/apache/airflow/commit/c997cab42d8695ac444e63dfe4b948a7ea82ed89) | 2020-03-13 | [AIRFLOW-6724] Add Google Analytics 360 Accounts Retrieve Operator (#7630) | -| [137896f32](https://github.com/apache/airflow/commit/137896f326cd29b59902a887e4c4e58f940ff62b) | 2020-03-12 | [AIRFLOW-7034] Remove feature: Assigning Dag to task using Bitshift Op (#7685) | -| [1f77f943d](https://github.com/apache/airflow/commit/1f77f943d5d85f66b6a988e8ef6506525eaf4732) | 2020-03-10 | [AIRFLOW-6980] Improve system tests and building providers package (#7615) | -| [bf9b6b6d7](https://github.com/apache/airflow/commit/bf9b6b6d70455352bbf807871c8eeb6324be7e54) | 2020-03-09 | [AIRFLOW-5013] Add GCP Data Catalog Hook and operators (#7664) | -| [e5130dc9f](https://github.com/apache/airflow/commit/e5130dc9fe89187e95071e678ea3b46600866762) | 2020-03-09 | [AIRFLOW-2911] Add job cancellation capability to Dataflow service (#7659) | -| [faf0df4b9](https://github.com/apache/airflow/commit/faf0df4b9460b7f037ee390addbd2c6effcae013) | 2020-03-09 | [AIRFLOW-XXXX] Fix upsert operator in BQ example DAG (#7666) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [b5b9795f0](https://github.com/apache/airflow/commit/b5b9795f0446bb484a91ee485f49ea456f1c26c4) | 2020-03-07 | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (fix) (#7624) | -| [6b65038fb](https://github.com/apache/airflow/commit/6b65038fb409ba1040e70305444816d8f5cfdc47) | 2020-03-06 | [AIRFLOW-6990] Improve system tests for Google Marketing Platform (#7631) | -| [755fe5224](https://github.com/apache/airflow/commit/755fe52249ba1cd965cf2f87fa7a428b8197a38a) | 2020-03-05 | [AIRFLOW-6915] Add AI Platform Console Link for MLEngineStartTrainingJobOperator (#7535) | -| [cb2f33911](https://github.com/apache/airflow/commit/cb2f339116cf2093da447748892fac68aecbb888) | 2020-03-04 | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (#7609) | -| [09fea3ce8](https://github.com/apache/airflow/commit/09fea3ce8e4d7816281963bb8f2cb06f4de6db5c) | 2020-03-04 | [AIRFLOW-6977] Fix BigQuery DTS example DAG (#7612) | -| [8230ccc48](https://github.com/apache/airflow/commit/8230ccc48b157c89b2b893d42c6fe1523b83363a) | 2020-03-04 | [AIRFLOW-6926] Fix Google Tasks operators return types and idempotency (#7547) | -| [0d1e3088a](https://github.com/apache/airflow/commit/0d1e3088aa9f16eaeeb7b18eccec8f35c79a53df) | 2020-03-04 | [AIRFLOW-6970] Improve GCP Video Intelligence system tests (#7604) | -| [ab6bb0012](https://github.com/apache/airflow/commit/ab6bb0012c38740b76e864d42d299c5c7a9972a3) | 2020-03-03 | [AIRFLOW-6971] Fix return type in CloudSpeechToTextRecognizeSpeechOperator (#7607) | -| [3db4ade3d](https://github.com/apache/airflow/commit/3db4ade3dc9660c21c28187100a22008552f2bd3) | 2020-02-29 | [AIRFLOW-6924] Fix Google DLP operators return types (#7546) | -| [008b4bab1](https://github.com/apache/airflow/commit/008b4bab14222da068b737d6332db4963b994007) | 2020-02-27 | [AIRFLOW-6730] Use total_seconds instead of seconds (#7363) | -| [bb552b2d9](https://github.com/apache/airflow/commit/bb552b2d9fd595cc3eb1b3a2f637f29b814878d7) | 2020-02-25 | [AIRFLOW-6908] Lazy load AirflowException (#7528) | -| [d1a34246a](https://github.com/apache/airflow/commit/d1a34246ac593901f8599b102dc3d7efa4dd61e4) | 2020-02-25 | [AIRFLOW-6593] Add GCP Stackdriver Alerting Hooks and Operators (#7322) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [dcf874352](https://github.com/apache/airflow/commit/dcf87435219307d4e916a8abc2b819ad75e2b1cf) | 2020-02-24 | [AIRFLOW-6894] Prevent db query in example_dags (#7516) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [35b961637](https://github.com/apache/airflow/commit/35b9616378d1cfba7c2eb3c71e20acb6734b7c77) | 2020-02-21 | [AIRFLOW-4973] Add Cloud Data Fusion Pipeline integration (#7486) | -| [aff3a361b](https://github.com/apache/airflow/commit/aff3a361b4092212c0757f9ce88fa2e40d25d1f4) | 2020-02-20 | [AIRFLOW-6558] Campaign Manager operators for conversions (#7420) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [5b199cb86](https://github.com/apache/airflow/commit/5b199cb86be5b1aefbd8620185033d6f635713c1) | 2020-02-17 | [AIRFLOW-XXXX] Typo in example_bigquery DAG (#7429) | -| [2c9345a8e](https://github.com/apache/airflow/commit/2c9345a8e03d37a2676efa2f2ea7e8b7814c5345) | 2020-02-17 | [AIRFLOW-6759] Added MLEngine operator/hook to cancel MLEngine jobs (#7400) | -| [946bdc23c](https://github.com/apache/airflow/commit/946bdc23c039637b0383e1269f99bdd1b2426565) | 2020-02-16 | [AIRFLOW-6405] Add GCP BigQuery Table Upsert Operator (#7126) | -| [2381c820c](https://github.com/apache/airflow/commit/2381c820c8aaeffc1c9b4ed47832038833400eb8) | 2020-02-13 | [AIRFLOW-6505] Let emoji encoded properly for json.dumps() (#7399) | -| [04c1fefbf](https://github.com/apache/airflow/commit/04c1fefbf26a73ed13881d2ec14eada48028ff72) | 2020-02-03 | [AIRFLOW-6676] added GCSDeleteBucketOperator (#7307) | -| [a0252748f](https://github.com/apache/airflow/commit/a0252748ff312daede15c6f0a3d39e16c774461c) | 2020-02-03 | [AIRFLOW-6717] Remove non-existent field from templated_fields (#7340) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9d8d07557](https://github.com/apache/airflow/commit/9d8d0755789d4aeadc5d3015f3cdde62901f85b8) | 2020-02-03 | [AIRFLOW-6715] Fix Google Cloud DLP Example DAG (#7337) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | -| [f4d3e5e54](https://github.com/apache/airflow/commit/f4d3e5e54507f52a00a9b95aa48eb0260e17224d) | 2020-01-13 | [AIRFLOW-6102] [AIP-21] Rename Dataproc operators (#7151) | -| [e7bf8ecb4](https://github.com/apache/airflow/commit/e7bf8ecb48f0299af8091433535ac573c2afd1cf) | 2020-01-13 | [AIRFLOW-6119] [AIP-21] Rename GCS operators, hooks and sensors (#7125) | -| [5b6772cb8](https://github.com/apache/airflow/commit/5b6772cb8391b248cb4b7be5fd3d5c035280fac1) | 2020-01-09 | [AIRFLOW-6125] [AIP-21] Rename S3 operator and SFTP operator (#7112) | -| [4f8592ae8](https://github.com/apache/airflow/commit/4f8592ae8f52ab7f42623d3b43eef0928c9aafb2) | 2020-01-08 | [AIRFLOW-6118] [AIP-21] Rename Pubsub operators and hook (#7046) | -| [20299473f](https://github.com/apache/airflow/commit/20299473f11add6531f607256ee8a0f7f9507ab8) | 2020-01-03 | [AIRFLOW-6115] [AIP-21] Rename GCP vision operators (#7020) | -| [18e8cea4e](https://github.com/apache/airflow/commit/18e8cea4e7487a7dfefc03661e5ebe54c4104ead) | 2020-01-03 | [AIRFLOW-6428] Fix import path for airflow.utils.dates.days_ago in Example DAGs (#7007) | -| [95087af14](https://github.com/apache/airflow/commit/95087af14091f28a83ced8ff1860b86dfd93f93d) | 2019-12-31 | [AIRFLOW-6110] [AIP-21] Rename natural_language service (#6968) | -| [69629a5a9](https://github.com/apache/airflow/commit/69629a5a948ab2c4ac04a4a4dca6ac86d19c11bd) | 2019-12-09 | [AIRFLOW-5807] Move SFTP from contrib to providers. (#6464) | -| [25e9047a4](https://github.com/apache/airflow/commit/25e9047a4a4da5fad4f85c366e3a6262c0a4f68e) | 2019-12-09 | [AIRFLOW-6193] Do not use asserts in Airflow main code (#6749) | -| [ed0a14f32](https://github.com/apache/airflow/commit/ed0a14f321b9dab3554ae395c11c147258536ce8) | 2019-12-09 | [AIRFLOW-6120] Rename GoogleCloudBaseHook (#6734) | -| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01 | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701) | -| [03c870a61](https://github.com/apache/airflow/commit/03c870a6172ab232af6319a30ad8d46622359b10) | 2019-11-26 | [AIRFLOW-6010] Remove cyclic imports and pylint hacks (#6601) | -| [5c4cfea8c](https://github.com/apache/airflow/commit/5c4cfea8c0f488496c1cbcc4c6c5db13d8210979) | 2019-11-15 | [AIRFLOW-5718] Add SFTPToGoogleCloudStorageOperator (#6393) | -| [44a8c37a9](https://github.com/apache/airflow/commit/44a8c37a9a8668469aa825ad21057cca6ac2c186) | 2019-11-13 | [AIRFLOW-XXX] Fix the docstring for Dataproc get_job method (#6581) | -| [d633d3ac4](https://github.com/apache/airflow/commit/d633d3ac44c395e6c43cd388f98fba1ce1c435a3) | 2019-11-13 | [AIRFLOW-5691] Rewrite Dataproc operators to use python library (#6371) | -| [d985c02d9](https://github.com/apache/airflow/commit/d985c02d9fa3d9ec946abc1735b0551fd61fb9f0) | 2019-11-05 | [AIRFLOW-XXX] Add How-To-Guide to GCP PubSub (#6497) | -| [a296cdabd](https://github.com/apache/airflow/commit/a296cdabdb9c9c65cf9a48329cb776aed5c82d43) | 2019-11-04 | [AIRFLOW-5743] Move Google PubSub to providers package (#6476) | -| [470b2a779](https://github.com/apache/airflow/commit/470b2a779d031406a3d5925f2fa2ec40e5c3bccb) | 2019-10-30 | [AIRFLOW-5741] Move Cloud Natural Language to providers (#6421) | -| [f2caa451f](https://github.com/apache/airflow/commit/f2caa451fc2b8ee59163314f9ec1cc372acbadf1) | 2019-10-27 | [AIRFLOW-5742] Move Google Cloud Vision to providers package (#6424) | -| [16d7accb2](https://github.com/apache/airflow/commit/16d7accb22c866d4fbf368e4d979dc1c4a41d93c) | 2019-10-22 | [AIRFLOW-4971] Add Google Display & Video 360 integration (#6170) | -| [4e661f535](https://github.com/apache/airflow/commit/4e661f535dea613f9b2e0075676f9a73a97461fe) | 2019-10-22 | [AIRFLOW-5379] Add Google Search Ads 360 operators (#6228) | -| [19e32b4e2](https://github.com/apache/airflow/commit/19e32b4e2c798f662e5d8d1e7c65036c5e7ac125) | 2019-10-18 | [AIRFLOW-5656] Rename provider to providers module (#6333) | diff --git a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 1414c1743c34e..0000000000000 --- a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,112 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [cb52fb0ae](https://github.com/apache/airflow/commit/cb52fb0ae1de1f1140babaed0e97299e4aaf96bf) | 2020-09-27 | Add example DAG and system test for MySQLToGCSOperator (#10990) | -| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25 | Fix incorrect Usage of Optional[str] & Optional[int] (#11141) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [daf8f3108](https://github.com/apache/airflow/commit/daf8f31080f06c044b4336071bd383bbbcdc6085) | 2020-09-23 | Add template fields renderers for better UI rendering (#11061) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [cb979f9f2](https://github.com/apache/airflow/commit/cb979f9f213bb3c9835a3dc924f84a07f5387378) | 2020-09-22 | Get Airflow configs with sensitive data from CloudSecretManagerBackend (#11024) | -| [76545bb3d](https://github.com/apache/airflow/commit/76545bb3d6fa82ce8eae072dbc74a3b76d8fd53c) | 2020-09-16 | Add example dag and system test for S3ToGCSOperator (#10951) | -| [22c631625](https://github.com/apache/airflow/commit/22c631625fd68abe280528f33b7cfd7603ebf66c) | 2020-09-16 | Fix more docs spellings (#10965) | -| [12a652f53](https://github.com/apache/airflow/commit/12a652f5344c7f03c3d780556ca1829b235fdb2d) | 2020-09-13 | Fix parameter name collision in AutoMLBatchPredictOperator #10723 (#10869) | -| [41a62735e](https://github.com/apache/airflow/commit/41a62735edcebbd9c39e505280646ef5d25aa1d5) | 2020-09-11 | Add on_kill method to BigQueryInsertJobOperator (#10866) | -| [3e91da56e](https://github.com/apache/airflow/commit/3e91da56e8c63a90dc859d8996a896b5d9f8cd43) | 2020-09-11 | fix typo in firebase/example_filestore DAG (#10875) | -| [68cc7273b](https://github.com/apache/airflow/commit/68cc7273bf0c0f562748b5f663da5c12d2cba6a7) | 2020-09-10 | Add on_kill method to DataprocSubmitJobOperator (#10847) | -| [f92095721](https://github.com/apache/airflow/commit/f92095721450c14605c986e165544a7bfb712a3d) | 2020-09-10 | Fix and remove some more typos from spelling_wordlist.txt (#10845) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [078bfaf60](https://github.com/apache/airflow/commit/078bfaf60adc5aebac8c347e7f6e5339ab9b56c0) | 2020-09-08 | Extract missing gcs_to_local example DAG from gcs example (#10767) | -| [10ce31127](https://github.com/apache/airflow/commit/10ce31127f1ff87176158935925afce46a989917) | 2020-09-08 | Deprecate using global as the default region in Google Dataproc operators and hooks (#10772) | -| [f14f37971](https://github.com/apache/airflow/commit/f14f3797163cc45fdcdabfb36ee7d638f70e470d) | 2020-09-07 | [AIRFLOW-10672] Refactor BigQueryToGCSOperator to use new method (#10773) | -| [c8ee45568](https://github.com/apache/airflow/commit/c8ee4556851c36b3b6e644a7746a49583dd53db1) | 2020-09-07 | Refactor DataprocCreateCluster operator to use simpler interface (#10403) | -| [ece685b5b](https://github.com/apache/airflow/commit/ece685b5b895ad1175440b49bf9e620dffd8248d) | 2020-09-05 | Asynchronous execution of Dataproc jobs with a Sensor (#10673) | -| [6e3d7b63d](https://github.com/apache/airflow/commit/6e3d7b63d3b34c34f8b38a7b41f4a5876e1f731f) | 2020-09-04 | Add masterConfig parameter to MLEngineStartTrainingJobOperator (#10578) | -| [804548d58](https://github.com/apache/airflow/commit/804548d58f2036fd4516824a38d0639ba5d5ab0e) | 2020-09-01 | Add Dataprep operators (#10304) | -| [11c00bc82](https://github.com/apache/airflow/commit/11c00bc820483691a87cdb16d519dce8dc57c40e) | 2020-08-30 | Fix typos: duplicated "the" (#10647) | -| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29 | Update Google Cloud branding (#10642) | -| [1b533f617](https://github.com/apache/airflow/commit/1b533f617e2e0200597d114d7570f6c0d69da1a0) | 2020-08-28 | Fix broken master - DLP (#10635) | -| [5ae82a56d](https://github.com/apache/airflow/commit/5ae82a56dab599de44f1be7027cecc4ef86f7bb6) | 2020-08-28 | Fix Google DLP example and improve ops idempotency (#10608) | -| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28 | Update Google Cloud branding (#10615) | -| [91ff31ad1](https://github.com/apache/airflow/commit/91ff31ad1021235bd21c87ad9dbc0b216a908671) | 2020-08-27 | Documentation for Google Cloud Data Loss Prevention (#8201) (#9651) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [866701c80](https://github.com/apache/airflow/commit/866701c8019f49dcb02c9696e4f6e9ce67d13ca6) | 2020-08-25 | Fix typo in "Cloud" (#10534) | -| [47265e7b5](https://github.com/apache/airflow/commit/47265e7b58bc28bcbbffc981442b6cc27a3af39c) | 2020-08-24 | Fix typo in PostgresHook (#10529) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24 | Implement impersonation in google operators (#10052) | -| [b0598b535](https://github.com/apache/airflow/commit/b0598b5351d2d027286e2333231b6c0c0704dba2) | 2020-08-24 | Add support for creating multiple replicated clusters in Bigtable hook and operator (#10475) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [515cc72c9](https://github.com/apache/airflow/commit/515cc72c995429c8c007f853ade385d79fcbac90) | 2020-08-22 | Fix typo in timed_out (#10459) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [88c7d2e52](https://github.com/apache/airflow/commit/88c7d2e526af4994066f65f830e2fa8edcbbce2e) | 2020-08-21 | Dataflow operators don't not always create a virtualenv (#10373) | -| [083c3c129](https://github.com/apache/airflow/commit/083c3c129bc3458d410f5ff37d7f5a9a7ad548b7) | 2020-08-18 | Simplified GCSTaskHandler configuration (#10365) | -| [1ae5bdf23](https://github.com/apache/airflow/commit/1ae5bdf23e3ac7cca05325ef8b255a7cf067e18e) | 2020-08-17 | Add test for GCSTaskHandler (#9600) (#9861) | -| [e195a980b](https://github.com/apache/airflow/commit/e195a980bc8e9d42f3eb4ac134950977b9e5158f) | 2020-08-16 | Add type annotations for mlengine_operator_utils (#10297) | -| [382c1011b](https://github.com/apache/airflow/commit/382c1011b6bcebd22760e2f98419281ef1a09d1b) | 2020-08-16 | Add Bigtable Update Instance Hook/Operator (#10340) | -| [bfa5a8d5f](https://github.com/apache/airflow/commit/bfa5a8d5f10458c14d380c4042ecfbac627d0639) | 2020-08-15 | CI: Fix failing docs-build (#10342) | -| [be46d20fb](https://github.com/apache/airflow/commit/be46d20fb431cc1d91c935e8894dfc7756c18993) | 2020-08-15 | Improve idempotency of BigQueryInsertJobOperator (#9590) | -| [47387a69e](https://github.com/apache/airflow/commit/47387a69e623676b57b6d42ff07e729da2d21bff) | 2020-08-14 | Catch Permission Denied exception when getting secret from GCP Secret Manager. (#10326) | -| [2f0613b0c](https://github.com/apache/airflow/commit/2f0613b0c2fdf176d9f13a8cd12162c60c64b644) | 2020-08-13 | Implement Google BigQuery Table Partition Sensor (#10218) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [ef088314f](https://github.com/apache/airflow/commit/ef088314f8f1b29ac636a7584cf9dda04b1df816) | 2020-08-09 | Added DataprepGetJobsForJobGroupOperator (#10246) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [c29533888](https://github.com/apache/airflow/commit/c29533888fadd40f5e9ce63e728bd8691182e542) | 2020-08-08 | Add labels param to Google MLEngine Operators (#10222) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [eff0f0321](https://github.com/apache/airflow/commit/eff0f03210d30a4aed9ed457eaaea9c9f05d54d1) | 2020-08-06 | Update guide for Google Cloud Secret Manager Backend (#10172) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [010322692](https://github.com/apache/airflow/commit/010322692e6e3f0adc156f0beb81e267da0e97bb) | 2020-08-06 | Improve handling Dataproc cluster creation with ERROR state (#9593) | -| [1437cb749](https://github.com/apache/airflow/commit/1437cb74955f4e10af5d70ebadde1e6b163fb9b7) | 2020-08-04 | Add correct signatures for operators in google provider package (#10144) | -| [6efa1b9cb](https://github.com/apache/airflow/commit/6efa1b9cb763ae0bdbc884a54d24dbdc39d9e3a6) | 2020-08-03 | Add additional Cloud Datastore operators (#10032) | -| [27020f8e5](https://github.com/apache/airflow/commit/27020f8e588575d53e63f9f9daecd3a522656644) | 2020-08-03 | Add try clause to DataFusionHook.wait_for_pipeline_state (#10031) | -| [4e3799fec](https://github.com/apache/airflow/commit/4e3799fec4c23d0f43603a0489c5a6158aeba035) | 2020-08-02 | [AIRFLOW-4541] Replace os.mkdirs usage with pathlib.Path(path).mkdir (#10117) | -| [85c56b173](https://github.com/apache/airflow/commit/85c56b1737c2bf61751836571300445c0aebae1a) | 2020-08-02 | Add missing params to GCP Pub/Sub creation_subscription (#10106) | -| [b79466c12](https://github.com/apache/airflow/commit/b79466c12f3ae717c31804acc2e9ffcd60f9611c) | 2020-08-02 | Fix sensor not providing arguments for GCSHook (#10074) | -| [4ee35d027](https://github.com/apache/airflow/commit/4ee35d027988c6456767faeb108a7f686d5117f2) | 2020-08-02 | Fix hook not passing gcp_conn_id to base class (#10075) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [4c84661ad](https://github.com/apache/airflow/commit/4c84661adb5bb5c581bb4193b4c7e935cbe07758) | 2020-07-31 | Split Display Video 360 example into smaler DAGs (#10077) | -| [59cbff087](https://github.com/apache/airflow/commit/59cbff0874dd5318cda4b9ce7b7eeb1aad1dad4d) | 2020-07-29 | Fix docstrings in BigQueryGetDataOperator (#10042) | -| [81b87d48e](https://github.com/apache/airflow/commit/81b87d48ed002d7a7f7bcb72a58e82d40a176fe2) | 2020-07-27 | Add unit tests for GcpBodyFieldSanitizer in Google providers (#9996) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25 | Stop using start_date in default_args in example_dags (#9982) | -| [ef98edf4d](https://github.com/apache/airflow/commit/ef98edf4da2d9b74d5cf5b21e81577b3151edb79) | 2020-07-23 | Add more information about using GoogleAdsHook (#9951) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [39a0288a4](https://github.com/apache/airflow/commit/39a0288a47536dfd9b651ecd075887d3e45fcfc4) | 2020-07-22 | Add Google Authentication for experimental API (#9848) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [c4244e18b](https://github.com/apache/airflow/commit/c4244e18bb894eb2180b8972052e56110fe5cbc9) | 2020-07-22 | Fix calling `get_client` in BigQueryHook.table_exists (#9916) | -| [5eacc1642](https://github.com/apache/airflow/commit/5eacc164201a121cd06126aff613cbe0919d35cc) | 2020-07-22 | Add support for impersonation in GCP hooks (#9915) | -| [1cfdebf5f](https://github.com/apache/airflow/commit/1cfdebf5f8841d61a11540b88c7913686e89e085) | 2020-07-21 | Fix insert_job method of BigQueryHook (#9899) | -| [c8c52e69c](https://github.com/apache/airflow/commit/c8c52e69c8d9cc1f26f63d95aecc0a6498d40b6f) | 2020-07-21 | Remove type hint causing DeprecationWarning in Firestore operators (#9819) | -| [eb6f1d1cf](https://github.com/apache/airflow/commit/eb6f1d1cf0503fa763c0d8d34a2fe16efb390b9c) | 2020-07-16 | Fix typo in datafusion operator (#9859) | -| [b01d95ec2](https://github.com/apache/airflow/commit/b01d95ec22b01ed79123178acd74ef40d57aaa7c) | 2020-07-15 | Change DAG.clear to take dag_run_state (#9824) | -| [6d65c15d1](https://github.com/apache/airflow/commit/6d65c15d156a41d5e735e44a1170426559a17d1f) | 2020-07-15 | Add guide for AI Platform (previously Machine Learning Engine) Operators (#9798) | -| [770de53eb](https://github.com/apache/airflow/commit/770de53eb57bd57ffc555ad15b18f0c058dbebe7) | 2020-07-15 | BigQueryTableExistenceSensor needs to specify keyword arguments (#9832) | -| [2d8dbacdf](https://github.com/apache/airflow/commit/2d8dbacdf6c19a598a7f55bcf65e28703aed6201) | 2020-07-15 | Add CloudVisionDeleteReferenceImageOperator (#9698) | -| [9f017951b](https://github.com/apache/airflow/commit/9f017951b94d9bf52b5ee66d72aa8dd822f07269) | 2020-07-15 | Add Google Deployment Manager Hook (#9159) | -| [ed5004cca](https://github.com/apache/airflow/commit/ed5004cca753650dc222fbb8e67573938c6c16d9) | 2020-07-14 | Allow `replace` flag in gcs_to_gcs operator. (#9667) | -| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13 | Keep functions signatures in decorators (#9786) | -| [68925904e](https://github.com/apache/airflow/commit/68925904e49aac6968defb6834863f4e6347fe59) | 2020-07-13 | Add multiple file upload functionality to GCS hook (#8849) | -| [1de78e8f9](https://github.com/apache/airflow/commit/1de78e8f97f48f8f4abd167a0120ffab8af6127a) | 2020-07-12 | Add Google Stackdriver link (#9765) | -| [092d33f29](https://github.com/apache/airflow/commit/092d33f298a7dbb871b1e1b4c17aad3989e89b79) | 2020-07-11 | Fix StackdriverTaskHandler + add system tests (#9761) | -| [b2305660f](https://github.com/apache/airflow/commit/b2305660f0eb55ebd31fdc7fe4e8aeed8c1f8c00) | 2020-07-09 | Update example DAG for AI Platform operators (#9727) | -| [23f80f34a](https://github.com/apache/airflow/commit/23f80f34adec86da24e4896168c53d213d01a7f6) | 2020-07-08 | Move gcs & wasb task handlers to their respective provider packages (#9714) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [cd3d9d934](https://github.com/apache/airflow/commit/cd3d9d93402f06a08f35e3586802f11a18c4f1f3) | 2020-07-02 | Fix using .json template extension in GMP operators (#9566) | -| [4799af30e](https://github.com/apache/airflow/commit/4799af30ee02c596647d1538854769124f9f4961) | 2020-06-30 | Extend BigQuery example with include clause (#9572) | -| [e33f1a12d](https://github.com/apache/airflow/commit/e33f1a12d72ac234e4897f44b326a332acf85901) | 2020-06-30 | Add template_ext to BigQueryInsertJobOperator (#9568) | -| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29 | Remove almost all references to airflow.contrib (#9559) | -| [c420dbd6e](https://github.com/apache/airflow/commit/c420dbd6e13e17867eb4ccc4271b37966310ac0f) | 2020-06-27 | Bump Pylint to 2.5.3 (#9294) | -| [0051c89cb](https://github.com/apache/airflow/commit/0051c89cba02d55236c913ce0110f7d5111ba436) | 2020-06-26 | nitpick fix (#9527) | -| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25 | Use literal syntax instead of function calls to create data structure (#9516) | -| [7256f4caa](https://github.com/apache/airflow/commit/7256f4caa226f8f8632d6e2d38d8c94cb3250a6f) | 2020-06-22 | Pylint fixes and deprecation of rare used methods in Connection (#9419) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [5b680e27e](https://github.com/apache/airflow/commit/5b680e27e8118861ef484c00a4b87c6885b0a518) | 2020-06-19 | Don't use connection to store task handler credentials (#9381) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 66e6231ac9a95..0000000000000 --- a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,32 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [727c739af](https://github.com/apache/airflow/commit/727c739afb565d4d394a8faedc969334cb8e738e) | 2020-10-22 | Improve Cloud Memorystore for Redis example (#11735) | -| [1da8379c9](https://github.com/apache/airflow/commit/1da8379c913843834353b44861c62f332a461bdf) | 2020-10-22 | Fix static checks after merging #10121 (#11737) | -| [91503308c](https://github.com/apache/airflow/commit/91503308c723b186ce6f4026f2a3e2c21030f6e5) | 2020-10-22 | Add Google Cloud Memorystore Memcached Operators (#10121) | -| [950c16d0b](https://github.com/apache/airflow/commit/950c16d0b0ab67bb7af11909de751029faf0313a) | 2020-10-21 | Retry requests in case of error in Google ML Engine Hook (#11712) | -| [2bfc53b5e](https://github.com/apache/airflow/commit/2bfc53b5eb67406d418371b74dc9bc5a07be238e) | 2020-10-21 | Fix doc errors in google provider files. (#11713) | -| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21 | Enforce strict rules for yamllint (#11709) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [2d854c350](https://github.com/apache/airflow/commit/2d854c3505ccad66e9a7d94267e51bed800433c2) | 2020-10-19 | Add service_account to Google ML Engine operator (#11619) | -| [46a121fb7](https://github.com/apache/airflow/commit/46a121fb7b77c0964e053b58750e2d8bc2bd0b2a) | 2020-10-18 | docs: Update Bigquery clustering docstrings (#11232) | -| [49c58147f](https://github.com/apache/airflow/commit/49c58147fed8a52869d0b0ecc00c102c11972ad0) | 2020-10-18 | Strict type checking for provider Google (#11609) | -| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16 | Add type annotations for AWS operators and hooks (#11434) | -| [3c10ca650](https://github.com/apache/airflow/commit/3c10ca6504be37fabff9a10caefea3fe4df31a02) | 2020-10-16 | Add DataflowStartFlexTemplateOperator (#8550) | -| [8865d14df](https://github.com/apache/airflow/commit/8865d14df4d58dd5f1a4d2ff81c77469959f175a) | 2020-10-16 | Strict type checking for provider google cloud (#11548) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [06141d6d0](https://github.com/apache/airflow/commit/06141d6d01398115e5e54c5766a46ae5514ba2f7) | 2020-10-12 | Google cloud operator strict type check (#11450) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [1845cd11b](https://github.com/apache/airflow/commit/1845cd11b77f302777ab854e84bef9c212c604a0) | 2020-10-11 | Strict type check for google ads and cloud hooks (#11390) | -| [bd204bb91](https://github.com/apache/airflow/commit/bd204bb91b4bc069284f9a44757c6baba8884140) | 2020-10-11 | Optionally set null marker in csv exports in BaseSQLToGCSOperator (#11409) | -| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10 | Remove redundant parentheses from Python files (#10967) | -| [8baf657fc](https://github.com/apache/airflow/commit/8baf657fc2b21a601b99b752e4f1176bf8a934ce) | 2020-10-09 | Fix regression in DataflowTemplatedJobStartOperator (#11167) | -| [b0fcf6755](https://github.com/apache/airflow/commit/b0fcf675595494b306800e1a516548dc0dc671f8) | 2020-10-07 | Add AzureFileShareToGCSOperator (#10991) | -| [47b05a87f](https://github.com/apache/airflow/commit/47b05a87f004dc273a4757ba49f03808a86f77e7) | 2020-10-07 | Improve handling of job_id in BigQuery operators (#11287) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.11.13.md b/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.11.13.md deleted file mode 100644 index cf2e071094f72..0000000000000 --- a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.11.13.md +++ /dev/null @@ -1,30 +0,0 @@ - - -### Release 2020.11.13 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------------------------------------| -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08 | Add authentication to AWS with Google credentials (#12079) | -| [2ef3b7ef8](https://github.com/apache/airflow/commit/2ef3b7ef8cafe3bdc8bf8db70fbc519b98576366) | 2020-11-08 | Fix ERROR - Object of type 'bytes' is not JSON serializable when using store_to_xcom_key parameter (#12172) | -| [0caec9fd3](https://github.com/apache/airflow/commit/0caec9fd32bee2b3036b5d7bdcb56bd6a3b9dccf) | 2020-11-06 | Dataflow - add waiting for successful job cancel (#11501) | -| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06 | Simplify string expressions (#12123) | -| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04 | Format all files (without excepions) by black (#12091) | -| [fd3db778e](https://github.com/apache/airflow/commit/fd3db778e715d0f164dda7ee8f672d477a323291) | 2020-11-04 | Add server side cursor support for postgres to GCS operator (#11793) | -| [f1f194026](https://github.com/apache/airflow/commit/f1f1940261744b4fdb67b0b5654488494efa9c64) | 2020-11-04 | Add DataflowStartSQLQuery operator (#8553) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [5f5244b74](https://github.com/apache/airflow/commit/5f5244b74df93cadbb99643cec76281460ca4411) | 2020-11-04 | Add template fields renderers to Biguery and Dataproc operators (#12067) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [45ae145c2](https://github.com/apache/airflow/commit/45ae145c25a19b4185c33ac0c4da920324b3987e) | 2020-11-03 | Log BigQuery job id in insert method of BigQueryHook (#12056) | -| [e324b37a6](https://github.com/apache/airflow/commit/e324b37a67e32c368df50604a00160d7766b5c33) | 2020-11-03 | Add job name and progress logs to Cloud Storage Transfer Hook (#12014) | -| [6071fdd58](https://github.com/apache/airflow/commit/6071fdd58470bb2a6c23fc16481e292b7247d0bb) | 2020-11-02 | Improve handling server errors in DataprocSubmitJobOperator (#11947) | -| [2f703df12](https://github.com/apache/airflow/commit/2f703df12dfd6511722ff9a82d5a569d092fccc2) | 2020-10-30 | Add SalesforceToGcsOperator (#10760) | -| [e5713e00b](https://github.com/apache/airflow/commit/e5713e00b3afcba6f78006ec0e360da317858e4d) | 2020-10-29 | Add drain option when canceling Dataflow pipelines (#11374) | -| [37eaac3c5](https://github.com/apache/airflow/commit/37eaac3c5dc93804413c10a6ca124fd7831befc0) | 2020-10-29 | The PRs which are not approved run subset of tests (#11828) | -| [79cb77199](https://github.com/apache/airflow/commit/79cb771992279d40ddd9eb6b0277382313a32898) | 2020-10-28 | Fixing re pattern and changing to use a single character class. (#11857) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [240c7d4d7](https://github.com/apache/airflow/commit/240c7d4d72aac8f6aab98f5913e8f54c4f1372ff) | 2020-10-26 | Google Memcached hooks - improve protobuf messages handling (#11743) | -| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26 | Fix spellings (#11825) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | diff --git a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.11.23.md deleted file mode 100644 index 83c5d07f5f5f0..0000000000000 --- a/airflow/providers/google/BACKPORT_PROVIDER_CHANGES_2020.11.23.md +++ /dev/null @@ -1,24 +0,0 @@ - - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [8d0950646](https://github.com/apache/airflow/commit/8d09506464c8480fa42e8bfe6a36c6f631cd23f6) | 2020-11-18 | Fix download method in GCSToBigQueryOperator (#12442) | -| [2c0920fba](https://github.com/apache/airflow/commit/2c0920fba5d2f05d2e29cead91127686af277ec2) | 2020-11-17 | Adds mechanism for provider package discovery. (#12383) | -| [2cda2f2a0](https://github.com/apache/airflow/commit/2cda2f2a0a94e5aaed87f0998fa57b4f8bff5e43) | 2020-11-17 | Add missing pre-commit definition - provider-yamls (#12393) | -| [80a957f14](https://github.com/apache/airflow/commit/80a957f142f260daed262b8e93a4d02c12cfeabc) | 2020-11-17 | Add Dataflow sensors - job metrics (#12039) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [917e6c442](https://github.com/apache/airflow/commit/917e6c4424985271c53dd8c413b211896ee55726) | 2020-11-16 | Add provide_file_and_upload to GCSHook (#12310) | -| [cfa4ecfeb](https://github.com/apache/airflow/commit/cfa4ecfeb02661f40b4778733384ac085fb5f04b) | 2020-11-15 | Add DataflowJobStatusSensor and support non-blocking execution of jobs (#11726) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [32b59f835](https://github.com/apache/airflow/commit/32b59f8350f55793df6838a32de662a80483ecda) | 2020-11-12 | Fixes the sending of an empty list to BigQuery `list_rows` (#12307) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [502ba309e](https://github.com/apache/airflow/commit/502ba309ea470943f0e99c634269e3d2d13ce6ca) | 2020-11-10 | Enable Markdownlint rule - MD022/blanks-around-headings (#12225) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [f37c6e6fc](https://github.com/apache/airflow/commit/f37c6e6fce8b704f5af28caa16d0ed7d873a0e4a) | 2020-11-10 | Add Compute Engine SSH hook (#9879) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [61feb6ec4](https://github.com/apache/airflow/commit/61feb6ec453f8dda1a0e1fe3ebcc0f1e3224b634) | 2020-11-09 | Provider's readmes generated for elasticsearch and google packages (#12194) | diff --git a/airflow/providers/google/BACKPORT_PROVIDER_README.md b/airflow/providers/google/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 0f8f7a2ac43be..0000000000000 --- a/airflow/providers/google/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,981 +0,0 @@ - - - -# Package apache-airflow-backport-providers-google - -Release: 2020.11.23 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [New transfer operators](#new-transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) - - [Moved hooks](#moved-hooks) - - [Secrets](#secrets) - - [Moved secrets](#moved-secrets) -- [Releases](#releases) - - [Release 2020.11.23](#release-20201123) - - [Release 2020.11.13](#release-20201113) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `google` provider. All classes for this provider package -are in `airflow.providers.google` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-google` - -## PIP requirements - -| PIP package | Version required | -|:-----------------------------------|:-------------------| -| PyOpenSSL | | -| google-ads | >=4.0.0 | -| google-api-python-client | >=1.6.0,<2.0.0 | -| google-auth | >=1.0.0,<2.0.0 | -| google-auth-httplib2 | >=0.0.1 | -| google-cloud-automl | >=0.4.0,<2.0.0 | -| google-cloud-bigquery-datatransfer | >=0.4.0,<2.0.0 | -| google-cloud-bigtable | >=1.0.0,<2.0.0 | -| google-cloud-container | >=0.1.1,<2.0.0 | -| google-cloud-datacatalog | >=0.5.0, <0.8 | -| google-cloud-dataproc | >=1.0.1,<2.0.0 | -| google-cloud-dlp | >=0.11.0,<2.0.0 | -| google-cloud-kms | >=1.2.1,<2.0.0 | -| google-cloud-language | >=1.1.1,<2.0.0 | -| google-cloud-logging | >=1.14.0,<2.0.0 | -| google-cloud-memcache | >=0.2.0 | -| google-cloud-monitoring | >=0.34.0,<2.0.0 | -| google-cloud-os-login | >=1.0.0,<2.0.0 | -| google-cloud-pubsub | >=1.0.0,<2.0.0 | -| google-cloud-redis | >=0.3.0,<2.0.0 | -| google-cloud-secret-manager | >=0.2.0,<2.0.0 | -| google-cloud-spanner | >=1.10.0,<2.0.0 | -| google-cloud-speech | >=0.36.3,<2.0.0 | -| google-cloud-storage | >=1.16,<2.0.0 | -| google-cloud-tasks | >=1.2.1,<2.0.0 | -| google-cloud-texttospeech | >=0.4.0,<2.0.0 | -| google-cloud-translate | >=1.5.0,<2.0.0 | -| google-cloud-videointelligence | >=1.7.0,<2.0.0 | -| google-cloud-vision | >=0.35.2,<2.0.0 | -| grpcio-gcp | >=0.2.2 | -| pandas-gbq | | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-google[amazon] -``` - -| Dependent package | Extra | -|:---------------------------------------------------------------------------------------------------------------------------------------|:-----------------| -| [apache-airflow-backport-providers-amazon](https://github.com/apache/airflow/tree/master/airflow/providers/amazon) | amazon | -| [apache-airflow-backport-providers-apache-cassandra](https://github.com/apache/airflow/tree/master/airflow/providers/apache/cassandra) | apache.cassandra | -| [apache-airflow-backport-providers-cncf-kubernetes](https://github.com/apache/airflow/tree/master/airflow/providers/cncf/kubernetes) | cncf.kubernetes | -| [apache-airflow-backport-providers-facebook](https://github.com/apache/airflow/tree/master/airflow/providers/facebook) | facebook | -| [apache-airflow-backport-providers-microsoft-azure](https://github.com/apache/airflow/tree/master/airflow/providers/microsoft/azure) | microsoft.azure | -| [apache-airflow-backport-providers-microsoft-mssql](https://github.com/apache/airflow/tree/master/airflow/providers/microsoft/mssql) | microsoft.mssql | -| [apache-airflow-backport-providers-mysql](https://github.com/apache/airflow/tree/master/airflow/providers/mysql) | mysql | -| [apache-airflow-backport-providers-postgres](https://github.com/apache/airflow/tree/master/airflow/providers/postgres) | postgres | -| [apache-airflow-backport-providers-presto](https://github.com/apache/airflow/tree/master/airflow/providers/presto) | presto | -| [apache-airflow-backport-providers-salesforce](https://github.com/apache/airflow/tree/master/airflow/providers/salesforce) | salesforce | -| [apache-airflow-backport-providers-sftp](https://github.com/apache/airflow/tree/master/airflow/providers/sftp) | sftp | -| [apache-airflow-backport-providers-ssh](https://github.com/apache/airflow/tree/master/airflow/providers/ssh) | ssh | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `google` provider -are in the `airflow.providers.google` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.google` package | -|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [ads.operators.ads.GoogleAdsListAccountsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/operators/ads.py) | -| [cloud.operators.automl.AutoMLBatchPredictOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLCreateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLDeleteDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLDeleteModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLDeployModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLGetModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLImportDataOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLListDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLPredictOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLTablesListColumnSpecsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLTablesListTableSpecsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLTablesUpdateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLTrainModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.bigquery.BigQueryInsertJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | -| [cloud.operators.bigquery_dts.BigQueryCreateDataTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py) | -| [cloud.operators.bigquery_dts.BigQueryDataTransferServiceStartTransferRunsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py) | -| [cloud.operators.bigquery_dts.BigQueryDeleteDataTransferConfigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py) | -| [cloud.operators.bigtable.BigtableUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreCreateInstanceAndImportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreExportAndDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreExportInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreFailoverInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreGetInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreImportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreListInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedApplyParametersOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedGetInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedListInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedUpdateParametersOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreScaleInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceS3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | -| [cloud.operators.datacatalog.CloudDataCatalogCreateEntryGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogCreateTagOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogDeleteEntryGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogDeleteEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogDeleteTagOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogGetEntryGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogGetEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogGetTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogListTagsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogLookupEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogRenameTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogSearchCatalogOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogUpdateEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogUpdateTagOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.dataflow.DataflowStartFlexTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py) | -| [cloud.operators.dataflow.DataflowStartSqlJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py) | -| [cloud.operators.datafusion.CloudDataFusionCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionCreatePipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionDeletePipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionGetInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionListPipelinesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionRestartInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionStartPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionStopPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.dataprep.DataprepGetJobGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataprep.py) | -| [cloud.operators.dataprep.DataprepGetJobsForJobGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataprep.py) | -| [cloud.operators.dataprep.DataprepRunJobGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataprep.py) | -| [cloud.operators.dataproc.DataprocSubmitJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | -| [cloud.operators.dataproc.DataprocUpdateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | -| [cloud.operators.datastore.CloudDatastoreAllocateIdsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreBeginTransactionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreCommitOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreDeleteOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreGetOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreRollbackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreRunQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.functions.CloudFunctionInvokeFunctionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/functions.py) | -| [cloud.operators.gcs.GCSDeleteBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | -| [cloud.operators.gcs.GCSFileTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | -| [cloud.operators.gcs.GCSSynchronizeBucketsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | -| [cloud.operators.life_sciences.LifeSciencesRunPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/life_sciences.py) | -| [cloud.operators.mlengine.MLEngineCreateModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineCreateVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineDeleteModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineDeleteVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineGetModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineListVersionsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineSetDefaultVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineTrainingCancelJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.pubsub.PubSubPullOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | -| [cloud.operators.stackdriver.StackdriverDeleteAlertOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverDeleteNotificationChannelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverDisableAlertPoliciesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverDisableNotificationChannelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverEnableAlertPoliciesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverEnableNotificationChannelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverListAlertPoliciesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverListNotificationChannelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverUpsertAlertOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverUpsertNotificationChannelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.tasks.CloudTasksQueueCreateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueueDeleteOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueueGetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueuePauseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueuePurgeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueueResumeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueueUpdateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueuesListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksTaskCreateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksTaskDeleteOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksTaskGetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksTaskRunOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksTasksListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.vision.CloudVisionAddProductToProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | -| [cloud.operators.vision.CloudVisionDeleteReferenceImageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | -| [firebase.operators.firestore.CloudFirestoreExportDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/firebase/operators/firestore.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsDataImportUploadOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsDeletePreviousDataUploadsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsGetAdsLinkOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsListAccountsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsModifyFileHeadersDataImportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsRetrieveAdsLinksListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchInsertConversionsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchUpdateConversionsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerDeleteReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerDownloadReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerInsertReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerRunReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360CreateReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360CreateSDFDownloadTaskOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360DeleteReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadLineItemsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360RunReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360SDFtoGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360UploadLineItemsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.search_ads.GoogleSearchAdsDownloadReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/search_ads.py) | -| [marketing_platform.operators.search_ads.GoogleSearchAdsInsertReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/search_ads.py) | -| [suite.operators.sheets.GoogleSheetsCreateSpreadsheetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/operators/sheets.py) | - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.operators.bigquery.BigQueryCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_check_operator.BigQueryCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py) | -| [cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryCreateEmptyTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryCreateExternalTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryDeleteDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryDeleteTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_table_delete_operator.py) | -| [cloud.operators.bigquery.BigQueryExecuteQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryGetDataOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_get_data.BigQueryGetDataOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_get_data.py) | -| [cloud.operators.bigquery.BigQueryGetDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryGetDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryGetDatasetTablesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryGetDatasetTablesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryIntervalCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py) | -| [cloud.operators.bigquery.BigQueryPatchDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryPatchDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryUpdateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryUpdateDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryUpsertTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryUpsertTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryValueCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py) | -| [cloud.operators.bigtable.BigtableCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.operators.bigtable.BigtableCreateTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.operators.bigtable.BigtableDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.operators.bigtable.BigtableDeleteTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.operators.bigtable.BigtableUpdateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.operators.cloud_build.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_build.py) | [contrib.operators.gcp_cloud_build_operator.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_cloud_build_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLCreateInstanceDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLDeleteInstanceDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLExecuteQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLExportInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLImportInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLInstancePatchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLPatchInstanceDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCancelOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationCancelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCreateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceDeleteJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGetOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationGetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceListOperationsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationsListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServicePauseOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationPauseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceResumeOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationResumeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceUpdateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.compute.ComputeEngineBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.compute.ComputeEngineCopyInstanceTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.compute.ComputeEngineInstanceGroupUpdateManagerTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.compute.ComputeEngineSetMachineTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.compute.ComputeEngineStartInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceInstanceStartOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.compute.ComputeEngineStopInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceInstanceStopOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.dataflow.DataflowCreateJavaJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py) | [contrib.operators.dataflow_operator.DataFlowJavaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataflow_operator.py) | -| [cloud.operators.dataflow.DataflowCreatePythonJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py) | [contrib.operators.dataflow_operator.DataFlowPythonOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataflow_operator.py) | -| [cloud.operators.dataflow.DataflowTemplatedJobStartOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py) | [contrib.operators.dataflow_operator.DataflowTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataflow_operator.py) | -| [cloud.operators.dataproc.DataprocCreateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataprocClusterCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocDeleteClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataprocClusterDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocInstantiateInlineWorkflowTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocInstantiateWorkflowTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocJobBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcJobBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocScaleClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataprocClusterScaleOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitHadoopJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcHadoopOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitHiveJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcHiveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitPigJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcPigOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitPySparkJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcPySparkOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitSparkJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcSparkOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitSparkSqlJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcSparkSqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.datastore.CloudDatastoreExportEntitiesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | [contrib.operators.datastore_export_operator.DatastoreExportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/datastore_export_operator.py) | -| [cloud.operators.datastore.CloudDatastoreImportEntitiesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | [contrib.operators.datastore_import_operator.DatastoreImportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/datastore_import_operator.py) | -| [cloud.operators.dlp.CloudDLPCancelDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCancelDLPJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPCreateDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCreateDLPJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPCreateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCreateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPCreateInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCreateInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPCreateJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCreateJobTriggerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPCreateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCreateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeidentifyContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeidentifyContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeleteDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteDlpJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeleteDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeleteInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeleteJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteJobTriggerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeleteStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPGetDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPGetDlpJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPGetDLPJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPGetJobTripperOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPGetDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPGetDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPGetInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPGetInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPGetStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPGetStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPInspectContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPInspectContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListDLPJobsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListDlpJobsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListDeidentifyTemplatesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListDeidentifyTemplatesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListInfoTypesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListInfoTypesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListInspectTemplatesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListInspectTemplatesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListJobTriggersOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListJobTriggersOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListStoredInfoTypesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListStoredInfoTypesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPRedactImageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPRedactImageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPReidentifyContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPReidentifyContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPUpdateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPUpdateInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPUpdateJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateJobTriggerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPUpdateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.functions.CloudFunctionDeleteFunctionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/functions.py) | [contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_function_operator.py) | -| [cloud.operators.functions.CloudFunctionDeployFunctionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/functions.py) | [contrib.operators.gcp_function_operator.GcfFunctionDeployOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_function_operator.py) | -| [cloud.operators.gcs.GCSBucketCreateAclEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_acl_operator.py) | -| [cloud.operators.gcs.GCSCreateBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_operator.py) | -| [cloud.operators.gcs.GCSDeleteObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_delete_operator.GoogleCloudStorageDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_delete_operator.py) | -| [cloud.operators.gcs.GCSListObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_list_operator.py) | -| [cloud.operators.gcs.GCSObjectCreateAclEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_acl_operator.py) | -| [cloud.operators.kubernetes_engine.GKECreateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py) | [contrib.operators.gcp_container_operator.GKEClusterCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py) | -| [cloud.operators.kubernetes_engine.GKEDeleteClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py) | [contrib.operators.gcp_container_operator.GKEClusterDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py) | -| [cloud.operators.kubernetes_engine.GKEStartPodOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py) | [contrib.operators.gcp_container_operator.GKEPodOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py) | -| [cloud.operators.mlengine.MLEngineManageModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineModelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | -| [cloud.operators.mlengine.MLEngineManageVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineVersionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | -| [cloud.operators.mlengine.MLEngineStartBatchPredictionJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | -| [cloud.operators.mlengine.MLEngineStartTrainingJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineTrainingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | -| [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitiesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntitiesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | -| [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitySentimentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntitySentimentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | -| [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeSentimentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeSentimentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | -| [cloud.operators.natural_language.CloudNaturalLanguageClassifyTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageClassifyTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | -| [cloud.operators.pubsub.PubSubCreateSubscriptionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubSubscriptionCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | -| [cloud.operators.pubsub.PubSubCreateTopicOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubTopicCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | -| [cloud.operators.pubsub.PubSubDeleteSubscriptionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubSubscriptionDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | -| [cloud.operators.pubsub.PubSubDeleteTopicOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubTopicDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | -| [cloud.operators.pubsub.PubSubPublishMessageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | -| [cloud.operators.spanner.SpannerDeleteDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.spanner.SpannerDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.spanner.SpannerDeployDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.spanner.SpannerDeployInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.spanner.SpannerQueryDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.spanner.SpannerUpdateDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.speech_to_text.CloudSpeechToTextRecognizeSpeechOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/speech_to_text.py) | [contrib.operators.gcp_speech_to_text_operator.GcpSpeechToTextRecognizeSpeechOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_speech_to_text_operator.py) | -| [cloud.operators.text_to_speech.CloudTextToSpeechSynthesizeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/text_to_speech.py) | [contrib.operators.gcp_text_to_speech_operator.GcpTextToSpeechSynthesizeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_text_to_speech_operator.py) | -| [cloud.operators.translate.CloudTranslateTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/translate.py) | [contrib.operators.gcp_translate_operator.CloudTranslateTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_translate_operator.py) | -| [cloud.operators.translate_speech.CloudTranslateSpeechOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/translate_speech.py) | [contrib.operators.gcp_translate_speech_operator.CloudTranslateSpeechOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_translate_speech_operator.py) | -| [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoExplicitContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py) | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoExplicitContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py) | -| [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoLabelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py) | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoLabelsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py) | -| [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoShotsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py) | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoShotsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py) | -| [cloud.operators.vision.CloudVisionCreateProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionCreateProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionCreateReferenceImageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionReferenceImageCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionDeleteProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionDeleteProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductSetDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionDetectImageLabelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionDetectImageLabelsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionDetectImageSafeSearchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionDetectImageSafeSearchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionDetectTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionDetectTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionGetProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductGetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionGetProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionImageAnnotateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionAnnotateImageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionRemoveProductFromProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionRemoveProductFromProductSetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionTextDetectOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionDetectDocumentTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionUpdateProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionUpdateProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | - - -## Transfer operators - - -### New transfer operators - -| New Airflow 2.0 transfers: `airflow.providers.google` package | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/transfers/ads_to_gcs.py) | -| [cloud.transfers.azure_fileshare_to_gcs.AzureFileShareToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py) | -| [cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py) | -| [cloud.transfers.gcs_to_local.GCSToLocalFilesystemOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_local.py) | -| [cloud.transfers.gcs_to_sftp.GCSToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_sftp.py) | -| [cloud.transfers.presto_to_gcs.PrestoToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/presto_to_gcs.py) | -| [cloud.transfers.salesforce_to_gcs.SalesforceToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py) | -| [cloud.transfers.sftp_to_gcs.SFTPToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sftp_to_gcs.py) | -| [cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sheets_to_gcs.py) | -| [suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/transfers/gcs_to_sheets.py) | - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.transfers.adls_to_gcs.ADLSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/adls_to_gcs.py) | [contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_to_gcs.py) | -| [cloud.transfers.bigquery_to_bigquery.BigQueryToBigQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py) | [contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_bigquery.py) | -| [cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py) | [contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_gcs.py) | -| [cloud.transfers.bigquery_to_mysql.BigQueryToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py) | [contrib.operators.bigquery_to_mysql_operator.BigQueryToMySqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_mysql_operator.py) | -| [cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py) | [contrib.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/cassandra_to_gcs.py) | -| [cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py) | [contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_bq.py) | -| [cloud.transfers.gcs_to_gcs.GCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_gcs.py) | [contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gcs.py) | -| [cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/local_to_gcs.py) | [contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_gcs.py) | -| [cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/mssql_to_gcs.py) | [contrib.operators.mssql_to_gcs.MsSqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mssql_to_gcs.py) | -| [cloud.transfers.mysql_to_gcs.MySQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/mysql_to_gcs.py) | [contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mysql_to_gcs.py) | -| [cloud.transfers.postgres_to_gcs.PostgresToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/postgres_to_gcs.py) | [contrib.operators.postgres_to_gcs_operator.PostgresToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/postgres_to_gcs_operator.py) | -| [cloud.transfers.s3_to_gcs.S3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/s3_to_gcs.py) | [contrib.operators.s3_to_gcs_operator.S3ToGCSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_gcs_operator.py) | -| [cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sql_to_gcs.py) | [contrib.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sql_to_gcs.py) | -| [suite.transfers.gcs_to_gdrive.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/transfers/gcs_to_gdrive.py) | [contrib.operators.gcs_to_gdrive_operator.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gdrive_operator.py) | - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.google` package | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.sensors.bigquery.BigQueryTablePartitionExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigquery.py) | -| [cloud.sensors.bigquery_dts.BigQueryDataTransferServiceTransferRunSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigquery_dts.py) | -| [cloud.sensors.dataflow.DataflowJobMetricsSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataflow.py) | -| [cloud.sensors.dataflow.DataflowJobStatusSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataflow.py) | -| [cloud.sensors.dataproc.DataprocJobSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataproc.py) | -| [marketing_platform.sensors.campaign_manager.GoogleCampaignManagerReportSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/campaign_manager.py) | -| [marketing_platform.sensors.display_video.GoogleDisplayVideo360GetSDFDownloadOperationSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/display_video.py) | -| [marketing_platform.sensors.display_video.GoogleDisplayVideo360ReportSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/display_video.py) | -| [marketing_platform.sensors.search_ads.GoogleSearchAdsReportSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/search_ads.py) | - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.sensors.bigquery.BigQueryTableExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigquery.py) | [contrib.sensors.bigquery_sensor.BigQueryTableSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/bigquery_sensor.py) | -| [cloud.sensors.bigtable.BigtableTableReplicationCompletedSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.sensors.cloud_storage_transfer_service.CloudDataTransferServiceJobStatusSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py) | [contrib.sensors.gcp_transfer_sensor.GCPTransferServiceWaitForJobStatusSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcp_transfer_sensor.py) | -| [cloud.sensors.gcs.GCSObjectExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py) | [contrib.sensors.gcs_sensor.GoogleCloudStorageObjectSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py) | -| [cloud.sensors.gcs.GCSObjectUpdateSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py) | [contrib.sensors.gcs_sensor.GoogleCloudStorageObjectUpdatedSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py) | -| [cloud.sensors.gcs.GCSObjectsWtihPrefixExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py) | [contrib.sensors.gcs_sensor.GoogleCloudStoragePrefixSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py) | -| [cloud.sensors.gcs.GCSUploadSessionCompleteSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py) | [contrib.sensors.gcs_sensor.GoogleCloudStorageUploadSessionCompleteSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py) | -| [cloud.sensors.pubsub.PubSubPullSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/pubsub.py) | [contrib.sensors.pubsub_sensor.PubSubPullSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/pubsub_sensor.py) | - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.google` package | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [ads.hooks.ads.GoogleAdsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/hooks/ads.py) | -| [cloud.hooks.automl.CloudAutoMLHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/automl.py) | -| [cloud.hooks.bigquery_dts.BiqQueryDataTransferServiceHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigquery_dts.py) | -| [cloud.hooks.cloud_memorystore.CloudMemorystoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_memorystore.py) | -| [cloud.hooks.cloud_memorystore.CloudMemorystoreMemcachedHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_memorystore.py) | -| [cloud.hooks.compute_ssh.ComputeEngineSSHHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/compute_ssh.py) | -| [cloud.hooks.datacatalog.CloudDataCatalogHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/datacatalog.py) | -| [cloud.hooks.datafusion.DataFusionHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/datafusion.py) | -| [cloud.hooks.dataprep.GoogleDataprepHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dataprep.py) | -| [cloud.hooks.gdm.GoogleDeploymentManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/gdm.py) | -| [cloud.hooks.life_sciences.LifeSciencesHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/life_sciences.py) | -| [cloud.hooks.os_login.OSLoginHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/os_login.py) | -| [cloud.hooks.secret_manager.SecretsManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/secret_manager.py) | -| [cloud.hooks.stackdriver.StackdriverHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/stackdriver.py) | -| [common.hooks.discovery_api.GoogleDiscoveryApiHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/common/hooks/discovery_api.py) | -| [firebase.hooks.firestore.CloudFirestoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/firebase/hooks/firestore.py) | -| [marketing_platform.hooks.analytics.GoogleAnalyticsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/analytics.py) | -| [marketing_platform.hooks.campaign_manager.GoogleCampaignManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/campaign_manager.py) | -| [marketing_platform.hooks.display_video.GoogleDisplayVideo360Hook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/display_video.py) | -| [marketing_platform.hooks.search_ads.GoogleSearchAdsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/search_ads.py) | -| [suite.hooks.sheets.GSheetsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/hooks/sheets.py) | - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.hooks.bigquery.BigQueryHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigquery.py) | [contrib.hooks.bigquery_hook.BigQueryHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/bigquery_hook.py) | -| [cloud.hooks.bigtable.BigtableHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigtable.py) | [contrib.hooks.gcp_bigtable_hook.BigtableHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_bigtable_hook.py) | -| [cloud.hooks.cloud_build.CloudBuildHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_build.py) | [contrib.hooks.gcp_cloud_build_hook.CloudBuildHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_cloud_build_hook.py) | -| [cloud.hooks.cloud_sql.CloudSQLDatabaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_sql.py) | [contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_sql_hook.py) | -| [cloud.hooks.cloud_sql.CloudSQLHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_sql.py) | [contrib.hooks.gcp_sql_hook.CloudSqlHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_sql_hook.py) | -| [cloud.hooks.cloud_storage_transfer_service.CloudDataTransferServiceHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py) | [contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_transfer_hook.py) | -| [cloud.hooks.compute.ComputeEngineHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/compute.py) | [contrib.hooks.gcp_compute_hook.GceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_compute_hook.py) | -| [cloud.hooks.dataflow.DataflowHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dataflow.py) | [contrib.hooks.gcp_dataflow_hook.DataFlowHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_dataflow_hook.py) | -| [cloud.hooks.dataproc.DataprocHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dataproc.py) | [contrib.hooks.gcp_dataproc_hook.DataProcHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_dataproc_hook.py) | -| [cloud.hooks.datastore.DatastoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/datastore.py) | [contrib.hooks.datastore_hook.DatastoreHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/datastore_hook.py) | -| [cloud.hooks.dlp.CloudDLPHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dlp.py) | [contrib.hooks.gcp_dlp_hook.CloudDLPHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_dlp_hook.py) | -| [cloud.hooks.functions.CloudFunctionsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/functions.py) | [contrib.hooks.gcp_function_hook.GcfHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_function_hook.py) | -| [cloud.hooks.gcs.GCSHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/gcs.py) | [contrib.hooks.gcs_hook.GoogleCloudStorageHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcs_hook.py) | -| [cloud.hooks.kms.CloudKMSHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/kms.py) | [contrib.hooks.gcp_kms_hook.GoogleCloudKMSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_kms_hook.py) | -| [cloud.hooks.kubernetes_engine.GKEHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/kubernetes_engine.py) | [contrib.hooks.gcp_container_hook.GKEClusterHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_container_hook.py) | -| [cloud.hooks.mlengine.MLEngineHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/mlengine.py) | [contrib.hooks.gcp_mlengine_hook.MLEngineHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_mlengine_hook.py) | -| [cloud.hooks.natural_language.CloudNaturalLanguageHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/natural_language.py) | [contrib.hooks.gcp_natural_language_hook.CloudNaturalLanguageHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_natural_language_hook.py) | -| [cloud.hooks.pubsub.PubSubHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/pubsub.py) | [contrib.hooks.gcp_pubsub_hook.PubSubHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_pubsub_hook.py) | -| [cloud.hooks.spanner.SpannerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/spanner.py) | [contrib.hooks.gcp_spanner_hook.CloudSpannerHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_spanner_hook.py) | -| [cloud.hooks.speech_to_text.CloudSpeechToTextHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/speech_to_text.py) | [contrib.hooks.gcp_speech_to_text_hook.GCPSpeechToTextHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_speech_to_text_hook.py) | -| [cloud.hooks.tasks.CloudTasksHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/tasks.py) | [contrib.hooks.gcp_tasks_hook.CloudTasksHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_tasks_hook.py) | -| [cloud.hooks.text_to_speech.CloudTextToSpeechHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/text_to_speech.py) | [contrib.hooks.gcp_text_to_speech_hook.GCPTextToSpeechHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_text_to_speech_hook.py) | -| [cloud.hooks.translate.CloudTranslateHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/translate.py) | [contrib.hooks.gcp_translate_hook.CloudTranslateHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_translate_hook.py) | -| [cloud.hooks.video_intelligence.CloudVideoIntelligenceHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/video_intelligence.py) | [contrib.hooks.gcp_video_intelligence_hook.CloudVideoIntelligenceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_video_intelligence_hook.py) | -| [cloud.hooks.vision.CloudVisionHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/vision.py) | [contrib.hooks.gcp_vision_hook.CloudVisionHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_vision_hook.py) | -| [common.hooks.base_google.GoogleBaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/common/hooks/base_google.py) | [contrib.hooks.gcp_api_base_hook.GoogleBaseHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_api_base_hook.py) | -| [suite.hooks.drive.GoogleDriveHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/hooks/drive.py) | [contrib.hooks.gdrive_hook.GoogleDriveHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gdrive_hook.py) | - - -## Secrets - - - -### Moved secrets - -| Airflow 2.0 secrets: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.secrets.secret_manager.CloudSecretManagerBackend](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/secrets/secret_manager.py) | [contrib.secrets.gcp_secrets_manager.CloudSecretsManagerBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/gcp_secrets_manager.py) | - - - -## Releases - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [8d0950646](https://github.com/apache/airflow/commit/8d09506464c8480fa42e8bfe6a36c6f631cd23f6) | 2020-11-18 | Fix download method in GCSToBigQueryOperator (#12442) | -| [2c0920fba](https://github.com/apache/airflow/commit/2c0920fba5d2f05d2e29cead91127686af277ec2) | 2020-11-17 | Adds mechanism for provider package discovery. (#12383) | -| [2cda2f2a0](https://github.com/apache/airflow/commit/2cda2f2a0a94e5aaed87f0998fa57b4f8bff5e43) | 2020-11-17 | Add missing pre-commit definition - provider-yamls (#12393) | -| [80a957f14](https://github.com/apache/airflow/commit/80a957f142f260daed262b8e93a4d02c12cfeabc) | 2020-11-17 | Add Dataflow sensors - job metrics (#12039) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [917e6c442](https://github.com/apache/airflow/commit/917e6c4424985271c53dd8c413b211896ee55726) | 2020-11-16 | Add provide_file_and_upload to GCSHook (#12310) | -| [cfa4ecfeb](https://github.com/apache/airflow/commit/cfa4ecfeb02661f40b4778733384ac085fb5f04b) | 2020-11-15 | Add DataflowJobStatusSensor and support non-blocking execution of jobs (#11726) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [32b59f835](https://github.com/apache/airflow/commit/32b59f8350f55793df6838a32de662a80483ecda) | 2020-11-12 | Fixes the sending of an empty list to BigQuery `list_rows` (#12307) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [502ba309e](https://github.com/apache/airflow/commit/502ba309ea470943f0e99c634269e3d2d13ce6ca) | 2020-11-10 | Enable Markdownlint rule - MD022/blanks-around-headings (#12225) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [f37c6e6fc](https://github.com/apache/airflow/commit/f37c6e6fce8b704f5af28caa16d0ed7d873a0e4a) | 2020-11-10 | Add Compute Engine SSH hook (#9879) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [61feb6ec4](https://github.com/apache/airflow/commit/61feb6ec453f8dda1a0e1fe3ebcc0f1e3224b634) | 2020-11-09 | Provider's readmes generated for elasticsearch and google packages (#12194) | - - -### Release 2020.11.13 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------------------------------------| -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08 | Add authentication to AWS with Google credentials (#12079) | -| [2ef3b7ef8](https://github.com/apache/airflow/commit/2ef3b7ef8cafe3bdc8bf8db70fbc519b98576366) | 2020-11-08 | Fix ERROR - Object of type 'bytes' is not JSON serializable when using store_to_xcom_key parameter (#12172) | -| [0caec9fd3](https://github.com/apache/airflow/commit/0caec9fd32bee2b3036b5d7bdcb56bd6a3b9dccf) | 2020-11-06 | Dataflow - add waiting for successful job cancel (#11501) | -| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06 | Simplify string expressions (#12123) | -| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04 | Format all files (without excepions) by black (#12091) | -| [fd3db778e](https://github.com/apache/airflow/commit/fd3db778e715d0f164dda7ee8f672d477a323291) | 2020-11-04 | Add server side cursor support for postgres to GCS operator (#11793) | -| [f1f194026](https://github.com/apache/airflow/commit/f1f1940261744b4fdb67b0b5654488494efa9c64) | 2020-11-04 | Add DataflowStartSQLQuery operator (#8553) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [5f5244b74](https://github.com/apache/airflow/commit/5f5244b74df93cadbb99643cec76281460ca4411) | 2020-11-04 | Add template fields renderers to Biguery and Dataproc operators (#12067) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [45ae145c2](https://github.com/apache/airflow/commit/45ae145c25a19b4185c33ac0c4da920324b3987e) | 2020-11-03 | Log BigQuery job id in insert method of BigQueryHook (#12056) | -| [e324b37a6](https://github.com/apache/airflow/commit/e324b37a67e32c368df50604a00160d7766b5c33) | 2020-11-03 | Add job name and progress logs to Cloud Storage Transfer Hook (#12014) | -| [6071fdd58](https://github.com/apache/airflow/commit/6071fdd58470bb2a6c23fc16481e292b7247d0bb) | 2020-11-02 | Improve handling server errors in DataprocSubmitJobOperator (#11947) | -| [2f703df12](https://github.com/apache/airflow/commit/2f703df12dfd6511722ff9a82d5a569d092fccc2) | 2020-10-30 | Add SalesforceToGcsOperator (#10760) | -| [e5713e00b](https://github.com/apache/airflow/commit/e5713e00b3afcba6f78006ec0e360da317858e4d) | 2020-10-29 | Add drain option when canceling Dataflow pipelines (#11374) | -| [37eaac3c5](https://github.com/apache/airflow/commit/37eaac3c5dc93804413c10a6ca124fd7831befc0) | 2020-10-29 | The PRs which are not approved run subset of tests (#11828) | -| [79cb77199](https://github.com/apache/airflow/commit/79cb771992279d40ddd9eb6b0277382313a32898) | 2020-10-28 | Fixing re pattern and changing to use a single character class. (#11857) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [240c7d4d7](https://github.com/apache/airflow/commit/240c7d4d72aac8f6aab98f5913e8f54c4f1372ff) | 2020-10-26 | Google Memcached hooks - improve protobuf messages handling (#11743) | -| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26 | Fix spellings (#11825) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [727c739af](https://github.com/apache/airflow/commit/727c739afb565d4d394a8faedc969334cb8e738e) | 2020-10-22 | Improve Cloud Memorystore for Redis example (#11735) | -| [1da8379c9](https://github.com/apache/airflow/commit/1da8379c913843834353b44861c62f332a461bdf) | 2020-10-22 | Fix static checks after merging #10121 (#11737) | -| [91503308c](https://github.com/apache/airflow/commit/91503308c723b186ce6f4026f2a3e2c21030f6e5) | 2020-10-22 | Add Google Cloud Memorystore Memcached Operators (#10121) | -| [950c16d0b](https://github.com/apache/airflow/commit/950c16d0b0ab67bb7af11909de751029faf0313a) | 2020-10-21 | Retry requests in case of error in Google ML Engine Hook (#11712) | -| [2bfc53b5e](https://github.com/apache/airflow/commit/2bfc53b5eb67406d418371b74dc9bc5a07be238e) | 2020-10-21 | Fix doc errors in google provider files. (#11713) | -| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21 | Enforce strict rules for yamllint (#11709) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [2d854c350](https://github.com/apache/airflow/commit/2d854c3505ccad66e9a7d94267e51bed800433c2) | 2020-10-19 | Add service_account to Google ML Engine operator (#11619) | -| [46a121fb7](https://github.com/apache/airflow/commit/46a121fb7b77c0964e053b58750e2d8bc2bd0b2a) | 2020-10-18 | docs: Update Bigquery clustering docstrings (#11232) | -| [49c58147f](https://github.com/apache/airflow/commit/49c58147fed8a52869d0b0ecc00c102c11972ad0) | 2020-10-18 | Strict type checking for provider Google (#11609) | -| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16 | Add type annotations for AWS operators and hooks (#11434) | -| [3c10ca650](https://github.com/apache/airflow/commit/3c10ca6504be37fabff9a10caefea3fe4df31a02) | 2020-10-16 | Add DataflowStartFlexTemplateOperator (#8550) | -| [8865d14df](https://github.com/apache/airflow/commit/8865d14df4d58dd5f1a4d2ff81c77469959f175a) | 2020-10-16 | Strict type checking for provider google cloud (#11548) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [06141d6d0](https://github.com/apache/airflow/commit/06141d6d01398115e5e54c5766a46ae5514ba2f7) | 2020-10-12 | Google cloud operator strict type check (#11450) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [1845cd11b](https://github.com/apache/airflow/commit/1845cd11b77f302777ab854e84bef9c212c604a0) | 2020-10-11 | Strict type check for google ads and cloud hooks (#11390) | -| [bd204bb91](https://github.com/apache/airflow/commit/bd204bb91b4bc069284f9a44757c6baba8884140) | 2020-10-11 | Optionally set null marker in csv exports in BaseSQLToGCSOperator (#11409) | -| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10 | Remove redundant parentheses from Python files (#10967) | -| [8baf657fc](https://github.com/apache/airflow/commit/8baf657fc2b21a601b99b752e4f1176bf8a934ce) | 2020-10-09 | Fix regression in DataflowTemplatedJobStartOperator (#11167) | -| [b0fcf6755](https://github.com/apache/airflow/commit/b0fcf675595494b306800e1a516548dc0dc671f8) | 2020-10-07 | Add AzureFileShareToGCSOperator (#10991) | -| [47b05a87f](https://github.com/apache/airflow/commit/47b05a87f004dc273a4757ba49f03808a86f77e7) | 2020-10-07 | Improve handling of job_id in BigQuery operators (#11287) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [cb52fb0ae](https://github.com/apache/airflow/commit/cb52fb0ae1de1f1140babaed0e97299e4aaf96bf) | 2020-09-27 | Add example DAG and system test for MySQLToGCSOperator (#10990) | -| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25 | Fix incorrect Usage of Optional[str] & Optional[int] (#11141) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [daf8f3108](https://github.com/apache/airflow/commit/daf8f31080f06c044b4336071bd383bbbcdc6085) | 2020-09-23 | Add template fields renderers for better UI rendering (#11061) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [cb979f9f2](https://github.com/apache/airflow/commit/cb979f9f213bb3c9835a3dc924f84a07f5387378) | 2020-09-22 | Get Airflow configs with sensitive data from CloudSecretManagerBackend (#11024) | -| [76545bb3d](https://github.com/apache/airflow/commit/76545bb3d6fa82ce8eae072dbc74a3b76d8fd53c) | 2020-09-16 | Add example dag and system test for S3ToGCSOperator (#10951) | -| [22c631625](https://github.com/apache/airflow/commit/22c631625fd68abe280528f33b7cfd7603ebf66c) | 2020-09-16 | Fix more docs spellings (#10965) | -| [12a652f53](https://github.com/apache/airflow/commit/12a652f5344c7f03c3d780556ca1829b235fdb2d) | 2020-09-13 | Fix parameter name collision in AutoMLBatchPredictOperator #10723 (#10869) | -| [41a62735e](https://github.com/apache/airflow/commit/41a62735edcebbd9c39e505280646ef5d25aa1d5) | 2020-09-11 | Add on_kill method to BigQueryInsertJobOperator (#10866) | -| [3e91da56e](https://github.com/apache/airflow/commit/3e91da56e8c63a90dc859d8996a896b5d9f8cd43) | 2020-09-11 | fix typo in firebase/example_filestore DAG (#10875) | -| [68cc7273b](https://github.com/apache/airflow/commit/68cc7273bf0c0f562748b5f663da5c12d2cba6a7) | 2020-09-10 | Add on_kill method to DataprocSubmitJobOperator (#10847) | -| [f92095721](https://github.com/apache/airflow/commit/f92095721450c14605c986e165544a7bfb712a3d) | 2020-09-10 | Fix and remove some more typos from spelling_wordlist.txt (#10845) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [078bfaf60](https://github.com/apache/airflow/commit/078bfaf60adc5aebac8c347e7f6e5339ab9b56c0) | 2020-09-08 | Extract missing gcs_to_local example DAG from gcs example (#10767) | -| [10ce31127](https://github.com/apache/airflow/commit/10ce31127f1ff87176158935925afce46a989917) | 2020-09-08 | Deprecate using global as the default region in Google Dataproc operators and hooks (#10772) | -| [f14f37971](https://github.com/apache/airflow/commit/f14f3797163cc45fdcdabfb36ee7d638f70e470d) | 2020-09-07 | [AIRFLOW-10672] Refactor BigQueryToGCSOperator to use new method (#10773) | -| [c8ee45568](https://github.com/apache/airflow/commit/c8ee4556851c36b3b6e644a7746a49583dd53db1) | 2020-09-07 | Refactor DataprocCreateCluster operator to use simpler interface (#10403) | -| [ece685b5b](https://github.com/apache/airflow/commit/ece685b5b895ad1175440b49bf9e620dffd8248d) | 2020-09-05 | Asynchronous execution of Dataproc jobs with a Sensor (#10673) | -| [6e3d7b63d](https://github.com/apache/airflow/commit/6e3d7b63d3b34c34f8b38a7b41f4a5876e1f731f) | 2020-09-04 | Add masterConfig parameter to MLEngineStartTrainingJobOperator (#10578) | -| [804548d58](https://github.com/apache/airflow/commit/804548d58f2036fd4516824a38d0639ba5d5ab0e) | 2020-09-01 | Add Dataprep operators (#10304) | -| [11c00bc82](https://github.com/apache/airflow/commit/11c00bc820483691a87cdb16d519dce8dc57c40e) | 2020-08-30 | Fix typos: duplicated "the" (#10647) | -| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29 | Update Google Cloud branding (#10642) | -| [1b533f617](https://github.com/apache/airflow/commit/1b533f617e2e0200597d114d7570f6c0d69da1a0) | 2020-08-28 | Fix broken master - DLP (#10635) | -| [5ae82a56d](https://github.com/apache/airflow/commit/5ae82a56dab599de44f1be7027cecc4ef86f7bb6) | 2020-08-28 | Fix Google DLP example and improve ops idempotency (#10608) | -| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28 | Update Google Cloud branding (#10615) | -| [91ff31ad1](https://github.com/apache/airflow/commit/91ff31ad1021235bd21c87ad9dbc0b216a908671) | 2020-08-27 | Documentation for Google Cloud Data Loss Prevention (#8201) (#9651) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [866701c80](https://github.com/apache/airflow/commit/866701c8019f49dcb02c9696e4f6e9ce67d13ca6) | 2020-08-25 | Fix typo in "Cloud" (#10534) | -| [47265e7b5](https://github.com/apache/airflow/commit/47265e7b58bc28bcbbffc981442b6cc27a3af39c) | 2020-08-24 | Fix typo in PostgresHook (#10529) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24 | Implement impersonation in google operators (#10052) | -| [b0598b535](https://github.com/apache/airflow/commit/b0598b5351d2d027286e2333231b6c0c0704dba2) | 2020-08-24 | Add support for creating multiple replicated clusters in Bigtable hook and operator (#10475) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [515cc72c9](https://github.com/apache/airflow/commit/515cc72c995429c8c007f853ade385d79fcbac90) | 2020-08-22 | Fix typo in timed_out (#10459) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [88c7d2e52](https://github.com/apache/airflow/commit/88c7d2e526af4994066f65f830e2fa8edcbbce2e) | 2020-08-21 | Dataflow operators don't not always create a virtualenv (#10373) | -| [083c3c129](https://github.com/apache/airflow/commit/083c3c129bc3458d410f5ff37d7f5a9a7ad548b7) | 2020-08-18 | Simplified GCSTaskHandler configuration (#10365) | -| [1ae5bdf23](https://github.com/apache/airflow/commit/1ae5bdf23e3ac7cca05325ef8b255a7cf067e18e) | 2020-08-17 | Add test for GCSTaskHandler (#9600) (#9861) | -| [e195a980b](https://github.com/apache/airflow/commit/e195a980bc8e9d42f3eb4ac134950977b9e5158f) | 2020-08-16 | Add type annotations for mlengine_operator_utils (#10297) | -| [382c1011b](https://github.com/apache/airflow/commit/382c1011b6bcebd22760e2f98419281ef1a09d1b) | 2020-08-16 | Add Bigtable Update Instance Hook/Operator (#10340) | -| [bfa5a8d5f](https://github.com/apache/airflow/commit/bfa5a8d5f10458c14d380c4042ecfbac627d0639) | 2020-08-15 | CI: Fix failing docs-build (#10342) | -| [be46d20fb](https://github.com/apache/airflow/commit/be46d20fb431cc1d91c935e8894dfc7756c18993) | 2020-08-15 | Improve idempotency of BigQueryInsertJobOperator (#9590) | -| [47387a69e](https://github.com/apache/airflow/commit/47387a69e623676b57b6d42ff07e729da2d21bff) | 2020-08-14 | Catch Permission Denied exception when getting secret from GCP Secret Manager. (#10326) | -| [2f0613b0c](https://github.com/apache/airflow/commit/2f0613b0c2fdf176d9f13a8cd12162c60c64b644) | 2020-08-13 | Implement Google BigQuery Table Partition Sensor (#10218) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [ef088314f](https://github.com/apache/airflow/commit/ef088314f8f1b29ac636a7584cf9dda04b1df816) | 2020-08-09 | Added DataprepGetJobsForJobGroupOperator (#10246) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [c29533888](https://github.com/apache/airflow/commit/c29533888fadd40f5e9ce63e728bd8691182e542) | 2020-08-08 | Add labels param to Google MLEngine Operators (#10222) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [eff0f0321](https://github.com/apache/airflow/commit/eff0f03210d30a4aed9ed457eaaea9c9f05d54d1) | 2020-08-06 | Update guide for Google Cloud Secret Manager Backend (#10172) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [010322692](https://github.com/apache/airflow/commit/010322692e6e3f0adc156f0beb81e267da0e97bb) | 2020-08-06 | Improve handling Dataproc cluster creation with ERROR state (#9593) | -| [1437cb749](https://github.com/apache/airflow/commit/1437cb74955f4e10af5d70ebadde1e6b163fb9b7) | 2020-08-04 | Add correct signatures for operators in google provider package (#10144) | -| [6efa1b9cb](https://github.com/apache/airflow/commit/6efa1b9cb763ae0bdbc884a54d24dbdc39d9e3a6) | 2020-08-03 | Add additional Cloud Datastore operators (#10032) | -| [27020f8e5](https://github.com/apache/airflow/commit/27020f8e588575d53e63f9f9daecd3a522656644) | 2020-08-03 | Add try clause to DataFusionHook.wait_for_pipeline_state (#10031) | -| [4e3799fec](https://github.com/apache/airflow/commit/4e3799fec4c23d0f43603a0489c5a6158aeba035) | 2020-08-02 | [AIRFLOW-4541] Replace os.mkdirs usage with pathlib.Path(path).mkdir (#10117) | -| [85c56b173](https://github.com/apache/airflow/commit/85c56b1737c2bf61751836571300445c0aebae1a) | 2020-08-02 | Add missing params to GCP Pub/Sub creation_subscription (#10106) | -| [b79466c12](https://github.com/apache/airflow/commit/b79466c12f3ae717c31804acc2e9ffcd60f9611c) | 2020-08-02 | Fix sensor not providing arguments for GCSHook (#10074) | -| [4ee35d027](https://github.com/apache/airflow/commit/4ee35d027988c6456767faeb108a7f686d5117f2) | 2020-08-02 | Fix hook not passing gcp_conn_id to base class (#10075) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [4c84661ad](https://github.com/apache/airflow/commit/4c84661adb5bb5c581bb4193b4c7e935cbe07758) | 2020-07-31 | Split Display Video 360 example into smaler DAGs (#10077) | -| [59cbff087](https://github.com/apache/airflow/commit/59cbff0874dd5318cda4b9ce7b7eeb1aad1dad4d) | 2020-07-29 | Fix docstrings in BigQueryGetDataOperator (#10042) | -| [81b87d48e](https://github.com/apache/airflow/commit/81b87d48ed002d7a7f7bcb72a58e82d40a176fe2) | 2020-07-27 | Add unit tests for GcpBodyFieldSanitizer in Google providers (#9996) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25 | Stop using start_date in default_args in example_dags (#9982) | -| [ef98edf4d](https://github.com/apache/airflow/commit/ef98edf4da2d9b74d5cf5b21e81577b3151edb79) | 2020-07-23 | Add more information about using GoogleAdsHook (#9951) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [39a0288a4](https://github.com/apache/airflow/commit/39a0288a47536dfd9b651ecd075887d3e45fcfc4) | 2020-07-22 | Add Google Authentication for experimental API (#9848) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [c4244e18b](https://github.com/apache/airflow/commit/c4244e18bb894eb2180b8972052e56110fe5cbc9) | 2020-07-22 | Fix calling `get_client` in BigQueryHook.table_exists (#9916) | -| [5eacc1642](https://github.com/apache/airflow/commit/5eacc164201a121cd06126aff613cbe0919d35cc) | 2020-07-22 | Add support for impersonation in GCP hooks (#9915) | -| [1cfdebf5f](https://github.com/apache/airflow/commit/1cfdebf5f8841d61a11540b88c7913686e89e085) | 2020-07-21 | Fix insert_job method of BigQueryHook (#9899) | -| [c8c52e69c](https://github.com/apache/airflow/commit/c8c52e69c8d9cc1f26f63d95aecc0a6498d40b6f) | 2020-07-21 | Remove type hint causing DeprecationWarning in Firestore operators (#9819) | -| [eb6f1d1cf](https://github.com/apache/airflow/commit/eb6f1d1cf0503fa763c0d8d34a2fe16efb390b9c) | 2020-07-16 | Fix typo in datafusion operator (#9859) | -| [b01d95ec2](https://github.com/apache/airflow/commit/b01d95ec22b01ed79123178acd74ef40d57aaa7c) | 2020-07-15 | Change DAG.clear to take dag_run_state (#9824) | -| [6d65c15d1](https://github.com/apache/airflow/commit/6d65c15d156a41d5e735e44a1170426559a17d1f) | 2020-07-15 | Add guide for AI Platform (previously Machine Learning Engine) Operators (#9798) | -| [770de53eb](https://github.com/apache/airflow/commit/770de53eb57bd57ffc555ad15b18f0c058dbebe7) | 2020-07-15 | BigQueryTableExistenceSensor needs to specify keyword arguments (#9832) | -| [2d8dbacdf](https://github.com/apache/airflow/commit/2d8dbacdf6c19a598a7f55bcf65e28703aed6201) | 2020-07-15 | Add CloudVisionDeleteReferenceImageOperator (#9698) | -| [9f017951b](https://github.com/apache/airflow/commit/9f017951b94d9bf52b5ee66d72aa8dd822f07269) | 2020-07-15 | Add Google Deployment Manager Hook (#9159) | -| [ed5004cca](https://github.com/apache/airflow/commit/ed5004cca753650dc222fbb8e67573938c6c16d9) | 2020-07-14 | Allow `replace` flag in gcs_to_gcs operator. (#9667) | -| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13 | Keep functions signatures in decorators (#9786) | -| [68925904e](https://github.com/apache/airflow/commit/68925904e49aac6968defb6834863f4e6347fe59) | 2020-07-13 | Add multiple file upload functionality to GCS hook (#8849) | -| [1de78e8f9](https://github.com/apache/airflow/commit/1de78e8f97f48f8f4abd167a0120ffab8af6127a) | 2020-07-12 | Add Google Stackdriver link (#9765) | -| [092d33f29](https://github.com/apache/airflow/commit/092d33f298a7dbb871b1e1b4c17aad3989e89b79) | 2020-07-11 | Fix StackdriverTaskHandler + add system tests (#9761) | -| [b2305660f](https://github.com/apache/airflow/commit/b2305660f0eb55ebd31fdc7fe4e8aeed8c1f8c00) | 2020-07-09 | Update example DAG for AI Platform operators (#9727) | -| [23f80f34a](https://github.com/apache/airflow/commit/23f80f34adec86da24e4896168c53d213d01a7f6) | 2020-07-08 | Move gcs & wasb task handlers to their respective provider packages (#9714) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [cd3d9d934](https://github.com/apache/airflow/commit/cd3d9d93402f06a08f35e3586802f11a18c4f1f3) | 2020-07-02 | Fix using .json template extension in GMP operators (#9566) | -| [4799af30e](https://github.com/apache/airflow/commit/4799af30ee02c596647d1538854769124f9f4961) | 2020-06-30 | Extend BigQuery example with include clause (#9572) | -| [e33f1a12d](https://github.com/apache/airflow/commit/e33f1a12d72ac234e4897f44b326a332acf85901) | 2020-06-30 | Add template_ext to BigQueryInsertJobOperator (#9568) | -| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29 | Remove almost all references to airflow.contrib (#9559) | -| [c420dbd6e](https://github.com/apache/airflow/commit/c420dbd6e13e17867eb4ccc4271b37966310ac0f) | 2020-06-27 | Bump Pylint to 2.5.3 (#9294) | -| [0051c89cb](https://github.com/apache/airflow/commit/0051c89cba02d55236c913ce0110f7d5111ba436) | 2020-06-26 | nitpick fix (#9527) | -| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25 | Use literal syntax instead of function calls to create data structure (#9516) | -| [7256f4caa](https://github.com/apache/airflow/commit/7256f4caa226f8f8632d6e2d38d8c94cb3250a6f) | 2020-06-22 | Pylint fixes and deprecation of rare used methods in Connection (#9419) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [5b680e27e](https://github.com/apache/airflow/commit/5b680e27e8118861ef484c00a4b87c6885b0a518) | 2020-06-19 | Don't use connection to store task handler credentials (#9381) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [416334e2e](https://github.com/apache/airflow/commit/416334e2ecd21d8a532af6102f1cfa9ac921a97a) | 2020-06-19 | Properly propagated warnings in operators (#9348) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [4e09c6442](https://github.com/apache/airflow/commit/4e09c64423bfaabd02a18b5fe7757dc15451ab73) | 2020-06-18 | Adds GCP Secret Manager Hook (#9368) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [639972d99](https://github.com/apache/airflow/commit/639972d995d848b16a3f283576efdbde28b8fdef) | 2020-06-16 | Add support for latest Apache Beam SDK in Dataflow operators (#9323) | -| [1459970b3](https://github.com/apache/airflow/commit/1459970b3b9780e139ce029ae889fd8f69a37bc7) | 2020-06-15 | Rename CloudBuildCreateBuildOperator to CloudBuildCreateOperator (#9314) | -| [431ea3291](https://github.com/apache/airflow/commit/431ea3291c9bf236bccdf8446d753c630ada2b25) | 2020-06-15 | Resolve upstream tasks when template field is XComArg (#8805) | -| [aee6ab94e](https://github.com/apache/airflow/commit/aee6ab94eb956347ad560cfe2673bc6011074513) | 2020-06-15 | Wait for pipeline state in Data Fusion operators (#8954) | -| [fb1c8b83d](https://github.com/apache/airflow/commit/fb1c8b83d400506a16c10e3d6623a913847e5cf5) | 2020-06-10 | Add test for BQ operations using location (#9206) | -| [a26afbfa5](https://github.com/apache/airflow/commit/a26afbfa51b0981ae742c6171938b57a80aace2b) | 2020-06-10 | Make generated job_id more informative in BQ insert_job (#9203) | -| [c41192fa1](https://github.com/apache/airflow/commit/c41192fa1fc5c2b3e7b8414c59f656ab67bbef28) | 2020-06-10 | Upgrade pendulum to latest major version ~2.0 (#9184) | -| [b1c8c5ed5](https://github.com/apache/airflow/commit/b1c8c5ed5bba3a852a5446f3fdd1131b4b22637a) | 2020-06-09 | Allows using private endpoints in GKEStartPodOperator (#9169) | -| [5918efc86](https://github.com/apache/airflow/commit/5918efc86a2217caa641a6ada289eee1c21407f8) | 2020-06-05 | Add 3.8 to the test matrices (#8836) | -| [9bcdadaf7](https://github.com/apache/airflow/commit/9bcdadaf7e6e73d3d2246fbbd32a9f30a1b43ca9) | 2020-06-05 | Add 'main' param to template_fields in DataprocSubmitPySparkJobOperator (#9154) | -| [f56811dff](https://github.com/apache/airflow/commit/f56811dff3af66cbceb0418f11e00507bab58674) | 2020-06-05 | [AIRFLOW-6290] Create guide for GKE operators (#8883) | -| [76962867b](https://github.com/apache/airflow/commit/76962867b5877cf5ffd1b6004453f783c0732ab1) | 2020-06-04 | Fix sql_to_gcs hook gzip of schema_file (#9140) | -| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | -| [789852546](https://github.com/apache/airflow/commit/78985254683c359f7444a7eb5f6ee4967c37d61f) | 2020-06-01 | Add BigQueryInsertJobOperator (#8868) | -| [29eb68b90](https://github.com/apache/airflow/commit/29eb68b90b5df692ac322be0939af5e7fa9b71bc) | 2020-05-31 | Create guide for Dataproc Operators (#9037) | -| [886afaf62](https://github.com/apache/airflow/commit/886afaf622602aa97f925bc3ee4fc27aa995c445) | 2020-05-29 | Add example dag and system test for LocalFilesystemToGCSOperator (#9043) | -| [a779c4dfc](https://github.com/apache/airflow/commit/a779c4dfc278d6ece480b012764ea5814dc78dee) | 2020-05-29 | add separate example dags and system tests for GCSToGoogleSheetsOperator (#9066) | -| [ada26be23](https://github.com/apache/airflow/commit/ada26be23c913796c2ae77b91cb7d113dfec75a6) | 2020-05-29 | Add correct description for dst param in LocalFilesystemToGCSOperator (#9055) | -| [81b2761b8](https://github.com/apache/airflow/commit/81b2761b86dae2d21a6ee859d49c08d46fea6def) | 2020-05-29 | add example dag and system test for GoogleSheetsToGCSOperator (#9056) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [3994030ea](https://github.com/apache/airflow/commit/3994030ea678727daaf9c2bfed0ca94a096f8d2a) | 2020-05-26 | Refactor BigQuery operators (#8858) | -| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [cf5cf45e1](https://github.com/apache/airflow/commit/cf5cf45e1c0dff9a40e02f0dc221542f974831a7) | 2020-05-23 | Support YAML input for CloudBuildCreateOperator (#8808) | -| [499493c5c](https://github.com/apache/airflow/commit/499493c5c5cf324ab8452ead80a10b71ce0c3b14) | 2020-05-19 | [AIRFLOW-6586] Improvements to gcs sensor (#7197) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [841d81664](https://github.com/apache/airflow/commit/841d81664737c25d73d095a7dab5de80d369c87c) | 2020-05-19 | Allow setting the pooling time in DLPHook (#8824) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [15273f0ea](https://github.com/apache/airflow/commit/15273f0ea05ec579c631ce26b5d620233ebdc4d2) | 2020-05-16 | Check for same task instead of Equality to detect Duplicate Tasks (#8828) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [e1e833bb2](https://github.com/apache/airflow/commit/e1e833bb260879ecb9a1f80f28450a3656c0e598) | 2020-05-13 | Update GoogleBaseHook to not follow 308 and use 60s timeout (#8816) | -| [8b5491971](https://github.com/apache/airflow/commit/8b54919711a203c3f35d98c6310a55d4df5da590) | 2020-05-12 | Refactor BigQuery hook methods to use python library (#8631) | -| [6911dfe83](https://github.com/apache/airflow/commit/6911dfe8372a33df67ce1fdd3c2bca1047718f60) | 2020-05-12 | Fix template fields in Google operators (#8840) | -| [4b06fde0f](https://github.com/apache/airflow/commit/4b06fde0f10ce178b3c336c5d901e3b089f2863d) | 2020-05-12 | Fix Flake8 errors (#8841) | -| [1d12c347c](https://github.com/apache/airflow/commit/1d12c347cb258e7081804da1f9f5ffdedc003163) | 2020-05-12 | Refactor BigQuery check operators (#8813) | -| [493b685d7](https://github.com/apache/airflow/commit/493b685d7879cfee532390ba0909d4b1d8764267) | 2020-05-10 | Add separate example DAGs and system tests for google cloud speech (#8778) | -| [79ef8bed8](https://github.com/apache/airflow/commit/79ef8bed891c22eb76adf99158288d1b44426dc0) | 2020-05-10 | Added Upload Multiple Entity Read Files to specified big query dataset (#8610) | -| [280f1f0c4](https://github.com/apache/airflow/commit/280f1f0c4cc49aba1b2f8b456326795733769d18) | 2020-05-10 | Correctly restore upstream_task_ids when deserializing Operators (#8775) | -| [58aefb23b](https://github.com/apache/airflow/commit/58aefb23b1d456bbb24876a4e3ff14f25d6274b0) | 2020-05-08 | Added SDFtoGCSOperator (#8740) | -| [723c52c94](https://github.com/apache/airflow/commit/723c52c942b49b0e8c8fa8667a4a6a45fa249498) | 2020-05-07 | Add documentation for SpannerDeployInstanceOperator (#8750) | -| [25ee4211b](https://github.com/apache/airflow/commit/25ee4211b345ce7c19fb7366fd230838c34f1d47) | 2020-05-06 | Support all RuntimeEnvironment parameters in DataflowTemplatedJobStartOperator (#8531) | -| [8d6f1aa4b](https://github.com/apache/airflow/commit/8d6f1aa4b5bb8809ffc55dc0c62e6d0e89f331e5) | 2020-05-05 | Support num_retries field in env var for GCP connection (#8700) | -| [67caae0f2](https://github.com/apache/airflow/commit/67caae0f25db4eec42b8e81c85683aabdd8d6c1a) | 2020-05-04 | Add system test for gcs_to_bigquery (#8556) | -| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03 | Add system test and docs for Facebook Ads operators (#8503) | -| [a28c66f23](https://github.com/apache/airflow/commit/a28c66f23d373cd0f8bfc765a515f21d4b66a0e9) | 2020-04-30 | [AIRFLOW-4734] Upsert functionality for PostgresHook.insert_rows() (#8625) | -| [992a24ce4](https://github.com/apache/airflow/commit/992a24ce41067d3b73f293878e71835892cbb632) | 2020-04-28 | Split and improve BigQuery example DAG (#8529) | -| [c1fb28230](https://github.com/apache/airflow/commit/c1fb28230fa0d36ef86c452c70254b253a113f9c) | 2020-04-28 | Refactor BigQueryHook dataset operations (#8477) | -| [e8d0f8fea](https://github.com/apache/airflow/commit/e8d0f8feab0ec08e248cd381359112ad6a832f5b) | 2020-04-26 | Improve idempodency in CloudDataTransferServiceCreateJobOperator (#8430) | -| [37fdfa977](https://github.com/apache/airflow/commit/37fdfa9775f43a5fa15de9c53ab33ecdf97513c5) | 2020-04-26 | [AIRFLOW-6281] Create guide for GCS to GCS transfer operators (#8442) | -| [14b22e6ff](https://github.com/apache/airflow/commit/14b22e6ffeb3af1f68e8362a1d0061b41364019c) | 2020-04-25 | Add hook and operator for Google Cloud Life Sciences (#8481) | -| [72ddc94d1](https://github.com/apache/airflow/commit/72ddc94d1ee08b414102e0b8ac197a3d8e965707) | 2020-04-23 | Pass location using parmamter in Dataflow integration (#8382) | -| [912aa4b42](https://github.com/apache/airflow/commit/912aa4b4237695275db6379cf2f0a633ea6087bc) | 2020-04-23 | Added GoogleDisplayVideo360DownloadLineItemsOperator (#8174) | -| [57c8c0583](https://github.com/apache/airflow/commit/57c8c05839f66ed2909b1bee8ff6976432db82aa) | 2020-04-22 | Use python client in BQ hook create_empty_table/dataset and table_exists (#8377) | -| [5d3a7eef3](https://github.com/apache/airflow/commit/5d3a7eef30b30fa466d8173f13abe4c356d73aef) | 2020-04-20 | Allow multiple extra_packages in Dataflow (#8394) | -| [79c99b1b6](https://github.com/apache/airflow/commit/79c99b1b6ae2ff5b0c8ab892f7f3fb1b44724121) | 2020-04-18 | Added location parameter to BigQueryCheckOperator (#8273) | -| [79d3f33c1](https://github.com/apache/airflow/commit/79d3f33c1b65c9c7e7b1a75e25d38cab9aa4517f) | 2020-04-17 | Clean up temporary files in Dataflow operators (#8313) | -| [efcffa323](https://github.com/apache/airflow/commit/efcffa323ddb5aa9f5907aa86808f3f3b4f5bd87) | 2020-04-16 | Add Dataproc SparkR Example (#8240) | -| [b198a1fa9](https://github.com/apache/airflow/commit/b198a1fa94c44228dc7358552aeb6a5371ae0da2) | 2020-04-15 | Create guide for BigQuery operators (#8276) | -| [2636cc932](https://github.com/apache/airflow/commit/2636cc932c3b156644edd46635cf9ff995c83159) | 2020-04-14 | Raise exception when GCP credential doesn't support account impersonation (#8213) | -| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14 | Added Facebook Ads Operator #7887 (#8008) | -| [8cae07ea1](https://github.com/apache/airflow/commit/8cae07ea1873a90516120d9ffbd28e7fdd2f78a4) | 2020-04-14 | fixed typo (#8294) | -| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13 | Less aggressive eager upgrade of requirements (#8267) | -| [1fd9ed384](https://github.com/apache/airflow/commit/1fd9ed3840361afa1e9456ccb0dfd5a60fba4e85) | 2020-04-13 | Add mypy plugin for decorators. (#8145) | -| [327b0a9f7](https://github.com/apache/airflow/commit/327b0a9f77bbcbe3f977a37de04264c2eff4bee1) | 2020-04-13 | Added GoogleDisplayVideo360UploadLineItemsOperator (#8216) | -| [bb5e403a3](https://github.com/apache/airflow/commit/bb5e403a320e7377e5040cb180f61b4f5a9ea558) | 2020-04-10 | Honor schema type for MySQL to GCS data pre-process (#8090) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [3fc89f29f](https://github.com/apache/airflow/commit/3fc89f29f5bcd1529089fa6cb9c44843614f9ec5) | 2020-04-06 | [AIRFLOW-7106] Cloud data fusion integration - Allow to pass args to start pipeline (#7849) | -| [7ef75d239](https://github.com/apache/airflow/commit/7ef75d2393f30d155de550e6d1ee8c055e2abfee) | 2020-04-03 | [AIRFLOW-7117] Honor self.schema in sql_to_gcs as schema to upload (#8049) | -| [ed2bc0057](https://github.com/apache/airflow/commit/ed2bc00576b39a88e3e1fb79092494f4bfdcbf5c) | 2020-04-02 | Add Google Ads list accounts operator (#8007) | -| [3808a6206](https://github.com/apache/airflow/commit/3808a6206e70d4af84b39ea7078df54f02c1435e) | 2020-04-01 | Unify Google class/package names (#8033) | -| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31 | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011) | -| [8e8978007](https://github.com/apache/airflow/commit/8e897800716c8ccedd1c53f2d083cb295786aa50) | 2020-03-31 | Add more refactor steps for providers.google (#8010) | -| [aae3b8fb2](https://github.com/apache/airflow/commit/aae3b8fb27870cb3cfba5ed73e35e08d520ef014) | 2020-03-31 | Individual package READMEs (#8012) | -| [779023968](https://github.com/apache/airflow/commit/779023968f983c91701f687bc823dc338934cdad) | 2020-03-30 | [AIRFLOW-7075] Operators for storing information from GCS into GA (#7743) | -| [49abce521](https://github.com/apache/airflow/commit/49abce52178c81954f8a25608f70ffe02fcf7b19) | 2020-03-30 | Improve system tests for Cloud Build (#8003) | -| [0f19a930d](https://github.com/apache/airflow/commit/0f19a930d1a7dec2a96bab0de144829f83cc0626) | 2020-03-29 | Remove GKEStartPodOperator when backporting (#7908) | -| [0e1c238b2](https://github.com/apache/airflow/commit/0e1c238b2fff3a092c93368125bc8d82abc4b308) | 2020-03-28 | Get Airflow Variables from GCP Secrets Manager (#7946) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [01f99426f](https://github.com/apache/airflow/commit/01f99426fddd2a24552f352edcb271fa78cf3b15) | 2020-03-28 | Add download/upload operators for GCS and Google Sheets (#7866) | -| [892522f8e](https://github.com/apache/airflow/commit/892522f8e2aeedc1ad842a08aaea967b0cae077f) | 2020-03-26 | Change signature of GSheetsHook methods (#7853) | -| [bfd425157](https://github.com/apache/airflow/commit/bfd425157a746402b516f8fc9e48f4ddccd794ce) | 2020-03-26 | Improve idempotency in MLEngineHook.create_model (#7811) | -| [f9c226343](https://github.com/apache/airflow/commit/f9c226343d94a7732da280d1dd086bf1ba291c77) | 2020-03-26 | Fix CloudSecretsManagerBackend invalid connections_prefix (#7861) | -| [e3920f12f](https://github.com/apache/airflow/commit/e3920f12f483b53950507c50f6ab6a4318072859) | 2020-03-26 | Improve setUp/tearDown in Cloud Firestore system test (#7862) | -| [8ba8a7295](https://github.com/apache/airflow/commit/8ba8a7295a31f6b44894bfcaea36fa93b8d8c0d0) | 2020-03-26 | Improve example DAGs for Cloud Memorystore (#7855) | -| [f7d1a437c](https://github.com/apache/airflow/commit/f7d1a437c17461b5ab768b75d58f0cb026b2a818) | 2020-03-26 | Fix CloudMemorystoreCreateInstanceAndImportOperator operator (#7856) | -| [beef6c230](https://github.com/apache/airflow/commit/beef6c230e4ff266af7c16b639bfda659b2bf6c0) | 2020-03-26 | Improve authorization in GCP system tests (#7863) | -| [5f165f3e4](https://github.com/apache/airflow/commit/5f165f3e4231ebd420ce643211a93e1fecf4877e) | 2020-03-26 | [AIRFLOW-5801] Get GCP credentials from file instead of JSON blob (#7869) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [1982c3fdc](https://github.com/apache/airflow/commit/1982c3fdca1f04cfc41fc5b5e285d8f01c6b76ab) | 2020-03-24 | Run Dataflow for ML Engine summary in venv (#7809) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [529db07b2](https://github.com/apache/airflow/commit/529db07b2ee73d886e37e8b3415462c730187b15) | 2020-03-23 | Improve Google PubSub hook publish method (#7831) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a001489b5](https://github.com/apache/airflow/commit/a001489b5928ebfc35f990a29d1c9c2ecb80bd61) | 2020-03-23 | Improve example DAG for ML Engine (#7810) | -| [9e5a8e7f8](https://github.com/apache/airflow/commit/9e5a8e7f83cf2368315fce62f8d81304f7ba2f04) | 2020-03-23 | Add call to Super class in 'google' providers (#7823) | -| [b86bf79bf](https://github.com/apache/airflow/commit/b86bf79bff615e61de98bead4d02eace5690d5fb) | 2020-03-23 | Fix typo in GCP credentials_provider's docstring (#7818) | -| [56c013ce9](https://github.com/apache/airflow/commit/56c013ce922eb18e5f7dd4410986afbcc6f29025) | 2020-03-23 | Add missing docstring in BigQueryHook.create_empty_table (#7817) | -| [426a79847](https://github.com/apache/airflow/commit/426a79847ced832ca3f67c135fd8830ebf1de7d2) | 2020-03-23 | Imrove support for laatest API in MLEngineStartTrainingJobOperator (#7812) | -| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23 | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795) | -| [27dac00e1](https://github.com/apache/airflow/commit/27dac00e125b87626a0b87074d61e6d38031bf47) | 2020-03-22 | [AIRFLOW-7099] Improve system test for cloud transfer service (#7794) | -| [0daf5d729](https://github.com/apache/airflow/commit/0daf5d729acef4e9aef5226452dff774e80430cd) | 2020-03-22 | Add ability to specify a maximum modified time for objects in GCSToGCSOperator (#7791) | -| [c8088c2bd](https://github.com/apache/airflow/commit/c8088c2bd70a16605a5d4b1a66a22309359d6712) | 2020-03-20 | [AIRFLOW-7100] Add GoogleAnalyticsGetAdsLinkOperator (#7781) | -| [5106a2931](https://github.com/apache/airflow/commit/5106a29314b413d168bcba7a64bf91c04fdb5dfe) | 2020-03-20 | [AIRFLOW-6752] Add GoogleAnalyticsRetrieveAdsLinksListOperator (#7748) | -| [759ce2a80](https://github.com/apache/airflow/commit/759ce2a80c95832fe4773c9f4fde23e1b03cbc6f) | 2020-03-20 | [AIRFLOW-6978] Add PubSubPullOperator (#7766) | -| [6b9b214e4](https://github.com/apache/airflow/commit/6b9b214e4c3b3afa8ea2e1a5c1e24993013d60ac) | 2020-03-20 | [AIRFLOW-6732] Add GoogleAdsHook and GoogleAdsToGcsOperator (#7692) | -| [b11891696](https://github.com/apache/airflow/commit/b11891696946d1461174b385c88d6af8abb99768) | 2020-03-19 | [AIRFLOW-7069] Fix cloudsql system tests (#7770) | -| [ae854cae5](https://github.com/apache/airflow/commit/ae854cae5a2cf8cae37edf7e0813ad01bccfbc30) | 2020-03-19 | [AIRFLOW-7082] Remove catch_http_exception decorator in GCP hooks (#7756) | -| [7e1e954d2](https://github.com/apache/airflow/commit/7e1e954d23ce272b0a71188f0f535e20d54be443) | 2020-03-19 | [AIRFLOW-7085] Cache credentials, project_id in GCP Base Hook (#7759) | -| [6e21c139b](https://github.com/apache/airflow/commit/6e21c139b3cce3f895040939f0b02e3e0ba36141) | 2020-03-19 | [AIRFLOW-XXXX] Fix reference to GCP classes in guides (#7762) | -| [ce022a3f7](https://github.com/apache/airflow/commit/ce022a3f72b7735087d4c3bbe81d293a0ab75327) | 2020-03-19 | [AIRFLOW-XXXX] Add cross-references for operators guide (#7760) | -| [029c84e55](https://github.com/apache/airflow/commit/029c84e5527b6db6bdbdbe026f455da325bedef3) | 2020-03-18 | [AIRFLOW-5421] Add Presto to GCS transfer operator (#7718) | -| [63a3102ed](https://github.com/apache/airflow/commit/63a3102ede8fb8f764d251b20cad5ee5bef84f50) | 2020-03-18 | [AIRFLOW-7064] Add CloudFirestoreExportDatabaseOperator (#7725) | -| [73305c7bd](https://github.com/apache/airflow/commit/73305c7bd57f14444804c13b8b290f479832d3db) | 2020-03-18 | [AIRFLOW-7081] Remove env variables from GCP guide (#7755) | -| [60fdbf6d9](https://github.com/apache/airflow/commit/60fdbf6d9255d34a8967400e9585b1cd5d29d3e9) | 2020-03-18 | [AIRFLOW-5610] Add ability to specify multiple objects to copy in GCSToGCSOperator (#7728) | -| [de7e934ca](https://github.com/apache/airflow/commit/de7e934ca3f21ce82f67accf92811b3ac044476f) | 2020-03-17 | [AIRFLOW-7079] Remove redundant code for storing template_fields (#7750) | -| [0de0347b2](https://github.com/apache/airflow/commit/0de0347b27a961c46ee49da6dfa9205321657749) | 2020-03-17 | [AIRFLOW-6855]: Escape project_dataset_table in SQL query in gcs to bq … (#7475) | -| [91557c6f8](https://github.com/apache/airflow/commit/91557c6f87529c010b8ad1110ece35fd7fd751e4) | 2020-03-17 | [AIRFLOW-7073] GKEStartPodOperator always use connection credentials (#7738) | -| [51161dbd9](https://github.com/apache/airflow/commit/51161dbd9de0c966016cec4d5036877890daee7c) | 2020-03-16 | [AIRFLOW-5664] Store timestamps with microseconds precision (#6354) | -| [2bc020c43](https://github.com/apache/airflow/commit/2bc020c43112dd3a769311de8d5012e8e8f399ee) | 2020-03-14 | [AIRFLOW-7055] Verbose logging option for google provider (#7711) | -| [c997cab42](https://github.com/apache/airflow/commit/c997cab42d8695ac444e63dfe4b948a7ea82ed89) | 2020-03-13 | [AIRFLOW-6724] Add Google Analytics 360 Accounts Retrieve Operator (#7630) | -| [137896f32](https://github.com/apache/airflow/commit/137896f326cd29b59902a887e4c4e58f940ff62b) | 2020-03-12 | [AIRFLOW-7034] Remove feature: Assigning Dag to task using Bitshift Op (#7685) | -| [1f77f943d](https://github.com/apache/airflow/commit/1f77f943d5d85f66b6a988e8ef6506525eaf4732) | 2020-03-10 | [AIRFLOW-6980] Improve system tests and building providers package (#7615) | -| [bf9b6b6d7](https://github.com/apache/airflow/commit/bf9b6b6d70455352bbf807871c8eeb6324be7e54) | 2020-03-09 | [AIRFLOW-5013] Add GCP Data Catalog Hook and operators (#7664) | -| [e5130dc9f](https://github.com/apache/airflow/commit/e5130dc9fe89187e95071e678ea3b46600866762) | 2020-03-09 | [AIRFLOW-2911] Add job cancellation capability to Dataflow service (#7659) | -| [faf0df4b9](https://github.com/apache/airflow/commit/faf0df4b9460b7f037ee390addbd2c6effcae013) | 2020-03-09 | [AIRFLOW-XXXX] Fix upsert operator in BQ example DAG (#7666) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [b5b9795f0](https://github.com/apache/airflow/commit/b5b9795f0446bb484a91ee485f49ea456f1c26c4) | 2020-03-07 | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (fix) (#7624) | -| [6b65038fb](https://github.com/apache/airflow/commit/6b65038fb409ba1040e70305444816d8f5cfdc47) | 2020-03-06 | [AIRFLOW-6990] Improve system tests for Google Marketing Platform (#7631) | -| [755fe5224](https://github.com/apache/airflow/commit/755fe52249ba1cd965cf2f87fa7a428b8197a38a) | 2020-03-05 | [AIRFLOW-6915] Add AI Platform Console Link for MLEngineStartTrainingJobOperator (#7535) | -| [cb2f33911](https://github.com/apache/airflow/commit/cb2f339116cf2093da447748892fac68aecbb888) | 2020-03-04 | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (#7609) | -| [09fea3ce8](https://github.com/apache/airflow/commit/09fea3ce8e4d7816281963bb8f2cb06f4de6db5c) | 2020-03-04 | [AIRFLOW-6977] Fix BigQuery DTS example DAG (#7612) | -| [8230ccc48](https://github.com/apache/airflow/commit/8230ccc48b157c89b2b893d42c6fe1523b83363a) | 2020-03-04 | [AIRFLOW-6926] Fix Google Tasks operators return types and idempotency (#7547) | -| [0d1e3088a](https://github.com/apache/airflow/commit/0d1e3088aa9f16eaeeb7b18eccec8f35c79a53df) | 2020-03-04 | [AIRFLOW-6970] Improve GCP Video Intelligence system tests (#7604) | -| [ab6bb0012](https://github.com/apache/airflow/commit/ab6bb0012c38740b76e864d42d299c5c7a9972a3) | 2020-03-03 | [AIRFLOW-6971] Fix return type in CloudSpeechToTextRecognizeSpeechOperator (#7607) | -| [3db4ade3d](https://github.com/apache/airflow/commit/3db4ade3dc9660c21c28187100a22008552f2bd3) | 2020-02-29 | [AIRFLOW-6924] Fix Google DLP operators return types (#7546) | -| [008b4bab1](https://github.com/apache/airflow/commit/008b4bab14222da068b737d6332db4963b994007) | 2020-02-27 | [AIRFLOW-6730] Use total_seconds instead of seconds (#7363) | -| [bb552b2d9](https://github.com/apache/airflow/commit/bb552b2d9fd595cc3eb1b3a2f637f29b814878d7) | 2020-02-25 | [AIRFLOW-6908] Lazy load AirflowException (#7528) | -| [d1a34246a](https://github.com/apache/airflow/commit/d1a34246ac593901f8599b102dc3d7efa4dd61e4) | 2020-02-25 | [AIRFLOW-6593] Add GCP Stackdriver Alerting Hooks and Operators (#7322) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [dcf874352](https://github.com/apache/airflow/commit/dcf87435219307d4e916a8abc2b819ad75e2b1cf) | 2020-02-24 | [AIRFLOW-6894] Prevent db query in example_dags (#7516) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [35b961637](https://github.com/apache/airflow/commit/35b9616378d1cfba7c2eb3c71e20acb6734b7c77) | 2020-02-21 | [AIRFLOW-4973] Add Cloud Data Fusion Pipeline integration (#7486) | -| [aff3a361b](https://github.com/apache/airflow/commit/aff3a361b4092212c0757f9ce88fa2e40d25d1f4) | 2020-02-20 | [AIRFLOW-6558] Campaign Manager operators for conversions (#7420) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [5b199cb86](https://github.com/apache/airflow/commit/5b199cb86be5b1aefbd8620185033d6f635713c1) | 2020-02-17 | [AIRFLOW-XXXX] Typo in example_bigquery DAG (#7429) | -| [2c9345a8e](https://github.com/apache/airflow/commit/2c9345a8e03d37a2676efa2f2ea7e8b7814c5345) | 2020-02-17 | [AIRFLOW-6759] Added MLEngine operator/hook to cancel MLEngine jobs (#7400) | -| [946bdc23c](https://github.com/apache/airflow/commit/946bdc23c039637b0383e1269f99bdd1b2426565) | 2020-02-16 | [AIRFLOW-6405] Add GCP BigQuery Table Upsert Operator (#7126) | -| [2381c820c](https://github.com/apache/airflow/commit/2381c820c8aaeffc1c9b4ed47832038833400eb8) | 2020-02-13 | [AIRFLOW-6505] Let emoji encoded properly for json.dumps() (#7399) | -| [04c1fefbf](https://github.com/apache/airflow/commit/04c1fefbf26a73ed13881d2ec14eada48028ff72) | 2020-02-03 | [AIRFLOW-6676] added GCSDeleteBucketOperator (#7307) | -| [a0252748f](https://github.com/apache/airflow/commit/a0252748ff312daede15c6f0a3d39e16c774461c) | 2020-02-03 | [AIRFLOW-6717] Remove non-existent field from templated_fields (#7340) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9d8d07557](https://github.com/apache/airflow/commit/9d8d0755789d4aeadc5d3015f3cdde62901f85b8) | 2020-02-03 | [AIRFLOW-6715] Fix Google Cloud DLP Example DAG (#7337) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | -| [f4d3e5e54](https://github.com/apache/airflow/commit/f4d3e5e54507f52a00a9b95aa48eb0260e17224d) | 2020-01-13 | [AIRFLOW-6102] [AIP-21] Rename Dataproc operators (#7151) | -| [e7bf8ecb4](https://github.com/apache/airflow/commit/e7bf8ecb48f0299af8091433535ac573c2afd1cf) | 2020-01-13 | [AIRFLOW-6119] [AIP-21] Rename GCS operators, hooks and sensors (#7125) | -| [5b6772cb8](https://github.com/apache/airflow/commit/5b6772cb8391b248cb4b7be5fd3d5c035280fac1) | 2020-01-09 | [AIRFLOW-6125] [AIP-21] Rename S3 operator and SFTP operator (#7112) | -| [4f8592ae8](https://github.com/apache/airflow/commit/4f8592ae8f52ab7f42623d3b43eef0928c9aafb2) | 2020-01-08 | [AIRFLOW-6118] [AIP-21] Rename Pubsub operators and hook (#7046) | -| [20299473f](https://github.com/apache/airflow/commit/20299473f11add6531f607256ee8a0f7f9507ab8) | 2020-01-03 | [AIRFLOW-6115] [AIP-21] Rename GCP vision operators (#7020) | -| [18e8cea4e](https://github.com/apache/airflow/commit/18e8cea4e7487a7dfefc03661e5ebe54c4104ead) | 2020-01-03 | [AIRFLOW-6428] Fix import path for airflow.utils.dates.days_ago in Example DAGs (#7007) | -| [95087af14](https://github.com/apache/airflow/commit/95087af14091f28a83ced8ff1860b86dfd93f93d) | 2019-12-31 | [AIRFLOW-6110] [AIP-21] Rename natural_language service (#6968) | -| [69629a5a9](https://github.com/apache/airflow/commit/69629a5a948ab2c4ac04a4a4dca6ac86d19c11bd) | 2019-12-09 | [AIRFLOW-5807] Move SFTP from contrib to providers. (#6464) | -| [25e9047a4](https://github.com/apache/airflow/commit/25e9047a4a4da5fad4f85c366e3a6262c0a4f68e) | 2019-12-09 | [AIRFLOW-6193] Do not use asserts in Airflow main code (#6749) | -| [ed0a14f32](https://github.com/apache/airflow/commit/ed0a14f321b9dab3554ae395c11c147258536ce8) | 2019-12-09 | [AIRFLOW-6120] Rename GoogleCloudBaseHook (#6734) | -| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01 | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701) | -| [03c870a61](https://github.com/apache/airflow/commit/03c870a6172ab232af6319a30ad8d46622359b10) | 2019-11-26 | [AIRFLOW-6010] Remove cyclic imports and pylint hacks (#6601) | -| [5c4cfea8c](https://github.com/apache/airflow/commit/5c4cfea8c0f488496c1cbcc4c6c5db13d8210979) | 2019-11-15 | [AIRFLOW-5718] Add SFTPToGoogleCloudStorageOperator (#6393) | -| [44a8c37a9](https://github.com/apache/airflow/commit/44a8c37a9a8668469aa825ad21057cca6ac2c186) | 2019-11-13 | [AIRFLOW-XXX] Fix the docstring for Dataproc get_job method (#6581) | -| [d633d3ac4](https://github.com/apache/airflow/commit/d633d3ac44c395e6c43cd388f98fba1ce1c435a3) | 2019-11-13 | [AIRFLOW-5691] Rewrite Dataproc operators to use python library (#6371) | -| [d985c02d9](https://github.com/apache/airflow/commit/d985c02d9fa3d9ec946abc1735b0551fd61fb9f0) | 2019-11-05 | [AIRFLOW-XXX] Add How-To-Guide to GCP PubSub (#6497) | -| [a296cdabd](https://github.com/apache/airflow/commit/a296cdabdb9c9c65cf9a48329cb776aed5c82d43) | 2019-11-04 | [AIRFLOW-5743] Move Google PubSub to providers package (#6476) | -| [470b2a779](https://github.com/apache/airflow/commit/470b2a779d031406a3d5925f2fa2ec40e5c3bccb) | 2019-10-30 | [AIRFLOW-5741] Move Cloud Natural Language to providers (#6421) | -| [f2caa451f](https://github.com/apache/airflow/commit/f2caa451fc2b8ee59163314f9ec1cc372acbadf1) | 2019-10-27 | [AIRFLOW-5742] Move Google Cloud Vision to providers package (#6424) | -| [16d7accb2](https://github.com/apache/airflow/commit/16d7accb22c866d4fbf368e4d979dc1c4a41d93c) | 2019-10-22 | [AIRFLOW-4971] Add Google Display & Video 360 integration (#6170) | -| [4e661f535](https://github.com/apache/airflow/commit/4e661f535dea613f9b2e0075676f9a73a97461fe) | 2019-10-22 | [AIRFLOW-5379] Add Google Search Ads 360 operators (#6228) | -| [19e32b4e2](https://github.com/apache/airflow/commit/19e32b4e2c798f662e5d8d1e7c65036c5e7ac125) | 2019-10-18 | [AIRFLOW-5656] Rename provider to providers module (#6333) | diff --git a/airflow/providers/google/CHANGELOG.rst b/airflow/providers/google/CHANGELOG.rst new file mode 100644 index 0000000000000..bed88c9df3a89 --- /dev/null +++ b/airflow/providers/google/CHANGELOG.rst @@ -0,0 +1,90 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +2.0.0 +..... + +Updated ``google-cloud-*`` libraries +```````````````````````````````````` + +This release of the provider package contains third-party library updates, which may require updating your +DAG files or custom hooks and operators, if you were using objects from those libraries. +Updating of these libraries is necessary to be able to use new features made available by new versions of +the libraries and to obtain bug fixes that are only available for new versions of the library. + +Details are covered in the UPDATING.md files for each library, but there are some details +that you should pay attention to. + + ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| Library name | Previous constraints | Current constraints | Upgrade Documentation | ++=====================================================================================================+======================+=====================+=======================================================================================================================================+ +| `google-cloud-bigquery-datatransfer `_ | ``>=0.4.0,<2.0.0`` | ``>=3.0.0,<4.0.0`` | `Upgrading google-cloud-bigquery-datatransfer `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-datacatalog `_ | ``>=0.5.0,<0.8`` | ``>=1.0.0,<2.0.0`` | `Upgrading google-cloud-datacatalog `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-os-login `_ | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-os-login `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-pubsub `_ | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-pubsub `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-kms `_ | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-kms `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-tasks `_ | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-task `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ + +The field names use the snake_case convention +````````````````````````````````````````````` + +If your DAG uses an object from the above mentioned libraries passed by XCom, it is necessary to update the +naming convention of the fields that are read. Previously, the fields used the CamelSnake convention, +now the snake_case convention is used. + +**Before:** + +.. code-block:: python + + set_acl_permission = GCSBucketCreateAclEntryOperator( + task_id="gcs-set-acl-permission", + bucket=BUCKET_NAME, + entity="user-{{ task_instance.xcom_pull('get-instance')['persistenceIamIdentity']" + ".split(':', 2)[1] }}", + role="OWNER", + ) + + +**After:** + +.. code-block:: python + + set_acl_permission = GCSBucketCreateAclEntryOperator( + task_id="gcs-set-acl-permission", + bucket=BUCKET_NAME, + entity="user-{{ task_instance.xcom_pull('get-instance')['persistence_iam_identity']" + ".split(':', 2)[1] }}", + role="OWNER", + ) + + + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/google/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/google/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index ec28ce5f8676d..0000000000000 --- a/airflow/providers/google/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,377 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [1dcd3e13f](https://github.com/apache/airflow/commit/1dcd3e13fd0a078fc9440e91b77f6f87aa60dd3b) | 2020-12-05 | Add support for extra links coming from the providers (#12472) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29 | Don't use time.time() or timezone.utcnow() for duration calculations (#12353) | -| [76bcd08dc](https://github.com/apache/airflow/commit/76bcd08dcae8d62307f5e9b8c2e182b54ed22a27) | 2020-11-28 | Added `@apply_defaults` decorator. (#12620) | -| [e1ebfa68b](https://github.com/apache/airflow/commit/e1ebfa68b109b5993c47891cfd0b9b7e46b6d770) | 2020-11-27 | Add DataflowJobMessagesSensor and DataflowAutoscalingEventsSensor (#12249) | -| [3fa51f94d](https://github.com/apache/airflow/commit/3fa51f94d7a17f170ddc31908d36c91f4456a20b) | 2020-11-24 | Add check for duplicates in provider.yaml files (#12578) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [9e3b2c554](https://github.com/apache/airflow/commit/9e3b2c554dadf58972198e4e16f15af2f15ec37a) | 2020-11-19 | GCP Secrets Optional Lookup (#12360) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [8d0950646](https://github.com/apache/airflow/commit/8d09506464c8480fa42e8bfe6a36c6f631cd23f6) | 2020-11-18 | Fix download method in GCSToBigQueryOperator (#12442) | -| [2c0920fba](https://github.com/apache/airflow/commit/2c0920fba5d2f05d2e29cead91127686af277ec2) | 2020-11-17 | Adds mechanism for provider package discovery. (#12383) | -| [2cda2f2a0](https://github.com/apache/airflow/commit/2cda2f2a0a94e5aaed87f0998fa57b4f8bff5e43) | 2020-11-17 | Add missing pre-commit definition - provider-yamls (#12393) | -| [80a957f14](https://github.com/apache/airflow/commit/80a957f142f260daed262b8e93a4d02c12cfeabc) | 2020-11-17 | Add Dataflow sensors - job metrics (#12039) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [917e6c442](https://github.com/apache/airflow/commit/917e6c4424985271c53dd8c413b211896ee55726) | 2020-11-16 | Add provide_file_and_upload to GCSHook (#12310) | -| [cfa4ecfeb](https://github.com/apache/airflow/commit/cfa4ecfeb02661f40b4778733384ac085fb5f04b) | 2020-11-15 | Add DataflowJobStatusSensor and support non-blocking execution of jobs (#11726) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [32b59f835](https://github.com/apache/airflow/commit/32b59f8350f55793df6838a32de662a80483ecda) | 2020-11-12 | Fixes the sending of an empty list to BigQuery `list_rows` (#12307) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [502ba309e](https://github.com/apache/airflow/commit/502ba309ea470943f0e99c634269e3d2d13ce6ca) | 2020-11-10 | Enable Markdownlint rule - MD022/blanks-around-headings (#12225) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [f37c6e6fc](https://github.com/apache/airflow/commit/f37c6e6fce8b704f5af28caa16d0ed7d873a0e4a) | 2020-11-10 | Add Compute Engine SSH hook (#9879) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [61feb6ec4](https://github.com/apache/airflow/commit/61feb6ec453f8dda1a0e1fe3ebcc0f1e3224b634) | 2020-11-09 | Provider's readmes generated for elasticsearch and google packages (#12194) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08 | Add authentication to AWS with Google credentials (#12079) | -| [2ef3b7ef8](https://github.com/apache/airflow/commit/2ef3b7ef8cafe3bdc8bf8db70fbc519b98576366) | 2020-11-08 | Fix ERROR - Object of type 'bytes' is not JSON serializable when using store_to_xcom_key parameter (#12172) | -| [0caec9fd3](https://github.com/apache/airflow/commit/0caec9fd32bee2b3036b5d7bdcb56bd6a3b9dccf) | 2020-11-06 | Dataflow - add waiting for successful job cancel (#11501) | -| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06 | Simplify string expressions (#12123) | -| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04 | Format all files (without excepions) by black (#12091) | -| [fd3db778e](https://github.com/apache/airflow/commit/fd3db778e715d0f164dda7ee8f672d477a323291) | 2020-11-04 | Add server side cursor support for postgres to GCS operator (#11793) | -| [f1f194026](https://github.com/apache/airflow/commit/f1f1940261744b4fdb67b0b5654488494efa9c64) | 2020-11-04 | Add DataflowStartSQLQuery operator (#8553) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [5f5244b74](https://github.com/apache/airflow/commit/5f5244b74df93cadbb99643cec76281460ca4411) | 2020-11-04 | Add template fields renderers to Biguery and Dataproc operators (#12067) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [45ae145c2](https://github.com/apache/airflow/commit/45ae145c25a19b4185c33ac0c4da920324b3987e) | 2020-11-03 | Log BigQuery job id in insert method of BigQueryHook (#12056) | -| [e324b37a6](https://github.com/apache/airflow/commit/e324b37a67e32c368df50604a00160d7766b5c33) | 2020-11-03 | Add job name and progress logs to Cloud Storage Transfer Hook (#12014) | -| [6071fdd58](https://github.com/apache/airflow/commit/6071fdd58470bb2a6c23fc16481e292b7247d0bb) | 2020-11-02 | Improve handling server errors in DataprocSubmitJobOperator (#11947) | -| [2f703df12](https://github.com/apache/airflow/commit/2f703df12dfd6511722ff9a82d5a569d092fccc2) | 2020-10-30 | Add SalesforceToGcsOperator (#10760) | -| [e5713e00b](https://github.com/apache/airflow/commit/e5713e00b3afcba6f78006ec0e360da317858e4d) | 2020-10-29 | Add drain option when canceling Dataflow pipelines (#11374) | -| [37eaac3c5](https://github.com/apache/airflow/commit/37eaac3c5dc93804413c10a6ca124fd7831befc0) | 2020-10-29 | The PRs which are not approved run subset of tests (#11828) | -| [79cb77199](https://github.com/apache/airflow/commit/79cb771992279d40ddd9eb6b0277382313a32898) | 2020-10-28 | Fixing re pattern and changing to use a single character class. (#11857) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [240c7d4d7](https://github.com/apache/airflow/commit/240c7d4d72aac8f6aab98f5913e8f54c4f1372ff) | 2020-10-26 | Google Memcached hooks - improve protobuf messages handling (#11743) | -| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26 | Fix spellings (#11825) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [727c739af](https://github.com/apache/airflow/commit/727c739afb565d4d394a8faedc969334cb8e738e) | 2020-10-22 | Improve Cloud Memorystore for Redis example (#11735) | -| [1da8379c9](https://github.com/apache/airflow/commit/1da8379c913843834353b44861c62f332a461bdf) | 2020-10-22 | Fix static checks after merging #10121 (#11737) | -| [91503308c](https://github.com/apache/airflow/commit/91503308c723b186ce6f4026f2a3e2c21030f6e5) | 2020-10-22 | Add Google Cloud Memorystore Memcached Operators (#10121) | -| [950c16d0b](https://github.com/apache/airflow/commit/950c16d0b0ab67bb7af11909de751029faf0313a) | 2020-10-21 | Retry requests in case of error in Google ML Engine Hook (#11712) | -| [2bfc53b5e](https://github.com/apache/airflow/commit/2bfc53b5eb67406d418371b74dc9bc5a07be238e) | 2020-10-21 | Fix doc errors in google provider files. (#11713) | -| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21 | Enforce strict rules for yamllint (#11709) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [2d854c350](https://github.com/apache/airflow/commit/2d854c3505ccad66e9a7d94267e51bed800433c2) | 2020-10-19 | Add service_account to Google ML Engine operator (#11619) | -| [46a121fb7](https://github.com/apache/airflow/commit/46a121fb7b77c0964e053b58750e2d8bc2bd0b2a) | 2020-10-18 | docs: Update Bigquery clustering docstrings (#11232) | -| [49c58147f](https://github.com/apache/airflow/commit/49c58147fed8a52869d0b0ecc00c102c11972ad0) | 2020-10-18 | Strict type checking for provider Google (#11609) | -| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16 | Add type annotations for AWS operators and hooks (#11434) | -| [3c10ca650](https://github.com/apache/airflow/commit/3c10ca6504be37fabff9a10caefea3fe4df31a02) | 2020-10-16 | Add DataflowStartFlexTemplateOperator (#8550) | -| [8865d14df](https://github.com/apache/airflow/commit/8865d14df4d58dd5f1a4d2ff81c77469959f175a) | 2020-10-16 | Strict type checking for provider google cloud (#11548) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [06141d6d0](https://github.com/apache/airflow/commit/06141d6d01398115e5e54c5766a46ae5514ba2f7) | 2020-10-12 | Google cloud operator strict type check (#11450) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [1845cd11b](https://github.com/apache/airflow/commit/1845cd11b77f302777ab854e84bef9c212c604a0) | 2020-10-11 | Strict type check for google ads and cloud hooks (#11390) | -| [bd204bb91](https://github.com/apache/airflow/commit/bd204bb91b4bc069284f9a44757c6baba8884140) | 2020-10-11 | Optionally set null marker in csv exports in BaseSQLToGCSOperator (#11409) | -| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10 | Remove redundant parentheses from Python files (#10967) | -| [8baf657fc](https://github.com/apache/airflow/commit/8baf657fc2b21a601b99b752e4f1176bf8a934ce) | 2020-10-09 | Fix regression in DataflowTemplatedJobStartOperator (#11167) | -| [b0fcf6755](https://github.com/apache/airflow/commit/b0fcf675595494b306800e1a516548dc0dc671f8) | 2020-10-07 | Add AzureFileShareToGCSOperator (#10991) | -| [47b05a87f](https://github.com/apache/airflow/commit/47b05a87f004dc273a4757ba49f03808a86f77e7) | 2020-10-07 | Improve handling of job_id in BigQuery operators (#11287) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [cb52fb0ae](https://github.com/apache/airflow/commit/cb52fb0ae1de1f1140babaed0e97299e4aaf96bf) | 2020-09-27 | Add example DAG and system test for MySQLToGCSOperator (#10990) | -| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25 | Fix incorrect Usage of Optional[str] & Optional[int] (#11141) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [daf8f3108](https://github.com/apache/airflow/commit/daf8f31080f06c044b4336071bd383bbbcdc6085) | 2020-09-23 | Add template fields renderers for better UI rendering (#11061) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [cb979f9f2](https://github.com/apache/airflow/commit/cb979f9f213bb3c9835a3dc924f84a07f5387378) | 2020-09-22 | Get Airflow configs with sensitive data from CloudSecretManagerBackend (#11024) | -| [76545bb3d](https://github.com/apache/airflow/commit/76545bb3d6fa82ce8eae072dbc74a3b76d8fd53c) | 2020-09-16 | Add example dag and system test for S3ToGCSOperator (#10951) | -| [22c631625](https://github.com/apache/airflow/commit/22c631625fd68abe280528f33b7cfd7603ebf66c) | 2020-09-16 | Fix more docs spellings (#10965) | -| [12a652f53](https://github.com/apache/airflow/commit/12a652f5344c7f03c3d780556ca1829b235fdb2d) | 2020-09-13 | Fix parameter name collision in AutoMLBatchPredictOperator #10723 (#10869) | -| [41a62735e](https://github.com/apache/airflow/commit/41a62735edcebbd9c39e505280646ef5d25aa1d5) | 2020-09-11 | Add on_kill method to BigQueryInsertJobOperator (#10866) | -| [3e91da56e](https://github.com/apache/airflow/commit/3e91da56e8c63a90dc859d8996a896b5d9f8cd43) | 2020-09-11 | fix typo in firebase/example_filestore DAG (#10875) | -| [68cc7273b](https://github.com/apache/airflow/commit/68cc7273bf0c0f562748b5f663da5c12d2cba6a7) | 2020-09-10 | Add on_kill method to DataprocSubmitJobOperator (#10847) | -| [f92095721](https://github.com/apache/airflow/commit/f92095721450c14605c986e165544a7bfb712a3d) | 2020-09-10 | Fix and remove some more typos from spelling_wordlist.txt (#10845) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [078bfaf60](https://github.com/apache/airflow/commit/078bfaf60adc5aebac8c347e7f6e5339ab9b56c0) | 2020-09-08 | Extract missing gcs_to_local example DAG from gcs example (#10767) | -| [10ce31127](https://github.com/apache/airflow/commit/10ce31127f1ff87176158935925afce46a989917) | 2020-09-08 | Deprecate using global as the default region in Google Dataproc operators and hooks (#10772) | -| [f14f37971](https://github.com/apache/airflow/commit/f14f3797163cc45fdcdabfb36ee7d638f70e470d) | 2020-09-07 | [AIRFLOW-10672] Refactor BigQueryToGCSOperator to use new method (#10773) | -| [c8ee45568](https://github.com/apache/airflow/commit/c8ee4556851c36b3b6e644a7746a49583dd53db1) | 2020-09-07 | Refactor DataprocCreateCluster operator to use simpler interface (#10403) | -| [ece685b5b](https://github.com/apache/airflow/commit/ece685b5b895ad1175440b49bf9e620dffd8248d) | 2020-09-05 | Asynchronous execution of Dataproc jobs with a Sensor (#10673) | -| [6e3d7b63d](https://github.com/apache/airflow/commit/6e3d7b63d3b34c34f8b38a7b41f4a5876e1f731f) | 2020-09-04 | Add masterConfig parameter to MLEngineStartTrainingJobOperator (#10578) | -| [804548d58](https://github.com/apache/airflow/commit/804548d58f2036fd4516824a38d0639ba5d5ab0e) | 2020-09-01 | Add Dataprep operators (#10304) | -| [11c00bc82](https://github.com/apache/airflow/commit/11c00bc820483691a87cdb16d519dce8dc57c40e) | 2020-08-30 | Fix typos: duplicated "the" (#10647) | -| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29 | Update Google Cloud branding (#10642) | -| [1b533f617](https://github.com/apache/airflow/commit/1b533f617e2e0200597d114d7570f6c0d69da1a0) | 2020-08-28 | Fix broken master - DLP (#10635) | -| [5ae82a56d](https://github.com/apache/airflow/commit/5ae82a56dab599de44f1be7027cecc4ef86f7bb6) | 2020-08-28 | Fix Google DLP example and improve ops idempotency (#10608) | -| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28 | Update Google Cloud branding (#10615) | -| [91ff31ad1](https://github.com/apache/airflow/commit/91ff31ad1021235bd21c87ad9dbc0b216a908671) | 2020-08-27 | Documentation for Google Cloud Data Loss Prevention (#8201) (#9651) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [866701c80](https://github.com/apache/airflow/commit/866701c8019f49dcb02c9696e4f6e9ce67d13ca6) | 2020-08-25 | Fix typo in "Cloud" (#10534) | -| [47265e7b5](https://github.com/apache/airflow/commit/47265e7b58bc28bcbbffc981442b6cc27a3af39c) | 2020-08-24 | Fix typo in PostgresHook (#10529) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24 | Implement impersonation in google operators (#10052) | -| [b0598b535](https://github.com/apache/airflow/commit/b0598b5351d2d027286e2333231b6c0c0704dba2) | 2020-08-24 | Add support for creating multiple replicated clusters in Bigtable hook and operator (#10475) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [515cc72c9](https://github.com/apache/airflow/commit/515cc72c995429c8c007f853ade385d79fcbac90) | 2020-08-22 | Fix typo in timed_out (#10459) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [88c7d2e52](https://github.com/apache/airflow/commit/88c7d2e526af4994066f65f830e2fa8edcbbce2e) | 2020-08-21 | Dataflow operators don't not always create a virtualenv (#10373) | -| [083c3c129](https://github.com/apache/airflow/commit/083c3c129bc3458d410f5ff37d7f5a9a7ad548b7) | 2020-08-18 | Simplified GCSTaskHandler configuration (#10365) | -| [1ae5bdf23](https://github.com/apache/airflow/commit/1ae5bdf23e3ac7cca05325ef8b255a7cf067e18e) | 2020-08-17 | Add test for GCSTaskHandler (#9600) (#9861) | -| [e195a980b](https://github.com/apache/airflow/commit/e195a980bc8e9d42f3eb4ac134950977b9e5158f) | 2020-08-16 | Add type annotations for mlengine_operator_utils (#10297) | -| [382c1011b](https://github.com/apache/airflow/commit/382c1011b6bcebd22760e2f98419281ef1a09d1b) | 2020-08-16 | Add Bigtable Update Instance Hook/Operator (#10340) | -| [bfa5a8d5f](https://github.com/apache/airflow/commit/bfa5a8d5f10458c14d380c4042ecfbac627d0639) | 2020-08-15 | CI: Fix failing docs-build (#10342) | -| [be46d20fb](https://github.com/apache/airflow/commit/be46d20fb431cc1d91c935e8894dfc7756c18993) | 2020-08-15 | Improve idempotency of BigQueryInsertJobOperator (#9590) | -| [47387a69e](https://github.com/apache/airflow/commit/47387a69e623676b57b6d42ff07e729da2d21bff) | 2020-08-14 | Catch Permission Denied exception when getting secret from GCP Secret Manager. (#10326) | -| [2f0613b0c](https://github.com/apache/airflow/commit/2f0613b0c2fdf176d9f13a8cd12162c60c64b644) | 2020-08-13 | Implement Google BigQuery Table Partition Sensor (#10218) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [ef088314f](https://github.com/apache/airflow/commit/ef088314f8f1b29ac636a7584cf9dda04b1df816) | 2020-08-09 | Added DataprepGetJobsForJobGroupOperator (#10246) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [c29533888](https://github.com/apache/airflow/commit/c29533888fadd40f5e9ce63e728bd8691182e542) | 2020-08-08 | Add labels param to Google MLEngine Operators (#10222) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [eff0f0321](https://github.com/apache/airflow/commit/eff0f03210d30a4aed9ed457eaaea9c9f05d54d1) | 2020-08-06 | Update guide for Google Cloud Secret Manager Backend (#10172) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [010322692](https://github.com/apache/airflow/commit/010322692e6e3f0adc156f0beb81e267da0e97bb) | 2020-08-06 | Improve handling Dataproc cluster creation with ERROR state (#9593) | -| [1437cb749](https://github.com/apache/airflow/commit/1437cb74955f4e10af5d70ebadde1e6b163fb9b7) | 2020-08-04 | Add correct signatures for operators in google provider package (#10144) | -| [6efa1b9cb](https://github.com/apache/airflow/commit/6efa1b9cb763ae0bdbc884a54d24dbdc39d9e3a6) | 2020-08-03 | Add additional Cloud Datastore operators (#10032) | -| [27020f8e5](https://github.com/apache/airflow/commit/27020f8e588575d53e63f9f9daecd3a522656644) | 2020-08-03 | Add try clause to DataFusionHook.wait_for_pipeline_state (#10031) | -| [4e3799fec](https://github.com/apache/airflow/commit/4e3799fec4c23d0f43603a0489c5a6158aeba035) | 2020-08-02 | [AIRFLOW-4541] Replace os.mkdirs usage with pathlib.Path(path).mkdir (#10117) | -| [85c56b173](https://github.com/apache/airflow/commit/85c56b1737c2bf61751836571300445c0aebae1a) | 2020-08-02 | Add missing params to GCP Pub/Sub creation_subscription (#10106) | -| [b79466c12](https://github.com/apache/airflow/commit/b79466c12f3ae717c31804acc2e9ffcd60f9611c) | 2020-08-02 | Fix sensor not providing arguments for GCSHook (#10074) | -| [4ee35d027](https://github.com/apache/airflow/commit/4ee35d027988c6456767faeb108a7f686d5117f2) | 2020-08-02 | Fix hook not passing gcp_conn_id to base class (#10075) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [4c84661ad](https://github.com/apache/airflow/commit/4c84661adb5bb5c581bb4193b4c7e935cbe07758) | 2020-07-31 | Split Display Video 360 example into smaler DAGs (#10077) | -| [59cbff087](https://github.com/apache/airflow/commit/59cbff0874dd5318cda4b9ce7b7eeb1aad1dad4d) | 2020-07-29 | Fix docstrings in BigQueryGetDataOperator (#10042) | -| [81b87d48e](https://github.com/apache/airflow/commit/81b87d48ed002d7a7f7bcb72a58e82d40a176fe2) | 2020-07-27 | Add unit tests for GcpBodyFieldSanitizer in Google providers (#9996) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25 | Stop using start_date in default_args in example_dags (#9982) | -| [ef98edf4d](https://github.com/apache/airflow/commit/ef98edf4da2d9b74d5cf5b21e81577b3151edb79) | 2020-07-23 | Add more information about using GoogleAdsHook (#9951) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [39a0288a4](https://github.com/apache/airflow/commit/39a0288a47536dfd9b651ecd075887d3e45fcfc4) | 2020-07-22 | Add Google Authentication for experimental API (#9848) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [c4244e18b](https://github.com/apache/airflow/commit/c4244e18bb894eb2180b8972052e56110fe5cbc9) | 2020-07-22 | Fix calling `get_client` in BigQueryHook.table_exists (#9916) | -| [5eacc1642](https://github.com/apache/airflow/commit/5eacc164201a121cd06126aff613cbe0919d35cc) | 2020-07-22 | Add support for impersonation in GCP hooks (#9915) | -| [1cfdebf5f](https://github.com/apache/airflow/commit/1cfdebf5f8841d61a11540b88c7913686e89e085) | 2020-07-21 | Fix insert_job method of BigQueryHook (#9899) | -| [c8c52e69c](https://github.com/apache/airflow/commit/c8c52e69c8d9cc1f26f63d95aecc0a6498d40b6f) | 2020-07-21 | Remove type hint causing DeprecationWarning in Firestore operators (#9819) | -| [eb6f1d1cf](https://github.com/apache/airflow/commit/eb6f1d1cf0503fa763c0d8d34a2fe16efb390b9c) | 2020-07-16 | Fix typo in datafusion operator (#9859) | -| [b01d95ec2](https://github.com/apache/airflow/commit/b01d95ec22b01ed79123178acd74ef40d57aaa7c) | 2020-07-15 | Change DAG.clear to take dag_run_state (#9824) | -| [6d65c15d1](https://github.com/apache/airflow/commit/6d65c15d156a41d5e735e44a1170426559a17d1f) | 2020-07-15 | Add guide for AI Platform (previously Machine Learning Engine) Operators (#9798) | -| [770de53eb](https://github.com/apache/airflow/commit/770de53eb57bd57ffc555ad15b18f0c058dbebe7) | 2020-07-15 | BigQueryTableExistenceSensor needs to specify keyword arguments (#9832) | -| [2d8dbacdf](https://github.com/apache/airflow/commit/2d8dbacdf6c19a598a7f55bcf65e28703aed6201) | 2020-07-15 | Add CloudVisionDeleteReferenceImageOperator (#9698) | -| [9f017951b](https://github.com/apache/airflow/commit/9f017951b94d9bf52b5ee66d72aa8dd822f07269) | 2020-07-15 | Add Google Deployment Manager Hook (#9159) | -| [ed5004cca](https://github.com/apache/airflow/commit/ed5004cca753650dc222fbb8e67573938c6c16d9) | 2020-07-14 | Allow `replace` flag in gcs_to_gcs operator. (#9667) | -| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13 | Keep functions signatures in decorators (#9786) | -| [68925904e](https://github.com/apache/airflow/commit/68925904e49aac6968defb6834863f4e6347fe59) | 2020-07-13 | Add multiple file upload functionality to GCS hook (#8849) | -| [1de78e8f9](https://github.com/apache/airflow/commit/1de78e8f97f48f8f4abd167a0120ffab8af6127a) | 2020-07-12 | Add Google Stackdriver link (#9765) | -| [092d33f29](https://github.com/apache/airflow/commit/092d33f298a7dbb871b1e1b4c17aad3989e89b79) | 2020-07-11 | Fix StackdriverTaskHandler + add system tests (#9761) | -| [b2305660f](https://github.com/apache/airflow/commit/b2305660f0eb55ebd31fdc7fe4e8aeed8c1f8c00) | 2020-07-09 | Update example DAG for AI Platform operators (#9727) | -| [23f80f34a](https://github.com/apache/airflow/commit/23f80f34adec86da24e4896168c53d213d01a7f6) | 2020-07-08 | Move gcs & wasb task handlers to their respective provider packages (#9714) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [cd3d9d934](https://github.com/apache/airflow/commit/cd3d9d93402f06a08f35e3586802f11a18c4f1f3) | 2020-07-02 | Fix using .json template extension in GMP operators (#9566) | -| [4799af30e](https://github.com/apache/airflow/commit/4799af30ee02c596647d1538854769124f9f4961) | 2020-06-30 | Extend BigQuery example with include clause (#9572) | -| [e33f1a12d](https://github.com/apache/airflow/commit/e33f1a12d72ac234e4897f44b326a332acf85901) | 2020-06-30 | Add template_ext to BigQueryInsertJobOperator (#9568) | -| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29 | Remove almost all references to airflow.contrib (#9559) | -| [c420dbd6e](https://github.com/apache/airflow/commit/c420dbd6e13e17867eb4ccc4271b37966310ac0f) | 2020-06-27 | Bump Pylint to 2.5.3 (#9294) | -| [0051c89cb](https://github.com/apache/airflow/commit/0051c89cba02d55236c913ce0110f7d5111ba436) | 2020-06-26 | nitpick fix (#9527) | -| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25 | Use literal syntax instead of function calls to create data structure (#9516) | -| [7256f4caa](https://github.com/apache/airflow/commit/7256f4caa226f8f8632d6e2d38d8c94cb3250a6f) | 2020-06-22 | Pylint fixes and deprecation of rare used methods in Connection (#9419) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [5b680e27e](https://github.com/apache/airflow/commit/5b680e27e8118861ef484c00a4b87c6885b0a518) | 2020-06-19 | Don't use connection to store task handler credentials (#9381) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [416334e2e](https://github.com/apache/airflow/commit/416334e2ecd21d8a532af6102f1cfa9ac921a97a) | 2020-06-19 | Properly propagated warnings in operators (#9348) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [4e09c6442](https://github.com/apache/airflow/commit/4e09c64423bfaabd02a18b5fe7757dc15451ab73) | 2020-06-18 | Adds GCP Secret Manager Hook (#9368) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [639972d99](https://github.com/apache/airflow/commit/639972d995d848b16a3f283576efdbde28b8fdef) | 2020-06-16 | Add support for latest Apache Beam SDK in Dataflow operators (#9323) | -| [1459970b3](https://github.com/apache/airflow/commit/1459970b3b9780e139ce029ae889fd8f69a37bc7) | 2020-06-15 | Rename CloudBuildCreateBuildOperator to CloudBuildCreateOperator (#9314) | -| [431ea3291](https://github.com/apache/airflow/commit/431ea3291c9bf236bccdf8446d753c630ada2b25) | 2020-06-15 | Resolve upstream tasks when template field is XComArg (#8805) | -| [aee6ab94e](https://github.com/apache/airflow/commit/aee6ab94eb956347ad560cfe2673bc6011074513) | 2020-06-15 | Wait for pipeline state in Data Fusion operators (#8954) | -| [fb1c8b83d](https://github.com/apache/airflow/commit/fb1c8b83d400506a16c10e3d6623a913847e5cf5) | 2020-06-10 | Add test for BQ operations using location (#9206) | -| [a26afbfa5](https://github.com/apache/airflow/commit/a26afbfa51b0981ae742c6171938b57a80aace2b) | 2020-06-10 | Make generated job_id more informative in BQ insert_job (#9203) | -| [c41192fa1](https://github.com/apache/airflow/commit/c41192fa1fc5c2b3e7b8414c59f656ab67bbef28) | 2020-06-10 | Upgrade pendulum to latest major version ~2.0 (#9184) | -| [b1c8c5ed5](https://github.com/apache/airflow/commit/b1c8c5ed5bba3a852a5446f3fdd1131b4b22637a) | 2020-06-09 | Allows using private endpoints in GKEStartPodOperator (#9169) | -| [5918efc86](https://github.com/apache/airflow/commit/5918efc86a2217caa641a6ada289eee1c21407f8) | 2020-06-05 | Add 3.8 to the test matrices (#8836) | -| [9bcdadaf7](https://github.com/apache/airflow/commit/9bcdadaf7e6e73d3d2246fbbd32a9f30a1b43ca9) | 2020-06-05 | Add 'main' param to template_fields in DataprocSubmitPySparkJobOperator (#9154) | -| [f56811dff](https://github.com/apache/airflow/commit/f56811dff3af66cbceb0418f11e00507bab58674) | 2020-06-05 | [AIRFLOW-6290] Create guide for GKE operators (#8883) | -| [76962867b](https://github.com/apache/airflow/commit/76962867b5877cf5ffd1b6004453f783c0732ab1) | 2020-06-04 | Fix sql_to_gcs hook gzip of schema_file (#9140) | -| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | -| [789852546](https://github.com/apache/airflow/commit/78985254683c359f7444a7eb5f6ee4967c37d61f) | 2020-06-01 | Add BigQueryInsertJobOperator (#8868) | -| [29eb68b90](https://github.com/apache/airflow/commit/29eb68b90b5df692ac322be0939af5e7fa9b71bc) | 2020-05-31 | Create guide for Dataproc Operators (#9037) | -| [886afaf62](https://github.com/apache/airflow/commit/886afaf622602aa97f925bc3ee4fc27aa995c445) | 2020-05-29 | Add example dag and system test for LocalFilesystemToGCSOperator (#9043) | -| [a779c4dfc](https://github.com/apache/airflow/commit/a779c4dfc278d6ece480b012764ea5814dc78dee) | 2020-05-29 | add separate example dags and system tests for GCSToGoogleSheetsOperator (#9066) | -| [ada26be23](https://github.com/apache/airflow/commit/ada26be23c913796c2ae77b91cb7d113dfec75a6) | 2020-05-29 | Add correct description for dst param in LocalFilesystemToGCSOperator (#9055) | -| [81b2761b8](https://github.com/apache/airflow/commit/81b2761b86dae2d21a6ee859d49c08d46fea6def) | 2020-05-29 | add example dag and system test for GoogleSheetsToGCSOperator (#9056) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [3994030ea](https://github.com/apache/airflow/commit/3994030ea678727daaf9c2bfed0ca94a096f8d2a) | 2020-05-26 | Refactor BigQuery operators (#8858) | -| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [cf5cf45e1](https://github.com/apache/airflow/commit/cf5cf45e1c0dff9a40e02f0dc221542f974831a7) | 2020-05-23 | Support YAML input for CloudBuildCreateOperator (#8808) | -| [499493c5c](https://github.com/apache/airflow/commit/499493c5c5cf324ab8452ead80a10b71ce0c3b14) | 2020-05-19 | [AIRFLOW-6586] Improvements to gcs sensor (#7197) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [841d81664](https://github.com/apache/airflow/commit/841d81664737c25d73d095a7dab5de80d369c87c) | 2020-05-19 | Allow setting the pooling time in DLPHook (#8824) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [15273f0ea](https://github.com/apache/airflow/commit/15273f0ea05ec579c631ce26b5d620233ebdc4d2) | 2020-05-16 | Check for same task instead of Equality to detect Duplicate Tasks (#8828) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [e1e833bb2](https://github.com/apache/airflow/commit/e1e833bb260879ecb9a1f80f28450a3656c0e598) | 2020-05-13 | Update GoogleBaseHook to not follow 308 and use 60s timeout (#8816) | -| [8b5491971](https://github.com/apache/airflow/commit/8b54919711a203c3f35d98c6310a55d4df5da590) | 2020-05-12 | Refactor BigQuery hook methods to use python library (#8631) | -| [6911dfe83](https://github.com/apache/airflow/commit/6911dfe8372a33df67ce1fdd3c2bca1047718f60) | 2020-05-12 | Fix template fields in Google operators (#8840) | -| [4b06fde0f](https://github.com/apache/airflow/commit/4b06fde0f10ce178b3c336c5d901e3b089f2863d) | 2020-05-12 | Fix Flake8 errors (#8841) | -| [1d12c347c](https://github.com/apache/airflow/commit/1d12c347cb258e7081804da1f9f5ffdedc003163) | 2020-05-12 | Refactor BigQuery check operators (#8813) | -| [493b685d7](https://github.com/apache/airflow/commit/493b685d7879cfee532390ba0909d4b1d8764267) | 2020-05-10 | Add separate example DAGs and system tests for google cloud speech (#8778) | -| [79ef8bed8](https://github.com/apache/airflow/commit/79ef8bed891c22eb76adf99158288d1b44426dc0) | 2020-05-10 | Added Upload Multiple Entity Read Files to specified big query dataset (#8610) | -| [280f1f0c4](https://github.com/apache/airflow/commit/280f1f0c4cc49aba1b2f8b456326795733769d18) | 2020-05-10 | Correctly restore upstream_task_ids when deserializing Operators (#8775) | -| [58aefb23b](https://github.com/apache/airflow/commit/58aefb23b1d456bbb24876a4e3ff14f25d6274b0) | 2020-05-08 | Added SDFtoGCSOperator (#8740) | -| [723c52c94](https://github.com/apache/airflow/commit/723c52c942b49b0e8c8fa8667a4a6a45fa249498) | 2020-05-07 | Add documentation for SpannerDeployInstanceOperator (#8750) | -| [25ee4211b](https://github.com/apache/airflow/commit/25ee4211b345ce7c19fb7366fd230838c34f1d47) | 2020-05-06 | Support all RuntimeEnvironment parameters in DataflowTemplatedJobStartOperator (#8531) | -| [8d6f1aa4b](https://github.com/apache/airflow/commit/8d6f1aa4b5bb8809ffc55dc0c62e6d0e89f331e5) | 2020-05-05 | Support num_retries field in env var for GCP connection (#8700) | -| [67caae0f2](https://github.com/apache/airflow/commit/67caae0f25db4eec42b8e81c85683aabdd8d6c1a) | 2020-05-04 | Add system test for gcs_to_bigquery (#8556) | -| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03 | Add system test and docs for Facebook Ads operators (#8503) | -| [a28c66f23](https://github.com/apache/airflow/commit/a28c66f23d373cd0f8bfc765a515f21d4b66a0e9) | 2020-04-30 | [AIRFLOW-4734] Upsert functionality for PostgresHook.insert_rows() (#8625) | -| [992a24ce4](https://github.com/apache/airflow/commit/992a24ce41067d3b73f293878e71835892cbb632) | 2020-04-28 | Split and improve BigQuery example DAG (#8529) | -| [c1fb28230](https://github.com/apache/airflow/commit/c1fb28230fa0d36ef86c452c70254b253a113f9c) | 2020-04-28 | Refactor BigQueryHook dataset operations (#8477) | -| [e8d0f8fea](https://github.com/apache/airflow/commit/e8d0f8feab0ec08e248cd381359112ad6a832f5b) | 2020-04-26 | Improve idempodency in CloudDataTransferServiceCreateJobOperator (#8430) | -| [37fdfa977](https://github.com/apache/airflow/commit/37fdfa9775f43a5fa15de9c53ab33ecdf97513c5) | 2020-04-26 | [AIRFLOW-6281] Create guide for GCS to GCS transfer operators (#8442) | -| [14b22e6ff](https://github.com/apache/airflow/commit/14b22e6ffeb3af1f68e8362a1d0061b41364019c) | 2020-04-25 | Add hook and operator for Google Cloud Life Sciences (#8481) | -| [72ddc94d1](https://github.com/apache/airflow/commit/72ddc94d1ee08b414102e0b8ac197a3d8e965707) | 2020-04-23 | Pass location using parmamter in Dataflow integration (#8382) | -| [912aa4b42](https://github.com/apache/airflow/commit/912aa4b4237695275db6379cf2f0a633ea6087bc) | 2020-04-23 | Added GoogleDisplayVideo360DownloadLineItemsOperator (#8174) | -| [57c8c0583](https://github.com/apache/airflow/commit/57c8c05839f66ed2909b1bee8ff6976432db82aa) | 2020-04-22 | Use python client in BQ hook create_empty_table/dataset and table_exists (#8377) | -| [5d3a7eef3](https://github.com/apache/airflow/commit/5d3a7eef30b30fa466d8173f13abe4c356d73aef) | 2020-04-20 | Allow multiple extra_packages in Dataflow (#8394) | -| [79c99b1b6](https://github.com/apache/airflow/commit/79c99b1b6ae2ff5b0c8ab892f7f3fb1b44724121) | 2020-04-18 | Added location parameter to BigQueryCheckOperator (#8273) | -| [79d3f33c1](https://github.com/apache/airflow/commit/79d3f33c1b65c9c7e7b1a75e25d38cab9aa4517f) | 2020-04-17 | Clean up temporary files in Dataflow operators (#8313) | -| [efcffa323](https://github.com/apache/airflow/commit/efcffa323ddb5aa9f5907aa86808f3f3b4f5bd87) | 2020-04-16 | Add Dataproc SparkR Example (#8240) | -| [b198a1fa9](https://github.com/apache/airflow/commit/b198a1fa94c44228dc7358552aeb6a5371ae0da2) | 2020-04-15 | Create guide for BigQuery operators (#8276) | -| [2636cc932](https://github.com/apache/airflow/commit/2636cc932c3b156644edd46635cf9ff995c83159) | 2020-04-14 | Raise exception when GCP credential doesn't support account impersonation (#8213) | -| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14 | Added Facebook Ads Operator #7887 (#8008) | -| [8cae07ea1](https://github.com/apache/airflow/commit/8cae07ea1873a90516120d9ffbd28e7fdd2f78a4) | 2020-04-14 | fixed typo (#8294) | -| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13 | Less aggressive eager upgrade of requirements (#8267) | -| [1fd9ed384](https://github.com/apache/airflow/commit/1fd9ed3840361afa1e9456ccb0dfd5a60fba4e85) | 2020-04-13 | Add mypy plugin for decorators. (#8145) | -| [327b0a9f7](https://github.com/apache/airflow/commit/327b0a9f77bbcbe3f977a37de04264c2eff4bee1) | 2020-04-13 | Added GoogleDisplayVideo360UploadLineItemsOperator (#8216) | -| [bb5e403a3](https://github.com/apache/airflow/commit/bb5e403a320e7377e5040cb180f61b4f5a9ea558) | 2020-04-10 | Honor schema type for MySQL to GCS data pre-process (#8090) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [3fc89f29f](https://github.com/apache/airflow/commit/3fc89f29f5bcd1529089fa6cb9c44843614f9ec5) | 2020-04-06 | [AIRFLOW-7106] Cloud data fusion integration - Allow to pass args to start pipeline (#7849) | -| [7ef75d239](https://github.com/apache/airflow/commit/7ef75d2393f30d155de550e6d1ee8c055e2abfee) | 2020-04-03 | [AIRFLOW-7117] Honor self.schema in sql_to_gcs as schema to upload (#8049) | -| [ed2bc0057](https://github.com/apache/airflow/commit/ed2bc00576b39a88e3e1fb79092494f4bfdcbf5c) | 2020-04-02 | Add Google Ads list accounts operator (#8007) | -| [3808a6206](https://github.com/apache/airflow/commit/3808a6206e70d4af84b39ea7078df54f02c1435e) | 2020-04-01 | Unify Google class/package names (#8033) | -| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31 | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011) | -| [8e8978007](https://github.com/apache/airflow/commit/8e897800716c8ccedd1c53f2d083cb295786aa50) | 2020-03-31 | Add more refactor steps for providers.google (#8010) | -| [aae3b8fb2](https://github.com/apache/airflow/commit/aae3b8fb27870cb3cfba5ed73e35e08d520ef014) | 2020-03-31 | Individual package READMEs (#8012) | -| [779023968](https://github.com/apache/airflow/commit/779023968f983c91701f687bc823dc338934cdad) | 2020-03-30 | [AIRFLOW-7075] Operators for storing information from GCS into GA (#7743) | -| [49abce521](https://github.com/apache/airflow/commit/49abce52178c81954f8a25608f70ffe02fcf7b19) | 2020-03-30 | Improve system tests for Cloud Build (#8003) | -| [0f19a930d](https://github.com/apache/airflow/commit/0f19a930d1a7dec2a96bab0de144829f83cc0626) | 2020-03-29 | Remove GKEStartPodOperator when backporting (#7908) | -| [0e1c238b2](https://github.com/apache/airflow/commit/0e1c238b2fff3a092c93368125bc8d82abc4b308) | 2020-03-28 | Get Airflow Variables from GCP Secrets Manager (#7946) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [01f99426f](https://github.com/apache/airflow/commit/01f99426fddd2a24552f352edcb271fa78cf3b15) | 2020-03-28 | Add download/upload operators for GCS and Google Sheets (#7866) | -| [892522f8e](https://github.com/apache/airflow/commit/892522f8e2aeedc1ad842a08aaea967b0cae077f) | 2020-03-26 | Change signature of GSheetsHook methods (#7853) | -| [bfd425157](https://github.com/apache/airflow/commit/bfd425157a746402b516f8fc9e48f4ddccd794ce) | 2020-03-26 | Improve idempotency in MLEngineHook.create_model (#7811) | -| [f9c226343](https://github.com/apache/airflow/commit/f9c226343d94a7732da280d1dd086bf1ba291c77) | 2020-03-26 | Fix CloudSecretsManagerBackend invalid connections_prefix (#7861) | -| [e3920f12f](https://github.com/apache/airflow/commit/e3920f12f483b53950507c50f6ab6a4318072859) | 2020-03-26 | Improve setUp/tearDown in Cloud Firestore system test (#7862) | -| [8ba8a7295](https://github.com/apache/airflow/commit/8ba8a7295a31f6b44894bfcaea36fa93b8d8c0d0) | 2020-03-26 | Improve example DAGs for Cloud Memorystore (#7855) | -| [f7d1a437c](https://github.com/apache/airflow/commit/f7d1a437c17461b5ab768b75d58f0cb026b2a818) | 2020-03-26 | Fix CloudMemorystoreCreateInstanceAndImportOperator operator (#7856) | -| [beef6c230](https://github.com/apache/airflow/commit/beef6c230e4ff266af7c16b639bfda659b2bf6c0) | 2020-03-26 | Improve authorization in GCP system tests (#7863) | -| [5f165f3e4](https://github.com/apache/airflow/commit/5f165f3e4231ebd420ce643211a93e1fecf4877e) | 2020-03-26 | [AIRFLOW-5801] Get GCP credentials from file instead of JSON blob (#7869) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [1982c3fdc](https://github.com/apache/airflow/commit/1982c3fdca1f04cfc41fc5b5e285d8f01c6b76ab) | 2020-03-24 | Run Dataflow for ML Engine summary in venv (#7809) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [529db07b2](https://github.com/apache/airflow/commit/529db07b2ee73d886e37e8b3415462c730187b15) | 2020-03-23 | Improve Google PubSub hook publish method (#7831) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a001489b5](https://github.com/apache/airflow/commit/a001489b5928ebfc35f990a29d1c9c2ecb80bd61) | 2020-03-23 | Improve example DAG for ML Engine (#7810) | -| [9e5a8e7f8](https://github.com/apache/airflow/commit/9e5a8e7f83cf2368315fce62f8d81304f7ba2f04) | 2020-03-23 | Add call to Super class in 'google' providers (#7823) | -| [b86bf79bf](https://github.com/apache/airflow/commit/b86bf79bff615e61de98bead4d02eace5690d5fb) | 2020-03-23 | Fix typo in GCP credentials_provider's docstring (#7818) | -| [56c013ce9](https://github.com/apache/airflow/commit/56c013ce922eb18e5f7dd4410986afbcc6f29025) | 2020-03-23 | Add missing docstring in BigQueryHook.create_empty_table (#7817) | -| [426a79847](https://github.com/apache/airflow/commit/426a79847ced832ca3f67c135fd8830ebf1de7d2) | 2020-03-23 | Imrove support for laatest API in MLEngineStartTrainingJobOperator (#7812) | -| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23 | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795) | -| [27dac00e1](https://github.com/apache/airflow/commit/27dac00e125b87626a0b87074d61e6d38031bf47) | 2020-03-22 | [AIRFLOW-7099] Improve system test for cloud transfer service (#7794) | -| [0daf5d729](https://github.com/apache/airflow/commit/0daf5d729acef4e9aef5226452dff774e80430cd) | 2020-03-22 | Add ability to specify a maximum modified time for objects in GCSToGCSOperator (#7791) | -| [c8088c2bd](https://github.com/apache/airflow/commit/c8088c2bd70a16605a5d4b1a66a22309359d6712) | 2020-03-20 | [AIRFLOW-7100] Add GoogleAnalyticsGetAdsLinkOperator (#7781) | -| [5106a2931](https://github.com/apache/airflow/commit/5106a29314b413d168bcba7a64bf91c04fdb5dfe) | 2020-03-20 | [AIRFLOW-6752] Add GoogleAnalyticsRetrieveAdsLinksListOperator (#7748) | -| [759ce2a80](https://github.com/apache/airflow/commit/759ce2a80c95832fe4773c9f4fde23e1b03cbc6f) | 2020-03-20 | [AIRFLOW-6978] Add PubSubPullOperator (#7766) | -| [6b9b214e4](https://github.com/apache/airflow/commit/6b9b214e4c3b3afa8ea2e1a5c1e24993013d60ac) | 2020-03-20 | [AIRFLOW-6732] Add GoogleAdsHook and GoogleAdsToGcsOperator (#7692) | -| [b11891696](https://github.com/apache/airflow/commit/b11891696946d1461174b385c88d6af8abb99768) | 2020-03-19 | [AIRFLOW-7069] Fix cloudsql system tests (#7770) | -| [ae854cae5](https://github.com/apache/airflow/commit/ae854cae5a2cf8cae37edf7e0813ad01bccfbc30) | 2020-03-19 | [AIRFLOW-7082] Remove catch_http_exception decorator in GCP hooks (#7756) | -| [7e1e954d2](https://github.com/apache/airflow/commit/7e1e954d23ce272b0a71188f0f535e20d54be443) | 2020-03-19 | [AIRFLOW-7085] Cache credentials, project_id in GCP Base Hook (#7759) | -| [6e21c139b](https://github.com/apache/airflow/commit/6e21c139b3cce3f895040939f0b02e3e0ba36141) | 2020-03-19 | [AIRFLOW-XXXX] Fix reference to GCP classes in guides (#7762) | -| [ce022a3f7](https://github.com/apache/airflow/commit/ce022a3f72b7735087d4c3bbe81d293a0ab75327) | 2020-03-19 | [AIRFLOW-XXXX] Add cross-references for operators guide (#7760) | -| [029c84e55](https://github.com/apache/airflow/commit/029c84e5527b6db6bdbdbe026f455da325bedef3) | 2020-03-18 | [AIRFLOW-5421] Add Presto to GCS transfer operator (#7718) | -| [63a3102ed](https://github.com/apache/airflow/commit/63a3102ede8fb8f764d251b20cad5ee5bef84f50) | 2020-03-18 | [AIRFLOW-7064] Add CloudFirestoreExportDatabaseOperator (#7725) | -| [73305c7bd](https://github.com/apache/airflow/commit/73305c7bd57f14444804c13b8b290f479832d3db) | 2020-03-18 | [AIRFLOW-7081] Remove env variables from GCP guide (#7755) | -| [60fdbf6d9](https://github.com/apache/airflow/commit/60fdbf6d9255d34a8967400e9585b1cd5d29d3e9) | 2020-03-18 | [AIRFLOW-5610] Add ability to specify multiple objects to copy in GCSToGCSOperator (#7728) | -| [de7e934ca](https://github.com/apache/airflow/commit/de7e934ca3f21ce82f67accf92811b3ac044476f) | 2020-03-17 | [AIRFLOW-7079] Remove redundant code for storing template_fields (#7750) | -| [0de0347b2](https://github.com/apache/airflow/commit/0de0347b27a961c46ee49da6dfa9205321657749) | 2020-03-17 | [AIRFLOW-6855]: Escape project_dataset_table in SQL query in gcs to bq … (#7475) | -| [91557c6f8](https://github.com/apache/airflow/commit/91557c6f87529c010b8ad1110ece35fd7fd751e4) | 2020-03-17 | [AIRFLOW-7073] GKEStartPodOperator always use connection credentials (#7738) | -| [51161dbd9](https://github.com/apache/airflow/commit/51161dbd9de0c966016cec4d5036877890daee7c) | 2020-03-16 | [AIRFLOW-5664] Store timestamps with microseconds precision (#6354) | -| [2bc020c43](https://github.com/apache/airflow/commit/2bc020c43112dd3a769311de8d5012e8e8f399ee) | 2020-03-14 | [AIRFLOW-7055] Verbose logging option for google provider (#7711) | -| [c997cab42](https://github.com/apache/airflow/commit/c997cab42d8695ac444e63dfe4b948a7ea82ed89) | 2020-03-13 | [AIRFLOW-6724] Add Google Analytics 360 Accounts Retrieve Operator (#7630) | -| [137896f32](https://github.com/apache/airflow/commit/137896f326cd29b59902a887e4c4e58f940ff62b) | 2020-03-12 | [AIRFLOW-7034] Remove feature: Assigning Dag to task using Bitshift Op (#7685) | -| [1f77f943d](https://github.com/apache/airflow/commit/1f77f943d5d85f66b6a988e8ef6506525eaf4732) | 2020-03-10 | [AIRFLOW-6980] Improve system tests and building providers package (#7615) | -| [bf9b6b6d7](https://github.com/apache/airflow/commit/bf9b6b6d70455352bbf807871c8eeb6324be7e54) | 2020-03-09 | [AIRFLOW-5013] Add GCP Data Catalog Hook and operators (#7664) | -| [e5130dc9f](https://github.com/apache/airflow/commit/e5130dc9fe89187e95071e678ea3b46600866762) | 2020-03-09 | [AIRFLOW-2911] Add job cancellation capability to Dataflow service (#7659) | -| [faf0df4b9](https://github.com/apache/airflow/commit/faf0df4b9460b7f037ee390addbd2c6effcae013) | 2020-03-09 | [AIRFLOW-XXXX] Fix upsert operator in BQ example DAG (#7666) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [b5b9795f0](https://github.com/apache/airflow/commit/b5b9795f0446bb484a91ee485f49ea456f1c26c4) | 2020-03-07 | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (fix) (#7624) | -| [6b65038fb](https://github.com/apache/airflow/commit/6b65038fb409ba1040e70305444816d8f5cfdc47) | 2020-03-06 | [AIRFLOW-6990] Improve system tests for Google Marketing Platform (#7631) | -| [755fe5224](https://github.com/apache/airflow/commit/755fe52249ba1cd965cf2f87fa7a428b8197a38a) | 2020-03-05 | [AIRFLOW-6915] Add AI Platform Console Link for MLEngineStartTrainingJobOperator (#7535) | -| [cb2f33911](https://github.com/apache/airflow/commit/cb2f339116cf2093da447748892fac68aecbb888) | 2020-03-04 | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (#7609) | -| [09fea3ce8](https://github.com/apache/airflow/commit/09fea3ce8e4d7816281963bb8f2cb06f4de6db5c) | 2020-03-04 | [AIRFLOW-6977] Fix BigQuery DTS example DAG (#7612) | -| [8230ccc48](https://github.com/apache/airflow/commit/8230ccc48b157c89b2b893d42c6fe1523b83363a) | 2020-03-04 | [AIRFLOW-6926] Fix Google Tasks operators return types and idempotency (#7547) | -| [0d1e3088a](https://github.com/apache/airflow/commit/0d1e3088aa9f16eaeeb7b18eccec8f35c79a53df) | 2020-03-04 | [AIRFLOW-6970] Improve GCP Video Intelligence system tests (#7604) | -| [ab6bb0012](https://github.com/apache/airflow/commit/ab6bb0012c38740b76e864d42d299c5c7a9972a3) | 2020-03-03 | [AIRFLOW-6971] Fix return type in CloudSpeechToTextRecognizeSpeechOperator (#7607) | -| [3db4ade3d](https://github.com/apache/airflow/commit/3db4ade3dc9660c21c28187100a22008552f2bd3) | 2020-02-29 | [AIRFLOW-6924] Fix Google DLP operators return types (#7546) | -| [008b4bab1](https://github.com/apache/airflow/commit/008b4bab14222da068b737d6332db4963b994007) | 2020-02-27 | [AIRFLOW-6730] Use total_seconds instead of seconds (#7363) | -| [bb552b2d9](https://github.com/apache/airflow/commit/bb552b2d9fd595cc3eb1b3a2f637f29b814878d7) | 2020-02-25 | [AIRFLOW-6908] Lazy load AirflowException (#7528) | -| [d1a34246a](https://github.com/apache/airflow/commit/d1a34246ac593901f8599b102dc3d7efa4dd61e4) | 2020-02-25 | [AIRFLOW-6593] Add GCP Stackdriver Alerting Hooks and Operators (#7322) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [dcf874352](https://github.com/apache/airflow/commit/dcf87435219307d4e916a8abc2b819ad75e2b1cf) | 2020-02-24 | [AIRFLOW-6894] Prevent db query in example_dags (#7516) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [35b961637](https://github.com/apache/airflow/commit/35b9616378d1cfba7c2eb3c71e20acb6734b7c77) | 2020-02-21 | [AIRFLOW-4973] Add Cloud Data Fusion Pipeline integration (#7486) | -| [aff3a361b](https://github.com/apache/airflow/commit/aff3a361b4092212c0757f9ce88fa2e40d25d1f4) | 2020-02-20 | [AIRFLOW-6558] Campaign Manager operators for conversions (#7420) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [5b199cb86](https://github.com/apache/airflow/commit/5b199cb86be5b1aefbd8620185033d6f635713c1) | 2020-02-17 | [AIRFLOW-XXXX] Typo in example_bigquery DAG (#7429) | -| [2c9345a8e](https://github.com/apache/airflow/commit/2c9345a8e03d37a2676efa2f2ea7e8b7814c5345) | 2020-02-17 | [AIRFLOW-6759] Added MLEngine operator/hook to cancel MLEngine jobs (#7400) | -| [946bdc23c](https://github.com/apache/airflow/commit/946bdc23c039637b0383e1269f99bdd1b2426565) | 2020-02-16 | [AIRFLOW-6405] Add GCP BigQuery Table Upsert Operator (#7126) | -| [2381c820c](https://github.com/apache/airflow/commit/2381c820c8aaeffc1c9b4ed47832038833400eb8) | 2020-02-13 | [AIRFLOW-6505] Let emoji encoded properly for json.dumps() (#7399) | -| [04c1fefbf](https://github.com/apache/airflow/commit/04c1fefbf26a73ed13881d2ec14eada48028ff72) | 2020-02-03 | [AIRFLOW-6676] added GCSDeleteBucketOperator (#7307) | -| [a0252748f](https://github.com/apache/airflow/commit/a0252748ff312daede15c6f0a3d39e16c774461c) | 2020-02-03 | [AIRFLOW-6717] Remove non-existent field from templated_fields (#7340) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9d8d07557](https://github.com/apache/airflow/commit/9d8d0755789d4aeadc5d3015f3cdde62901f85b8) | 2020-02-03 | [AIRFLOW-6715] Fix Google Cloud DLP Example DAG (#7337) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | -| [f4d3e5e54](https://github.com/apache/airflow/commit/f4d3e5e54507f52a00a9b95aa48eb0260e17224d) | 2020-01-13 | [AIRFLOW-6102] [AIP-21] Rename Dataproc operators (#7151) | -| [e7bf8ecb4](https://github.com/apache/airflow/commit/e7bf8ecb48f0299af8091433535ac573c2afd1cf) | 2020-01-13 | [AIRFLOW-6119] [AIP-21] Rename GCS operators, hooks and sensors (#7125) | -| [5b6772cb8](https://github.com/apache/airflow/commit/5b6772cb8391b248cb4b7be5fd3d5c035280fac1) | 2020-01-09 | [AIRFLOW-6125] [AIP-21] Rename S3 operator and SFTP operator (#7112) | -| [4f8592ae8](https://github.com/apache/airflow/commit/4f8592ae8f52ab7f42623d3b43eef0928c9aafb2) | 2020-01-08 | [AIRFLOW-6118] [AIP-21] Rename Pubsub operators and hook (#7046) | -| [20299473f](https://github.com/apache/airflow/commit/20299473f11add6531f607256ee8a0f7f9507ab8) | 2020-01-03 | [AIRFLOW-6115] [AIP-21] Rename GCP vision operators (#7020) | -| [18e8cea4e](https://github.com/apache/airflow/commit/18e8cea4e7487a7dfefc03661e5ebe54c4104ead) | 2020-01-03 | [AIRFLOW-6428] Fix import path for airflow.utils.dates.days_ago in Example DAGs (#7007) | -| [95087af14](https://github.com/apache/airflow/commit/95087af14091f28a83ced8ff1860b86dfd93f93d) | 2019-12-31 | [AIRFLOW-6110] [AIP-21] Rename natural_language service (#6968) | -| [69629a5a9](https://github.com/apache/airflow/commit/69629a5a948ab2c4ac04a4a4dca6ac86d19c11bd) | 2019-12-09 | [AIRFLOW-5807] Move SFTP from contrib to providers. (#6464) | -| [25e9047a4](https://github.com/apache/airflow/commit/25e9047a4a4da5fad4f85c366e3a6262c0a4f68e) | 2019-12-09 | [AIRFLOW-6193] Do not use asserts in Airflow main code (#6749) | -| [ed0a14f32](https://github.com/apache/airflow/commit/ed0a14f321b9dab3554ae395c11c147258536ce8) | 2019-12-09 | [AIRFLOW-6120] Rename GoogleCloudBaseHook (#6734) | -| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01 | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701) | -| [03c870a61](https://github.com/apache/airflow/commit/03c870a6172ab232af6319a30ad8d46622359b10) | 2019-11-26 | [AIRFLOW-6010] Remove cyclic imports and pylint hacks (#6601) | -| [5c4cfea8c](https://github.com/apache/airflow/commit/5c4cfea8c0f488496c1cbcc4c6c5db13d8210979) | 2019-11-15 | [AIRFLOW-5718] Add SFTPToGoogleCloudStorageOperator (#6393) | -| [44a8c37a9](https://github.com/apache/airflow/commit/44a8c37a9a8668469aa825ad21057cca6ac2c186) | 2019-11-13 | [AIRFLOW-XXX] Fix the docstring for Dataproc get_job method (#6581) | -| [d633d3ac4](https://github.com/apache/airflow/commit/d633d3ac44c395e6c43cd388f98fba1ce1c435a3) | 2019-11-13 | [AIRFLOW-5691] Rewrite Dataproc operators to use python library (#6371) | -| [d985c02d9](https://github.com/apache/airflow/commit/d985c02d9fa3d9ec946abc1735b0551fd61fb9f0) | 2019-11-05 | [AIRFLOW-XXX] Add How-To-Guide to GCP PubSub (#6497) | -| [a296cdabd](https://github.com/apache/airflow/commit/a296cdabdb9c9c65cf9a48329cb776aed5c82d43) | 2019-11-04 | [AIRFLOW-5743] Move Google PubSub to providers package (#6476) | -| [470b2a779](https://github.com/apache/airflow/commit/470b2a779d031406a3d5925f2fa2ec40e5c3bccb) | 2019-10-30 | [AIRFLOW-5741] Move Cloud Natural Language to providers (#6421) | -| [f2caa451f](https://github.com/apache/airflow/commit/f2caa451fc2b8ee59163314f9ec1cc372acbadf1) | 2019-10-27 | [AIRFLOW-5742] Move Google Cloud Vision to providers package (#6424) | -| [16d7accb2](https://github.com/apache/airflow/commit/16d7accb22c866d4fbf368e4d979dc1c4a41d93c) | 2019-10-22 | [AIRFLOW-4971] Add Google Display & Video 360 integration (#6170) | -| [4e661f535](https://github.com/apache/airflow/commit/4e661f535dea613f9b2e0075676f9a73a97461fe) | 2019-10-22 | [AIRFLOW-5379] Add Google Search Ads 360 operators (#6228) | -| [19e32b4e2](https://github.com/apache/airflow/commit/19e32b4e2c798f662e5d8d1e7c65036c5e7ac125) | 2019-10-18 | [AIRFLOW-5656] Rename provider to providers module (#6333) | diff --git a/airflow/providers/google/README.md b/airflow/providers/google/README.md deleted file mode 100644 index 4fa48ded742fd..0000000000000 --- a/airflow/providers/google/README.md +++ /dev/null @@ -1,967 +0,0 @@ - - - -# Package apache-airflow-providers-google - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [New transfer operators](#new-transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) - - [Moved hooks](#moved-hooks) - - [Secrets](#secrets) - - [Moved secrets](#moved-secrets) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `google` provider. All classes for this provider package -are in `airflow.providers.google` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-google` - -## PIP requirements - -| PIP package | Version required | -|:-----------------------------------|:-------------------| -| PyOpenSSL | | -| google-ads | >=4.0.0,<8.0.0 | -| google-api-python-client | >=1.6.0,<2.0.0 | -| google-auth | >=1.0.0,<2.0.0 | -| google-auth-httplib2 | >=0.0.1 | -| google-cloud-automl | >=0.4.0,<2.0.0 | -| google-cloud-bigquery-datatransfer | >=0.4.0,<2.0.0 | -| google-cloud-bigtable | >=1.0.0,<2.0.0 | -| google-cloud-container | >=0.1.1,<2.0.0 | -| google-cloud-datacatalog | >=0.5.0, <0.8 | -| google-cloud-dataproc | >=1.0.1,<2.0.0 | -| google-cloud-dlp | >=0.11.0,<2.0.0 | -| google-cloud-kms | >=1.2.1,<2.0.0 | -| google-cloud-language | >=1.1.1,<2.0.0 | -| google-cloud-logging | >=1.14.0,<2.0.0 | -| google-cloud-memcache | >=0.2.0 | -| google-cloud-monitoring | >=0.34.0,<2.0.0 | -| google-cloud-os-login | >=1.0.0,<2.0.0 | -| google-cloud-pubsub | >=1.0.0,<2.0.0 | -| google-cloud-redis | >=0.3.0,<2.0.0 | -| google-cloud-secret-manager | >=0.2.0,<2.0.0 | -| google-cloud-spanner | >=1.10.0,<2.0.0 | -| google-cloud-speech | >=0.36.3,<2.0.0 | -| google-cloud-storage | >=1.16,<2.0.0 | -| google-cloud-tasks | >=1.2.1,<2.0.0 | -| google-cloud-texttospeech | >=0.4.0,<2.0.0 | -| google-cloud-translate | >=1.5.0,<2.0.0 | -| google-cloud-videointelligence | >=1.7.0,<2.0.0 | -| google-cloud-vision | >=0.35.2,<2.0.0 | -| grpcio-gcp | >=0.2.2 | -| pandas-gbq | | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-google[amazon] -``` - -| Dependent package | Extra | -|:----------------------------------------------------------------------------------------------------------------|:-----------------| -| [apache-airflow-providers-amazon](https://pypi.org/project/apache-airflow-providers-amazon) | amazon | -| [apache-airflow-providers-apache-cassandra](https://pypi.org/project/apache-airflow-providers-apache-cassandra) | apache.cassandra | -| [apache-airflow-providers-cncf-kubernetes](https://pypi.org/project/apache-airflow-providers-cncf-kubernetes) | cncf.kubernetes | -| [apache-airflow-providers-facebook](https://pypi.org/project/apache-airflow-providers-facebook) | facebook | -| [apache-airflow-providers-microsoft-azure](https://pypi.org/project/apache-airflow-providers-microsoft-azure) | microsoft.azure | -| [apache-airflow-providers-microsoft-mssql](https://pypi.org/project/apache-airflow-providers-microsoft-mssql) | microsoft.mssql | -| [apache-airflow-providers-mysql](https://pypi.org/project/apache-airflow-providers-mysql) | mysql | -| [apache-airflow-providers-postgres](https://pypi.org/project/apache-airflow-providers-postgres) | postgres | -| [apache-airflow-providers-presto](https://pypi.org/project/apache-airflow-providers-presto) | presto | -| [apache-airflow-providers-salesforce](https://pypi.org/project/apache-airflow-providers-salesforce) | salesforce | -| [apache-airflow-providers-sftp](https://pypi.org/project/apache-airflow-providers-sftp) | sftp | -| [apache-airflow-providers-ssh](https://pypi.org/project/apache-airflow-providers-ssh) | ssh | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `google` provider -are in the `airflow.providers.google` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.google` package | -|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [ads.operators.ads.GoogleAdsListAccountsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/operators/ads.py) | -| [cloud.operators.automl.AutoMLBatchPredictOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLCreateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLDeleteDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLDeleteModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLDeployModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLGetModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLImportDataOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLListDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLPredictOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLTablesListColumnSpecsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLTablesListTableSpecsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLTablesUpdateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.automl.AutoMLTrainModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/automl.py) | -| [cloud.operators.bigquery.BigQueryInsertJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | -| [cloud.operators.bigquery_dts.BigQueryCreateDataTransferOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py) | -| [cloud.operators.bigquery_dts.BigQueryDataTransferServiceStartTransferRunsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py) | -| [cloud.operators.bigquery_dts.BigQueryDeleteDataTransferConfigOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery_dts.py) | -| [cloud.operators.bigtable.BigtableUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreCreateInstanceAndImportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreExportAndDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreExportInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreFailoverInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreGetInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreImportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreListInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedApplyParametersOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedGetInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedListInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreMemcachedUpdateParametersOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreScaleInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_memorystore.CloudMemorystoreUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_memorystore.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceS3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | -| [cloud.operators.datacatalog.CloudDataCatalogCreateEntryGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogCreateEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogCreateTagOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogCreateTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogDeleteEntryGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogDeleteEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogDeleteTagOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogDeleteTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogGetEntryGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogGetEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogGetTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogListTagsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogLookupEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogRenameTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogSearchCatalogOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogUpdateEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogUpdateTagOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateFieldOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.datacatalog.CloudDataCatalogUpdateTagTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datacatalog.py) | -| [cloud.operators.dataflow.DataflowStartFlexTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py) | -| [cloud.operators.dataflow.DataflowStartSqlJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py) | -| [cloud.operators.datafusion.CloudDataFusionCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionCreatePipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionDeletePipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionGetInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionListPipelinesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionRestartInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionStartPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionStopPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.datafusion.CloudDataFusionUpdateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datafusion.py) | -| [cloud.operators.dataprep.DataprepGetJobGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataprep.py) | -| [cloud.operators.dataprep.DataprepGetJobsForJobGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataprep.py) | -| [cloud.operators.dataprep.DataprepRunJobGroupOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataprep.py) | -| [cloud.operators.dataproc.DataprocSubmitJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | -| [cloud.operators.dataproc.DataprocUpdateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | -| [cloud.operators.datastore.CloudDatastoreAllocateIdsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreBeginTransactionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreCommitOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreDeleteOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreGetOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreRollbackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.datastore.CloudDatastoreRunQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | -| [cloud.operators.functions.CloudFunctionInvokeFunctionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/functions.py) | -| [cloud.operators.gcs.GCSDeleteBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | -| [cloud.operators.gcs.GCSFileTransformOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | -| [cloud.operators.gcs.GCSSynchronizeBucketsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | -| [cloud.operators.life_sciences.LifeSciencesRunPipelineOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/life_sciences.py) | -| [cloud.operators.mlengine.MLEngineCreateModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineCreateVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineDeleteModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineDeleteVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineGetModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineListVersionsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineSetDefaultVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.mlengine.MLEngineTrainingCancelJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | -| [cloud.operators.pubsub.PubSubPullOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | -| [cloud.operators.stackdriver.StackdriverDeleteAlertOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverDeleteNotificationChannelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverDisableAlertPoliciesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverDisableNotificationChannelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverEnableAlertPoliciesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverEnableNotificationChannelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverListAlertPoliciesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverListNotificationChannelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverUpsertAlertOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.stackdriver.StackdriverUpsertNotificationChannelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/stackdriver.py) | -| [cloud.operators.tasks.CloudTasksQueueCreateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueueDeleteOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueueGetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueuePauseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueuePurgeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueueResumeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueueUpdateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksQueuesListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksTaskCreateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksTaskDeleteOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksTaskGetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksTaskRunOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.tasks.CloudTasksTasksListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/tasks.py) | -| [cloud.operators.vision.CloudVisionAddProductToProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | -| [cloud.operators.vision.CloudVisionDeleteReferenceImageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | -| [firebase.operators.firestore.CloudFirestoreExportDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/firebase/operators/firestore.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsDataImportUploadOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsDeletePreviousDataUploadsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsGetAdsLinkOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsListAccountsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsModifyFileHeadersDataImportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.analytics.GoogleAnalyticsRetrieveAdsLinksListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/analytics.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchInsertConversionsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerBatchUpdateConversionsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerDeleteReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerDownloadReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerInsertReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.campaign_manager.GoogleCampaignManagerRunReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/campaign_manager.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360CreateReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360CreateSDFDownloadTaskOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360DeleteReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadLineItemsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360DownloadReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360RunReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360SDFtoGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.display_video.GoogleDisplayVideo360UploadLineItemsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/display_video.py) | -| [marketing_platform.operators.search_ads.GoogleSearchAdsDownloadReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/search_ads.py) | -| [marketing_platform.operators.search_ads.GoogleSearchAdsInsertReportOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/operators/search_ads.py) | -| [suite.operators.sheets.GoogleSheetsCreateSpreadsheetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/operators/sheets.py) | - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.operators.bigquery.BigQueryCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_check_operator.BigQueryCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py) | -| [cloud.operators.bigquery.BigQueryCreateEmptyDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryCreateEmptyDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryCreateEmptyTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryCreateExternalTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryDeleteDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryDeleteTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_table_delete_operator.py) | -| [cloud.operators.bigquery.BigQueryExecuteQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryGetDataOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_get_data.BigQueryGetDataOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_get_data.py) | -| [cloud.operators.bigquery.BigQueryGetDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryGetDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryGetDatasetTablesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryGetDatasetTablesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryIntervalCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py) | -| [cloud.operators.bigquery.BigQueryPatchDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryPatchDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryUpdateDatasetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryUpdateDatasetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryUpsertTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_operator.BigQueryUpsertTableOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_operator.py) | -| [cloud.operators.bigquery.BigQueryValueCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigquery.py) | [contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_check_operator.py) | -| [cloud.operators.bigtable.BigtableCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableInstanceCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.operators.bigtable.BigtableCreateTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableTableCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.operators.bigtable.BigtableDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.operators.bigtable.BigtableDeleteTableOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableTableDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.operators.bigtable.BigtableUpdateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableClusterUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.operators.cloud_build.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_build.py) | [contrib.operators.gcp_cloud_build_operator.CloudBuildCreateBuildOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_cloud_build_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLCreateInstanceDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLCreateInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLDeleteInstanceDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabaseDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLExecuteQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLExportInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceExportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLImportInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceImportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLInstancePatchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstancePatchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_sql.CloudSQLPatchInstanceDatabaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_sql.py) | [contrib.operators.gcp_sql_operator.CloudSqlInstanceDatabasePatchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_sql_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCancelOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationCancelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceCreateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceDeleteJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceGetOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationGetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceListOperationsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationsListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServicePauseOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationPauseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceResumeOperationOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceOperationResumeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.cloud_storage_transfer_service.CloudDataTransferServiceUpdateJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/cloud_storage_transfer_service.py) | [contrib.operators.gcp_transfer_operator.GcpTransferServiceJobUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_transfer_operator.py) | -| [cloud.operators.compute.ComputeEngineBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.compute.ComputeEngineCopyInstanceTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceInstanceTemplateCopyOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.compute.ComputeEngineInstanceGroupUpdateManagerTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceInstanceGroupManagerUpdateTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.compute.ComputeEngineSetMachineTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceSetMachineTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.compute.ComputeEngineStartInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceInstanceStartOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.compute.ComputeEngineStopInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/compute.py) | [contrib.operators.gcp_compute_operator.GceInstanceStopOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_compute_operator.py) | -| [cloud.operators.dataflow.DataflowCreateJavaJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py) | [contrib.operators.dataflow_operator.DataFlowJavaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataflow_operator.py) | -| [cloud.operators.dataflow.DataflowCreatePythonJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py) | [contrib.operators.dataflow_operator.DataFlowPythonOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataflow_operator.py) | -| [cloud.operators.dataflow.DataflowTemplatedJobStartOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataflow.py) | [contrib.operators.dataflow_operator.DataflowTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataflow_operator.py) | -| [cloud.operators.dataproc.DataprocCreateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataprocClusterCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocDeleteClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataprocClusterDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocInstantiateInlineWorkflowTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateInlineOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocInstantiateWorkflowTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataprocWorkflowTemplateInstantiateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocJobBaseOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcJobBaseOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocScaleClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataprocClusterScaleOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitHadoopJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcHadoopOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitHiveJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcHiveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitPigJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcPigOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitPySparkJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcPySparkOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitSparkJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcSparkOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.dataproc.DataprocSubmitSparkSqlJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dataproc.py) | [contrib.operators.dataproc_operator.DataProcSparkSqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/dataproc_operator.py) | -| [cloud.operators.datastore.CloudDatastoreExportEntitiesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | [contrib.operators.datastore_export_operator.DatastoreExportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/datastore_export_operator.py) | -| [cloud.operators.datastore.CloudDatastoreImportEntitiesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/datastore.py) | [contrib.operators.datastore_import_operator.DatastoreImportOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/datastore_import_operator.py) | -| [cloud.operators.dlp.CloudDLPCancelDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCancelDLPJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPCreateDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCreateDLPJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPCreateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCreateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPCreateInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCreateInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPCreateJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCreateJobTriggerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPCreateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPCreateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeidentifyContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeidentifyContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeleteDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteDlpJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeleteDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeleteInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeleteJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteJobTriggerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPDeleteStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPDeleteStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPGetDLPJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPGetDlpJobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPGetDLPJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPGetJobTripperOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPGetDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPGetDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPGetInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPGetInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPGetStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPGetStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPInspectContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPInspectContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListDLPJobsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListDlpJobsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListDeidentifyTemplatesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListDeidentifyTemplatesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListInfoTypesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListInfoTypesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListInspectTemplatesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListInspectTemplatesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListJobTriggersOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListJobTriggersOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPListStoredInfoTypesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPListStoredInfoTypesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPRedactImageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPRedactImageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPReidentifyContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPReidentifyContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPUpdateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateDeidentifyTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPUpdateInspectTemplateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateInspectTemplateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPUpdateJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateJobTriggerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.dlp.CloudDLPUpdateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/dlp.py) | [contrib.operators.gcp_dlp_operator.CloudDLPUpdateStoredInfoTypeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_dlp_operator.py) | -| [cloud.operators.functions.CloudFunctionDeleteFunctionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/functions.py) | [contrib.operators.gcp_function_operator.GcfFunctionDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_function_operator.py) | -| [cloud.operators.functions.CloudFunctionDeployFunctionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/functions.py) | [contrib.operators.gcp_function_operator.GcfFunctionDeployOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_function_operator.py) | -| [cloud.operators.gcs.GCSBucketCreateAclEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_acl_operator.GoogleCloudStorageBucketCreateAclEntryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_acl_operator.py) | -| [cloud.operators.gcs.GCSCreateBucketOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_operator.py) | -| [cloud.operators.gcs.GCSDeleteObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_delete_operator.GoogleCloudStorageDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_delete_operator.py) | -| [cloud.operators.gcs.GCSListObjectsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_list_operator.py) | -| [cloud.operators.gcs.GCSObjectCreateAclEntryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/gcs.py) | [contrib.operators.gcs_acl_operator.GoogleCloudStorageObjectCreateAclEntryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_acl_operator.py) | -| [cloud.operators.kubernetes_engine.GKECreateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py) | [contrib.operators.gcp_container_operator.GKEClusterCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py) | -| [cloud.operators.kubernetes_engine.GKEDeleteClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py) | [contrib.operators.gcp_container_operator.GKEClusterDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py) | -| [cloud.operators.kubernetes_engine.GKEStartPodOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/kubernetes_engine.py) | [contrib.operators.gcp_container_operator.GKEPodOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_container_operator.py) | -| [cloud.operators.mlengine.MLEngineManageModelOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineModelOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | -| [cloud.operators.mlengine.MLEngineManageVersionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineVersionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | -| [cloud.operators.mlengine.MLEngineStartBatchPredictionJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | -| [cloud.operators.mlengine.MLEngineStartTrainingJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/mlengine.py) | [contrib.operators.mlengine_operator.MLEngineTrainingOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mlengine_operator.py) | -| [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitiesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntitiesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | -| [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeEntitySentimentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeEntitySentimentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | -| [cloud.operators.natural_language.CloudNaturalLanguageAnalyzeSentimentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageAnalyzeSentimentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | -| [cloud.operators.natural_language.CloudNaturalLanguageClassifyTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/natural_language.py) | [contrib.operators.gcp_natural_language_operator.CloudLanguageClassifyTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_natural_language_operator.py) | -| [cloud.operators.pubsub.PubSubCreateSubscriptionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubSubscriptionCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | -| [cloud.operators.pubsub.PubSubCreateTopicOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubTopicCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | -| [cloud.operators.pubsub.PubSubDeleteSubscriptionOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubSubscriptionDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | -| [cloud.operators.pubsub.PubSubDeleteTopicOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubTopicDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | -| [cloud.operators.pubsub.PubSubPublishMessageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/pubsub.py) | [contrib.operators.pubsub_operator.PubSubPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/pubsub_operator.py) | -| [cloud.operators.spanner.SpannerDeleteDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.spanner.SpannerDeleteInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.spanner.SpannerDeployDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseDeployOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.spanner.SpannerDeployInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDeployOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.spanner.SpannerQueryDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.spanner.SpannerUpdateDatabaseInstanceOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/spanner.py) | [contrib.operators.gcp_spanner_operator.CloudSpannerInstanceDatabaseUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_spanner_operator.py) | -| [cloud.operators.speech_to_text.CloudSpeechToTextRecognizeSpeechOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/speech_to_text.py) | [contrib.operators.gcp_speech_to_text_operator.GcpSpeechToTextRecognizeSpeechOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_speech_to_text_operator.py) | -| [cloud.operators.text_to_speech.CloudTextToSpeechSynthesizeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/text_to_speech.py) | [contrib.operators.gcp_text_to_speech_operator.GcpTextToSpeechSynthesizeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_text_to_speech_operator.py) | -| [cloud.operators.translate.CloudTranslateTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/translate.py) | [contrib.operators.gcp_translate_operator.CloudTranslateTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_translate_operator.py) | -| [cloud.operators.translate_speech.CloudTranslateSpeechOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/translate_speech.py) | [contrib.operators.gcp_translate_speech_operator.CloudTranslateSpeechOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_translate_speech_operator.py) | -| [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoExplicitContentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py) | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoExplicitContentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py) | -| [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoLabelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py) | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoLabelsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py) | -| [cloud.operators.video_intelligence.CloudVideoIntelligenceDetectVideoShotsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/video_intelligence.py) | [contrib.operators.gcp_video_intelligence_operator.CloudVideoIntelligenceDetectVideoShotsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_video_intelligence_operator.py) | -| [cloud.operators.vision.CloudVisionCreateProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionCreateProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductSetCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionCreateReferenceImageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionReferenceImageCreateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionDeleteProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionDeleteProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductSetDeleteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionDetectImageLabelsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionDetectImageLabelsOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionDetectImageSafeSearchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionDetectImageSafeSearchOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionDetectTextOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionDetectTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionGetProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductGetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionGetProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductSetGetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionImageAnnotateOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionAnnotateImageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionRemoveProductFromProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionRemoveProductFromProductSetOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionTextDetectOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionDetectDocumentTextOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionUpdateProductOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | -| [cloud.operators.vision.CloudVisionUpdateProductSetOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/operators/vision.py) | [contrib.operators.gcp_vision_operator.CloudVisionProductSetUpdateOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_vision_operator.py) | - - -## Transfer operators - - -### New transfer operators - -| New Airflow 2.0 transfers: `airflow.providers.google` package | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [ads.transfers.ads_to_gcs.GoogleAdsToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/transfers/ads_to_gcs.py) | -| [cloud.transfers.azure_fileshare_to_gcs.AzureFileShareToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/azure_fileshare_to_gcs.py) | -| [cloud.transfers.facebook_ads_to_gcs.FacebookAdsReportToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/facebook_ads_to_gcs.py) | -| [cloud.transfers.gcs_to_local.GCSToLocalFilesystemOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_local.py) | -| [cloud.transfers.gcs_to_sftp.GCSToSFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_sftp.py) | -| [cloud.transfers.presto_to_gcs.PrestoToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/presto_to_gcs.py) | -| [cloud.transfers.salesforce_to_gcs.SalesforceToGcsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/salesforce_to_gcs.py) | -| [cloud.transfers.sftp_to_gcs.SFTPToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sftp_to_gcs.py) | -| [cloud.transfers.sheets_to_gcs.GoogleSheetsToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sheets_to_gcs.py) | -| [suite.transfers.gcs_to_sheets.GCSToGoogleSheetsOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/transfers/gcs_to_sheets.py) | - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.transfers.adls_to_gcs.ADLSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/adls_to_gcs.py) | [contrib.operators.adls_to_gcs.AdlsToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_to_gcs.py) | -| [cloud.transfers.bigquery_to_bigquery.BigQueryToBigQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_bigquery.py) | [contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_bigquery.py) | -| [cloud.transfers.bigquery_to_gcs.BigQueryToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_gcs.py) | [contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_gcs.py) | -| [cloud.transfers.bigquery_to_mysql.BigQueryToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/bigquery_to_mysql.py) | [contrib.operators.bigquery_to_mysql_operator.BigQueryToMySqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/bigquery_to_mysql_operator.py) | -| [cloud.transfers.cassandra_to_gcs.CassandraToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/cassandra_to_gcs.py) | [contrib.operators.cassandra_to_gcs.CassandraToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/cassandra_to_gcs.py) | -| [cloud.transfers.gcs_to_bigquery.GCSToBigQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_bigquery.py) | [contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_bq.py) | -| [cloud.transfers.gcs_to_gcs.GCSToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/gcs_to_gcs.py) | [contrib.operators.gcs_to_gcs.GoogleCloudStorageToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gcs.py) | -| [cloud.transfers.local_to_gcs.LocalFilesystemToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/local_to_gcs.py) | [contrib.operators.file_to_gcs.FileToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_gcs.py) | -| [cloud.transfers.mssql_to_gcs.MSSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/mssql_to_gcs.py) | [contrib.operators.mssql_to_gcs.MsSqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mssql_to_gcs.py) | -| [cloud.transfers.mysql_to_gcs.MySQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/mysql_to_gcs.py) | [contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/mysql_to_gcs.py) | -| [cloud.transfers.postgres_to_gcs.PostgresToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/postgres_to_gcs.py) | [contrib.operators.postgres_to_gcs_operator.PostgresToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/postgres_to_gcs_operator.py) | -| [cloud.transfers.s3_to_gcs.S3ToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/s3_to_gcs.py) | [contrib.operators.s3_to_gcs_operator.S3ToGCSOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/s3_to_gcs_operator.py) | -| [cloud.transfers.sql_to_gcs.BaseSQLToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/transfers/sql_to_gcs.py) | [contrib.operators.sql_to_gcs.BaseSQLToGoogleCloudStorageOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sql_to_gcs.py) | -| [suite.transfers.gcs_to_gdrive.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/transfers/gcs_to_gdrive.py) | [contrib.operators.gcs_to_gdrive_operator.GCSToGoogleDriveOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcs_to_gdrive_operator.py) | - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.google` package | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.sensors.bigquery.BigQueryTablePartitionExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigquery.py) | -| [cloud.sensors.bigquery_dts.BigQueryDataTransferServiceTransferRunSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigquery_dts.py) | -| [cloud.sensors.dataflow.DataflowJobAutoScalingEventsSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataflow.py) | -| [cloud.sensors.dataflow.DataflowJobMessagesSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataflow.py) | -| [cloud.sensors.dataflow.DataflowJobMetricsSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataflow.py) | -| [cloud.sensors.dataflow.DataflowJobStatusSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataflow.py) | -| [cloud.sensors.dataproc.DataprocJobSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/dataproc.py) | -| [marketing_platform.sensors.campaign_manager.GoogleCampaignManagerReportSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/campaign_manager.py) | -| [marketing_platform.sensors.display_video.GoogleDisplayVideo360GetSDFDownloadOperationSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/display_video.py) | -| [marketing_platform.sensors.display_video.GoogleDisplayVideo360ReportSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/display_video.py) | -| [marketing_platform.sensors.search_ads.GoogleSearchAdsReportSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/sensors/search_ads.py) | - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.sensors.bigquery.BigQueryTableExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigquery.py) | [contrib.sensors.bigquery_sensor.BigQueryTableSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/bigquery_sensor.py) | -| [cloud.sensors.bigtable.BigtableTableReplicationCompletedSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/bigtable.py) | [contrib.operators.gcp_bigtable_operator.BigtableTableWaitForReplicationSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/gcp_bigtable_operator.py) | -| [cloud.sensors.cloud_storage_transfer_service.CloudDataTransferServiceJobStatusSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/cloud_storage_transfer_service.py) | [contrib.sensors.gcp_transfer_sensor.GCPTransferServiceWaitForJobStatusSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcp_transfer_sensor.py) | -| [cloud.sensors.gcs.GCSObjectExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py) | [contrib.sensors.gcs_sensor.GoogleCloudStorageObjectSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py) | -| [cloud.sensors.gcs.GCSObjectUpdateSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py) | [contrib.sensors.gcs_sensor.GoogleCloudStorageObjectUpdatedSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py) | -| [cloud.sensors.gcs.GCSObjectsWtihPrefixExistenceSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py) | [contrib.sensors.gcs_sensor.GoogleCloudStoragePrefixSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py) | -| [cloud.sensors.gcs.GCSUploadSessionCompleteSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/gcs.py) | [contrib.sensors.gcs_sensor.GoogleCloudStorageUploadSessionCompleteSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/gcs_sensor.py) | -| [cloud.sensors.pubsub.PubSubPullSensor](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/sensors/pubsub.py) | [contrib.sensors.pubsub_sensor.PubSubPullSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/pubsub_sensor.py) | - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.google` package | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [ads.hooks.ads.GoogleAdsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/ads/hooks/ads.py) | -| [cloud.hooks.automl.CloudAutoMLHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/automl.py) | -| [cloud.hooks.bigquery_dts.BiqQueryDataTransferServiceHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigquery_dts.py) | -| [cloud.hooks.cloud_memorystore.CloudMemorystoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_memorystore.py) | -| [cloud.hooks.cloud_memorystore.CloudMemorystoreMemcachedHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_memorystore.py) | -| [cloud.hooks.compute_ssh.ComputeEngineSSHHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/compute_ssh.py) | -| [cloud.hooks.datacatalog.CloudDataCatalogHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/datacatalog.py) | -| [cloud.hooks.datafusion.DataFusionHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/datafusion.py) | -| [cloud.hooks.dataprep.GoogleDataprepHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dataprep.py) | -| [cloud.hooks.gdm.GoogleDeploymentManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/gdm.py) | -| [cloud.hooks.life_sciences.LifeSciencesHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/life_sciences.py) | -| [cloud.hooks.os_login.OSLoginHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/os_login.py) | -| [cloud.hooks.secret_manager.SecretsManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/secret_manager.py) | -| [cloud.hooks.stackdriver.StackdriverHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/stackdriver.py) | -| [common.hooks.discovery_api.GoogleDiscoveryApiHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/common/hooks/discovery_api.py) | -| [firebase.hooks.firestore.CloudFirestoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/firebase/hooks/firestore.py) | -| [marketing_platform.hooks.analytics.GoogleAnalyticsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/analytics.py) | -| [marketing_platform.hooks.campaign_manager.GoogleCampaignManagerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/campaign_manager.py) | -| [marketing_platform.hooks.display_video.GoogleDisplayVideo360Hook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/display_video.py) | -| [marketing_platform.hooks.search_ads.GoogleSearchAdsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/marketing_platform/hooks/search_ads.py) | -| [suite.hooks.sheets.GSheetsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/hooks/sheets.py) | - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.hooks.bigquery.BigQueryHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigquery.py) | [contrib.hooks.bigquery_hook.BigQueryHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/bigquery_hook.py) | -| [cloud.hooks.bigtable.BigtableHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/bigtable.py) | [contrib.hooks.gcp_bigtable_hook.BigtableHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_bigtable_hook.py) | -| [cloud.hooks.cloud_build.CloudBuildHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_build.py) | [contrib.hooks.gcp_cloud_build_hook.CloudBuildHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_cloud_build_hook.py) | -| [cloud.hooks.cloud_sql.CloudSQLDatabaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_sql.py) | [contrib.hooks.gcp_sql_hook.CloudSqlDatabaseHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_sql_hook.py) | -| [cloud.hooks.cloud_sql.CloudSQLHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_sql.py) | [contrib.hooks.gcp_sql_hook.CloudSqlHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_sql_hook.py) | -| [cloud.hooks.cloud_storage_transfer_service.CloudDataTransferServiceHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/cloud_storage_transfer_service.py) | [contrib.hooks.gcp_transfer_hook.GCPTransferServiceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_transfer_hook.py) | -| [cloud.hooks.compute.ComputeEngineHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/compute.py) | [contrib.hooks.gcp_compute_hook.GceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_compute_hook.py) | -| [cloud.hooks.dataflow.DataflowHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dataflow.py) | [contrib.hooks.gcp_dataflow_hook.DataFlowHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_dataflow_hook.py) | -| [cloud.hooks.dataproc.DataprocHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dataproc.py) | [contrib.hooks.gcp_dataproc_hook.DataProcHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_dataproc_hook.py) | -| [cloud.hooks.datastore.DatastoreHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/datastore.py) | [contrib.hooks.datastore_hook.DatastoreHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/datastore_hook.py) | -| [cloud.hooks.dlp.CloudDLPHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/dlp.py) | [contrib.hooks.gcp_dlp_hook.CloudDLPHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_dlp_hook.py) | -| [cloud.hooks.functions.CloudFunctionsHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/functions.py) | [contrib.hooks.gcp_function_hook.GcfHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_function_hook.py) | -| [cloud.hooks.gcs.GCSHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/gcs.py) | [contrib.hooks.gcs_hook.GoogleCloudStorageHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcs_hook.py) | -| [cloud.hooks.kms.CloudKMSHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/kms.py) | [contrib.hooks.gcp_kms_hook.GoogleCloudKMSHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_kms_hook.py) | -| [cloud.hooks.kubernetes_engine.GKEHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/kubernetes_engine.py) | [contrib.hooks.gcp_container_hook.GKEClusterHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_container_hook.py) | -| [cloud.hooks.mlengine.MLEngineHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/mlengine.py) | [contrib.hooks.gcp_mlengine_hook.MLEngineHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_mlengine_hook.py) | -| [cloud.hooks.natural_language.CloudNaturalLanguageHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/natural_language.py) | [contrib.hooks.gcp_natural_language_hook.CloudNaturalLanguageHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_natural_language_hook.py) | -| [cloud.hooks.pubsub.PubSubHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/pubsub.py) | [contrib.hooks.gcp_pubsub_hook.PubSubHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_pubsub_hook.py) | -| [cloud.hooks.spanner.SpannerHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/spanner.py) | [contrib.hooks.gcp_spanner_hook.CloudSpannerHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_spanner_hook.py) | -| [cloud.hooks.speech_to_text.CloudSpeechToTextHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/speech_to_text.py) | [contrib.hooks.gcp_speech_to_text_hook.GCPSpeechToTextHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_speech_to_text_hook.py) | -| [cloud.hooks.tasks.CloudTasksHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/tasks.py) | [contrib.hooks.gcp_tasks_hook.CloudTasksHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_tasks_hook.py) | -| [cloud.hooks.text_to_speech.CloudTextToSpeechHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/text_to_speech.py) | [contrib.hooks.gcp_text_to_speech_hook.GCPTextToSpeechHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_text_to_speech_hook.py) | -| [cloud.hooks.translate.CloudTranslateHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/translate.py) | [contrib.hooks.gcp_translate_hook.CloudTranslateHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_translate_hook.py) | -| [cloud.hooks.video_intelligence.CloudVideoIntelligenceHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/video_intelligence.py) | [contrib.hooks.gcp_video_intelligence_hook.CloudVideoIntelligenceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_video_intelligence_hook.py) | -| [cloud.hooks.vision.CloudVisionHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/hooks/vision.py) | [contrib.hooks.gcp_vision_hook.CloudVisionHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_vision_hook.py) | -| [common.hooks.base_google.GoogleBaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/common/hooks/base_google.py) | [contrib.hooks.gcp_api_base_hook.GoogleBaseHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gcp_api_base_hook.py) | -| [suite.hooks.drive.GoogleDriveHook](https://github.com/apache/airflow/blob/master/airflow/providers/google/suite/hooks/drive.py) | [contrib.hooks.gdrive_hook.GoogleDriveHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/gdrive_hook.py) | - - -## Secrets - - - -### Moved secrets - -| Airflow 2.0 secrets: `airflow.providers.google` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [cloud.secrets.secret_manager.CloudSecretManagerBackend](https://github.com/apache/airflow/blob/master/airflow/providers/google/cloud/secrets/secret_manager.py) | [contrib.secrets.gcp_secrets_manager.CloudSecretsManagerBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/gcp_secrets_manager.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [1dcd3e13f](https://github.com/apache/airflow/commit/1dcd3e13fd0a078fc9440e91b77f6f87aa60dd3b) | 2020-12-05 | Add support for extra links coming from the providers (#12472) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [02d94349b](https://github.com/apache/airflow/commit/02d94349be3d201ce9d37d7358573c937fd010df) | 2020-11-29 | Don't use time.time() or timezone.utcnow() for duration calculations (#12353) | -| [76bcd08dc](https://github.com/apache/airflow/commit/76bcd08dcae8d62307f5e9b8c2e182b54ed22a27) | 2020-11-28 | Added `@apply_defaults` decorator. (#12620) | -| [e1ebfa68b](https://github.com/apache/airflow/commit/e1ebfa68b109b5993c47891cfd0b9b7e46b6d770) | 2020-11-27 | Add DataflowJobMessagesSensor and DataflowAutoscalingEventsSensor (#12249) | -| [3fa51f94d](https://github.com/apache/airflow/commit/3fa51f94d7a17f170ddc31908d36c91f4456a20b) | 2020-11-24 | Add check for duplicates in provider.yaml files (#12578) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [9e3b2c554](https://github.com/apache/airflow/commit/9e3b2c554dadf58972198e4e16f15af2f15ec37a) | 2020-11-19 | GCP Secrets Optional Lookup (#12360) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [8d0950646](https://github.com/apache/airflow/commit/8d09506464c8480fa42e8bfe6a36c6f631cd23f6) | 2020-11-18 | Fix download method in GCSToBigQueryOperator (#12442) | -| [2c0920fba](https://github.com/apache/airflow/commit/2c0920fba5d2f05d2e29cead91127686af277ec2) | 2020-11-17 | Adds mechanism for provider package discovery. (#12383) | -| [2cda2f2a0](https://github.com/apache/airflow/commit/2cda2f2a0a94e5aaed87f0998fa57b4f8bff5e43) | 2020-11-17 | Add missing pre-commit definition - provider-yamls (#12393) | -| [80a957f14](https://github.com/apache/airflow/commit/80a957f142f260daed262b8e93a4d02c12cfeabc) | 2020-11-17 | Add Dataflow sensors - job metrics (#12039) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [917e6c442](https://github.com/apache/airflow/commit/917e6c4424985271c53dd8c413b211896ee55726) | 2020-11-16 | Add provide_file_and_upload to GCSHook (#12310) | -| [cfa4ecfeb](https://github.com/apache/airflow/commit/cfa4ecfeb02661f40b4778733384ac085fb5f04b) | 2020-11-15 | Add DataflowJobStatusSensor and support non-blocking execution of jobs (#11726) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [32b59f835](https://github.com/apache/airflow/commit/32b59f8350f55793df6838a32de662a80483ecda) | 2020-11-12 | Fixes the sending of an empty list to BigQuery `list_rows` (#12307) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [502ba309e](https://github.com/apache/airflow/commit/502ba309ea470943f0e99c634269e3d2d13ce6ca) | 2020-11-10 | Enable Markdownlint rule - MD022/blanks-around-headings (#12225) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [f37c6e6fc](https://github.com/apache/airflow/commit/f37c6e6fce8b704f5af28caa16d0ed7d873a0e4a) | 2020-11-10 | Add Compute Engine SSH hook (#9879) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [61feb6ec4](https://github.com/apache/airflow/commit/61feb6ec453f8dda1a0e1fe3ebcc0f1e3224b634) | 2020-11-09 | Provider's readmes generated for elasticsearch and google packages (#12194) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [fcb6b00ef](https://github.com/apache/airflow/commit/fcb6b00efef80c81272a30cfc618202a29e0c6a9) | 2020-11-08 | Add authentication to AWS with Google credentials (#12079) | -| [2ef3b7ef8](https://github.com/apache/airflow/commit/2ef3b7ef8cafe3bdc8bf8db70fbc519b98576366) | 2020-11-08 | Fix ERROR - Object of type 'bytes' is not JSON serializable when using store_to_xcom_key parameter (#12172) | -| [0caec9fd3](https://github.com/apache/airflow/commit/0caec9fd32bee2b3036b5d7bdcb56bd6a3b9dccf) | 2020-11-06 | Dataflow - add waiting for successful job cancel (#11501) | -| [cf9437d79](https://github.com/apache/airflow/commit/cf9437d79f9658d1309e4bfe847fe63d52ec7b99) | 2020-11-06 | Simplify string expressions (#12123) | -| [91a64db50](https://github.com/apache/airflow/commit/91a64db505e50712cd53928b4f2b84aece3cc1c0) | 2020-11-04 | Format all files (without excepions) by black (#12091) | -| [fd3db778e](https://github.com/apache/airflow/commit/fd3db778e715d0f164dda7ee8f672d477a323291) | 2020-11-04 | Add server side cursor support for postgres to GCS operator (#11793) | -| [f1f194026](https://github.com/apache/airflow/commit/f1f1940261744b4fdb67b0b5654488494efa9c64) | 2020-11-04 | Add DataflowStartSQLQuery operator (#8553) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [5f5244b74](https://github.com/apache/airflow/commit/5f5244b74df93cadbb99643cec76281460ca4411) | 2020-11-04 | Add template fields renderers to Biguery and Dataproc operators (#12067) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [45ae145c2](https://github.com/apache/airflow/commit/45ae145c25a19b4185c33ac0c4da920324b3987e) | 2020-11-03 | Log BigQuery job id in insert method of BigQueryHook (#12056) | -| [e324b37a6](https://github.com/apache/airflow/commit/e324b37a67e32c368df50604a00160d7766b5c33) | 2020-11-03 | Add job name and progress logs to Cloud Storage Transfer Hook (#12014) | -| [6071fdd58](https://github.com/apache/airflow/commit/6071fdd58470bb2a6c23fc16481e292b7247d0bb) | 2020-11-02 | Improve handling server errors in DataprocSubmitJobOperator (#11947) | -| [2f703df12](https://github.com/apache/airflow/commit/2f703df12dfd6511722ff9a82d5a569d092fccc2) | 2020-10-30 | Add SalesforceToGcsOperator (#10760) | -| [e5713e00b](https://github.com/apache/airflow/commit/e5713e00b3afcba6f78006ec0e360da317858e4d) | 2020-10-29 | Add drain option when canceling Dataflow pipelines (#11374) | -| [37eaac3c5](https://github.com/apache/airflow/commit/37eaac3c5dc93804413c10a6ca124fd7831befc0) | 2020-10-29 | The PRs which are not approved run subset of tests (#11828) | -| [79cb77199](https://github.com/apache/airflow/commit/79cb771992279d40ddd9eb6b0277382313a32898) | 2020-10-28 | Fixing re pattern and changing to use a single character class. (#11857) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [240c7d4d7](https://github.com/apache/airflow/commit/240c7d4d72aac8f6aab98f5913e8f54c4f1372ff) | 2020-10-26 | Google Memcached hooks - improve protobuf messages handling (#11743) | -| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26 | Fix spellings (#11825) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [727c739af](https://github.com/apache/airflow/commit/727c739afb565d4d394a8faedc969334cb8e738e) | 2020-10-22 | Improve Cloud Memorystore for Redis example (#11735) | -| [1da8379c9](https://github.com/apache/airflow/commit/1da8379c913843834353b44861c62f332a461bdf) | 2020-10-22 | Fix static checks after merging #10121 (#11737) | -| [91503308c](https://github.com/apache/airflow/commit/91503308c723b186ce6f4026f2a3e2c21030f6e5) | 2020-10-22 | Add Google Cloud Memorystore Memcached Operators (#10121) | -| [950c16d0b](https://github.com/apache/airflow/commit/950c16d0b0ab67bb7af11909de751029faf0313a) | 2020-10-21 | Retry requests in case of error in Google ML Engine Hook (#11712) | -| [2bfc53b5e](https://github.com/apache/airflow/commit/2bfc53b5eb67406d418371b74dc9bc5a07be238e) | 2020-10-21 | Fix doc errors in google provider files. (#11713) | -| [53e606210](https://github.com/apache/airflow/commit/53e6062105be0ae1761a354e2055eb0779d12e73) | 2020-10-21 | Enforce strict rules for yamllint (#11709) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [2d854c350](https://github.com/apache/airflow/commit/2d854c3505ccad66e9a7d94267e51bed800433c2) | 2020-10-19 | Add service_account to Google ML Engine operator (#11619) | -| [46a121fb7](https://github.com/apache/airflow/commit/46a121fb7b77c0964e053b58750e2d8bc2bd0b2a) | 2020-10-18 | docs: Update Bigquery clustering docstrings (#11232) | -| [49c58147f](https://github.com/apache/airflow/commit/49c58147fed8a52869d0b0ecc00c102c11972ad0) | 2020-10-18 | Strict type checking for provider Google (#11609) | -| [0823d46a7](https://github.com/apache/airflow/commit/0823d46a7f267f2e45195a175021825367938add) | 2020-10-16 | Add type annotations for AWS operators and hooks (#11434) | -| [3c10ca650](https://github.com/apache/airflow/commit/3c10ca6504be37fabff9a10caefea3fe4df31a02) | 2020-10-16 | Add DataflowStartFlexTemplateOperator (#8550) | -| [8865d14df](https://github.com/apache/airflow/commit/8865d14df4d58dd5f1a4d2ff81c77469959f175a) | 2020-10-16 | Strict type checking for provider google cloud (#11548) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [06141d6d0](https://github.com/apache/airflow/commit/06141d6d01398115e5e54c5766a46ae5514ba2f7) | 2020-10-12 | Google cloud operator strict type check (#11450) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [1845cd11b](https://github.com/apache/airflow/commit/1845cd11b77f302777ab854e84bef9c212c604a0) | 2020-10-11 | Strict type check for google ads and cloud hooks (#11390) | -| [bd204bb91](https://github.com/apache/airflow/commit/bd204bb91b4bc069284f9a44757c6baba8884140) | 2020-10-11 | Optionally set null marker in csv exports in BaseSQLToGCSOperator (#11409) | -| [75071831b](https://github.com/apache/airflow/commit/75071831baa936d292354f98aac46cd808a4b2b8) | 2020-10-10 | Remove redundant parentheses from Python files (#10967) | -| [8baf657fc](https://github.com/apache/airflow/commit/8baf657fc2b21a601b99b752e4f1176bf8a934ce) | 2020-10-09 | Fix regression in DataflowTemplatedJobStartOperator (#11167) | -| [b0fcf6755](https://github.com/apache/airflow/commit/b0fcf675595494b306800e1a516548dc0dc671f8) | 2020-10-07 | Add AzureFileShareToGCSOperator (#10991) | -| [47b05a87f](https://github.com/apache/airflow/commit/47b05a87f004dc273a4757ba49f03808a86f77e7) | 2020-10-07 | Improve handling of job_id in BigQuery operators (#11287) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [cb52fb0ae](https://github.com/apache/airflow/commit/cb52fb0ae1de1f1140babaed0e97299e4aaf96bf) | 2020-09-27 | Add example DAG and system test for MySQLToGCSOperator (#10990) | -| [99accec29](https://github.com/apache/airflow/commit/99accec29d71b0a57fd4e90151b9d4d10321be07) | 2020-09-25 | Fix incorrect Usage of Optional[str] & Optional[int] (#11141) | -| [e3f96ce7a](https://github.com/apache/airflow/commit/e3f96ce7a8ac098aeef5e9930e6de6c428274d57) | 2020-09-24 | Fix incorrect Usage of Optional[bool] (#11138) | -| [daf8f3108](https://github.com/apache/airflow/commit/daf8f31080f06c044b4336071bd383bbbcdc6085) | 2020-09-23 | Add template fields renderers for better UI rendering (#11061) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [cb979f9f2](https://github.com/apache/airflow/commit/cb979f9f213bb3c9835a3dc924f84a07f5387378) | 2020-09-22 | Get Airflow configs with sensitive data from CloudSecretManagerBackend (#11024) | -| [76545bb3d](https://github.com/apache/airflow/commit/76545bb3d6fa82ce8eae072dbc74a3b76d8fd53c) | 2020-09-16 | Add example dag and system test for S3ToGCSOperator (#10951) | -| [22c631625](https://github.com/apache/airflow/commit/22c631625fd68abe280528f33b7cfd7603ebf66c) | 2020-09-16 | Fix more docs spellings (#10965) | -| [12a652f53](https://github.com/apache/airflow/commit/12a652f5344c7f03c3d780556ca1829b235fdb2d) | 2020-09-13 | Fix parameter name collision in AutoMLBatchPredictOperator #10723 (#10869) | -| [41a62735e](https://github.com/apache/airflow/commit/41a62735edcebbd9c39e505280646ef5d25aa1d5) | 2020-09-11 | Add on_kill method to BigQueryInsertJobOperator (#10866) | -| [3e91da56e](https://github.com/apache/airflow/commit/3e91da56e8c63a90dc859d8996a896b5d9f8cd43) | 2020-09-11 | fix typo in firebase/example_filestore DAG (#10875) | -| [68cc7273b](https://github.com/apache/airflow/commit/68cc7273bf0c0f562748b5f663da5c12d2cba6a7) | 2020-09-10 | Add on_kill method to DataprocSubmitJobOperator (#10847) | -| [f92095721](https://github.com/apache/airflow/commit/f92095721450c14605c986e165544a7bfb712a3d) | 2020-09-10 | Fix and remove some more typos from spelling_wordlist.txt (#10845) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [078bfaf60](https://github.com/apache/airflow/commit/078bfaf60adc5aebac8c347e7f6e5339ab9b56c0) | 2020-09-08 | Extract missing gcs_to_local example DAG from gcs example (#10767) | -| [10ce31127](https://github.com/apache/airflow/commit/10ce31127f1ff87176158935925afce46a989917) | 2020-09-08 | Deprecate using global as the default region in Google Dataproc operators and hooks (#10772) | -| [f14f37971](https://github.com/apache/airflow/commit/f14f3797163cc45fdcdabfb36ee7d638f70e470d) | 2020-09-07 | [AIRFLOW-10672] Refactor BigQueryToGCSOperator to use new method (#10773) | -| [c8ee45568](https://github.com/apache/airflow/commit/c8ee4556851c36b3b6e644a7746a49583dd53db1) | 2020-09-07 | Refactor DataprocCreateCluster operator to use simpler interface (#10403) | -| [ece685b5b](https://github.com/apache/airflow/commit/ece685b5b895ad1175440b49bf9e620dffd8248d) | 2020-09-05 | Asynchronous execution of Dataproc jobs with a Sensor (#10673) | -| [6e3d7b63d](https://github.com/apache/airflow/commit/6e3d7b63d3b34c34f8b38a7b41f4a5876e1f731f) | 2020-09-04 | Add masterConfig parameter to MLEngineStartTrainingJobOperator (#10578) | -| [804548d58](https://github.com/apache/airflow/commit/804548d58f2036fd4516824a38d0639ba5d5ab0e) | 2020-09-01 | Add Dataprep operators (#10304) | -| [11c00bc82](https://github.com/apache/airflow/commit/11c00bc820483691a87cdb16d519dce8dc57c40e) | 2020-08-30 | Fix typos: duplicated "the" (#10647) | -| [2ca615cff](https://github.com/apache/airflow/commit/2ca615cffefe97dfa38e1b7f60d9ed33c6628992) | 2020-08-29 | Update Google Cloud branding (#10642) | -| [1b533f617](https://github.com/apache/airflow/commit/1b533f617e2e0200597d114d7570f6c0d69da1a0) | 2020-08-28 | Fix broken master - DLP (#10635) | -| [5ae82a56d](https://github.com/apache/airflow/commit/5ae82a56dab599de44f1be7027cecc4ef86f7bb6) | 2020-08-28 | Fix Google DLP example and improve ops idempotency (#10608) | -| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28 | Update Google Cloud branding (#10615) | -| [91ff31ad1](https://github.com/apache/airflow/commit/91ff31ad1021235bd21c87ad9dbc0b216a908671) | 2020-08-27 | Documentation for Google Cloud Data Loss Prevention (#8201) (#9651) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d76026545](https://github.com/apache/airflow/commit/d7602654526fdd2876466371404784bd17cfe0d2) | 2020-08-25 | PyDocStyle: No whitespaces allowed surrounding docstring text (#10533) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [866701c80](https://github.com/apache/airflow/commit/866701c8019f49dcb02c9696e4f6e9ce67d13ca6) | 2020-08-25 | Fix typo in "Cloud" (#10534) | -| [47265e7b5](https://github.com/apache/airflow/commit/47265e7b58bc28bcbbffc981442b6cc27a3af39c) | 2020-08-24 | Fix typo in PostgresHook (#10529) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [3734876d9](https://github.com/apache/airflow/commit/3734876d9898067ee933b84af522d53df6160d7f) | 2020-08-24 | Implement impersonation in google operators (#10052) | -| [b0598b535](https://github.com/apache/airflow/commit/b0598b5351d2d027286e2333231b6c0c0704dba2) | 2020-08-24 | Add support for creating multiple replicated clusters in Bigtable hook and operator (#10475) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [515cc72c9](https://github.com/apache/airflow/commit/515cc72c995429c8c007f853ade385d79fcbac90) | 2020-08-22 | Fix typo in timed_out (#10459) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [88c7d2e52](https://github.com/apache/airflow/commit/88c7d2e526af4994066f65f830e2fa8edcbbce2e) | 2020-08-21 | Dataflow operators don't not always create a virtualenv (#10373) | -| [083c3c129](https://github.com/apache/airflow/commit/083c3c129bc3458d410f5ff37d7f5a9a7ad548b7) | 2020-08-18 | Simplified GCSTaskHandler configuration (#10365) | -| [1ae5bdf23](https://github.com/apache/airflow/commit/1ae5bdf23e3ac7cca05325ef8b255a7cf067e18e) | 2020-08-17 | Add test for GCSTaskHandler (#9600) (#9861) | -| [e195a980b](https://github.com/apache/airflow/commit/e195a980bc8e9d42f3eb4ac134950977b9e5158f) | 2020-08-16 | Add type annotations for mlengine_operator_utils (#10297) | -| [382c1011b](https://github.com/apache/airflow/commit/382c1011b6bcebd22760e2f98419281ef1a09d1b) | 2020-08-16 | Add Bigtable Update Instance Hook/Operator (#10340) | -| [bfa5a8d5f](https://github.com/apache/airflow/commit/bfa5a8d5f10458c14d380c4042ecfbac627d0639) | 2020-08-15 | CI: Fix failing docs-build (#10342) | -| [be46d20fb](https://github.com/apache/airflow/commit/be46d20fb431cc1d91c935e8894dfc7756c18993) | 2020-08-15 | Improve idempotency of BigQueryInsertJobOperator (#9590) | -| [47387a69e](https://github.com/apache/airflow/commit/47387a69e623676b57b6d42ff07e729da2d21bff) | 2020-08-14 | Catch Permission Denied exception when getting secret from GCP Secret Manager. (#10326) | -| [2f0613b0c](https://github.com/apache/airflow/commit/2f0613b0c2fdf176d9f13a8cd12162c60c64b644) | 2020-08-13 | Implement Google BigQuery Table Partition Sensor (#10218) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [ef088314f](https://github.com/apache/airflow/commit/ef088314f8f1b29ac636a7584cf9dda04b1df816) | 2020-08-09 | Added DataprepGetJobsForJobGroupOperator (#10246) | -| [b43f90abf](https://github.com/apache/airflow/commit/b43f90abf4c7219d5d59cccb0514256bd3f2fdc7) | 2020-08-09 | Fix various typos in the repo (#10263) | -| [c29533888](https://github.com/apache/airflow/commit/c29533888fadd40f5e9ce63e728bd8691182e542) | 2020-08-08 | Add labels param to Google MLEngine Operators (#10222) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [eff0f0321](https://github.com/apache/airflow/commit/eff0f03210d30a4aed9ed457eaaea9c9f05d54d1) | 2020-08-06 | Update guide for Google Cloud Secret Manager Backend (#10172) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [010322692](https://github.com/apache/airflow/commit/010322692e6e3f0adc156f0beb81e267da0e97bb) | 2020-08-06 | Improve handling Dataproc cluster creation with ERROR state (#9593) | -| [1437cb749](https://github.com/apache/airflow/commit/1437cb74955f4e10af5d70ebadde1e6b163fb9b7) | 2020-08-04 | Add correct signatures for operators in google provider package (#10144) | -| [6efa1b9cb](https://github.com/apache/airflow/commit/6efa1b9cb763ae0bdbc884a54d24dbdc39d9e3a6) | 2020-08-03 | Add additional Cloud Datastore operators (#10032) | -| [27020f8e5](https://github.com/apache/airflow/commit/27020f8e588575d53e63f9f9daecd3a522656644) | 2020-08-03 | Add try clause to DataFusionHook.wait_for_pipeline_state (#10031) | -| [4e3799fec](https://github.com/apache/airflow/commit/4e3799fec4c23d0f43603a0489c5a6158aeba035) | 2020-08-02 | [AIRFLOW-4541] Replace os.mkdirs usage with pathlib.Path(path).mkdir (#10117) | -| [85c56b173](https://github.com/apache/airflow/commit/85c56b1737c2bf61751836571300445c0aebae1a) | 2020-08-02 | Add missing params to GCP Pub/Sub creation_subscription (#10106) | -| [b79466c12](https://github.com/apache/airflow/commit/b79466c12f3ae717c31804acc2e9ffcd60f9611c) | 2020-08-02 | Fix sensor not providing arguments for GCSHook (#10074) | -| [4ee35d027](https://github.com/apache/airflow/commit/4ee35d027988c6456767faeb108a7f686d5117f2) | 2020-08-02 | Fix hook not passing gcp_conn_id to base class (#10075) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [4c84661ad](https://github.com/apache/airflow/commit/4c84661adb5bb5c581bb4193b4c7e935cbe07758) | 2020-07-31 | Split Display Video 360 example into smaler DAGs (#10077) | -| [59cbff087](https://github.com/apache/airflow/commit/59cbff0874dd5318cda4b9ce7b7eeb1aad1dad4d) | 2020-07-29 | Fix docstrings in BigQueryGetDataOperator (#10042) | -| [81b87d48e](https://github.com/apache/airflow/commit/81b87d48ed002d7a7f7bcb72a58e82d40a176fe2) | 2020-07-27 | Add unit tests for GcpBodyFieldSanitizer in Google providers (#9996) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [8b10a4b35](https://github.com/apache/airflow/commit/8b10a4b35e45d536a6475bfe1491ee75fad50186) | 2020-07-25 | Stop using start_date in default_args in example_dags (#9982) | -| [ef98edf4d](https://github.com/apache/airflow/commit/ef98edf4da2d9b74d5cf5b21e81577b3151edb79) | 2020-07-23 | Add more information about using GoogleAdsHook (#9951) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [39a0288a4](https://github.com/apache/airflow/commit/39a0288a47536dfd9b651ecd075887d3e45fcfc4) | 2020-07-22 | Add Google Authentication for experimental API (#9848) | -| [c2db0dfeb](https://github.com/apache/airflow/commit/c2db0dfeb13ee679bf4d7b57874f0fcb39c0f0ed) | 2020-07-22 | More strict rules in mypy (#9705) (#9906) | -| [c4244e18b](https://github.com/apache/airflow/commit/c4244e18bb894eb2180b8972052e56110fe5cbc9) | 2020-07-22 | Fix calling `get_client` in BigQueryHook.table_exists (#9916) | -| [5eacc1642](https://github.com/apache/airflow/commit/5eacc164201a121cd06126aff613cbe0919d35cc) | 2020-07-22 | Add support for impersonation in GCP hooks (#9915) | -| [1cfdebf5f](https://github.com/apache/airflow/commit/1cfdebf5f8841d61a11540b88c7913686e89e085) | 2020-07-21 | Fix insert_job method of BigQueryHook (#9899) | -| [c8c52e69c](https://github.com/apache/airflow/commit/c8c52e69c8d9cc1f26f63d95aecc0a6498d40b6f) | 2020-07-21 | Remove type hint causing DeprecationWarning in Firestore operators (#9819) | -| [eb6f1d1cf](https://github.com/apache/airflow/commit/eb6f1d1cf0503fa763c0d8d34a2fe16efb390b9c) | 2020-07-16 | Fix typo in datafusion operator (#9859) | -| [b01d95ec2](https://github.com/apache/airflow/commit/b01d95ec22b01ed79123178acd74ef40d57aaa7c) | 2020-07-15 | Change DAG.clear to take dag_run_state (#9824) | -| [6d65c15d1](https://github.com/apache/airflow/commit/6d65c15d156a41d5e735e44a1170426559a17d1f) | 2020-07-15 | Add guide for AI Platform (previously Machine Learning Engine) Operators (#9798) | -| [770de53eb](https://github.com/apache/airflow/commit/770de53eb57bd57ffc555ad15b18f0c058dbebe7) | 2020-07-15 | BigQueryTableExistenceSensor needs to specify keyword arguments (#9832) | -| [2d8dbacdf](https://github.com/apache/airflow/commit/2d8dbacdf6c19a598a7f55bcf65e28703aed6201) | 2020-07-15 | Add CloudVisionDeleteReferenceImageOperator (#9698) | -| [9f017951b](https://github.com/apache/airflow/commit/9f017951b94d9bf52b5ee66d72aa8dd822f07269) | 2020-07-15 | Add Google Deployment Manager Hook (#9159) | -| [ed5004cca](https://github.com/apache/airflow/commit/ed5004cca753650dc222fbb8e67573938c6c16d9) | 2020-07-14 | Allow `replace` flag in gcs_to_gcs operator. (#9667) | -| [553bb7af7](https://github.com/apache/airflow/commit/553bb7af7cb7a50f7141b5b89297713cee6d19f6) | 2020-07-13 | Keep functions signatures in decorators (#9786) | -| [68925904e](https://github.com/apache/airflow/commit/68925904e49aac6968defb6834863f4e6347fe59) | 2020-07-13 | Add multiple file upload functionality to GCS hook (#8849) | -| [1de78e8f9](https://github.com/apache/airflow/commit/1de78e8f97f48f8f4abd167a0120ffab8af6127a) | 2020-07-12 | Add Google Stackdriver link (#9765) | -| [092d33f29](https://github.com/apache/airflow/commit/092d33f298a7dbb871b1e1b4c17aad3989e89b79) | 2020-07-11 | Fix StackdriverTaskHandler + add system tests (#9761) | -| [b2305660f](https://github.com/apache/airflow/commit/b2305660f0eb55ebd31fdc7fe4e8aeed8c1f8c00) | 2020-07-09 | Update example DAG for AI Platform operators (#9727) | -| [23f80f34a](https://github.com/apache/airflow/commit/23f80f34adec86da24e4896168c53d213d01a7f6) | 2020-07-08 | Move gcs & wasb task handlers to their respective provider packages (#9714) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [a79e2d4c4](https://github.com/apache/airflow/commit/a79e2d4c4aa105f3fac5ae6a28e29af9cd572407) | 2020-07-06 | Move provider's log task handlers to the provider package (#9604) | -| [cd3d9d934](https://github.com/apache/airflow/commit/cd3d9d93402f06a08f35e3586802f11a18c4f1f3) | 2020-07-02 | Fix using .json template extension in GMP operators (#9566) | -| [4799af30e](https://github.com/apache/airflow/commit/4799af30ee02c596647d1538854769124f9f4961) | 2020-06-30 | Extend BigQuery example with include clause (#9572) | -| [e33f1a12d](https://github.com/apache/airflow/commit/e33f1a12d72ac234e4897f44b326a332acf85901) | 2020-06-30 | Add template_ext to BigQueryInsertJobOperator (#9568) | -| [40add26d4](https://github.com/apache/airflow/commit/40add26d459c2511a6d9d305ae7300f0d6104211) | 2020-06-29 | Remove almost all references to airflow.contrib (#9559) | -| [c420dbd6e](https://github.com/apache/airflow/commit/c420dbd6e13e17867eb4ccc4271b37966310ac0f) | 2020-06-27 | Bump Pylint to 2.5.3 (#9294) | -| [0051c89cb](https://github.com/apache/airflow/commit/0051c89cba02d55236c913ce0110f7d5111ba436) | 2020-06-26 | nitpick fix (#9527) | -| [87fdbd070](https://github.com/apache/airflow/commit/87fdbd0708d942af98d35604fe5962962e25d246) | 2020-06-25 | Use literal syntax instead of function calls to create data structure (#9516) | -| [7256f4caa](https://github.com/apache/airflow/commit/7256f4caa226f8f8632d6e2d38d8c94cb3250a6f) | 2020-06-22 | Pylint fixes and deprecation of rare used methods in Connection (#9419) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [5b680e27e](https://github.com/apache/airflow/commit/5b680e27e8118861ef484c00a4b87c6885b0a518) | 2020-06-19 | Don't use connection to store task handler credentials (#9381) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [416334e2e](https://github.com/apache/airflow/commit/416334e2ecd21d8a532af6102f1cfa9ac921a97a) | 2020-06-19 | Properly propagated warnings in operators (#9348) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [4e09c6442](https://github.com/apache/airflow/commit/4e09c64423bfaabd02a18b5fe7757dc15451ab73) | 2020-06-18 | Adds GCP Secret Manager Hook (#9368) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [639972d99](https://github.com/apache/airflow/commit/639972d995d848b16a3f283576efdbde28b8fdef) | 2020-06-16 | Add support for latest Apache Beam SDK in Dataflow operators (#9323) | -| [1459970b3](https://github.com/apache/airflow/commit/1459970b3b9780e139ce029ae889fd8f69a37bc7) | 2020-06-15 | Rename CloudBuildCreateBuildOperator to CloudBuildCreateOperator (#9314) | -| [431ea3291](https://github.com/apache/airflow/commit/431ea3291c9bf236bccdf8446d753c630ada2b25) | 2020-06-15 | Resolve upstream tasks when template field is XComArg (#8805) | -| [aee6ab94e](https://github.com/apache/airflow/commit/aee6ab94eb956347ad560cfe2673bc6011074513) | 2020-06-15 | Wait for pipeline state in Data Fusion operators (#8954) | -| [fb1c8b83d](https://github.com/apache/airflow/commit/fb1c8b83d400506a16c10e3d6623a913847e5cf5) | 2020-06-10 | Add test for BQ operations using location (#9206) | -| [a26afbfa5](https://github.com/apache/airflow/commit/a26afbfa51b0981ae742c6171938b57a80aace2b) | 2020-06-10 | Make generated job_id more informative in BQ insert_job (#9203) | -| [c41192fa1](https://github.com/apache/airflow/commit/c41192fa1fc5c2b3e7b8414c59f656ab67bbef28) | 2020-06-10 | Upgrade pendulum to latest major version ~2.0 (#9184) | -| [b1c8c5ed5](https://github.com/apache/airflow/commit/b1c8c5ed5bba3a852a5446f3fdd1131b4b22637a) | 2020-06-09 | Allows using private endpoints in GKEStartPodOperator (#9169) | -| [5918efc86](https://github.com/apache/airflow/commit/5918efc86a2217caa641a6ada289eee1c21407f8) | 2020-06-05 | Add 3.8 to the test matrices (#8836) | -| [9bcdadaf7](https://github.com/apache/airflow/commit/9bcdadaf7e6e73d3d2246fbbd32a9f30a1b43ca9) | 2020-06-05 | Add 'main' param to template_fields in DataprocSubmitPySparkJobOperator (#9154) | -| [f56811dff](https://github.com/apache/airflow/commit/f56811dff3af66cbceb0418f11e00507bab58674) | 2020-06-05 | [AIRFLOW-6290] Create guide for GKE operators (#8883) | -| [76962867b](https://github.com/apache/airflow/commit/76962867b5877cf5ffd1b6004453f783c0732ab1) | 2020-06-04 | Fix sql_to_gcs hook gzip of schema_file (#9140) | -| [17adcea83](https://github.com/apache/airflow/commit/17adcea835cb7b0cf2d8da0ac7dda5549cfa3e45) | 2020-06-02 | Fix handling of subprocess error handling in s3_file_transform and gcs (#9106) | -| [789852546](https://github.com/apache/airflow/commit/78985254683c359f7444a7eb5f6ee4967c37d61f) | 2020-06-01 | Add BigQueryInsertJobOperator (#8868) | -| [29eb68b90](https://github.com/apache/airflow/commit/29eb68b90b5df692ac322be0939af5e7fa9b71bc) | 2020-05-31 | Create guide for Dataproc Operators (#9037) | -| [886afaf62](https://github.com/apache/airflow/commit/886afaf622602aa97f925bc3ee4fc27aa995c445) | 2020-05-29 | Add example dag and system test for LocalFilesystemToGCSOperator (#9043) | -| [a779c4dfc](https://github.com/apache/airflow/commit/a779c4dfc278d6ece480b012764ea5814dc78dee) | 2020-05-29 | add separate example dags and system tests for GCSToGoogleSheetsOperator (#9066) | -| [ada26be23](https://github.com/apache/airflow/commit/ada26be23c913796c2ae77b91cb7d113dfec75a6) | 2020-05-29 | Add correct description for dst param in LocalFilesystemToGCSOperator (#9055) | -| [81b2761b8](https://github.com/apache/airflow/commit/81b2761b86dae2d21a6ee859d49c08d46fea6def) | 2020-05-29 | add example dag and system test for GoogleSheetsToGCSOperator (#9056) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [3994030ea](https://github.com/apache/airflow/commit/3994030ea678727daaf9c2bfed0ca94a096f8d2a) | 2020-05-26 | Refactor BigQuery operators (#8858) | -| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [cf5cf45e1](https://github.com/apache/airflow/commit/cf5cf45e1c0dff9a40e02f0dc221542f974831a7) | 2020-05-23 | Support YAML input for CloudBuildCreateOperator (#8808) | -| [499493c5c](https://github.com/apache/airflow/commit/499493c5c5cf324ab8452ead80a10b71ce0c3b14) | 2020-05-19 | [AIRFLOW-6586] Improvements to gcs sensor (#7197) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [841d81664](https://github.com/apache/airflow/commit/841d81664737c25d73d095a7dab5de80d369c87c) | 2020-05-19 | Allow setting the pooling time in DLPHook (#8824) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [15273f0ea](https://github.com/apache/airflow/commit/15273f0ea05ec579c631ce26b5d620233ebdc4d2) | 2020-05-16 | Check for same task instead of Equality to detect Duplicate Tasks (#8828) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [e1e833bb2](https://github.com/apache/airflow/commit/e1e833bb260879ecb9a1f80f28450a3656c0e598) | 2020-05-13 | Update GoogleBaseHook to not follow 308 and use 60s timeout (#8816) | -| [8b5491971](https://github.com/apache/airflow/commit/8b54919711a203c3f35d98c6310a55d4df5da590) | 2020-05-12 | Refactor BigQuery hook methods to use python library (#8631) | -| [6911dfe83](https://github.com/apache/airflow/commit/6911dfe8372a33df67ce1fdd3c2bca1047718f60) | 2020-05-12 | Fix template fields in Google operators (#8840) | -| [4b06fde0f](https://github.com/apache/airflow/commit/4b06fde0f10ce178b3c336c5d901e3b089f2863d) | 2020-05-12 | Fix Flake8 errors (#8841) | -| [1d12c347c](https://github.com/apache/airflow/commit/1d12c347cb258e7081804da1f9f5ffdedc003163) | 2020-05-12 | Refactor BigQuery check operators (#8813) | -| [493b685d7](https://github.com/apache/airflow/commit/493b685d7879cfee532390ba0909d4b1d8764267) | 2020-05-10 | Add separate example DAGs and system tests for google cloud speech (#8778) | -| [79ef8bed8](https://github.com/apache/airflow/commit/79ef8bed891c22eb76adf99158288d1b44426dc0) | 2020-05-10 | Added Upload Multiple Entity Read Files to specified big query dataset (#8610) | -| [280f1f0c4](https://github.com/apache/airflow/commit/280f1f0c4cc49aba1b2f8b456326795733769d18) | 2020-05-10 | Correctly restore upstream_task_ids when deserializing Operators (#8775) | -| [58aefb23b](https://github.com/apache/airflow/commit/58aefb23b1d456bbb24876a4e3ff14f25d6274b0) | 2020-05-08 | Added SDFtoGCSOperator (#8740) | -| [723c52c94](https://github.com/apache/airflow/commit/723c52c942b49b0e8c8fa8667a4a6a45fa249498) | 2020-05-07 | Add documentation for SpannerDeployInstanceOperator (#8750) | -| [25ee4211b](https://github.com/apache/airflow/commit/25ee4211b345ce7c19fb7366fd230838c34f1d47) | 2020-05-06 | Support all RuntimeEnvironment parameters in DataflowTemplatedJobStartOperator (#8531) | -| [8d6f1aa4b](https://github.com/apache/airflow/commit/8d6f1aa4b5bb8809ffc55dc0c62e6d0e89f331e5) | 2020-05-05 | Support num_retries field in env var for GCP connection (#8700) | -| [67caae0f2](https://github.com/apache/airflow/commit/67caae0f25db4eec42b8e81c85683aabdd8d6c1a) | 2020-05-04 | Add system test for gcs_to_bigquery (#8556) | -| [bc45fa675](https://github.com/apache/airflow/commit/bc45fa6759203b4c26b52e693dac97486a84204e) | 2020-05-03 | Add system test and docs for Facebook Ads operators (#8503) | -| [a28c66f23](https://github.com/apache/airflow/commit/a28c66f23d373cd0f8bfc765a515f21d4b66a0e9) | 2020-04-30 | [AIRFLOW-4734] Upsert functionality for PostgresHook.insert_rows() (#8625) | -| [992a24ce4](https://github.com/apache/airflow/commit/992a24ce41067d3b73f293878e71835892cbb632) | 2020-04-28 | Split and improve BigQuery example DAG (#8529) | -| [c1fb28230](https://github.com/apache/airflow/commit/c1fb28230fa0d36ef86c452c70254b253a113f9c) | 2020-04-28 | Refactor BigQueryHook dataset operations (#8477) | -| [e8d0f8fea](https://github.com/apache/airflow/commit/e8d0f8feab0ec08e248cd381359112ad6a832f5b) | 2020-04-26 | Improve idempodency in CloudDataTransferServiceCreateJobOperator (#8430) | -| [37fdfa977](https://github.com/apache/airflow/commit/37fdfa9775f43a5fa15de9c53ab33ecdf97513c5) | 2020-04-26 | [AIRFLOW-6281] Create guide for GCS to GCS transfer operators (#8442) | -| [14b22e6ff](https://github.com/apache/airflow/commit/14b22e6ffeb3af1f68e8362a1d0061b41364019c) | 2020-04-25 | Add hook and operator for Google Cloud Life Sciences (#8481) | -| [72ddc94d1](https://github.com/apache/airflow/commit/72ddc94d1ee08b414102e0b8ac197a3d8e965707) | 2020-04-23 | Pass location using parmamter in Dataflow integration (#8382) | -| [912aa4b42](https://github.com/apache/airflow/commit/912aa4b4237695275db6379cf2f0a633ea6087bc) | 2020-04-23 | Added GoogleDisplayVideo360DownloadLineItemsOperator (#8174) | -| [57c8c0583](https://github.com/apache/airflow/commit/57c8c05839f66ed2909b1bee8ff6976432db82aa) | 2020-04-22 | Use python client in BQ hook create_empty_table/dataset and table_exists (#8377) | -| [5d3a7eef3](https://github.com/apache/airflow/commit/5d3a7eef30b30fa466d8173f13abe4c356d73aef) | 2020-04-20 | Allow multiple extra_packages in Dataflow (#8394) | -| [79c99b1b6](https://github.com/apache/airflow/commit/79c99b1b6ae2ff5b0c8ab892f7f3fb1b44724121) | 2020-04-18 | Added location parameter to BigQueryCheckOperator (#8273) | -| [79d3f33c1](https://github.com/apache/airflow/commit/79d3f33c1b65c9c7e7b1a75e25d38cab9aa4517f) | 2020-04-17 | Clean up temporary files in Dataflow operators (#8313) | -| [efcffa323](https://github.com/apache/airflow/commit/efcffa323ddb5aa9f5907aa86808f3f3b4f5bd87) | 2020-04-16 | Add Dataproc SparkR Example (#8240) | -| [b198a1fa9](https://github.com/apache/airflow/commit/b198a1fa94c44228dc7358552aeb6a5371ae0da2) | 2020-04-15 | Create guide for BigQuery operators (#8276) | -| [2636cc932](https://github.com/apache/airflow/commit/2636cc932c3b156644edd46635cf9ff995c83159) | 2020-04-14 | Raise exception when GCP credential doesn't support account impersonation (#8213) | -| [eee4ebaee](https://github.com/apache/airflow/commit/eee4ebaeeb1991480ee178ddb600bc69b2a88764) | 2020-04-14 | Added Facebook Ads Operator #7887 (#8008) | -| [8cae07ea1](https://github.com/apache/airflow/commit/8cae07ea1873a90516120d9ffbd28e7fdd2f78a4) | 2020-04-14 | fixed typo (#8294) | -| [45c898330](https://github.com/apache/airflow/commit/45c8983306ab1c54abdacd8f870e790fad25cb37) | 2020-04-13 | Less aggressive eager upgrade of requirements (#8267) | -| [1fd9ed384](https://github.com/apache/airflow/commit/1fd9ed3840361afa1e9456ccb0dfd5a60fba4e85) | 2020-04-13 | Add mypy plugin for decorators. (#8145) | -| [327b0a9f7](https://github.com/apache/airflow/commit/327b0a9f77bbcbe3f977a37de04264c2eff4bee1) | 2020-04-13 | Added GoogleDisplayVideo360UploadLineItemsOperator (#8216) | -| [bb5e403a3](https://github.com/apache/airflow/commit/bb5e403a320e7377e5040cb180f61b4f5a9ea558) | 2020-04-10 | Honor schema type for MySQL to GCS data pre-process (#8090) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [3fc89f29f](https://github.com/apache/airflow/commit/3fc89f29f5bcd1529089fa6cb9c44843614f9ec5) | 2020-04-06 | [AIRFLOW-7106] Cloud data fusion integration - Allow to pass args to start pipeline (#7849) | -| [7ef75d239](https://github.com/apache/airflow/commit/7ef75d2393f30d155de550e6d1ee8c055e2abfee) | 2020-04-03 | [AIRFLOW-7117] Honor self.schema in sql_to_gcs as schema to upload (#8049) | -| [ed2bc0057](https://github.com/apache/airflow/commit/ed2bc00576b39a88e3e1fb79092494f4bfdcbf5c) | 2020-04-02 | Add Google Ads list accounts operator (#8007) | -| [3808a6206](https://github.com/apache/airflow/commit/3808a6206e70d4af84b39ea7078df54f02c1435e) | 2020-04-01 | Unify Google class/package names (#8033) | -| [8a0240257](https://github.com/apache/airflow/commit/8a02402576f83869d5134b4bddef5d73c15a8320) | 2020-03-31 | Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011) | -| [8e8978007](https://github.com/apache/airflow/commit/8e897800716c8ccedd1c53f2d083cb295786aa50) | 2020-03-31 | Add more refactor steps for providers.google (#8010) | -| [aae3b8fb2](https://github.com/apache/airflow/commit/aae3b8fb27870cb3cfba5ed73e35e08d520ef014) | 2020-03-31 | Individual package READMEs (#8012) | -| [779023968](https://github.com/apache/airflow/commit/779023968f983c91701f687bc823dc338934cdad) | 2020-03-30 | [AIRFLOW-7075] Operators for storing information from GCS into GA (#7743) | -| [49abce521](https://github.com/apache/airflow/commit/49abce52178c81954f8a25608f70ffe02fcf7b19) | 2020-03-30 | Improve system tests for Cloud Build (#8003) | -| [0f19a930d](https://github.com/apache/airflow/commit/0f19a930d1a7dec2a96bab0de144829f83cc0626) | 2020-03-29 | Remove GKEStartPodOperator when backporting (#7908) | -| [0e1c238b2](https://github.com/apache/airflow/commit/0e1c238b2fff3a092c93368125bc8d82abc4b308) | 2020-03-28 | Get Airflow Variables from GCP Secrets Manager (#7946) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [01f99426f](https://github.com/apache/airflow/commit/01f99426fddd2a24552f352edcb271fa78cf3b15) | 2020-03-28 | Add download/upload operators for GCS and Google Sheets (#7866) | -| [892522f8e](https://github.com/apache/airflow/commit/892522f8e2aeedc1ad842a08aaea967b0cae077f) | 2020-03-26 | Change signature of GSheetsHook methods (#7853) | -| [bfd425157](https://github.com/apache/airflow/commit/bfd425157a746402b516f8fc9e48f4ddccd794ce) | 2020-03-26 | Improve idempotency in MLEngineHook.create_model (#7811) | -| [f9c226343](https://github.com/apache/airflow/commit/f9c226343d94a7732da280d1dd086bf1ba291c77) | 2020-03-26 | Fix CloudSecretsManagerBackend invalid connections_prefix (#7861) | -| [e3920f12f](https://github.com/apache/airflow/commit/e3920f12f483b53950507c50f6ab6a4318072859) | 2020-03-26 | Improve setUp/tearDown in Cloud Firestore system test (#7862) | -| [8ba8a7295](https://github.com/apache/airflow/commit/8ba8a7295a31f6b44894bfcaea36fa93b8d8c0d0) | 2020-03-26 | Improve example DAGs for Cloud Memorystore (#7855) | -| [f7d1a437c](https://github.com/apache/airflow/commit/f7d1a437c17461b5ab768b75d58f0cb026b2a818) | 2020-03-26 | Fix CloudMemorystoreCreateInstanceAndImportOperator operator (#7856) | -| [beef6c230](https://github.com/apache/airflow/commit/beef6c230e4ff266af7c16b639bfda659b2bf6c0) | 2020-03-26 | Improve authorization in GCP system tests (#7863) | -| [5f165f3e4](https://github.com/apache/airflow/commit/5f165f3e4231ebd420ce643211a93e1fecf4877e) | 2020-03-26 | [AIRFLOW-5801] Get GCP credentials from file instead of JSON blob (#7869) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [1982c3fdc](https://github.com/apache/airflow/commit/1982c3fdca1f04cfc41fc5b5e285d8f01c6b76ab) | 2020-03-24 | Run Dataflow for ML Engine summary in venv (#7809) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [529db07b2](https://github.com/apache/airflow/commit/529db07b2ee73d886e37e8b3415462c730187b15) | 2020-03-23 | Improve Google PubSub hook publish method (#7831) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a001489b5](https://github.com/apache/airflow/commit/a001489b5928ebfc35f990a29d1c9c2ecb80bd61) | 2020-03-23 | Improve example DAG for ML Engine (#7810) | -| [9e5a8e7f8](https://github.com/apache/airflow/commit/9e5a8e7f83cf2368315fce62f8d81304f7ba2f04) | 2020-03-23 | Add call to Super class in 'google' providers (#7823) | -| [b86bf79bf](https://github.com/apache/airflow/commit/b86bf79bff615e61de98bead4d02eace5690d5fb) | 2020-03-23 | Fix typo in GCP credentials_provider's docstring (#7818) | -| [56c013ce9](https://github.com/apache/airflow/commit/56c013ce922eb18e5f7dd4410986afbcc6f29025) | 2020-03-23 | Add missing docstring in BigQueryHook.create_empty_table (#7817) | -| [426a79847](https://github.com/apache/airflow/commit/426a79847ced832ca3f67c135fd8830ebf1de7d2) | 2020-03-23 | Imrove support for laatest API in MLEngineStartTrainingJobOperator (#7812) | -| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23 | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795) | -| [27dac00e1](https://github.com/apache/airflow/commit/27dac00e125b87626a0b87074d61e6d38031bf47) | 2020-03-22 | [AIRFLOW-7099] Improve system test for cloud transfer service (#7794) | -| [0daf5d729](https://github.com/apache/airflow/commit/0daf5d729acef4e9aef5226452dff774e80430cd) | 2020-03-22 | Add ability to specify a maximum modified time for objects in GCSToGCSOperator (#7791) | -| [c8088c2bd](https://github.com/apache/airflow/commit/c8088c2bd70a16605a5d4b1a66a22309359d6712) | 2020-03-20 | [AIRFLOW-7100] Add GoogleAnalyticsGetAdsLinkOperator (#7781) | -| [5106a2931](https://github.com/apache/airflow/commit/5106a29314b413d168bcba7a64bf91c04fdb5dfe) | 2020-03-20 | [AIRFLOW-6752] Add GoogleAnalyticsRetrieveAdsLinksListOperator (#7748) | -| [759ce2a80](https://github.com/apache/airflow/commit/759ce2a80c95832fe4773c9f4fde23e1b03cbc6f) | 2020-03-20 | [AIRFLOW-6978] Add PubSubPullOperator (#7766) | -| [6b9b214e4](https://github.com/apache/airflow/commit/6b9b214e4c3b3afa8ea2e1a5c1e24993013d60ac) | 2020-03-20 | [AIRFLOW-6732] Add GoogleAdsHook and GoogleAdsToGcsOperator (#7692) | -| [b11891696](https://github.com/apache/airflow/commit/b11891696946d1461174b385c88d6af8abb99768) | 2020-03-19 | [AIRFLOW-7069] Fix cloudsql system tests (#7770) | -| [ae854cae5](https://github.com/apache/airflow/commit/ae854cae5a2cf8cae37edf7e0813ad01bccfbc30) | 2020-03-19 | [AIRFLOW-7082] Remove catch_http_exception decorator in GCP hooks (#7756) | -| [7e1e954d2](https://github.com/apache/airflow/commit/7e1e954d23ce272b0a71188f0f535e20d54be443) | 2020-03-19 | [AIRFLOW-7085] Cache credentials, project_id in GCP Base Hook (#7759) | -| [6e21c139b](https://github.com/apache/airflow/commit/6e21c139b3cce3f895040939f0b02e3e0ba36141) | 2020-03-19 | [AIRFLOW-XXXX] Fix reference to GCP classes in guides (#7762) | -| [ce022a3f7](https://github.com/apache/airflow/commit/ce022a3f72b7735087d4c3bbe81d293a0ab75327) | 2020-03-19 | [AIRFLOW-XXXX] Add cross-references for operators guide (#7760) | -| [029c84e55](https://github.com/apache/airflow/commit/029c84e5527b6db6bdbdbe026f455da325bedef3) | 2020-03-18 | [AIRFLOW-5421] Add Presto to GCS transfer operator (#7718) | -| [63a3102ed](https://github.com/apache/airflow/commit/63a3102ede8fb8f764d251b20cad5ee5bef84f50) | 2020-03-18 | [AIRFLOW-7064] Add CloudFirestoreExportDatabaseOperator (#7725) | -| [73305c7bd](https://github.com/apache/airflow/commit/73305c7bd57f14444804c13b8b290f479832d3db) | 2020-03-18 | [AIRFLOW-7081] Remove env variables from GCP guide (#7755) | -| [60fdbf6d9](https://github.com/apache/airflow/commit/60fdbf6d9255d34a8967400e9585b1cd5d29d3e9) | 2020-03-18 | [AIRFLOW-5610] Add ability to specify multiple objects to copy in GCSToGCSOperator (#7728) | -| [de7e934ca](https://github.com/apache/airflow/commit/de7e934ca3f21ce82f67accf92811b3ac044476f) | 2020-03-17 | [AIRFLOW-7079] Remove redundant code for storing template_fields (#7750) | -| [0de0347b2](https://github.com/apache/airflow/commit/0de0347b27a961c46ee49da6dfa9205321657749) | 2020-03-17 | [AIRFLOW-6855]: Escape project_dataset_table in SQL query in gcs to bq … (#7475) | -| [91557c6f8](https://github.com/apache/airflow/commit/91557c6f87529c010b8ad1110ece35fd7fd751e4) | 2020-03-17 | [AIRFLOW-7073] GKEStartPodOperator always use connection credentials (#7738) | -| [51161dbd9](https://github.com/apache/airflow/commit/51161dbd9de0c966016cec4d5036877890daee7c) | 2020-03-16 | [AIRFLOW-5664] Store timestamps with microseconds precision (#6354) | -| [2bc020c43](https://github.com/apache/airflow/commit/2bc020c43112dd3a769311de8d5012e8e8f399ee) | 2020-03-14 | [AIRFLOW-7055] Verbose logging option for google provider (#7711) | -| [c997cab42](https://github.com/apache/airflow/commit/c997cab42d8695ac444e63dfe4b948a7ea82ed89) | 2020-03-13 | [AIRFLOW-6724] Add Google Analytics 360 Accounts Retrieve Operator (#7630) | -| [137896f32](https://github.com/apache/airflow/commit/137896f326cd29b59902a887e4c4e58f940ff62b) | 2020-03-12 | [AIRFLOW-7034] Remove feature: Assigning Dag to task using Bitshift Op (#7685) | -| [1f77f943d](https://github.com/apache/airflow/commit/1f77f943d5d85f66b6a988e8ef6506525eaf4732) | 2020-03-10 | [AIRFLOW-6980] Improve system tests and building providers package (#7615) | -| [bf9b6b6d7](https://github.com/apache/airflow/commit/bf9b6b6d70455352bbf807871c8eeb6324be7e54) | 2020-03-09 | [AIRFLOW-5013] Add GCP Data Catalog Hook and operators (#7664) | -| [e5130dc9f](https://github.com/apache/airflow/commit/e5130dc9fe89187e95071e678ea3b46600866762) | 2020-03-09 | [AIRFLOW-2911] Add job cancellation capability to Dataflow service (#7659) | -| [faf0df4b9](https://github.com/apache/airflow/commit/faf0df4b9460b7f037ee390addbd2c6effcae013) | 2020-03-09 | [AIRFLOW-XXXX] Fix upsert operator in BQ example DAG (#7666) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [b5b9795f0](https://github.com/apache/airflow/commit/b5b9795f0446bb484a91ee485f49ea456f1c26c4) | 2020-03-07 | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (fix) (#7624) | -| [6b65038fb](https://github.com/apache/airflow/commit/6b65038fb409ba1040e70305444816d8f5cfdc47) | 2020-03-06 | [AIRFLOW-6990] Improve system tests for Google Marketing Platform (#7631) | -| [755fe5224](https://github.com/apache/airflow/commit/755fe52249ba1cd965cf2f87fa7a428b8197a38a) | 2020-03-05 | [AIRFLOW-6915] Add AI Platform Console Link for MLEngineStartTrainingJobOperator (#7535) | -| [cb2f33911](https://github.com/apache/airflow/commit/cb2f339116cf2093da447748892fac68aecbb888) | 2020-03-04 | [AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (#7609) | -| [09fea3ce8](https://github.com/apache/airflow/commit/09fea3ce8e4d7816281963bb8f2cb06f4de6db5c) | 2020-03-04 | [AIRFLOW-6977] Fix BigQuery DTS example DAG (#7612) | -| [8230ccc48](https://github.com/apache/airflow/commit/8230ccc48b157c89b2b893d42c6fe1523b83363a) | 2020-03-04 | [AIRFLOW-6926] Fix Google Tasks operators return types and idempotency (#7547) | -| [0d1e3088a](https://github.com/apache/airflow/commit/0d1e3088aa9f16eaeeb7b18eccec8f35c79a53df) | 2020-03-04 | [AIRFLOW-6970] Improve GCP Video Intelligence system tests (#7604) | -| [ab6bb0012](https://github.com/apache/airflow/commit/ab6bb0012c38740b76e864d42d299c5c7a9972a3) | 2020-03-03 | [AIRFLOW-6971] Fix return type in CloudSpeechToTextRecognizeSpeechOperator (#7607) | -| [3db4ade3d](https://github.com/apache/airflow/commit/3db4ade3dc9660c21c28187100a22008552f2bd3) | 2020-02-29 | [AIRFLOW-6924] Fix Google DLP operators return types (#7546) | -| [008b4bab1](https://github.com/apache/airflow/commit/008b4bab14222da068b737d6332db4963b994007) | 2020-02-27 | [AIRFLOW-6730] Use total_seconds instead of seconds (#7363) | -| [bb552b2d9](https://github.com/apache/airflow/commit/bb552b2d9fd595cc3eb1b3a2f637f29b814878d7) | 2020-02-25 | [AIRFLOW-6908] Lazy load AirflowException (#7528) | -| [d1a34246a](https://github.com/apache/airflow/commit/d1a34246ac593901f8599b102dc3d7efa4dd61e4) | 2020-02-25 | [AIRFLOW-6593] Add GCP Stackdriver Alerting Hooks and Operators (#7322) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [dcf874352](https://github.com/apache/airflow/commit/dcf87435219307d4e916a8abc2b819ad75e2b1cf) | 2020-02-24 | [AIRFLOW-6894] Prevent db query in example_dags (#7516) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [35b961637](https://github.com/apache/airflow/commit/35b9616378d1cfba7c2eb3c71e20acb6734b7c77) | 2020-02-21 | [AIRFLOW-4973] Add Cloud Data Fusion Pipeline integration (#7486) | -| [aff3a361b](https://github.com/apache/airflow/commit/aff3a361b4092212c0757f9ce88fa2e40d25d1f4) | 2020-02-20 | [AIRFLOW-6558] Campaign Manager operators for conversions (#7420) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [5b199cb86](https://github.com/apache/airflow/commit/5b199cb86be5b1aefbd8620185033d6f635713c1) | 2020-02-17 | [AIRFLOW-XXXX] Typo in example_bigquery DAG (#7429) | -| [2c9345a8e](https://github.com/apache/airflow/commit/2c9345a8e03d37a2676efa2f2ea7e8b7814c5345) | 2020-02-17 | [AIRFLOW-6759] Added MLEngine operator/hook to cancel MLEngine jobs (#7400) | -| [946bdc23c](https://github.com/apache/airflow/commit/946bdc23c039637b0383e1269f99bdd1b2426565) | 2020-02-16 | [AIRFLOW-6405] Add GCP BigQuery Table Upsert Operator (#7126) | -| [2381c820c](https://github.com/apache/airflow/commit/2381c820c8aaeffc1c9b4ed47832038833400eb8) | 2020-02-13 | [AIRFLOW-6505] Let emoji encoded properly for json.dumps() (#7399) | -| [04c1fefbf](https://github.com/apache/airflow/commit/04c1fefbf26a73ed13881d2ec14eada48028ff72) | 2020-02-03 | [AIRFLOW-6676] added GCSDeleteBucketOperator (#7307) | -| [a0252748f](https://github.com/apache/airflow/commit/a0252748ff312daede15c6f0a3d39e16c774461c) | 2020-02-03 | [AIRFLOW-6717] Remove non-existent field from templated_fields (#7340) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9d8d07557](https://github.com/apache/airflow/commit/9d8d0755789d4aeadc5d3015f3cdde62901f85b8) | 2020-02-03 | [AIRFLOW-6715] Fix Google Cloud DLP Example DAG (#7337) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [373c6aa4a](https://github.com/apache/airflow/commit/373c6aa4a208284b5ff72987e4bd8f4e2ada1a1b) | 2020-01-30 | [AIRFLOW-6682] Move GCP classes to providers package (#7295) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | -| [f4d3e5e54](https://github.com/apache/airflow/commit/f4d3e5e54507f52a00a9b95aa48eb0260e17224d) | 2020-01-13 | [AIRFLOW-6102] [AIP-21] Rename Dataproc operators (#7151) | -| [e7bf8ecb4](https://github.com/apache/airflow/commit/e7bf8ecb48f0299af8091433535ac573c2afd1cf) | 2020-01-13 | [AIRFLOW-6119] [AIP-21] Rename GCS operators, hooks and sensors (#7125) | -| [5b6772cb8](https://github.com/apache/airflow/commit/5b6772cb8391b248cb4b7be5fd3d5c035280fac1) | 2020-01-09 | [AIRFLOW-6125] [AIP-21] Rename S3 operator and SFTP operator (#7112) | -| [4f8592ae8](https://github.com/apache/airflow/commit/4f8592ae8f52ab7f42623d3b43eef0928c9aafb2) | 2020-01-08 | [AIRFLOW-6118] [AIP-21] Rename Pubsub operators and hook (#7046) | -| [20299473f](https://github.com/apache/airflow/commit/20299473f11add6531f607256ee8a0f7f9507ab8) | 2020-01-03 | [AIRFLOW-6115] [AIP-21] Rename GCP vision operators (#7020) | -| [18e8cea4e](https://github.com/apache/airflow/commit/18e8cea4e7487a7dfefc03661e5ebe54c4104ead) | 2020-01-03 | [AIRFLOW-6428] Fix import path for airflow.utils.dates.days_ago in Example DAGs (#7007) | -| [95087af14](https://github.com/apache/airflow/commit/95087af14091f28a83ced8ff1860b86dfd93f93d) | 2019-12-31 | [AIRFLOW-6110] [AIP-21] Rename natural_language service (#6968) | -| [69629a5a9](https://github.com/apache/airflow/commit/69629a5a948ab2c4ac04a4a4dca6ac86d19c11bd) | 2019-12-09 | [AIRFLOW-5807] Move SFTP from contrib to providers. (#6464) | -| [25e9047a4](https://github.com/apache/airflow/commit/25e9047a4a4da5fad4f85c366e3a6262c0a4f68e) | 2019-12-09 | [AIRFLOW-6193] Do not use asserts in Airflow main code (#6749) | -| [ed0a14f32](https://github.com/apache/airflow/commit/ed0a14f321b9dab3554ae395c11c147258536ce8) | 2019-12-09 | [AIRFLOW-6120] Rename GoogleCloudBaseHook (#6734) | -| [2f2f89c14](https://github.com/apache/airflow/commit/2f2f89c148e2b694aee9402707f68065ee7320f8) | 2019-12-01 | [AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701) | -| [03c870a61](https://github.com/apache/airflow/commit/03c870a6172ab232af6319a30ad8d46622359b10) | 2019-11-26 | [AIRFLOW-6010] Remove cyclic imports and pylint hacks (#6601) | -| [5c4cfea8c](https://github.com/apache/airflow/commit/5c4cfea8c0f488496c1cbcc4c6c5db13d8210979) | 2019-11-15 | [AIRFLOW-5718] Add SFTPToGoogleCloudStorageOperator (#6393) | -| [44a8c37a9](https://github.com/apache/airflow/commit/44a8c37a9a8668469aa825ad21057cca6ac2c186) | 2019-11-13 | [AIRFLOW-XXX] Fix the docstring for Dataproc get_job method (#6581) | -| [d633d3ac4](https://github.com/apache/airflow/commit/d633d3ac44c395e6c43cd388f98fba1ce1c435a3) | 2019-11-13 | [AIRFLOW-5691] Rewrite Dataproc operators to use python library (#6371) | -| [d985c02d9](https://github.com/apache/airflow/commit/d985c02d9fa3d9ec946abc1735b0551fd61fb9f0) | 2019-11-05 | [AIRFLOW-XXX] Add How-To-Guide to GCP PubSub (#6497) | -| [a296cdabd](https://github.com/apache/airflow/commit/a296cdabdb9c9c65cf9a48329cb776aed5c82d43) | 2019-11-04 | [AIRFLOW-5743] Move Google PubSub to providers package (#6476) | -| [470b2a779](https://github.com/apache/airflow/commit/470b2a779d031406a3d5925f2fa2ec40e5c3bccb) | 2019-10-30 | [AIRFLOW-5741] Move Cloud Natural Language to providers (#6421) | -| [f2caa451f](https://github.com/apache/airflow/commit/f2caa451fc2b8ee59163314f9ec1cc372acbadf1) | 2019-10-27 | [AIRFLOW-5742] Move Google Cloud Vision to providers package (#6424) | -| [16d7accb2](https://github.com/apache/airflow/commit/16d7accb22c866d4fbf368e4d979dc1c4a41d93c) | 2019-10-22 | [AIRFLOW-4971] Add Google Display & Video 360 integration (#6170) | -| [4e661f535](https://github.com/apache/airflow/commit/4e661f535dea613f9b2e0075676f9a73a97461fe) | 2019-10-22 | [AIRFLOW-5379] Add Google Search Ads 360 operators (#6228) | -| [19e32b4e2](https://github.com/apache/airflow/commit/19e32b4e2c798f662e5d8d1e7c65036c5e7ac125) | 2019-10-18 | [AIRFLOW-5656] Rename provider to providers module (#6333) | diff --git a/airflow/providers/google/cloud/ADDITIONAL_INFO.md b/airflow/providers/google/cloud/ADDITIONAL_INFO.md deleted file mode 100644 index ab4ca6ffd7c6b..0000000000000 --- a/airflow/providers/google/cloud/ADDITIONAL_INFO.md +++ /dev/null @@ -1,28 +0,0 @@ - - -## Additional info - -### Breaking change in `AutoMLBatchPredictOperator` - -Class `AutoMLBatchPredictOperator` property `params` is renamed to `prediction_params`. -To keep old behaviour, please rename `params` to `prediction_params` when initializing an instance of `AutoMLBatchPredictOperator`. - -Property `params` still exists, but as a property inherited from parent's class `BaseOperator`. -Property `params` has nothing to do with prediction, use `prediction_params` instead. diff --git a/airflow/providers/google/cloud/example_dags/example_automl_tables.py b/airflow/providers/google/cloud/example_dags/example_automl_tables.py index 4ff92b3d1df7f..117bd34c3e8db 100644 --- a/airflow/providers/google/cloud/example_dags/example_automl_tables.py +++ b/airflow/providers/google/cloud/example_dags/example_automl_tables.py @@ -47,7 +47,7 @@ GCP_AUTOML_DATASET_BUCKET = os.environ.get( "GCP_AUTOML_DATASET_BUCKET", "gs://cloud-ml-tables-data/bank-marketing.csv" ) -TARGET = os.environ.get("GCP_AUTOML_TARGET", "Class") +TARGET = os.environ.get("GCP_AUTOML_TARGET", "Deposit") # Example values MODEL_ID = "TBL123456" @@ -76,9 +76,9 @@ def get_target_column_spec(columns_specs: List[Dict], column_name: str) -> str: Using column name returns spec of the column. """ for column in columns_specs: - if column["displayName"] == column_name: + if column["display_name"] == column_name: return extract_object_id(column) - return "" + raise Exception(f"Unknown target column: {column_name}") # Example DAG to create dataset, train model_id and deploy it. diff --git a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py index 260dc5d8ee19e..da13c9da703d1 100644 --- a/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py +++ b/airflow/providers/google/cloud/example_dags/example_bigquery_dts.py @@ -22,9 +22,6 @@ import os import time -from google.cloud.bigquery_datatransfer_v1.types import TransferConfig -from google.protobuf.json_format import ParseDict - from airflow import models from airflow.providers.google.cloud.operators.bigquery_dts import ( BigQueryCreateDataTransferOperator, @@ -55,16 +52,13 @@ "file_format": "CSV", } -TRANSFER_CONFIG = ParseDict( - { - "destination_dataset_id": GCP_DTS_BQ_DATASET, - "display_name": "GCS Test Config", - "data_source_id": "google_cloud_storage", - "schedule_options": schedule_options, - "params": PARAMS, - }, - TransferConfig(), -) +TRANSFER_CONFIG = { + "destination_dataset_id": GCP_DTS_BQ_DATASET, + "display_name": "GCS Test Config", + "data_source_id": "google_cloud_storage", + "schedule_options": schedule_options, + "params": PARAMS, +} # [END howto_bigquery_dts_create_args] diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py b/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py index 441c165981cca..acb50b4058da3 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_memorystore.py @@ -22,7 +22,7 @@ from urllib.parse import urlparse from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.cloud.redis_v1.gapic.enums import FailoverInstanceRequest, Instance +from google.cloud.redis_v1 import FailoverInstanceRequest, Instance from airflow import models from airflow.operators.bash import BashOperator @@ -161,7 +161,7 @@ set_acl_permission = GCSBucketCreateAclEntryOperator( task_id="gcs-set-acl-permission", bucket=BUCKET_NAME, - entity="user-{{ task_instance.xcom_pull('get-instance')['persistenceIamIdentity']" + entity="user-{{ task_instance.xcom_pull('get-instance')['persistence_iam_identity']" ".split(':', 2)[1] }}", role="OWNER", ) diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py index 353aa335f3d93..c1bc8c0b9cd93 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_aws.py @@ -28,7 +28,7 @@ .. warning:: You need to provide a large enough set of data so that operations do not execute too quickly. Otherwise, DAG will fail. -* GCP_TRANSFER_SECOND_TARGET_BUCKET - Google Cloud Storage bucket bucket to which files are copied +* GCP_TRANSFER_SECOND_TARGET_BUCKET - Google Cloud Storage bucket to which files are copied * WAIT_FOR_OPERATION_POKE_INTERVAL - interval of what to check the status of the operation A smaller value than the default value accelerates the system test and ensures its correct execution with smaller quantities of files in the source bucket diff --git a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py index c4cfa2e31efc3..8e851df9a9394 100644 --- a/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py +++ b/airflow/providers/google/cloud/example_dags/example_cloud_storage_transfer_service_gcp.py @@ -25,7 +25,7 @@ * GCP_PROJECT_ID - Google Cloud Project to use for the Google Cloud Transfer Service. * GCP_TRANSFER_FIRST_TARGET_BUCKET - Google Cloud Storage bucket to which files are copied from AWS. It is also a source bucket in next step -* GCP_TRANSFER_SECOND_TARGET_BUCKET - Google Cloud Storage bucket bucket to which files are copied +* GCP_TRANSFER_SECOND_TARGET_BUCKET - Google Cloud Storage bucket to which files are copied """ import os diff --git a/airflow/providers/google/cloud/example_dags/example_datacatalog.py b/airflow/providers/google/cloud/example_dags/example_datacatalog.py index c8597a6e4f0ab..cc4b73ae41ec6 100644 --- a/airflow/providers/google/cloud/example_dags/example_datacatalog.py +++ b/airflow/providers/google/cloud/example_dags/example_datacatalog.py @@ -19,7 +19,7 @@ """ Example Airflow DAG that interacts with Google Data Catalog service """ -from google.cloud.datacatalog_v1beta1.proto.tags_pb2 import FieldType, TagField, TagTemplateField +from google.cloud.datacatalog_v1beta1 import FieldType, TagField, TagTemplateField from airflow import models from airflow.operators.bash_operator import BashOperator @@ -91,7 +91,7 @@ entry_id=ENTRY_ID, entry={ "display_name": "Wizard", - "type": "FILESET", + "type_": "FILESET", "gcs_fileset_spec": {"file_patterns": ["gs://test-datacatalog/**"]}, }, ) @@ -144,7 +144,7 @@ "display_name": "Awesome Tag Template", "fields": { FIELD_NAME_1: TagTemplateField( - display_name="first-field", type=FieldType(primitive_type="STRING") + display_name="first-field", type_=dict(primitive_type="STRING") ) }, }, @@ -172,7 +172,7 @@ tag_template=TEMPLATE_ID, tag_template_field_id=FIELD_NAME_2, tag_template_field=TagTemplateField( - display_name="second-field", type=FieldType(primitive_type="STRING") + display_name="second-field", type_=FieldType(primitive_type="STRING") ), ) # [END howto_operator_gcp_datacatalog_create_tag_template_field] @@ -305,7 +305,7 @@ # [START howto_operator_gcp_datacatalog_lookup_entry_result] lookup_entry_result = BashOperator( task_id="lookup_entry_result", - bash_command="echo \"{{ task_instance.xcom_pull('lookup_entry')['displayName'] }}\"", + bash_command="echo \"{{ task_instance.xcom_pull('lookup_entry')['display_name'] }}\"", ) # [END howto_operator_gcp_datacatalog_lookup_entry_result] diff --git a/airflow/providers/google/cloud/example_dags/example_stackdriver.py b/airflow/providers/google/cloud/example_dags/example_stackdriver.py index 68ac978e3da32..9c418b7dca06d 100644 --- a/airflow/providers/google/cloud/example_dags/example_stackdriver.py +++ b/airflow/providers/google/cloud/example_dags/example_stackdriver.py @@ -21,6 +21,7 @@ """ import json +import os from airflow import models from airflow.providers.google.cloud.operators.stackdriver import ( @@ -37,56 +38,80 @@ ) from airflow.utils.dates import days_ago +PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "example-project") + TEST_ALERT_POLICY_1 = { "combiner": "OR", - "name": "projects/sd-project/alertPolicies/12345", - "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"}, "enabled": True, - "displayName": "test alert 1", + "display_name": "test alert 1", "conditions": [ { - "conditionThreshold": { + "condition_threshold": { + "filter": ( + 'metric.label.state="blocked" AND ' + 'metric.type="agent.googleapis.com/processes/count_by_state" ' + 'AND resource.type="gce_instance"' + ), "comparison": "COMPARISON_GT", - "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}], + "threshold_value": 100, + "duration": {'seconds': 900}, + "trigger": {"percent": 0}, + "aggregations": [ + { + "alignment_period": {'seconds': 60}, + "per_series_aligner": "ALIGN_MEAN", + "cross_series_reducer": "REDUCE_MEAN", + "group_by_fields": ["project", "resource.label.instance_id", "resource.label.zone"], + } + ], }, - "displayName": "Condition display", - "name": "projects/sd-project/alertPolicies/123/conditions/456", + "display_name": "test_alert_policy_1", } ], } TEST_ALERT_POLICY_2 = { "combiner": "OR", - "name": "projects/sd-project/alertPolicies/6789", - "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"}, "enabled": False, - "displayName": "test alert 2", + "display_name": "test alert 2", "conditions": [ { - "conditionThreshold": { + "condition_threshold": { + "filter": ( + 'metric.label.state="blocked" AND ' + 'metric.type="agent.googleapis.com/processes/count_by_state" AND ' + 'resource.type="gce_instance"' + ), "comparison": "COMPARISON_GT", - "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}], + "threshold_value": 100, + "duration": {'seconds': 900}, + "trigger": {"percent": 0}, + "aggregations": [ + { + "alignment_period": {'seconds': 60}, + "per_series_aligner": "ALIGN_MEAN", + "cross_series_reducer": "REDUCE_MEAN", + "group_by_fields": ["project", "resource.label.instance_id", "resource.label.zone"], + } + ], }, - "displayName": "Condition display", - "name": "projects/sd-project/alertPolicies/456/conditions/789", + "display_name": "test_alert_policy_2", } ], } TEST_NOTIFICATION_CHANNEL_1 = { - "displayName": "channel1", + "display_name": "channel1", "enabled": True, "labels": {"auth_token": "top-secret", "channel_name": "#channel"}, - "name": "projects/sd-project/notificationChannels/12345", - "type": "slack", + "type_": "slack", } TEST_NOTIFICATION_CHANNEL_2 = { - "displayName": "channel2", + "display_name": "channel2", "enabled": False, "labels": {"auth_token": "top-secret", "channel_name": "#channel"}, - "name": "projects/sd-project/notificationChannels/6789", - "type": "slack", + "type_": "slack", } with models.DAG( @@ -150,18 +175,29 @@ # [START howto_operator_gcp_stackdriver_delete_notification_channel] delete_notification_channel = StackdriverDeleteNotificationChannelOperator( task_id='delete-notification-channel', - name='test-channel', + name="{{ task_instance.xcom_pull('list-notification-channel')[0]['name'] }}", ) # [END howto_operator_gcp_stackdriver_delete_notification_channel] + delete_notification_channel_2 = StackdriverDeleteNotificationChannelOperator( + task_id='delete-notification-channel-2', + name="{{ task_instance.xcom_pull('list-notification-channel')[1]['name'] }}", + ) + # [START howto_operator_gcp_stackdriver_delete_alert_policy] delete_alert_policy = StackdriverDeleteAlertOperator( task_id='delete-alert-policy', - name='test-alert', + name="{{ task_instance.xcom_pull('list-alert-policies')[0]['name'] }}", ) # [END howto_operator_gcp_stackdriver_delete_alert_policy] + delete_alert_policy_2 = StackdriverDeleteAlertOperator( + task_id='delete-alert-policy-2', + name="{{ task_instance.xcom_pull('list-alert-policies')[1]['name'] }}", + ) + create_notification_channel >> enable_notification_channel >> disable_notification_channel disable_notification_channel >> list_notification_channel >> create_alert_policy create_alert_policy >> enable_alert_policy >> disable_alert_policy >> list_alert_policies - list_alert_policies >> delete_notification_channel >> delete_alert_policy + list_alert_policies >> delete_notification_channel >> delete_notification_channel_2 + delete_notification_channel_2 >> delete_alert_policy >> delete_alert_policy_2 diff --git a/airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py b/airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py new file mode 100644 index 0000000000000..32dc8a004b79f --- /dev/null +++ b/airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py @@ -0,0 +1,150 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""" +Example DAG using TrinoToGCSOperator. +""" +import os +import re + +from airflow import models +from airflow.providers.google.cloud.operators.bigquery import ( + BigQueryCreateEmptyDatasetOperator, + BigQueryCreateExternalTableOperator, + BigQueryDeleteDatasetOperator, + BigQueryExecuteQueryOperator, +) +from airflow.providers.google.cloud.transfers.trino_to_gcs import TrinoToGCSOperator +from airflow.utils.dates import days_ago + +GCP_PROJECT_ID = os.environ.get("GCP_PROJECT_ID", 'example-project') +GCS_BUCKET = os.environ.get("GCP_TRINO_TO_GCS_BUCKET_NAME", "test-trino-to-gcs-bucket") +DATASET_NAME = os.environ.get("GCP_TRINO_TO_GCS_DATASET_NAME", "test_trino_to_gcs_dataset") + +SOURCE_MULTIPLE_TYPES = "memory.default.test_multiple_types" +SOURCE_CUSTOMER_TABLE = "tpch.sf1.customer" + + +def safe_name(s: str) -> str: + """ + Remove invalid characters for filename + """ + return re.sub("[^0-9a-zA-Z_]+", "_", s) + + +with models.DAG( + dag_id="example_trino_to_gcs", + schedule_interval=None, # Override to match your needs + start_date=days_ago(1), + tags=["example"], +) as dag: + + create_dataset = BigQueryCreateEmptyDatasetOperator(task_id="create-dataset", dataset_id=DATASET_NAME) + + delete_dataset = BigQueryDeleteDatasetOperator( + task_id="delete_dataset", dataset_id=DATASET_NAME, delete_contents=True + ) + + # [START howto_operator_trino_to_gcs_basic] + trino_to_gcs_basic = TrinoToGCSOperator( + task_id="trino_to_gcs_basic", + sql=f"select * from {SOURCE_MULTIPLE_TYPES}", + bucket=GCS_BUCKET, + filename=f"{safe_name(SOURCE_MULTIPLE_TYPES)}.{{}}.json", + ) + # [END howto_operator_trino_to_gcs_basic] + + # [START howto_operator_trino_to_gcs_multiple_types] + trino_to_gcs_multiple_types = TrinoToGCSOperator( + task_id="trino_to_gcs_multiple_types", + sql=f"select * from {SOURCE_MULTIPLE_TYPES}", + bucket=GCS_BUCKET, + filename=f"{safe_name(SOURCE_MULTIPLE_TYPES)}.{{}}.json", + schema_filename=f"{safe_name(SOURCE_MULTIPLE_TYPES)}-schema.json", + gzip=False, + ) + # [END howto_operator_trino_to_gcs_multiple_types] + + # [START howto_operator_create_external_table_multiple_types] + create_external_table_multiple_types = BigQueryCreateExternalTableOperator( + task_id="create_external_table_multiple_types", + bucket=GCS_BUCKET, + source_objects=[f"{safe_name(SOURCE_MULTIPLE_TYPES)}.*.json"], + source_format="NEWLINE_DELIMITED_JSON", + destination_project_dataset_table=f"{DATASET_NAME}.{safe_name(SOURCE_MULTIPLE_TYPES)}", + schema_object=f"{safe_name(SOURCE_MULTIPLE_TYPES)}-schema.json", + ) + # [END howto_operator_create_external_table_multiple_types] + + read_data_from_gcs_multiple_types = BigQueryExecuteQueryOperator( + task_id="read_data_from_gcs_multiple_types", + sql=f"SELECT COUNT(*) FROM `{GCP_PROJECT_ID}.{DATASET_NAME}.{safe_name(SOURCE_MULTIPLE_TYPES)}`", + use_legacy_sql=False, + ) + + # [START howto_operator_trino_to_gcs_many_chunks] + trino_to_gcs_many_chunks = TrinoToGCSOperator( + task_id="trino_to_gcs_many_chunks", + sql=f"select * from {SOURCE_CUSTOMER_TABLE}", + bucket=GCS_BUCKET, + filename=f"{safe_name(SOURCE_CUSTOMER_TABLE)}.{{}}.json", + schema_filename=f"{safe_name(SOURCE_CUSTOMER_TABLE)}-schema.json", + approx_max_file_size_bytes=10_000_000, + gzip=False, + ) + # [END howto_operator_trino_to_gcs_many_chunks] + + create_external_table_many_chunks = BigQueryCreateExternalTableOperator( + task_id="create_external_table_many_chunks", + bucket=GCS_BUCKET, + source_objects=[f"{safe_name(SOURCE_CUSTOMER_TABLE)}.*.json"], + source_format="NEWLINE_DELIMITED_JSON", + destination_project_dataset_table=f"{DATASET_NAME}.{safe_name(SOURCE_CUSTOMER_TABLE)}", + schema_object=f"{safe_name(SOURCE_CUSTOMER_TABLE)}-schema.json", + ) + + # [START howto_operator_read_data_from_gcs_many_chunks] + read_data_from_gcs_many_chunks = BigQueryExecuteQueryOperator( + task_id="read_data_from_gcs_many_chunks", + sql=f"SELECT COUNT(*) FROM `{GCP_PROJECT_ID}.{DATASET_NAME}.{safe_name(SOURCE_CUSTOMER_TABLE)}`", + use_legacy_sql=False, + ) + # [END howto_operator_read_data_from_gcs_many_chunks] + + # [START howto_operator_trino_to_gcs_csv] + trino_to_gcs_csv = TrinoToGCSOperator( + task_id="trino_to_gcs_csv", + sql=f"select * from {SOURCE_MULTIPLE_TYPES}", + bucket=GCS_BUCKET, + filename=f"{safe_name(SOURCE_MULTIPLE_TYPES)}.{{}}.csv", + schema_filename=f"{safe_name(SOURCE_MULTIPLE_TYPES)}-schema.json", + export_format="csv", + ) + # [END howto_operator_trino_to_gcs_csv] + + create_dataset >> trino_to_gcs_basic + create_dataset >> trino_to_gcs_multiple_types + create_dataset >> trino_to_gcs_many_chunks + create_dataset >> trino_to_gcs_csv + + trino_to_gcs_multiple_types >> create_external_table_multiple_types >> read_data_from_gcs_multiple_types + trino_to_gcs_many_chunks >> create_external_table_many_chunks >> read_data_from_gcs_many_chunks + + trino_to_gcs_basic >> delete_dataset + trino_to_gcs_csv >> delete_dataset + read_data_from_gcs_multiple_types >> delete_dataset + read_data_from_gcs_many_chunks >> delete_dataset diff --git a/airflow/providers/google/cloud/example_dags/example_workflows.py b/airflow/providers/google/cloud/example_dags/example_workflows.py new file mode 100644 index 0000000000000..0fab435959201 --- /dev/null +++ b/airflow/providers/google/cloud/example_dags/example_workflows.py @@ -0,0 +1,197 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os + +from airflow import DAG +from airflow.providers.google.cloud.operators.workflows import ( + WorkflowsCancelExecutionOperator, + WorkflowsCreateExecutionOperator, + WorkflowsCreateWorkflowOperator, + WorkflowsDeleteWorkflowOperator, + WorkflowsGetExecutionOperator, + WorkflowsGetWorkflowOperator, + WorkflowsListExecutionsOperator, + WorkflowsListWorkflowsOperator, + WorkflowsUpdateWorkflowOperator, +) +from airflow.providers.google.cloud.sensors.workflows import WorkflowExecutionSensor +from airflow.utils.dates import days_ago + +LOCATION = os.environ.get("GCP_WORKFLOWS_LOCATION", "us-central1") +PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "an-id") + +WORKFLOW_ID = os.getenv("GCP_WORKFLOWS_WORKFLOW_ID", "airflow-test-workflow") + +# [START how_to_define_workflow] +WORKFLOW_CONTENT = """ +- getCurrentTime: + call: http.get + args: + url: https://us-central1-workflowsample.cloudfunctions.net/datetime + result: currentTime +- readWikipedia: + call: http.get + args: + url: https://en.wikipedia.org/w/api.php + query: + action: opensearch + search: ${currentTime.body.dayOfTheWeek} + result: wikiResult +- returnResult: + return: ${wikiResult.body[1]} +""" + +WORKFLOW = { + "description": "Test workflow", + "labels": {"airflow-version": "dev"}, + "source_contents": WORKFLOW_CONTENT, +} +# [END how_to_define_workflow] + +EXECUTION = {"argument": ""} + +SLEEP_WORKFLOW_ID = os.getenv("GCP_WORKFLOWS_SLEEP_WORKFLOW_ID", "sleep_workflow") +SLEEP_WORKFLOW_CONTENT = """ +- someSleep: + call: sys.sleep + args: + seconds: 120 +""" + +SLEEP_WORKFLOW = { + "description": "Test workflow", + "labels": {"airflow-version": "dev"}, + "source_contents": SLEEP_WORKFLOW_CONTENT, +} + + +with DAG("example_cloud_workflows", start_date=days_ago(1), schedule_interval=None) as dag: + # [START how_to_create_workflow] + create_workflow = WorkflowsCreateWorkflowOperator( + task_id="create_workflow", + location=LOCATION, + project_id=PROJECT_ID, + workflow=WORKFLOW, + workflow_id=WORKFLOW_ID, + ) + # [END how_to_create_workflow] + + # [START how_to_update_workflow] + update_workflows = WorkflowsUpdateWorkflowOperator( + task_id="update_workflows", + location=LOCATION, + project_id=PROJECT_ID, + workflow_id=WORKFLOW_ID, + update_mask={"paths": ["name", "description"]}, + ) + # [END how_to_update_workflow] + + # [START how_to_get_workflow] + get_workflow = WorkflowsGetWorkflowOperator( + task_id="get_workflow", location=LOCATION, project_id=PROJECT_ID, workflow_id=WORKFLOW_ID + ) + # [END how_to_get_workflow] + + # [START how_to_list_workflows] + list_workflows = WorkflowsListWorkflowsOperator( + task_id="list_workflows", + location=LOCATION, + project_id=PROJECT_ID, + ) + # [END how_to_list_workflows] + + # [START how_to_delete_workflow] + delete_workflow = WorkflowsDeleteWorkflowOperator( + task_id="delete_workflow", location=LOCATION, project_id=PROJECT_ID, workflow_id=WORKFLOW_ID + ) + # [END how_to_delete_workflow] + + # [START how_to_create_execution] + create_execution = WorkflowsCreateExecutionOperator( + task_id="create_execution", + location=LOCATION, + project_id=PROJECT_ID, + execution=EXECUTION, + workflow_id=WORKFLOW_ID, + ) + # [END how_to_create_execution] + + # [START how_to_wait_for_execution] + wait_for_execution = WorkflowExecutionSensor( + task_id="wait_for_execution", + location=LOCATION, + project_id=PROJECT_ID, + workflow_id=WORKFLOW_ID, + execution_id='{{ task_instance.xcom_pull("create_execution", key="execution_id") }}', + ) + # [END how_to_wait_for_execution] + + # [START how_to_get_execution] + get_execution = WorkflowsGetExecutionOperator( + task_id="get_execution", + location=LOCATION, + project_id=PROJECT_ID, + workflow_id=WORKFLOW_ID, + execution_id='{{ task_instance.xcom_pull("create_execution", key="execution_id") }}', + ) + # [END how_to_get_execution] + + # [START how_to_list_executions] + list_executions = WorkflowsListExecutionsOperator( + task_id="list_executions", location=LOCATION, project_id=PROJECT_ID, workflow_id=WORKFLOW_ID + ) + # [END how_to_list_executions] + + create_workflow_for_cancel = WorkflowsCreateWorkflowOperator( + task_id="create_workflow_for_cancel", + location=LOCATION, + project_id=PROJECT_ID, + workflow=SLEEP_WORKFLOW, + workflow_id=SLEEP_WORKFLOW_ID, + ) + + create_execution_for_cancel = WorkflowsCreateExecutionOperator( + task_id="create_execution_for_cancel", + location=LOCATION, + project_id=PROJECT_ID, + execution=EXECUTION, + workflow_id=SLEEP_WORKFLOW_ID, + ) + + # [START how_to_cancel_execution] + cancel_execution = WorkflowsCancelExecutionOperator( + task_id="cancel_execution", + location=LOCATION, + project_id=PROJECT_ID, + workflow_id=SLEEP_WORKFLOW_ID, + execution_id='{{ task_instance.xcom_pull("create_execution_for_cancel", key="execution_id") }}', + ) + # [END how_to_cancel_execution] + + create_workflow >> update_workflows >> [get_workflow, list_workflows] + update_workflows >> [create_execution, create_execution_for_cancel] + + create_execution >> wait_for_execution >> [get_execution, list_executions] + create_workflow_for_cancel >> create_execution_for_cancel >> cancel_execution + + [cancel_execution, list_executions] >> delete_workflow + + +if __name__ == '__main__': + dag.clear(dag_run_state=None) + dag.run() diff --git a/airflow/providers/google/cloud/hooks/automl.py b/airflow/providers/google/cloud/hooks/automl.py index 78ec4fbde54e0..75d70379a6789 100644 --- a/airflow/providers/google/cloud/hooks/automl.py +++ b/airflow/providers/google/cloud/hooks/automl.py @@ -20,22 +20,23 @@ from typing import Dict, List, Optional, Sequence, Tuple, Union from cached_property import cached_property +from google.api_core.operation import Operation from google.api_core.retry import Retry -from google.cloud.automl_v1beta1 import AutoMlClient, PredictionServiceClient -from google.cloud.automl_v1beta1.types import ( +from google.cloud.automl_v1beta1 import ( + AutoMlClient, BatchPredictInputConfig, BatchPredictOutputConfig, ColumnSpec, Dataset, ExamplePayload, - FieldMask, ImageObjectDetectionModelDeploymentMetadata, InputConfig, Model, - Operation, + PredictionServiceClient, PredictResponse, TableSpec, ) +from google.protobuf.field_mask_pb2 import FieldMask from airflow.providers.google.common.hooks.base_google import GoogleBaseHook @@ -123,9 +124,9 @@ def create_model( :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance """ client = self.get_conn() - parent = client.location_path(project_id, location) + parent = f"projects/{project_id}/locations/{location}" return client.create_model( - parent=parent, model=model, retry=retry, timeout=timeout, metadata=metadata + request={'parent': parent, 'model': model}, retry=retry, timeout=timeout, metadata=metadata or () ) @GoogleBaseHook.fallback_to_default_project_id @@ -176,15 +177,17 @@ def batch_predict( :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance """ client = self.prediction_client - name = client.model_path(project=project_id, location=location, model=model_id) + name = f"projects/{project_id}/locations/{location}/models/{model_id}" result = client.batch_predict( - name=name, - input_config=input_config, - output_config=output_config, - params=params, + request={ + 'name': name, + 'input_config': input_config, + 'output_config': output_config, + 'params': params, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) return result @@ -229,14 +232,12 @@ def predict( :return: `google.cloud.automl_v1beta1.types.PredictResponse` instance """ client = self.prediction_client - name = client.model_path(project=project_id, location=location, model=model_id) + name = f"projects/{project_id}/locations/{location}/models/{model_id}" result = client.predict( - name=name, - payload=payload, - params=params, + request={'name': name, 'payload': payload, 'params': params}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) return result @@ -273,13 +274,12 @@ def create_dataset( :return: `google.cloud.automl_v1beta1.types.Dataset` instance. """ client = self.get_conn() - parent = client.location_path(project=project_id, location=location) + parent = f"projects/{project_id}/locations/{location}" result = client.create_dataset( - parent=parent, - dataset=dataset, + request={'parent': parent, 'dataset': dataset}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) return result @@ -319,13 +319,12 @@ def import_data( :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance """ client = self.get_conn() - name = client.dataset_path(project=project_id, location=location, dataset=dataset_id) + name = f"projects/{project_id}/locations/{location}/datasets/{dataset_id}" result = client.import_data( - name=name, - input_config=input_config, + request={'name': name, 'input_config': input_config}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) return result @@ -385,13 +384,10 @@ def list_column_specs( # pylint: disable=too-many-arguments table_spec=table_spec_id, ) result = client.list_column_specs( - parent=parent, - field_mask=field_mask, - filter_=filter_, - page_size=page_size, + request={'parent': parent, 'field_mask': field_mask, 'filter': filter_, 'page_size': page_size}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) return result @@ -427,8 +423,10 @@ def get_model( :return: `google.cloud.automl_v1beta1.types.Model` instance. """ client = self.get_conn() - name = client.model_path(project=project_id, location=location, model=model_id) - result = client.get_model(name=name, retry=retry, timeout=timeout, metadata=metadata) + name = f"projects/{project_id}/locations/{location}/models/{model_id}" + result = client.get_model( + request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or () + ) return result @GoogleBaseHook.fallback_to_default_project_id @@ -463,8 +461,10 @@ def delete_model( :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance. """ client = self.get_conn() - name = client.model_path(project=project_id, location=location, model=model_id) - result = client.delete_model(name=name, retry=retry, timeout=timeout, metadata=metadata) + name = f"projects/{project_id}/locations/{location}/models/{model_id}" + result = client.delete_model( + request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or () + ) return result def update_dataset( @@ -497,11 +497,10 @@ def update_dataset( """ client = self.get_conn() result = client.update_dataset( - dataset=dataset, - update_mask=update_mask, + request={'dataset': dataset, 'update_mask': update_mask}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) return result @@ -547,13 +546,15 @@ def deploy_model( :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance. """ client = self.get_conn() - name = client.model_path(project=project_id, location=location, model=model_id) + name = f"projects/{project_id}/locations/{location}/models/{model_id}" result = client.deploy_model( - name=name, + request={ + 'name': name, + 'image_object_detection_model_deployment_metadata': image_detection_metadata, + }, retry=retry, timeout=timeout, - metadata=metadata, - image_object_detection_model_deployment_metadata=image_detection_metadata, + metadata=metadata or (), ) return result @@ -601,14 +602,12 @@ def list_table_specs( of the response through the `options` parameter. """ client = self.get_conn() - parent = client.dataset_path(project=project_id, location=location, dataset=dataset_id) + parent = f"projects/{project_id}/locations/{location}/datasets/{dataset_id}" result = client.list_table_specs( - parent=parent, - filter_=filter_, - page_size=page_size, + request={'parent': parent, 'filter': filter_, 'page_size': page_size}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) return result @@ -644,8 +643,10 @@ def list_datasets( of the response through the `options` parameter. """ client = self.get_conn() - parent = client.location_path(project=project_id, location=location) - result = client.list_datasets(parent=parent, retry=retry, timeout=timeout, metadata=metadata) + parent = f"projects/{project_id}/locations/{location}" + result = client.list_datasets( + request={'parent': parent}, retry=retry, timeout=timeout, metadata=metadata or () + ) return result @GoogleBaseHook.fallback_to_default_project_id @@ -680,6 +681,8 @@ def delete_dataset( :return: `google.cloud.automl_v1beta1.types._OperationFuture` instance """ client = self.get_conn() - name = client.dataset_path(project=project_id, location=location, dataset=dataset_id) - result = client.delete_dataset(name=name, retry=retry, timeout=timeout, metadata=metadata) + name = f"projects/{project_id}/locations/{location}/datasets/{dataset_id}" + result = client.delete_dataset( + request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or () + ) return result diff --git a/airflow/providers/google/cloud/hooks/bigquery_dts.py b/airflow/providers/google/cloud/hooks/bigquery_dts.py index 2d8d12bc039bd..37d42ef6ca901 100644 --- a/airflow/providers/google/cloud/hooks/bigquery_dts.py +++ b/airflow/providers/google/cloud/hooks/bigquery_dts.py @@ -27,7 +27,6 @@ TransferConfig, TransferRun, ) -from google.protobuf.json_format import MessageToDict, ParseDict from googleapiclient.discovery import Resource from airflow.providers.google.common.hooks.base_google import GoogleBaseHook @@ -71,7 +70,7 @@ def _disable_auto_scheduling(config: Union[dict, TransferConfig]) -> TransferCon :param config: Data transfer configuration to create. :type config: Union[dict, google.cloud.bigquery_datatransfer_v1.types.TransferConfig] """ - config = MessageToDict(config) if isinstance(config, TransferConfig) else config + config = TransferConfig.to_dict(config) if isinstance(config, TransferConfig) else config new_config = copy(config) schedule_options = new_config.get("schedule_options") if schedule_options: @@ -80,7 +79,11 @@ def _disable_auto_scheduling(config: Union[dict, TransferConfig]) -> TransferCon schedule_options["disable_auto_scheduling"] = True else: new_config["schedule_options"] = {"disable_auto_scheduling": True} - return ParseDict(new_config, TransferConfig()) + # HACK: TransferConfig.to_dict returns invalid representation + # See: https://github.com/googleapis/python-bigquery-datatransfer/issues/90 + if isinstance(new_config.get('user_id'), str): + new_config['user_id'] = int(new_config['user_id']) + return TransferConfig(**new_config) def get_conn(self) -> DataTransferServiceClient: """ @@ -129,14 +132,16 @@ def create_transfer_config( :return: A ``google.cloud.bigquery_datatransfer_v1.types.TransferConfig`` instance. """ client = self.get_conn() - parent = client.project_path(project_id) + parent = f"projects/{project_id}" return client.create_transfer_config( - parent=parent, - transfer_config=self._disable_auto_scheduling(transfer_config), - authorization_code=authorization_code, + request={ + 'parent': parent, + 'transfer_config': self._disable_auto_scheduling(transfer_config), + 'authorization_code': authorization_code, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) @GoogleBaseHook.fallback_to_default_project_id @@ -169,8 +174,10 @@ def delete_transfer_config( :return: None """ client = self.get_conn() - name = client.project_transfer_config_path(project=project_id, transfer_config=transfer_config_id) - return client.delete_transfer_config(name=name, retry=retry, timeout=timeout, metadata=metadata) + name = f"projects/{project_id}/transferConfigs/{transfer_config_id}" + return client.delete_transfer_config( + request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or () + ) @GoogleBaseHook.fallback_to_default_project_id def start_manual_transfer_runs( @@ -216,14 +223,16 @@ def start_manual_transfer_runs( :return: An ``google.cloud.bigquery_datatransfer_v1.types.StartManualTransferRunsResponse`` instance. """ client = self.get_conn() - parent = client.project_transfer_config_path(project=project_id, transfer_config=transfer_config_id) + parent = f"projects/{project_id}/transferConfigs/{transfer_config_id}" return client.start_manual_transfer_runs( - parent=parent, - requested_time_range=requested_time_range, - requested_run_time=requested_run_time, + request={ + 'parent': parent, + 'requested_time_range': requested_time_range, + 'requested_run_time': requested_run_time, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) @GoogleBaseHook.fallback_to_default_project_id @@ -259,5 +268,7 @@ def get_transfer_run( :return: An ``google.cloud.bigquery_datatransfer_v1.types.TransferRun`` instance. """ client = self.get_conn() - name = client.project_run_path(project=project_id, transfer_config=transfer_config_id, run=run_id) - return client.get_transfer_run(name=name, retry=retry, timeout=timeout, metadata=metadata) + name = f"projects/{project_id}/transferConfigs/{transfer_config_id}/runs/{run_id}" + return client.get_transfer_run( + request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or () + ) diff --git a/airflow/providers/google/cloud/hooks/cloud_memorystore.py b/airflow/providers/google/cloud/hooks/cloud_memorystore.py index bfc01f94285df..caf1cd6cf6e2c 100644 --- a/airflow/providers/google/cloud/hooks/cloud_memorystore.py +++ b/airflow/providers/google/cloud/hooks/cloud_memorystore.py @@ -23,10 +23,14 @@ from google.api_core.retry import Retry from google.cloud.memcache_v1beta2 import CloudMemcacheClient from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.cloud.redis_v1 import CloudRedisClient -from google.cloud.redis_v1.gapic.enums import FailoverInstanceRequest -from google.cloud.redis_v1.types import FieldMask, InputConfig, Instance, OutputConfig -from google.protobuf.json_format import ParseDict +from google.cloud.redis_v1 import ( + CloudRedisClient, + FailoverInstanceRequest, + InputConfig, + Instance, + OutputConfig, +) +from google.protobuf.field_mask_pb2 import FieldMask from airflow import version from airflow.exceptions import AirflowException @@ -70,7 +74,7 @@ def __init__( ) self._client: Optional[CloudRedisClient] = None - def get_conn(self): + def get_conn(self) -> CloudRedisClient: """Retrieves client library object that allow access to Cloud Memorystore service.""" if not self._client: self._client = CloudRedisClient(credentials=self._get_credentials()) @@ -143,35 +147,36 @@ def create_instance( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - parent = CloudRedisClient.location_path(project_id, location) - instance_name = CloudRedisClient.instance_path(project_id, location, instance_id) + if isinstance(instance, dict): + instance = Instance(**instance) + elif not isinstance(instance, Instance): + raise AirflowException("instance is not instance of Instance type or python dict") + + parent = f"projects/{project_id}/locations/{location}" + instance_name = f"projects/{project_id}/locations/{location}/instances/{instance_id}" try: + self.log.info("Fetching instance: %s", instance_name) instance = client.get_instance( - name=instance_name, retry=retry, timeout=timeout, metadata=metadata + request={'name': instance_name}, retry=retry, timeout=timeout, metadata=metadata or () ) self.log.info("Instance exists. Skipping creation.") return instance except NotFound: self.log.info("Instance not exists.") - if isinstance(instance, dict): - instance = ParseDict(instance, Instance()) - elif not isinstance(instance, Instance): - raise AirflowException("instance is not instance of Instance type or python dict") - self._append_label(instance, "airflow-version", "v" + version.version) result = client.create_instance( - parent=parent, - instance_id=instance_id, - instance=instance, + request={'parent': parent, 'instance_id': instance_id, 'instance': instance}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) result.result() self.log.info("Instance created.") - return client.get_instance(name=instance_name, retry=retry, timeout=timeout, metadata=metadata) + return client.get_instance( + request={'name': instance_name}, retry=retry, timeout=timeout, metadata=metadata or () + ) @GoogleBaseHook.fallback_to_default_project_id def delete_instance( @@ -203,15 +208,25 @@ def delete_instance( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = CloudRedisClient.instance_path(project_id, location, instance) + name = f"projects/{project_id}/locations/{location}/instances/{instance}" self.log.info("Fetching Instance: %s", name) - instance = client.get_instance(name=name, retry=retry, timeout=timeout, metadata=metadata) + instance = client.get_instance( + request={'name': name}, + retry=retry, + timeout=timeout, + metadata=metadata or (), + ) if not instance: return self.log.info("Deleting Instance: %s", name) - result = client.delete_instance(name=name, retry=retry, timeout=timeout, metadata=metadata) + result = client.delete_instance( + request={'name': name}, + retry=retry, + timeout=timeout, + metadata=metadata or (), + ) result.result() self.log.info("Instance deleted: %s", name) @@ -253,10 +268,13 @@ def export_instance( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = CloudRedisClient.instance_path(project_id, location, instance) + name = f"projects/{project_id}/locations/{location}/instances/{instance}" self.log.info("Exporting Instance: %s", name) result = client.export_instance( - name=name, output_config=output_config, retry=retry, timeout=timeout, metadata=metadata + request={'name': name, 'output_config': output_config}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) result.result() self.log.info("Instance exported: %s", name) @@ -297,15 +315,14 @@ def failover_instance( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = CloudRedisClient.instance_path(project_id, location, instance) + name = f"projects/{project_id}/locations/{location}/instances/{instance}" self.log.info("Failovering Instance: %s", name) result = client.failover_instance( - name=name, - data_protection_mode=data_protection_mode, + request={'name': name, 'data_protection_mode': data_protection_mode}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) result.result() self.log.info("Instance failovered: %s", name) @@ -340,8 +357,13 @@ def get_instance( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = CloudRedisClient.instance_path(project_id, location, instance) - result = client.get_instance(name=name, retry=retry, timeout=timeout, metadata=metadata) + name = f"projects/{project_id}/locations/{location}/instances/{instance}" + result = client.get_instance( + request={'name': name}, + retry=retry, + timeout=timeout, + metadata=metadata or (), + ) self.log.info("Fetched Instance: %s", name) return result @@ -384,10 +406,13 @@ def import_instance( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = CloudRedisClient.instance_path(project_id, location, instance) + name = f"projects/{project_id}/locations/{location}/instances/{instance}" self.log.info("Importing Instance: %s", name) result = client.import_instance( - name=name, input_config=input_config, retry=retry, timeout=timeout, metadata=metadata + request={'name': name, 'input_config': input_config}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) result.result() self.log.info("Instance imported: %s", name) @@ -428,9 +453,12 @@ def list_instances( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - parent = CloudRedisClient.location_path(project_id, location) + parent = f"projects/{project_id}/locations/{location}" result = client.list_instances( - parent=parent, page_size=page_size, retry=retry, timeout=timeout, metadata=metadata + request={'parent': parent, 'page_size': page_size}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) self.log.info("Fetched instances") return result @@ -485,17 +513,20 @@ def update_instance( client = self.get_conn() if isinstance(instance, dict): - instance = ParseDict(instance, Instance()) + instance = Instance(**instance) elif not isinstance(instance, Instance): raise AirflowException("instance is not instance of Instance type or python dict") if location and instance_id: - name = CloudRedisClient.instance_path(project_id, location, instance_id) + name = f"projects/{project_id}/locations/{location}/instances/{instance_id}" instance.name = name self.log.info("Updating instances: %s", instance.name) result = client.update_instance( - update_mask=update_mask, instance=instance, retry=retry, timeout=timeout, metadata=metadata + request={'update_mask': update_mask, 'instance': instance}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) result.result() self.log.info("Instance updated: %s", instance.name) @@ -610,7 +641,12 @@ def apply_parameters( self.log.info("Applying update to instance: %s", instance_id) result = client.apply_parameters( - name=name, node_ids=node_ids, apply_all=apply_all, retry=retry, timeout=timeout, metadata=metadata + name=name, + node_ids=node_ids, + apply_all=apply_all, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) result.result() self.log.info("Instance updated: %s", instance_id) @@ -688,11 +724,16 @@ def create_instance( resource=instance, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) result.result() self.log.info("Instance created.") - return client.get_instance(name=instance_name, retry=retry, timeout=timeout, metadata=metadata) + return client.get_instance( + name=instance_name, + retry=retry, + timeout=timeout, + metadata=metadata or (), + ) @GoogleBaseHook.fallback_to_default_project_id def delete_instance( @@ -727,13 +768,23 @@ def delete_instance( metadata = metadata or () name = CloudMemcacheClient.instance_path(project_id, location, instance) self.log.info("Fetching Instance: %s", name) - instance = client.get_instance(name=name, retry=retry, timeout=timeout, metadata=metadata) + instance = client.get_instance( + name=name, + retry=retry, + timeout=timeout, + metadata=metadata or (), + ) if not instance: return self.log.info("Deleting Instance: %s", name) - result = client.delete_instance(name=name, retry=retry, timeout=timeout, metadata=metadata) + result = client.delete_instance( + name=name, + retry=retry, + timeout=timeout, + metadata=metadata or (), + ) result.result() self.log.info("Instance deleted: %s", name) @@ -808,7 +859,12 @@ def list_instances( parent = path_template.expand( "projects/{project}/locations/{location}", project=project_id, location=location ) - result = client.list_instances(parent=parent, retry=retry, timeout=timeout, metadata=metadata) + result = client.list_instances( + parent=parent, + retry=retry, + timeout=timeout, + metadata=metadata or (), + ) self.log.info("Fetched instances") return result @@ -871,7 +927,7 @@ def update_instance( self.log.info("Updating instances: %s", instance.name) result = client.update_instance( - update_mask=update_mask, resource=instance, retry=retry, timeout=timeout, metadata=metadata + update_mask=update_mask, resource=instance, retry=retry, timeout=timeout, metadata=metadata or () ) result.result() self.log.info("Instance updated: %s", instance.name) @@ -934,7 +990,7 @@ def update_parameters( parameters=parameters, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) result.result() self.log.info("Update staged for instance: %s", instance_id) diff --git a/airflow/providers/google/cloud/hooks/datacatalog.py b/airflow/providers/google/cloud/hooks/datacatalog.py index 9c689c3ca9d9e..0d6cc7505fd86 100644 --- a/airflow/providers/google/cloud/hooks/datacatalog.py +++ b/airflow/providers/google/cloud/hooks/datacatalog.py @@ -18,16 +18,18 @@ from typing import Dict, Optional, Sequence, Tuple, Union from google.api_core.retry import Retry -from google.cloud.datacatalog_v1beta1 import DataCatalogClient -from google.cloud.datacatalog_v1beta1.types import ( +from google.cloud import datacatalog +from google.cloud.datacatalog_v1beta1 import ( + CreateTagRequest, + DataCatalogClient, Entry, EntryGroup, - FieldMask, SearchCatalogRequest, Tag, TagTemplate, TagTemplateField, ) +from google.protobuf.field_mask_pb2 import FieldMask from airflow import AirflowException from airflow.providers.google.common.hooks.base_google import GoogleBaseHook @@ -115,10 +117,13 @@ def create_entry( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - parent = DataCatalogClient.entry_group_path(project_id, location, entry_group) + parent = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}" self.log.info('Creating a new entry: parent=%s', parent) result = client.create_entry( - parent=parent, entry_id=entry_id, entry=entry, retry=retry, timeout=timeout, metadata=metadata + request={'parent': parent, 'entry_id': entry_id, 'entry': entry}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) self.log.info('Created a entry: name=%s', result.name) return result @@ -161,16 +166,14 @@ def create_entry_group( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - parent = DataCatalogClient.location_path(project_id, location) + parent = f"projects/{project_id}/locations/{location}" self.log.info('Creating a new entry group: parent=%s', parent) result = client.create_entry_group( - parent=parent, - entry_group_id=entry_group_id, - entry_group=entry_group, + request={'parent': parent, 'entry_group_id': entry_group_id, 'entry_group': entry_group}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) self.log.info('Created a entry group: name=%s', result.name) @@ -218,15 +221,34 @@ def create_tag( """ client = self.get_conn() if template_id: - template_path = DataCatalogClient.tag_template_path(project_id, location, template_id) + template_path = f"projects/{project_id}/locations/{location}/tagTemplates/{template_id}" if isinstance(tag, Tag): tag.template = template_path else: tag["template"] = template_path - parent = DataCatalogClient.entry_path(project_id, location, entry_group, entry) + parent = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}" self.log.info('Creating a new tag: parent=%s', parent) - result = client.create_tag(parent=parent, tag=tag, retry=retry, timeout=timeout, metadata=metadata) + # HACK: google-cloud-datacatalog has problems with mapping messages where the value is not a + # primitive type, so we need to convert it manually. + # See: https://github.com/googleapis/python-datacatalog/issues/84 + if isinstance(tag, dict): + tag = Tag( + name=tag.get('name'), + template=tag.get('template'), + template_display_name=tag.get('template_display_name'), + column=tag.get('column'), + fields={ + k: datacatalog.TagField(**v) if isinstance(v, dict) else v + for k, v in tag.get("fields", {}).items() + }, + ) + request = CreateTagRequest( + parent=parent, + tag=tag, + ) + + result = client.create_tag(request=request, retry=retry, timeout=timeout, metadata=metadata or ()) self.log.info('Created a tag: name=%s', result.name) return result @@ -267,17 +289,30 @@ def create_tag_template( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - parent = DataCatalogClient.location_path(project_id, location) + parent = f"projects/{project_id}/locations/{location}" self.log.info('Creating a new tag template: parent=%s', parent) + # HACK: google-cloud-datacatalog has problems with mapping messages where the value is not a + # primitive type, so we need to convert it manually. + # See: https://github.com/googleapis/python-datacatalog/issues/84 + if isinstance(tag_template, dict): + tag_template = datacatalog.TagTemplate( + name=tag_template.get("name"), + display_name=tag_template.get("display_name"), + fields={ + k: datacatalog.TagTemplateField(**v) if isinstance(v, dict) else v + for k, v in tag_template.get("fields", {}).items() + }, + ) + request = datacatalog.CreateTagTemplateRequest( + parent=parent, tag_template_id=tag_template_id, tag_template=tag_template + ) result = client.create_tag_template( - parent=parent, - tag_template_id=tag_template_id, - tag_template=tag_template, + request=request, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) self.log.info('Created a tag template: name=%s', result.name) @@ -325,17 +360,19 @@ def create_tag_template_field( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - parent = DataCatalogClient.tag_template_path(project_id, location, tag_template) + parent = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}" self.log.info('Creating a new tag template field: parent=%s', parent) result = client.create_tag_template_field( - parent=parent, - tag_template_field_id=tag_template_field_id, - tag_template_field=tag_template_field, + request={ + 'parent': parent, + 'tag_template_field_id': tag_template_field_id, + 'tag_template_field': tag_template_field, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) self.log.info('Created a tag template field: name=%s', result.name) @@ -375,9 +412,9 @@ def delete_entry( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = DataCatalogClient.entry_path(project_id, location, entry_group, entry) + name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}" self.log.info('Deleting a entry: name=%s', name) - client.delete_entry(name=name, retry=retry, timeout=timeout, metadata=metadata) + client.delete_entry(request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()) self.log.info('Deleted a entry: name=%s', name) @GoogleBaseHook.fallback_to_default_project_id @@ -412,10 +449,12 @@ def delete_entry_group( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = DataCatalogClient.entry_group_path(project_id, location, entry_group) + name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}" self.log.info('Deleting a entry group: name=%s', name) - client.delete_entry_group(name=name, retry=retry, timeout=timeout, metadata=metadata) + client.delete_entry_group( + request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or () + ) self.log.info('Deleted a entry group: name=%s', name) @GoogleBaseHook.fallback_to_default_project_id @@ -454,10 +493,12 @@ def delete_tag( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = DataCatalogClient.tag_path(project_id, location, entry_group, entry, tag) + name = ( + f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}" + ) self.log.info('Deleting a tag: name=%s', name) - client.delete_tag(name=name, retry=retry, timeout=timeout, metadata=metadata) + client.delete_tag(request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or ()) self.log.info('Deleted a tag: name=%s', name) @GoogleBaseHook.fallback_to_default_project_id @@ -495,10 +536,12 @@ def delete_tag_template( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = DataCatalogClient.tag_template_path(project_id, location, tag_template) + name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}" self.log.info('Deleting a tag template: name=%s', name) - client.delete_tag_template(name=name, force=force, retry=retry, timeout=timeout, metadata=metadata) + client.delete_tag_template( + request={'name': name, 'force': force}, retry=retry, timeout=timeout, metadata=metadata or () + ) self.log.info('Deleted a tag template: name=%s', name) @GoogleBaseHook.fallback_to_default_project_id @@ -537,11 +580,11 @@ def delete_tag_template_field( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = DataCatalogClient.field_path(project_id, location, tag_template, field) + name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field}" self.log.info('Deleting a tag template field: name=%s', name) client.delete_tag_template_field( - name=name, force=force, retry=retry, timeout=timeout, metadata=metadata + request={'name': name, 'force': force}, retry=retry, timeout=timeout, metadata=metadata or () ) self.log.info('Deleted a tag template field: name=%s', name) @@ -578,10 +621,12 @@ def get_entry( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = DataCatalogClient.entry_path(project_id, location, entry_group, entry) + name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}" self.log.info('Getting a entry: name=%s', name) - result = client.get_entry(name=name, retry=retry, timeout=timeout, metadata=metadata) + result = client.get_entry( + request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or () + ) self.log.info('Received a entry: name=%s', result.name) return result @@ -607,8 +652,8 @@ def get_entry_group( :param read_mask: The fields to return. If not set or empty, all fields are returned. If a dict is provided, it must be of the same form as the protobuf message - :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` - :type read_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask] + :class:`~google.protobuf.field_mask_pb2.FieldMask` + :type read_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask] :param project_id: The ID of the Google Cloud project that owns the entry group. If set to ``None`` or missing, the default project_id from the Google Cloud connection is used. :type project_id: str @@ -622,12 +667,15 @@ def get_entry_group( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = DataCatalogClient.entry_group_path(project_id, location, entry_group) + name = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}" self.log.info('Getting a entry group: name=%s', name) result = client.get_entry_group( - name=name, read_mask=read_mask, retry=retry, timeout=timeout, metadata=metadata + request={'name': name, 'read_mask': read_mask}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) self.log.info('Received a entry group: name=%s', result.name) @@ -664,11 +712,13 @@ def get_tag_template( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = DataCatalogClient.tag_template_path(project_id, location, tag_template) + name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}" self.log.info('Getting a tag template: name=%s', name) - result = client.get_tag_template(name=name, retry=retry, timeout=timeout, metadata=metadata) + result = client.get_tag_template( + request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or () + ) self.log.info('Received a tag template: name=%s', result.name) @@ -712,12 +762,15 @@ def list_tags( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - parent = DataCatalogClient.entry_path(project_id, location, entry_group, entry) + parent = f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}" self.log.info('Listing tag on entry: entry_name=%s', parent) result = client.list_tags( - parent=parent, page_size=page_size, retry=retry, timeout=timeout, metadata=metadata + request={'parent': parent, 'page_size': page_size}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) self.log.info('Received tags.') @@ -811,12 +864,18 @@ def lookup_entry( if linked_resource: self.log.info('Getting entry: linked_resource=%s', linked_resource) result = client.lookup_entry( - linked_resource=linked_resource, retry=retry, timeout=timeout, metadata=metadata + request={'linked_resource': linked_resource}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) else: self.log.info('Getting entry: sql_resource=%s', sql_resource) result = client.lookup_entry( - sql_resource=sql_resource, retry=retry, timeout=timeout, metadata=metadata + request={'sql_resource': sql_resource}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) self.log.info('Received entry. name=%s', result.name) @@ -860,18 +919,17 @@ def rename_tag_template_field( :type metadata: Sequence[Tuple[str, str]] """ client = self.get_conn() - name = DataCatalogClient.field_path(project_id, location, tag_template, field) + name = f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}/fields/{field}" self.log.info( 'Renaming field: old_name=%s, new_tag_template_field_id=%s', name, new_tag_template_field_id ) result = client.rename_tag_template_field( - name=name, - new_tag_template_field_id=new_tag_template_field_id, + request={'name': name, 'new_tag_template_field_id': new_tag_template_field_id}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) self.log.info('Renamed tag template field.') @@ -946,13 +1004,10 @@ def search_catalog( order_by, ) result = client.search_catalog( - scope=scope, - query=query, - page_size=page_size, - order_by=order_by, + request={'scope': scope, 'query': query, 'page_size': page_size, 'order_by': order_by}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) self.log.info('Received items.') @@ -984,8 +1039,8 @@ def update_entry( updated. If a dict is provided, it must be of the same form as the protobuf message - :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` - :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask] + :class:`~google.protobuf.field_mask_pb2.FieldMask` + :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask] :param location: Required. The location of the entry to update. :type location: str :param entry_group: The entry group ID for the entry that is being updated. @@ -1006,7 +1061,9 @@ def update_entry( """ client = self.get_conn() if project_id and location and entry_group and entry_id: - full_entry_name = DataCatalogClient.entry_path(project_id, location, entry_group, entry_id) + full_entry_name = ( + f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry_id}" + ) if isinstance(entry, Entry): entry.name = full_entry_name elif isinstance(entry, dict): @@ -1025,7 +1082,10 @@ def update_entry( if isinstance(entry, dict): entry = Entry(**entry) result = client.update_entry( - entry=entry, update_mask=update_mask, retry=retry, timeout=timeout, metadata=metadata + request={'entry': entry, 'update_mask': update_mask}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) self.log.info('Updated entry.') @@ -1059,7 +1119,7 @@ def update_tag( # pylint: disable=too-many-arguments If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` - :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask] + :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask] :param location: Required. The location of the tag to rename. :type location: str :param entry_group: The entry group ID for the tag that is being updated. @@ -1082,7 +1142,10 @@ def update_tag( # pylint: disable=too-many-arguments """ client = self.get_conn() if project_id and location and entry_group and entry and tag_id: - full_tag_name = DataCatalogClient.tag_path(project_id, location, entry_group, entry, tag_id) + full_tag_name = ( + f"projects/{project_id}/locations/{location}/entryGroups/{entry_group}/entries/{entry}" + f"/tags/{tag_id}" + ) if isinstance(tag, Tag): tag.name = full_tag_name elif isinstance(tag, dict): @@ -1102,7 +1165,10 @@ def update_tag( # pylint: disable=too-many-arguments if isinstance(tag, dict): tag = Tag(**tag) result = client.update_tag( - tag=tag, update_mask=update_mask, retry=retry, timeout=timeout, metadata=metadata + request={'tag': tag, 'update_mask': update_mask}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) self.log.info('Updated tag.') @@ -1137,8 +1203,8 @@ def update_tag_template( If absent or empty, all of the allowed fields above will be updated. If a dict is provided, it must be of the same form as the protobuf message - :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` - :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask] + :class:`~google.protobuf.field_mask_pb2.FieldMask` + :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask] :param location: Required. The location of the tag template to rename. :type location: str :param tag_template_id: Optional. The tag template ID for the entry that is being updated. @@ -1157,8 +1223,8 @@ def update_tag_template( """ client = self.get_conn() if project_id and location and tag_template: - full_tag_template_name = DataCatalogClient.tag_template_path( - project_id, location, tag_template_id + full_tag_template_name = ( + f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template_id}" ) if isinstance(tag_template, TagTemplate): tag_template.name = full_tag_template_name @@ -1179,11 +1245,10 @@ def update_tag_template( if isinstance(tag_template, dict): tag_template = TagTemplate(**tag_template) result = client.update_tag_template( - tag_template=tag_template, - update_mask=update_mask, + request={'tag_template': tag_template, 'update_mask': update_mask}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) self.log.info('Updated tag template.') @@ -1222,8 +1287,8 @@ def update_tag_template_field( # pylint: disable=too-many-arguments Therefore, enum values can only be added, existing enum values cannot be deleted nor renamed. If a dict is provided, it must be of the same form as the protobuf message - :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` - :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask] + :class:`~google.protobuf.field_mask_pb2.FieldMask` + :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask] :param tag_template_field_name: Optional. The name of the tag template field to rename. :type tag_template_field_name: str :param location: Optional. The location of the tag to rename. @@ -1246,19 +1311,22 @@ def update_tag_template_field( # pylint: disable=too-many-arguments """ client = self.get_conn() if project_id and location and tag_template and tag_template_field_id: - tag_template_field_name = DataCatalogClient.field_path( - project_id, location, tag_template, tag_template_field_id + tag_template_field_name = ( + f"projects/{project_id}/locations/{location}/tagTemplates/{tag_template}" + f"/fields/{tag_template_field_id}" ) self.log.info("Updating tag template field: name=%s", tag_template_field_name) result = client.update_tag_template_field( - name=tag_template_field_name, - tag_template_field=tag_template_field, - update_mask=update_mask, + request={ + 'name': tag_template_field_name, + 'tag_template_field': tag_template_field, + 'update_mask': update_mask, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) self.log.info('Updated tag template field.') diff --git a/airflow/providers/google/cloud/hooks/dataflow.py b/airflow/providers/google/cloud/hooks/dataflow.py index 0a665d465f237..0ad02621e893e 100644 --- a/airflow/providers/google/cloud/hooks/dataflow.py +++ b/airflow/providers/google/cloud/hooks/dataflow.py @@ -19,23 +19,20 @@ import functools import json import re -import select import shlex import subprocess -import textwrap import time import uuid import warnings from copy import deepcopy -from tempfile import TemporaryDirectory from typing import Any, Callable, Dict, Generator, List, Optional, Sequence, Set, TypeVar, Union, cast from googleapiclient.discovery import build from airflow.exceptions import AirflowException +from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType, beam_options_to_args from airflow.providers.google.common.hooks.base_google import GoogleBaseHook from airflow.utils.log.logging_mixin import LoggingMixin -from airflow.utils.python_virtualenv import prepare_virtualenv from airflow.utils.timeout import timeout # This is the default location @@ -50,6 +47,35 @@ T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +def process_line_and_extract_dataflow_job_id_callback( + on_new_job_id_callback: Optional[Callable[[str], None]] +) -> Callable[[str], None]: + """ + Returns callback which triggers function passed as `on_new_job_id_callback` when Dataflow job_id is found. + To be used for `process_line_callback` in + :py:class:`~airflow.providers.apache.beam.hooks.beam.BeamCommandRunner` + + :param on_new_job_id_callback: Callback called when the job ID is known + :type on_new_job_id_callback: callback + """ + + def _process_line_and_extract_job_id( + line: str, + # on_new_job_id_callback: Optional[Callable[[str], None]] + ) -> None: + # Job id info: https://goo.gl/SE29y9. + matched_job = JOB_ID_PATTERN.search(line) + if matched_job: + job_id = matched_job.group("job_id_java") or matched_job.group("job_id_python") + if on_new_job_id_callback: + on_new_job_id_callback(job_id) + + def wrap(line: str): + return _process_line_and_extract_job_id(line) + + return wrap + + def _fallback_variable_parameter(parameter_name: str, variable_key_name: str) -> Callable[[T], T]: def _wrapper(func: T) -> T: """ @@ -484,98 +510,6 @@ def cancel(self) -> None: self.log.info("No jobs to cancel") -class _DataflowRunner(LoggingMixin): - def __init__( - self, - cmd: List[str], - on_new_job_id_callback: Optional[Callable[[str], None]] = None, - ) -> None: - super().__init__() - self.log.info("Running command: %s", " ".join(shlex.quote(c) for c in cmd)) - self.on_new_job_id_callback = on_new_job_id_callback - self.job_id: Optional[str] = None - self._proc = subprocess.Popen( - cmd, - shell=False, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - close_fds=True, - ) - - def _process_fd(self, fd): - """ - Prints output to logs and lookup for job ID in each line. - - :param fd: File descriptor. - """ - if fd == self._proc.stderr: - while True: - line = self._proc.stderr.readline().decode() - if not line: - return - self._process_line_and_extract_job_id(line) - self.log.warning(line.rstrip("\n")) - - if fd == self._proc.stdout: - while True: - line = self._proc.stdout.readline().decode() - if not line: - return - self._process_line_and_extract_job_id(line) - self.log.info(line.rstrip("\n")) - - raise Exception("No data in stderr or in stdout.") - - def _process_line_and_extract_job_id(self, line: str) -> None: - """ - Extracts job_id. - - :param line: URL from which job_id has to be extracted - :type line: str - """ - # Job id info: https://goo.gl/SE29y9. - matched_job = JOB_ID_PATTERN.search(line) - if matched_job: - job_id = matched_job.group("job_id_java") or matched_job.group("job_id_python") - self.log.info("Found Job ID: %s", job_id) - self.job_id = job_id - if self.on_new_job_id_callback: - self.on_new_job_id_callback(job_id) - - def wait_for_done(self) -> Optional[str]: - """ - Waits for Dataflow job to complete. - - :return: Job id - :rtype: Optional[str] - """ - self.log.info("Start waiting for DataFlow process to complete.") - self.job_id = None - reads = [self._proc.stderr, self._proc.stdout] - while True: - # Wait for at least one available fd. - readable_fds, _, _ = select.select(reads, [], [], 5) - if readable_fds is None: - self.log.info("Waiting for DataFlow process to complete.") - continue - - for readable_fd in readable_fds: - self._process_fd(readable_fd) - - if self._proc.poll() is not None: - break - - # Corner case: check if more output was created between the last read and the process termination - for readable_fd in reads: - self._process_fd(readable_fd) - - self.log.info("Process exited with return code: %s", self._proc.returncode) - - if self._proc.returncode != 0: - raise Exception(f"DataFlow failed with return code {self._proc.returncode}") - return self.job_id - - class DataflowHook(GoogleBaseHook): """ Hook for Google Dataflow. @@ -598,6 +532,8 @@ def __init__( self.drain_pipeline = drain_pipeline self.cancel_timeout = cancel_timeout self.wait_until_finished = wait_until_finished + self.job_id: Optional[str] = None + self.beam_hook = BeamHook(BeamRunnerType.DataflowRunner) super().__init__( gcp_conn_id=gcp_conn_id, delegate_to=delegate_to, @@ -609,40 +545,6 @@ def get_conn(self) -> build: http_authorized = self._authorize() return build("dataflow", "v1b3", http=http_authorized, cache_discovery=False) - @GoogleBaseHook.provide_gcp_credential_file - def _start_dataflow( - self, - variables: dict, - name: str, - command_prefix: List[str], - project_id: str, - multiple_jobs: bool = False, - on_new_job_id_callback: Optional[Callable[[str], None]] = None, - location: str = DEFAULT_DATAFLOW_LOCATION, - ) -> None: - cmd = command_prefix + [ - "--runner=DataflowRunner", - f"--project={project_id}", - ] - if variables: - cmd.extend(self._options_to_args(variables)) - runner = _DataflowRunner(cmd=cmd, on_new_job_id_callback=on_new_job_id_callback) - job_id = runner.wait_for_done() - job_controller = _DataflowJobsController( - dataflow=self.get_conn(), - project_number=project_id, - name=name, - location=location, - poll_sleep=self.poll_sleep, - job_id=job_id, - num_retries=self.num_retries, - multiple_jobs=multiple_jobs, - drain_pipeline=self.drain_pipeline, - cancel_timeout=self.cancel_timeout, - wait_until_finished=self.wait_until_finished, - ) - job_controller.wait_for_done() - @_fallback_to_location_from_variables @_fallback_to_project_id_from_variables @GoogleBaseHook.fallback_to_default_project_id @@ -680,22 +582,36 @@ def start_java_dataflow( :param location: Job location. :type location: str """ - name = self._build_dataflow_job_name(job_name, append_job_name) + warnings.warn( + """"This method is deprecated. + Please use `airflow.providers.apache.beam.hooks.beam.start.start_java_pipeline` + to start pipeline and `providers.google.cloud.hooks.dataflow.DataflowHook.wait_for_done` + to wait for the required pipeline state. + """, + DeprecationWarning, + stacklevel=3, + ) + + name = self.build_dataflow_job_name(job_name, append_job_name) + variables["jobName"] = name variables["region"] = location + variables["project"] = project_id if "labels" in variables: variables["labels"] = json.dumps(variables["labels"], separators=(",", ":")) - command_prefix = ["java", "-cp", jar, job_class] if job_class else ["java", "-jar", jar] - self._start_dataflow( + self.beam_hook.start_java_pipeline( variables=variables, - name=name, - command_prefix=command_prefix, - project_id=project_id, - multiple_jobs=multiple_jobs, - on_new_job_id_callback=on_new_job_id_callback, + jar=jar, + job_class=job_class, + process_line_callback=process_line_and_extract_dataflow_job_id_callback(on_new_job_id_callback), + ) + self.wait_for_done( # pylint: disable=no-value-for-parameter + job_name=name, location=location, + job_id=self.job_id, + multiple_jobs=multiple_jobs, ) @_fallback_to_location_from_variables @@ -748,7 +664,7 @@ def start_template_dataflow( :type environment: Optional[dict] """ - name = self._build_dataflow_job_name(job_name, append_job_name) + name = self.build_dataflow_job_name(job_name, append_job_name) environment = environment or {} # available keys for runtime environment are listed here: @@ -921,58 +837,40 @@ def start_python_dataflow( # pylint: disable=too-many-arguments :param location: Job location. :type location: str """ - name = self._build_dataflow_job_name(job_name, append_job_name) + warnings.warn( + """This method is deprecated. + Please use `airflow.providers.apache.beam.hooks.beam.start.start_python_pipeline` + to start pipeline and `providers.google.cloud.hooks.dataflow.DataflowHook.wait_for_done` + to wait for the required pipeline state. + """, + DeprecationWarning, + stacklevel=3, + ) + + name = self.build_dataflow_job_name(job_name, append_job_name) variables["job_name"] = name variables["region"] = location + variables["project"] = project_id - if "labels" in variables: - variables["labels"] = [f"{key}={value}" for key, value in variables["labels"].items()] - - if py_requirements is not None: - if not py_requirements and not py_system_site_packages: - warning_invalid_environment = textwrap.dedent( - """\ - Invalid method invocation. You have disabled inclusion of system packages and empty list - required for installation, so it is not possible to create a valid virtual environment. - In the virtual environment, apache-beam package must be installed for your job to be \ - executed. To fix this problem: - * install apache-beam on the system, then set parameter py_system_site_packages to True, - * add apache-beam to the list of required packages in parameter py_requirements. - """ - ) - raise AirflowException(warning_invalid_environment) - - with TemporaryDirectory(prefix="dataflow-venv") as tmp_dir: - py_interpreter = prepare_virtualenv( - venv_directory=tmp_dir, - python_bin=py_interpreter, - system_site_packages=py_system_site_packages, - requirements=py_requirements, - ) - command_prefix = [py_interpreter] + py_options + [dataflow] - - self._start_dataflow( - variables=variables, - name=name, - command_prefix=command_prefix, - project_id=project_id, - on_new_job_id_callback=on_new_job_id_callback, - location=location, - ) - else: - command_prefix = [py_interpreter] + py_options + [dataflow] - - self._start_dataflow( - variables=variables, - name=name, - command_prefix=command_prefix, - project_id=project_id, - on_new_job_id_callback=on_new_job_id_callback, - location=location, - ) + self.beam_hook.start_python_pipeline( + variables=variables, + py_file=dataflow, + py_options=py_options, + py_interpreter=py_interpreter, + py_requirements=py_requirements, + py_system_site_packages=py_system_site_packages, + process_line_callback=process_line_and_extract_dataflow_job_id_callback(on_new_job_id_callback), + ) + + self.wait_for_done( # pylint: disable=no-value-for-parameter + job_name=name, + location=location, + job_id=self.job_id, + ) @staticmethod - def _build_dataflow_job_name(job_name: str, append_job_name: bool = True) -> str: + def build_dataflow_job_name(job_name: str, append_job_name: bool = True) -> str: + """Builds Dataflow job name.""" base_job_name = str(job_name).replace("_", "-") if not re.match(r"^[a-z]([-a-z0-9]*[a-z0-9])?$", base_job_name): @@ -989,23 +887,6 @@ def _build_dataflow_job_name(job_name: str, append_job_name: bool = True) -> str return safe_job_name - @staticmethod - def _options_to_args(variables: dict) -> List[str]: - if not variables: - return [] - # The logic of this method should be compatible with Apache Beam: - # https://github.com/apache/beam/blob/b56740f0e8cd80c2873412847d0b336837429fb9/sdks/python/ - # apache_beam/options/pipeline_options.py#L230-L251 - args: List[str] = [] - for attr, value in variables.items(): - if value is None or (isinstance(value, bool) and value): - args.append(f"--{attr}") - elif isinstance(value, list): - args.extend([f"--{attr}={v}" for v in value]) - else: - args.append(f"--{attr}={value}") - return args - @_fallback_to_location_from_variables @_fallback_to_project_id_from_variables @GoogleBaseHook.fallback_to_default_project_id @@ -1125,7 +1006,7 @@ def start_sql_job( "--format=value(job.id)", f"--job-name={job_name}", f"--region={location}", - *(self._options_to_args(options)), + *(beam_options_to_args(options)), ] self.log.info("Executing command: %s", " ".join([shlex.quote(c) for c in cmd])) with self.provide_authorized_gcloud(): @@ -1266,3 +1147,44 @@ def fetch_job_autoscaling_events_by_id( location=location, ) return jobs_controller.fetch_job_autoscaling_events_by_id(job_id) + + @GoogleBaseHook.fallback_to_default_project_id + def wait_for_done( + self, + job_name: str, + location: str, + project_id: str, + job_id: Optional[str] = None, + multiple_jobs: bool = False, + ) -> None: + """ + Wait for Dataflow job. + + :param job_name: The 'jobName' to use when executing the DataFlow job + (templated). This ends up being set in the pipeline options, so any entry + with key ``'jobName'`` in ``options`` will be overwritten. + :type job_name: str + :param location: location the job is running + :type location: str + :param project_id: Optional, the Google Cloud project ID in which to start a job. + If set to None or missing, the default project_id from the Google Cloud connection is used. + :type project_id: + :param job_id: a Dataflow job ID + :type job_id: str + :param multiple_jobs: If pipeline creates multiple jobs then monitor all jobs + :type multiple_jobs: boolean + """ + job_controller = _DataflowJobsController( + dataflow=self.get_conn(), + project_number=project_id, + name=job_name, + location=location, + poll_sleep=self.poll_sleep, + job_id=job_id or self.job_id, + num_retries=self.num_retries, + multiple_jobs=multiple_jobs, + drain_pipeline=self.drain_pipeline, + cancel_timeout=self.cancel_timeout, + wait_until_finished=self.wait_until_finished, + ) + job_controller.wait_for_done() diff --git a/airflow/providers/google/cloud/hooks/dataproc.py b/airflow/providers/google/cloud/hooks/dataproc.py index 12d5941cdd164..35d4786012477 100644 --- a/airflow/providers/google/cloud/hooks/dataproc.py +++ b/airflow/providers/google/cloud/hooks/dataproc.py @@ -26,18 +26,16 @@ from google.api_core.exceptions import ServerError from google.api_core.retry import Retry from google.cloud.dataproc_v1beta2 import ( # pylint: disable=no-name-in-module - ClusterControllerClient, - JobControllerClient, - WorkflowTemplateServiceClient, -) -from google.cloud.dataproc_v1beta2.types import ( # pylint: disable=no-name-in-module Cluster, - Duration, - FieldMask, + ClusterControllerClient, Job, + JobControllerClient, JobStatus, WorkflowTemplate, + WorkflowTemplateServiceClient, ) +from google.protobuf.duration_pb2 import Duration +from google.protobuf.field_mask_pb2 import FieldMask from airflow.exceptions import AirflowException from airflow.providers.google.common.hooks.base_google import GoogleBaseHook @@ -291,10 +289,12 @@ def create_cluster( client = self.get_cluster_client(location=region) result = client.create_cluster( - project_id=project_id, - region=region, - cluster=cluster, - request_id=request_id, + request={ + 'project_id': project_id, + 'region': region, + 'cluster': cluster, + 'request_id': request_id, + }, retry=retry, timeout=timeout, metadata=metadata, @@ -340,11 +340,13 @@ def delete_cluster( """ client = self.get_cluster_client(location=region) result = client.delete_cluster( - project_id=project_id, - region=region, - cluster_name=cluster_name, - cluster_uuid=cluster_uuid, - request_id=request_id, + request={ + 'project_id': project_id, + 'region': region, + 'cluster_name': cluster_name, + 'cluster_uuid': cluster_uuid, + 'request_id': request_id, + }, retry=retry, timeout=timeout, metadata=metadata, @@ -382,9 +384,7 @@ def diagnose_cluster( """ client = self.get_cluster_client(location=region) operation = client.diagnose_cluster( - project_id=project_id, - region=region, - cluster_name=cluster_name, + request={'project_id': project_id, 'region': region, 'cluster_name': cluster_name}, retry=retry, timeout=timeout, metadata=metadata, @@ -423,9 +423,7 @@ def get_cluster( """ client = self.get_cluster_client(location=region) result = client.get_cluster( - project_id=project_id, - region=region, - cluster_name=cluster_name, + request={'project_id': project_id, 'region': region, 'cluster_name': cluster_name}, retry=retry, timeout=timeout, metadata=metadata, @@ -467,10 +465,7 @@ def list_clusters( """ client = self.get_cluster_client(location=region) result = client.list_clusters( - project_id=project_id, - region=region, - filter_=filter_, - page_size=page_size, + request={'project_id': project_id, 'region': region, 'filter': filter_, 'page_size': page_size}, retry=retry, timeout=timeout, metadata=metadata, @@ -551,13 +546,15 @@ def update_cluster( # pylint: disable=too-many-arguments """ client = self.get_cluster_client(location=location) operation = client.update_cluster( - project_id=project_id, - region=location, - cluster_name=cluster_name, - cluster=cluster, - update_mask=update_mask, - graceful_decommission_timeout=graceful_decommission_timeout, - request_id=request_id, + request={ + 'project_id': project_id, + 'region': location, + 'cluster_name': cluster_name, + 'cluster': cluster, + 'update_mask': update_mask, + 'graceful_decommission_timeout': graceful_decommission_timeout, + 'request_id': request_id, + }, retry=retry, timeout=timeout, metadata=metadata, @@ -593,10 +590,11 @@ def create_workflow_template( :param metadata: Additional metadata that is provided to the method. :type metadata: Sequence[Tuple[str, str]] """ + metadata = metadata or () client = self.get_template_client(location) - parent = client.region_path(project_id, location) + parent = f'projects/{project_id}/regions/{location}' return client.create_workflow_template( - parent=parent, template=template, retry=retry, timeout=timeout, metadata=metadata + request={'parent': parent, 'template': template}, retry=retry, timeout=timeout, metadata=metadata ) @GoogleBaseHook.fallback_to_default_project_id @@ -643,13 +641,11 @@ def instantiate_workflow_template( :param metadata: Additional metadata that is provided to the method. :type metadata: Sequence[Tuple[str, str]] """ + metadata = metadata or () client = self.get_template_client(location) - name = client.workflow_template_path(project_id, location, template_name) + name = f'projects/{project_id}/regions/{location}/workflowTemplates/{template_name}' operation = client.instantiate_workflow_template( - name=name, - version=version, - parameters=parameters, - request_id=request_id, + request={'name': name, 'version': version, 'request_id': request_id, 'parameters': parameters}, retry=retry, timeout=timeout, metadata=metadata, @@ -690,12 +686,11 @@ def instantiate_inline_workflow_template( :param metadata: Additional metadata that is provided to the method. :type metadata: Sequence[Tuple[str, str]] """ + metadata = metadata or () client = self.get_template_client(location) - parent = client.region_path(project_id, location) + parent = f'projects/{project_id}/regions/{location}' operation = client.instantiate_inline_workflow_template( - parent=parent, - template=template, - request_id=request_id, + request={'parent': parent, 'template': template, 'request_id': request_id}, retry=retry, timeout=timeout, metadata=metadata, @@ -722,19 +717,19 @@ def wait_for_job( """ state = None start = time.monotonic() - while state not in (JobStatus.ERROR, JobStatus.DONE, JobStatus.CANCELLED): + while state not in (JobStatus.State.ERROR, JobStatus.State.DONE, JobStatus.State.CANCELLED): if timeout and start + timeout < time.monotonic(): raise AirflowException(f"Timeout: dataproc job {job_id} is not ready after {timeout}s") time.sleep(wait_time) try: - job = self.get_job(location=location, job_id=job_id, project_id=project_id) + job = self.get_job(project_id=project_id, location=location, job_id=job_id) state = job.status.state except ServerError as err: self.log.info("Retrying. Dataproc API returned server error when waiting for job: %s", err) - if state == JobStatus.ERROR: + if state == JobStatus.State.ERROR: raise AirflowException(f'Job failed:\n{job}') - if state == JobStatus.CANCELLED: + if state == JobStatus.State.CANCELLED: raise AirflowException(f'Job was cancelled:\n{job}') @GoogleBaseHook.fallback_to_default_project_id @@ -767,9 +762,7 @@ def get_job( """ client = self.get_job_client(location=location) job = client.get_job( - project_id=project_id, - region=location, - job_id=job_id, + request={'project_id': project_id, 'region': location, 'job_id': job_id}, retry=retry, timeout=timeout, metadata=metadata, @@ -812,10 +805,7 @@ def submit_job( """ client = self.get_job_client(location=location) return client.submit_job( - project_id=project_id, - region=location, - job=job, - request_id=request_id, + request={'project_id': project_id, 'region': location, 'job': job, 'request_id': request_id}, retry=retry, timeout=timeout, metadata=metadata, @@ -884,9 +874,7 @@ def cancel_job( client = self.get_job_client(location=location) job = client.cancel_job( - project_id=project_id, - region=location, - job_id=job_id, + request={'project_id': project_id, 'region': location, 'job_id': job_id}, retry=retry, timeout=timeout, metadata=metadata, diff --git a/airflow/providers/google/cloud/hooks/gcs.py b/airflow/providers/google/cloud/hooks/gcs.py index 0ca39616699b7..72a23eacadd26 100644 --- a/airflow/providers/google/cloud/hooks/gcs.py +++ b/airflow/providers/google/cloud/hooks/gcs.py @@ -40,6 +40,9 @@ RT = TypeVar('RT') # pylint: disable=invalid-name T = TypeVar("T", bound=Callable) # pylint: disable=invalid-name +# Use default timeout from google-cloud-storage +DEFAULT_TIMEOUT = 60 + def _fallback_object_url_to_object_name_and_bucket_name( object_url_keyword_arg_name='object_url', @@ -257,7 +260,12 @@ def rewrite( ) def download( - self, object_name: str, bucket_name: Optional[str], filename: Optional[str] = None + self, + object_name: str, + bucket_name: Optional[str], + filename: Optional[str] = None, + chunk_size: Optional[int] = None, + timeout: Optional[int] = DEFAULT_TIMEOUT, ) -> Union[str, bytes]: """ Downloads a file from Google Cloud Storage. @@ -273,16 +281,20 @@ def download( :type object_name: str :param filename: If set, a local file path where the file should be written to. :type filename: str + :param chunk_size: Blob chunk size. + :type chunk_size: int + :param timeout: Request timeout in seconds. + :type timeout: int """ # TODO: future improvement check file size before downloading, # to check for local space availability client = self.get_conn() bucket = client.bucket(bucket_name) - blob = bucket.blob(blob_name=object_name) + blob = bucket.blob(blob_name=object_name, chunk_size=chunk_size) if filename: - blob.download_to_filename(filename) + blob.download_to_filename(filename, timeout=timeout) self.log.info('File downloaded to %s', filename) return filename else: @@ -359,6 +371,8 @@ def upload( mime_type: Optional[str] = None, gzip: bool = False, encoding: str = 'utf-8', + chunk_size: Optional[int] = None, + timeout: Optional[int] = DEFAULT_TIMEOUT, ) -> None: """ Uploads a local file or file data as string or bytes to Google Cloud Storage. @@ -377,10 +391,14 @@ def upload( :type gzip: bool :param encoding: bytes encoding for file data if provided as string :type encoding: str + :param chunk_size: Blob chunk size. + :type chunk_size: int + :param timeout: Request timeout in seconds. + :type timeout: int """ client = self.get_conn() bucket = client.bucket(bucket_name) - blob = bucket.blob(blob_name=object_name) + blob = bucket.blob(blob_name=object_name, chunk_size=chunk_size) if filename and data: raise ValueError( "'filename' and 'data' parameter provided. Please " @@ -398,7 +416,7 @@ def upload( shutil.copyfileobj(f_in, f_out) filename = filename_gz - blob.upload_from_filename(filename=filename, content_type=mime_type) + blob.upload_from_filename(filename=filename, content_type=mime_type, timeout=timeout) if gzip: os.remove(filename) self.log.info('File %s uploaded to %s in %s bucket', filename, object_name, bucket_name) @@ -412,7 +430,7 @@ def upload( with gz.GzipFile(fileobj=out, mode="w") as f: f.write(data) data = out.getvalue() - blob.upload_from_string(data, content_type=mime_type) + blob.upload_from_string(data, content_type=mime_type, timeout=timeout) self.log.info('Data stream uploaded to %s in %s bucket', object_name, bucket_name) else: raise ValueError("'filename' and 'data' parameter missing. One is required to upload to gcs.") diff --git a/airflow/providers/google/cloud/hooks/kms.py b/airflow/providers/google/cloud/hooks/kms.py index e63c2f1cb79f6..3fd14336d23d7 100644 --- a/airflow/providers/google/cloud/hooks/kms.py +++ b/airflow/providers/google/cloud/hooks/kms.py @@ -118,12 +118,14 @@ def encrypt( :rtype: str """ response = self.get_conn().encrypt( - name=key_name, - plaintext=plaintext, - additional_authenticated_data=authenticated_data, + request={ + 'name': key_name, + 'plaintext': plaintext, + 'additional_authenticated_data': authenticated_data, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) ciphertext = _b64encode(response.ciphertext) @@ -161,12 +163,14 @@ def decrypt( :rtype: bytes """ response = self.get_conn().decrypt( - name=key_name, - ciphertext=_b64decode(ciphertext), - additional_authenticated_data=authenticated_data, + request={ + 'name': key_name, + 'ciphertext': _b64decode(ciphertext), + 'additional_authenticated_data': authenticated_data, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) return response.plaintext diff --git a/airflow/providers/google/cloud/hooks/os_login.py b/airflow/providers/google/cloud/hooks/os_login.py index c7a4234055f6b..361ea60637b7f 100644 --- a/airflow/providers/google/cloud/hooks/os_login.py +++ b/airflow/providers/google/cloud/hooks/os_login.py @@ -17,7 +17,7 @@ from typing import Dict, Optional, Sequence, Union -from google.cloud.oslogin_v1 import OsLoginServiceClient +from google.cloud.oslogin_v1 import ImportSshPublicKeyResponse, OsLoginServiceClient from airflow.providers.google.common.hooks.base_google import GoogleBaseHook @@ -54,7 +54,7 @@ def get_conn(self) -> OsLoginServiceClient: @GoogleBaseHook.fallback_to_default_project_id def import_ssh_public_key( self, user: str, ssh_public_key: Dict, project_id: str, retry=None, timeout=None, metadata=None - ): + ) -> ImportSshPublicKeyResponse: """ Adds an SSH public key and returns the profile information. Default POSIX account information is set when no username and UID exist as part of the @@ -74,14 +74,16 @@ def import_ssh_public_key( :type timeout: Optional[float] :param metadata: Additional metadata that is provided to the method. :type metadata: Optional[Sequence[Tuple[str, str]]] - :return: A :class:`~google.cloud.oslogin_v1.types.ImportSshPublicKeyResponse` instance. + :return: A :class:`~google.cloud.oslogin_v1.ImportSshPublicKeyResponse` instance. """ conn = self.get_conn() return conn.import_ssh_public_key( - parent=OsLoginServiceClient.user_path(user=user), - ssh_public_key=ssh_public_key, - project_id=project_id, + request=dict( + parent=f"users/{user}", + ssh_public_key=ssh_public_key, + project_id=project_id, + ), retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) diff --git a/airflow/providers/google/cloud/hooks/pubsub.py b/airflow/providers/google/cloud/hooks/pubsub.py index f2ae19089a149..37240a2a668b7 100644 --- a/airflow/providers/google/cloud/hooks/pubsub.py +++ b/airflow/providers/google/cloud/hooks/pubsub.py @@ -111,7 +111,7 @@ def publish( self._validate_messages(messages) publisher = self.get_conn() - topic_path = PublisherClient.topic_path(project_id, topic) # pylint: disable=no-member + topic_path = f"projects/{project_id}/topics/{topic}" self.log.info("Publish %d messages to topic (path) %s", len(messages), topic_path) try: @@ -206,7 +206,7 @@ def create_topic( :type metadata: Sequence[Tuple[str, str]]] """ publisher = self.get_conn() - topic_path = PublisherClient.topic_path(project_id, topic) # pylint: disable=no-member + topic_path = f"projects/{project_id}/topics/{topic}" # Add airflow-version label to the topic labels = labels or {} @@ -216,13 +216,15 @@ def create_topic( try: # pylint: disable=no-member publisher.create_topic( - name=topic_path, - labels=labels, - message_storage_policy=message_storage_policy, - kms_key_name=kms_key_name, + request={ + "name": topic_path, + "labels": labels, + "message_storage_policy": message_storage_policy, + "kms_key_name": kms_key_name, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) except AlreadyExists: self.log.warning('Topic already exists: %s', topic) @@ -266,16 +268,13 @@ def delete_topic( :type metadata: Sequence[Tuple[str, str]]] """ publisher = self.get_conn() - topic_path = PublisherClient.topic_path(project_id, topic) # pylint: disable=no-member + topic_path = f"projects/{project_id}/topics/{topic}" self.log.info("Deleting topic (path) %s", topic_path) try: # pylint: disable=no-member publisher.delete_topic( - topic=topic_path, - retry=retry, - timeout=timeout, - metadata=metadata, + request={"topic": topic_path}, retry=retry, timeout=timeout, metadata=metadata or () ) except NotFound: self.log.warning('Topic does not exist: %s', topic_path) @@ -401,27 +400,29 @@ def create_subscription( labels['airflow-version'] = 'v' + version.replace('.', '-').replace('+', '-') # pylint: disable=no-member - subscription_path = SubscriberClient.subscription_path(subscription_project_id, subscription) - topic_path = SubscriberClient.topic_path(project_id, topic) + subscription_path = f"projects/{subscription_project_id}/subscriptions/{subscription}" + topic_path = f"projects/{project_id}/topics/{topic}" self.log.info("Creating subscription (path) %s for topic (path) %a", subscription_path, topic_path) try: subscriber.create_subscription( - name=subscription_path, - topic=topic_path, - push_config=push_config, - ack_deadline_seconds=ack_deadline_secs, - retain_acked_messages=retain_acked_messages, - message_retention_duration=message_retention_duration, - labels=labels, - enable_message_ordering=enable_message_ordering, - expiration_policy=expiration_policy, - filter_=filter_, - dead_letter_policy=dead_letter_policy, - retry_policy=retry_policy, + request={ + "name": subscription_path, + "topic": topic_path, + "push_config": push_config, + "ack_deadline_seconds": ack_deadline_secs, + "retain_acked_messages": retain_acked_messages, + "message_retention_duration": message_retention_duration, + "labels": labels, + "enable_message_ordering": enable_message_ordering, + "expiration_policy": expiration_policy, + "filter": filter_, + "dead_letter_policy": dead_letter_policy, + "retry_policy": retry_policy, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) except AlreadyExists: self.log.warning('Subscription already exists: %s', subscription_path) @@ -466,13 +467,16 @@ def delete_subscription( """ subscriber = self.subscriber_client # noqa E501 # pylint: disable=no-member - subscription_path = SubscriberClient.subscription_path(project_id, subscription) + subscription_path = f"projects/{project_id}/subscriptions/{subscription}" self.log.info("Deleting subscription (path) %s", subscription_path) try: # pylint: disable=no-member subscriber.delete_subscription( - subscription=subscription_path, retry=retry, timeout=timeout, metadata=metadata + request={"subscription": subscription_path}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) except NotFound: @@ -527,18 +531,20 @@ def pull( """ subscriber = self.subscriber_client # noqa E501 # pylint: disable=no-member,line-too-long - subscription_path = SubscriberClient.subscription_path(project_id, subscription) + subscription_path = f"projects/{project_id}/subscriptions/{subscription}" self.log.info("Pulling max %d messages from subscription (path) %s", max_messages, subscription_path) try: # pylint: disable=no-member response = subscriber.pull( - subscription=subscription_path, - max_messages=max_messages, - return_immediately=return_immediately, + request={ + "subscription": subscription_path, + "max_messages": max_messages, + "return_immediately": return_immediately, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) result = getattr(response, 'received_messages', []) self.log.info("Pulled %d messages from subscription (path) %s", len(result), subscription_path) @@ -591,17 +597,16 @@ def acknowledge( subscriber = self.subscriber_client # noqa E501 # pylint: disable=no-member - subscription_path = SubscriberClient.subscription_path(project_id, subscription) + subscription_path = f"projects/{project_id}/subscriptions/{subscription}" self.log.info("Acknowledging %d ack_ids from subscription (path) %s", len(ack_ids), subscription_path) try: # pylint: disable=no-member subscriber.acknowledge( - subscription=subscription_path, - ack_ids=ack_ids, + request={"subscription": subscription_path, "ack_ids": ack_ids}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) except (HttpError, GoogleAPICallError) as e: raise PubSubException( diff --git a/airflow/providers/google/cloud/hooks/stackdriver.py b/airflow/providers/google/cloud/hooks/stackdriver.py index 9da1afa409a36..04dc329e4f69b 100644 --- a/airflow/providers/google/cloud/hooks/stackdriver.py +++ b/airflow/providers/google/cloud/hooks/stackdriver.py @@ -24,7 +24,8 @@ from google.api_core.exceptions import InvalidArgument from google.api_core.gapic_v1.method import DEFAULT from google.cloud import monitoring_v3 -from google.protobuf.json_format import MessageToDict, MessageToJson, Parse +from google.cloud.monitoring_v3 import AlertPolicy, NotificationChannel +from google.protobuf.field_mask_pb2 import FieldMask from googleapiclient.errors import HttpError from airflow.exceptions import AirflowException @@ -110,18 +111,20 @@ def list_alert_policies( """ client = self._get_policy_client() policies_ = client.list_alert_policies( - name=f'projects/{project_id}', - filter_=filter_, - order_by=order_by, - page_size=page_size, + request={ + 'name': f'projects/{project_id}', + 'filter': filter_, + 'order_by': order_by, + 'page_size': page_size, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) if format_ == "dict": - return [MessageToDict(policy) for policy in policies_] + return [AlertPolicy.to_dict(policy) for policy in policies_] elif format_ == "json": - return [MessageToJson(policy) for policy in policies_] + return [AlertPolicy.to_jsoon(policy) for policy in policies_] else: return policies_ @@ -138,12 +141,14 @@ def _toggle_policy_status( client = self._get_policy_client() policies_ = self.list_alert_policies(project_id=project_id, filter_=filter_) for policy in policies_: - if policy.enabled.value != bool(new_state): - policy.enabled.value = bool(new_state) - mask = monitoring_v3.types.field_mask_pb2.FieldMask() - mask.paths.append('enabled') # pylint: disable=no-member + if policy.enabled != bool(new_state): + policy.enabled = bool(new_state) + mask = FieldMask(paths=['enabled']) client.update_alert_policy( - alert_policy=policy, update_mask=mask, retry=retry, timeout=timeout, metadata=metadata + request={'alert_policy': policy, 'update_mask': mask}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) @GoogleBaseHook.fallback_to_default_project_id @@ -265,40 +270,39 @@ def upsert_alert( ] policies_ = [] channels = [] - - for channel in record["channels"]: - channel_json = json.dumps(channel) - channels.append(Parse(channel_json, monitoring_v3.types.notification_pb2.NotificationChannel())) - for policy in record["policies"]: - policy_json = json.dumps(policy) - policies_.append(Parse(policy_json, monitoring_v3.types.alert_pb2.AlertPolicy())) + for channel in record.get("channels", []): + channels.append(NotificationChannel(**channel)) + for policy in record.get("policies", []): + policies_.append(AlertPolicy(**policy)) channel_name_map = {} for channel in channels: channel.verification_status = ( - monitoring_v3.enums.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED + monitoring_v3.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED ) if channel.name in existing_channels: channel_client.update_notification_channel( - notification_channel=channel, retry=retry, timeout=timeout, metadata=metadata + request={'notification_channel': channel}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) else: old_name = channel.name - channel.ClearField('name') + channel.name = None new_channel = channel_client.create_notification_channel( - name=f'projects/{project_id}', - notification_channel=channel, + request={'name': f'projects/{project_id}', 'notification_channel': channel}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) channel_name_map[old_name] = new_channel.name for policy in policies_: - policy.ClearField('creation_record') - policy.ClearField('mutation_record') + policy.creation_record = None + policy.mutation_record = None for i, channel in enumerate(policy.notification_channels): new_channel = channel_name_map.get(channel) @@ -308,20 +312,22 @@ def upsert_alert( if policy.name in existing_policies: try: policy_client.update_alert_policy( - alert_policy=policy, retry=retry, timeout=timeout, metadata=metadata + request={'alert_policy': policy}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) except InvalidArgument: pass else: - policy.ClearField('name') + policy.name = None for condition in policy.conditions: - condition.ClearField('name') + condition.name = None policy_client.create_alert_policy( - name=f'projects/{project_id}', - alert_policy=policy, + request={'name': f'projects/{project_id}', 'alert_policy': policy}, retry=retry, timeout=timeout, - metadata=None, + metadata=metadata or (), ) def delete_alert_policy( @@ -349,7 +355,9 @@ def delete_alert_policy( """ policy_client = self._get_policy_client() try: - policy_client.delete_alert_policy(name=name, retry=retry, timeout=timeout, metadata=metadata) + policy_client.delete_alert_policy( + request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or () + ) except HttpError as err: raise AirflowException(f'Delete alerting policy failed. Error was {err.content}') @@ -405,18 +413,20 @@ def list_notification_channels( """ client = self._get_channel_client() channels = client.list_notification_channels( - name=f'projects/{project_id}', - filter_=filter_, - order_by=order_by, - page_size=page_size, + request={ + 'name': f'projects/{project_id}', + 'filter': filter_, + 'order_by': order_by, + 'page_size': page_size, + }, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) if format_ == "dict": - return [MessageToDict(channel) for channel in channels] + return [NotificationChannel.to_dict(channel) for channel in channels] elif format_ == "json": - return [MessageToJson(channel) for channel in channels] + return [NotificationChannel.to_json(channel) for channel in channels] else: return channels @@ -431,18 +441,18 @@ def _toggle_channel_status( metadata: Optional[str] = None, ) -> None: client = self._get_channel_client() - channels = client.list_notification_channels(name=f'projects/{project_id}', filter_=filter_) + channels = client.list_notification_channels( + request={'name': f'projects/{project_id}', 'filter': filter_} + ) for channel in channels: - if channel.enabled.value != bool(new_state): - channel.enabled.value = bool(new_state) - mask = monitoring_v3.types.field_mask_pb2.FieldMask() - mask.paths.append('enabled') # pylint: disable=no-member + if channel.enabled != bool(new_state): + channel.enabled = bool(new_state) + mask = FieldMask(paths=['enabled']) client.update_notification_channel( - notification_channel=channel, - update_mask=mask, + request={'notification_channel': channel, 'update_mask': mask}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) @GoogleBaseHook.fallback_to_default_project_id @@ -518,7 +528,7 @@ def disable_notification_channels( new_state=False, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) @GoogleBaseHook.fallback_to_default_project_id @@ -562,29 +572,28 @@ def upsert_channel( channel_name_map = {} for channel in record["channels"]: - channel_json = json.dumps(channel) - channels_list.append( - Parse(channel_json, monitoring_v3.types.notification_pb2.NotificationChannel()) - ) + channels_list.append(NotificationChannel(**channel)) for channel in channels_list: channel.verification_status = ( - monitoring_v3.enums.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED + monitoring_v3.NotificationChannel.VerificationStatus.VERIFICATION_STATUS_UNSPECIFIED ) if channel.name in existing_channels: channel_client.update_notification_channel( - notification_channel=channel, retry=retry, timeout=timeout, metadata=metadata + request={'notification_channel': channel}, + retry=retry, + timeout=timeout, + metadata=metadata or (), ) else: old_name = channel.name - channel.ClearField('name') + channel.name = None new_channel = channel_client.create_notification_channel( - name=f'projects/{project_id}', - notification_channel=channel, + request={'name': f'projects/{project_id}', 'notification_channel': channel}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) channel_name_map[old_name] = new_channel.name @@ -616,7 +625,7 @@ def delete_notification_channel( channel_client = self._get_channel_client() try: channel_client.delete_notification_channel( - name=name, retry=retry, timeout=timeout, metadata=metadata + request={'name': name}, retry=retry, timeout=timeout, metadata=metadata or () ) except HttpError as err: raise AirflowException(f'Delete notification channel failed. Error was {err.content}') diff --git a/airflow/providers/google/cloud/hooks/tasks.py b/airflow/providers/google/cloud/hooks/tasks.py index 1c3223d8a0512..633f227d7875d 100644 --- a/airflow/providers/google/cloud/hooks/tasks.py +++ b/airflow/providers/google/cloud/hooks/tasks.py @@ -21,11 +21,13 @@ which allows you to connect to Google Cloud Tasks service, performing actions to queues or tasks. """ + from typing import Dict, List, Optional, Sequence, Tuple, Union from google.api_core.retry import Retry -from google.cloud.tasks_v2 import CloudTasksClient, enums -from google.cloud.tasks_v2.types import FieldMask, Queue, Task +from google.cloud.tasks_v2 import CloudTasksClient +from google.cloud.tasks_v2.types import Queue, Task +from google.protobuf.field_mask_pb2 import FieldMask from airflow.exceptions import AirflowException from airflow.providers.google.common.hooks.base_google import GoogleBaseHook @@ -120,20 +122,19 @@ def create_queue( client = self.get_conn() if queue_name: - full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name) + full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}" if isinstance(task_queue, Queue): task_queue.name = full_queue_name elif isinstance(task_queue, dict): task_queue['name'] = full_queue_name else: raise AirflowException('Unable to set queue_name.') - full_location_path = CloudTasksClient.location_path(project_id, location) + full_location_path = f"projects/{project_id}/locations/{location}" return client.create_queue( - parent=full_location_path, - queue=task_queue, + request={'parent': full_location_path, 'queue': task_queue}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) @GoogleBaseHook.fallback_to_default_project_id @@ -167,7 +168,7 @@ def update_queue( :param update_mask: A mast used to specify which fields of the queue are being updated. If empty, then all fields will be updated. If a dict is provided, it must be of the same form as the protobuf message. - :type update_mask: dict or google.cloud.tasks_v2.types.FieldMask + :type update_mask: dict or google.protobuf.field_mask_pb2.FieldMask :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry @@ -182,7 +183,7 @@ def update_queue( client = self.get_conn() if queue_name and location: - full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name) + full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}" if isinstance(task_queue, Queue): task_queue.name = full_queue_name elif isinstance(task_queue, dict): @@ -190,11 +191,10 @@ def update_queue( else: raise AirflowException('Unable to set queue_name.') return client.update_queue( - queue=task_queue, - update_mask=update_mask, + request={'queue': task_queue, 'update_mask': update_mask}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) @GoogleBaseHook.fallback_to_default_project_id @@ -230,8 +230,10 @@ def get_queue( """ client = self.get_conn() - full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name) - return client.get_queue(name=full_queue_name, retry=retry, timeout=timeout, metadata=metadata) + full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}" + return client.get_queue( + request={'name': full_queue_name}, retry=retry, timeout=timeout, metadata=metadata or () + ) @GoogleBaseHook.fallback_to_default_project_id def list_queues( @@ -270,14 +272,12 @@ def list_queues( """ client = self.get_conn() - full_location_path = CloudTasksClient.location_path(project_id, location) + full_location_path = f"projects/{project_id}/locations/{location}" queues = client.list_queues( - parent=full_location_path, - filter_=results_filter, - page_size=page_size, + request={'parent': full_location_path, 'filter': results_filter, 'page_size': page_size}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) return list(queues) @@ -313,8 +313,10 @@ def delete_queue( """ client = self.get_conn() - full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name) - client.delete_queue(name=full_queue_name, retry=retry, timeout=timeout, metadata=metadata) + full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}" + client.delete_queue( + request={'name': full_queue_name}, retry=retry, timeout=timeout, metadata=metadata or () + ) @GoogleBaseHook.fallback_to_default_project_id def purge_queue( @@ -349,8 +351,10 @@ def purge_queue( """ client = self.get_conn() - full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name) - return client.purge_queue(name=full_queue_name, retry=retry, timeout=timeout, metadata=metadata) + full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}" + return client.purge_queue( + request={'name': full_queue_name}, retry=retry, timeout=timeout, metadata=metadata or () + ) @GoogleBaseHook.fallback_to_default_project_id def pause_queue( @@ -385,8 +389,10 @@ def pause_queue( """ client = self.get_conn() - full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name) - return client.pause_queue(name=full_queue_name, retry=retry, timeout=timeout, metadata=metadata) + full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}" + return client.pause_queue( + request={'name': full_queue_name}, retry=retry, timeout=timeout, metadata=metadata or () + ) @GoogleBaseHook.fallback_to_default_project_id def resume_queue( @@ -421,8 +427,10 @@ def resume_queue( """ client = self.get_conn() - full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name) - return client.resume_queue(name=full_queue_name, retry=retry, timeout=timeout, metadata=metadata) + full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}" + return client.resume_queue( + request={'name': full_queue_name}, retry=retry, timeout=timeout, metadata=metadata or () + ) @GoogleBaseHook.fallback_to_default_project_id def create_task( @@ -432,7 +440,7 @@ def create_task( task: Union[Dict, Task], project_id: str, task_name: Optional[str] = None, - response_view: Optional[enums.Task.View] = None, + response_view: Optional = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, @@ -455,7 +463,7 @@ def create_task( :type task_name: str :param response_view: (Optional) This field specifies which subset of the Task will be returned. - :type response_view: google.cloud.tasks_v2.enums.Task.View + :type response_view: google.cloud.tasks_v2.Task.View :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry @@ -470,21 +478,21 @@ def create_task( client = self.get_conn() if task_name: - full_task_name = CloudTasksClient.task_path(project_id, location, queue_name, task_name) + full_task_name = ( + f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}" + ) if isinstance(task, Task): task.name = full_task_name elif isinstance(task, dict): task['name'] = full_task_name else: raise AirflowException('Unable to set task_name.') - full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name) + full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}" return client.create_task( - parent=full_queue_name, - task=task, - response_view=response_view, + request={'parent': full_queue_name, 'task': task, 'response_view': response_view}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) @GoogleBaseHook.fallback_to_default_project_id @@ -494,7 +502,7 @@ def get_task( queue_name: str, task_name: str, project_id: str, - response_view: Optional[enums.Task.View] = None, + response_view: Optional = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, @@ -513,7 +521,7 @@ def get_task( :type project_id: str :param response_view: (Optional) This field specifies which subset of the Task will be returned. - :type response_view: google.cloud.tasks_v2.enums.Task.View + :type response_view: google.cloud.tasks_v2.Task.View :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry @@ -527,13 +535,12 @@ def get_task( """ client = self.get_conn() - full_task_name = CloudTasksClient.task_path(project_id, location, queue_name, task_name) + full_task_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}" return client.get_task( - name=full_task_name, - response_view=response_view, + request={'name': full_task_name, 'response_view': response_view}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) @GoogleBaseHook.fallback_to_default_project_id @@ -542,7 +549,7 @@ def list_tasks( location: str, queue_name: str, project_id: str, - response_view: Optional[enums.Task.View] = None, + response_view: Optional = None, page_size: Optional[int] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, @@ -560,7 +567,7 @@ def list_tasks( :type project_id: str :param response_view: (Optional) This field specifies which subset of the Task will be returned. - :type response_view: google.cloud.tasks_v2.enums.Task.View + :type response_view: google.cloud.tasks_v2.Task.View :param page_size: (Optional) The maximum number of resources contained in the underlying API response. :type page_size: int @@ -576,14 +583,12 @@ def list_tasks( :rtype: list[google.cloud.tasks_v2.types.Task] """ client = self.get_conn() - full_queue_name = CloudTasksClient.queue_path(project_id, location, queue_name) + full_queue_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}" tasks = client.list_tasks( - parent=full_queue_name, - response_view=response_view, - page_size=page_size, + request={'parent': full_queue_name, 'response_view': response_view, 'page_size': page_size}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) return list(tasks) @@ -622,8 +627,10 @@ def delete_task( """ client = self.get_conn() - full_task_name = CloudTasksClient.task_path(project_id, location, queue_name, task_name) - client.delete_task(name=full_task_name, retry=retry, timeout=timeout, metadata=metadata) + full_task_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}" + client.delete_task( + request={'name': full_task_name}, retry=retry, timeout=timeout, metadata=metadata or () + ) @GoogleBaseHook.fallback_to_default_project_id def run_task( @@ -632,7 +639,7 @@ def run_task( queue_name: str, task_name: str, project_id: str, - response_view: Optional[enums.Task.View] = None, + response_view: Optional = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[Sequence[Tuple[str, str]]] = None, @@ -651,7 +658,7 @@ def run_task( :type project_id: str :param response_view: (Optional) This field specifies which subset of the Task will be returned. - :type response_view: google.cloud.tasks_v2.enums.Task.View + :type response_view: google.cloud.tasks_v2.Task.View :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry @@ -665,11 +672,10 @@ def run_task( """ client = self.get_conn() - full_task_name = CloudTasksClient.task_path(project_id, location, queue_name, task_name) + full_task_name = f"projects/{project_id}/locations/{location}/queues/{queue_name}/tasks/{task_name}" return client.run_task( - name=full_task_name, - response_view=response_view, + request={'name': full_task_name, 'response_view': response_view}, retry=retry, timeout=timeout, - metadata=metadata, + metadata=metadata or (), ) diff --git a/airflow/providers/google/cloud/hooks/workflows.py b/airflow/providers/google/cloud/hooks/workflows.py new file mode 100644 index 0000000000000..6c7835032892e --- /dev/null +++ b/airflow/providers/google/cloud/hooks/workflows.py @@ -0,0 +1,401 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Dict, Optional, Sequence, Tuple, Union + +from google.api_core.operation import Operation +from google.api_core.retry import Retry + +# pylint: disable=no-name-in-module +from google.cloud.workflows.executions_v1beta import Execution, ExecutionsClient +from google.cloud.workflows.executions_v1beta.services.executions.pagers import ListExecutionsPager +from google.cloud.workflows_v1beta import Workflow, WorkflowsClient +from google.cloud.workflows_v1beta.services.workflows.pagers import ListWorkflowsPager +from google.protobuf.field_mask_pb2 import FieldMask + +from airflow.providers.google.common.hooks.base_google import GoogleBaseHook + +# pylint: enable=no-name-in-module + + +class WorkflowsHook(GoogleBaseHook): + """ + Hook for Google GCP APIs. + + All the methods in the hook where project_id is used must be called with + keyword arguments rather than positional. + """ + + def get_workflows_client(self) -> WorkflowsClient: + """Returns WorkflowsClient.""" + return WorkflowsClient(credentials=self._get_credentials(), client_info=self.client_info) + + def get_executions_client(self) -> ExecutionsClient: + """Returns ExecutionsClient.""" + return ExecutionsClient(credentials=self._get_credentials(), client_info=self.client_info) + + @GoogleBaseHook.fallback_to_default_project_id + def create_workflow( + self, + workflow: Dict, + workflow_id: str, + location: str, + project_id: str, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + ) -> Operation: + """ + Creates a new workflow. If a workflow with the specified name + already exists in the specified project and location, the long + running operation will return + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS] error. + + :param workflow: Required. Workflow to be created. + :type workflow: Dict + :param workflow_id: Required. The ID of the workflow to be created. + :type workflow_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + metadata = metadata or () + client = self.get_workflows_client() + parent = f"projects/{project_id}/locations/{location}" + return client.create_workflow( + request={"parent": parent, "workflow": workflow, "workflow_id": workflow_id}, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + @GoogleBaseHook.fallback_to_default_project_id + def get_workflow( + self, + workflow_id: str, + location: str, + project_id: str, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + ) -> Workflow: + """ + Gets details of a single Workflow. + + :param workflow_id: Required. The ID of the workflow to be created. + :type workflow_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + metadata = metadata or () + client = self.get_workflows_client() + name = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}" + return client.get_workflow(request={"name": name}, retry=retry, timeout=timeout, metadata=metadata) + + def update_workflow( + self, + workflow: Union[Dict, Workflow], + update_mask: Optional[FieldMask] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + ) -> Operation: + """ + Updates an existing workflow. + Running this method has no impact on already running + executions of the workflow. A new revision of the + workflow may be created as a result of a successful + update operation. In that case, such revision will be + used in new workflow executions. + + :param workflow: Required. Workflow to be created. + :type workflow: Dict + :param update_mask: List of fields to be updated. If not present, + the entire workflow will be updated. + :type update_mask: FieldMask + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + metadata = metadata or () + client = self.get_workflows_client() + return client.update_workflow( + request={"workflow": workflow, "update_mask": update_mask}, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + @GoogleBaseHook.fallback_to_default_project_id + def delete_workflow( + self, + workflow_id: str, + location: str, + project_id: str, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + ) -> Operation: + """ + Deletes a workflow with the specified name. + This method also cancels and deletes all running + executions of the workflow. + + :param workflow_id: Required. The ID of the workflow to be created. + :type workflow_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + metadata = metadata or () + client = self.get_workflows_client() + name = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}" + return client.delete_workflow(request={"name": name}, retry=retry, timeout=timeout, metadata=metadata) + + @GoogleBaseHook.fallback_to_default_project_id + def list_workflows( + self, + location: str, + project_id: str, + filter_: Optional[str] = None, + order_by: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + ) -> ListWorkflowsPager: + """ + Lists Workflows in a given project and location. + The default order is not specified. + + :param filter_: Filter to restrict results to specific workflows. + :type filter_: str + :param order_by: Comma-separated list of fields that that + specify the order of the results. Default sorting order for a field is ascending. + To specify descending order for a field, append a "desc" suffix. + If not specified, the results will be returned in an unspecified order. + :type order_by: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + metadata = metadata or () + client = self.get_workflows_client() + parent = f"projects/{project_id}/locations/{location}" + + return client.list_workflows( + request={"parent": parent, "filter": filter_, "order_by": order_by}, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + @GoogleBaseHook.fallback_to_default_project_id + def create_execution( + self, + workflow_id: str, + location: str, + project_id: str, + execution: Dict, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + ) -> Execution: + """ + Creates a new execution using the latest revision of + the given workflow. + + :param execution: Required. Input parameters of the execution represented as a dictionary. + :type execution: Dict + :param workflow_id: Required. The ID of the workflow. + :type workflow_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + metadata = metadata or () + client = self.get_executions_client() + parent = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}" + return client.create_execution( + request={"parent": parent, "execution": execution}, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + @GoogleBaseHook.fallback_to_default_project_id + def get_execution( + self, + workflow_id: str, + execution_id: str, + location: str, + project_id: str, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + ) -> Execution: + """ + Returns an execution for the given ``workflow_id`` and ``execution_id``. + + :param workflow_id: Required. The ID of the workflow. + :type workflow_id: str + :param execution_id: Required. The ID of the execution. + :type execution_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + metadata = metadata or () + client = self.get_executions_client() + name = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}/executions/{execution_id}" + return client.get_execution(request={"name": name}, retry=retry, timeout=timeout, metadata=metadata) + + @GoogleBaseHook.fallback_to_default_project_id + def cancel_execution( + self, + workflow_id: str, + execution_id: str, + location: str, + project_id: str, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + ) -> Execution: + """ + Cancels an execution using the given ``workflow_id`` and ``execution_id``. + + :param workflow_id: Required. The ID of the workflow. + :type workflow_id: str + :param execution_id: Required. The ID of the execution. + :type execution_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + metadata = metadata or () + client = self.get_executions_client() + name = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}/executions/{execution_id}" + return client.cancel_execution( + request={"name": name}, retry=retry, timeout=timeout, metadata=metadata + ) + + @GoogleBaseHook.fallback_to_default_project_id + def list_executions( + self, + workflow_id: str, + location: str, + project_id: str, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + ) -> ListExecutionsPager: + """ + Returns a list of executions which belong to the + workflow with the given name. The method returns + executions of all workflow revisions. Returned + executions are ordered by their start time (newest + first). + + :param workflow_id: Required. The ID of the workflow to be created. + :type workflow_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + metadata = metadata or () + client = self.get_executions_client() + parent = f"projects/{project_id}/locations/{location}/workflows/{workflow_id}" + return client.list_executions( + request={"parent": parent}, retry=retry, timeout=timeout, metadata=metadata + ) diff --git a/airflow/providers/google/cloud/log/stackdriver_task_handler.py b/airflow/providers/google/cloud/log/stackdriver_task_handler.py index be75fcd973e7f..5479185c5eb22 100644 --- a/airflow/providers/google/cloud/log/stackdriver_task_handler.py +++ b/airflow/providers/google/cloud/log/stackdriver_task_handler.py @@ -21,9 +21,12 @@ from cached_property import cached_property from google.api_core.gapic_v1.client_info import ClientInfo +from google.auth.credentials import Credentials from google.cloud import logging as gcp_logging +from google.cloud.logging import Resource from google.cloud.logging.handlers.transports import BackgroundThreadTransport, Transport -from google.cloud.logging.resource import Resource +from google.cloud.logging_v2.services.logging_service_v2 import LoggingServiceV2Client +from google.cloud.logging_v2.types import ListLogEntriesRequest, ListLogEntriesResponse from airflow import version from airflow.models import TaskInstance @@ -99,13 +102,19 @@ def __init__( self.resource: Resource = resource self.labels: Optional[Dict[str, str]] = labels self.task_instance_labels: Optional[Dict[str, str]] = {} + self.task_instance_hostname = 'default-hostname' @cached_property - def _client(self) -> gcp_logging.Client: - """Google Cloud Library API client""" + def _credentials_and_project(self) -> Tuple[Credentials, str]: credentials, project = get_credentials_and_project_id( key_path=self.gcp_key_path, scopes=self.scopes, disable_logging=True ) + return credentials, project + + @property + def _client(self) -> gcp_logging.Client: + """The Cloud Library API client""" + credentials, project = self._credentials_and_project client = gcp_logging.Client( credentials=credentials, project=project, @@ -113,6 +122,16 @@ def _client(self) -> gcp_logging.Client: ) return client + @property + def _logging_service_client(self) -> LoggingServiceV2Client: + """The Cloud logging service v2 client.""" + credentials, _ = self._credentials_and_project + client = LoggingServiceV2Client( + credentials=credentials, + client_info=ClientInfo(client_library_version='airflow_v' + version.version), + ) + return client + @cached_property def _transport(self) -> Transport: """Object responsible for sending data to Stackdriver""" @@ -146,10 +165,11 @@ def set_context(self, task_instance: TaskInstance) -> None: :type task_instance: :class:`airflow.models.TaskInstance` """ self.task_instance_labels = self._task_instance_to_labels(task_instance) + self.task_instance_hostname = task_instance.hostname def read( self, task_instance: TaskInstance, try_number: Optional[int] = None, metadata: Optional[Dict] = None - ) -> Tuple[List[str], List[Dict]]: + ) -> Tuple[List[Tuple[Tuple[str, str]]], List[Dict[str, str]]]: """ Read logs of given task instance from Stackdriver logging. @@ -160,12 +180,14 @@ def read( :type try_number: Optional[int] :param metadata: log metadata. It is used for steaming log reading and auto-tailing. :type metadata: Dict - :return: a tuple of list of logs and list of metadata - :rtype: Tuple[List[str], List[Dict]] + :return: a tuple of ( + list of (one element tuple with two element tuple - hostname and logs) + and list of metadata) + :rtype: Tuple[List[Tuple[Tuple[str, str]]], List[Dict[str, str]]] """ if try_number is not None and try_number < 1: - logs = [f"Error fetching the logs. Try number {try_number} is invalid."] - return logs, [{"end_of_log": "true"}] + logs = f"Error fetching the logs. Try number {try_number} is invalid." + return [((self.task_instance_hostname, logs),)], [{"end_of_log": "true"}] if not metadata: metadata = {} @@ -188,7 +210,7 @@ def read( if next_page_token: new_metadata['next_page_token'] = next_page_token - return [messages], [new_metadata] + return [((self.task_instance_hostname, messages),)], [new_metadata] def _prepare_log_filter(self, ti_labels: Dict[str, str]) -> str: """ @@ -210,9 +232,10 @@ def escale_label_value(value: str) -> str: escaped_value = value.replace("\\", "\\\\").replace('"', '\\"') return f'"{escaped_value}"' + _, project = self._credentials_and_project log_filters = [ f'resource.type={escale_label_value(self.resource.type)}', - f'logName="projects/{self._client.project}/logs/{self.name}"', + f'logName="projects/{project}/logs/{self.name}"', ] for key, value in self.resource.labels.items(): @@ -252,6 +275,8 @@ def _read_logs( log_filter=log_filter, page_token=next_page_token ) messages.append(new_messages) + if not messages: + break end_of_log = True next_page_token = None @@ -271,15 +296,21 @@ def _read_single_logs_page(self, log_filter: str, page_token: Optional[str] = No :return: Downloaded logs and next page token :rtype: Tuple[str, str] """ - entries = self._client.list_entries(filter_=log_filter, page_token=page_token) - page = next(entries.pages) - next_page_token = entries.next_page_token + _, project = self._credentials_and_project + request = ListLogEntriesRequest( + resource_names=[f'projects/{project}'], + filter=log_filter, + page_token=page_token, + order_by='timestamp asc', + page_size=1000, + ) + response = self._logging_service_client.list_log_entries(request=request) + page: ListLogEntriesResponse = next(response.pages) messages = [] - for entry in page: - if "message" in entry.payload: - messages.append(entry.payload["message"]) - - return "\n".join(messages), next_page_token + for entry in page.entries: + if "message" in entry.json_payload: + messages.append(entry.json_payload["message"]) + return "\n".join(messages), page.next_page_token @classmethod def _task_instance_to_labels(cls, ti: TaskInstance) -> Dict[str, str]: @@ -315,7 +346,7 @@ def get_external_log_url(self, task_instance: TaskInstance, try_number: int) -> :return: URL to the external log collection service :rtype: str """ - project_id = self._client.project + _, project_id = self._credentials_and_project ti_labels = self._task_instance_to_labels(task_instance) ti_labels[self.LABEL_TRY_NUMBER] = str(try_number) @@ -331,3 +362,6 @@ def get_external_log_url(self, task_instance: TaskInstance, try_number: int) -> url = f"{self.LOG_VIEWER_BASE_URL}?{urlencode(url_query_string)}" return url + + def close(self) -> None: + self._transport.flush() diff --git a/airflow/providers/google/cloud/operators/automl.py b/airflow/providers/google/cloud/operators/automl.py index a1823cdd754a8..cdf79b0ecde37 100644 --- a/airflow/providers/google/cloud/operators/automl.py +++ b/airflow/providers/google/cloud/operators/automl.py @@ -22,7 +22,14 @@ from typing import Dict, List, Optional, Sequence, Tuple, Union from google.api_core.retry import Retry -from google.protobuf.json_format import MessageToDict +from google.cloud.automl_v1beta1 import ( + BatchPredictResult, + ColumnSpec, + Dataset, + Model, + PredictResponse, + TableSpec, +) from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook @@ -113,7 +120,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - result = MessageToDict(operation.result()) + result = Model.to_dict(operation.result()) model_id = hook.extract_object_id(result) self.log.info("Model created: %s", model_id) @@ -212,7 +219,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(result) + return PredictResponse.to_dict(result) class AutoMLBatchPredictOperator(BaseOperator): @@ -324,7 +331,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - result = MessageToDict(operation.result()) + result = BatchPredictResult.to_dict(operation.result()) self.log.info("Batch prediction ready.") return result @@ -414,7 +421,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - result = MessageToDict(result) + result = Dataset.to_dict(result) dataset_id = hook.extract_object_id(result) self.log.info("Creating completed. Dataset id: %s", dataset_id) @@ -513,9 +520,8 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - result = MessageToDict(operation.result()) + operation.result() self.log.info("Import completed") - return result class AutoMLTablesListColumnSpecsOperator(BaseOperator): @@ -627,7 +633,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - result = [MessageToDict(spec) for spec in page_iterator] + result = [ColumnSpec.to_dict(spec) for spec in page_iterator] self.log.info("Columns specs obtained.") return result @@ -718,7 +724,7 @@ def execute(self, context): metadata=self.metadata, ) self.log.info("Dataset updated.") - return MessageToDict(result) + return Dataset.to_dict(result) class AutoMLGetModelOperator(BaseOperator): @@ -804,7 +810,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(result) + return Model.to_dict(result) class AutoMLDeleteModelOperator(BaseOperator): @@ -890,8 +896,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - result = MessageToDict(operation.result()) - return result + operation.result() class AutoMLDeployModelOperator(BaseOperator): @@ -991,9 +996,8 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - result = MessageToDict(operation.result()) + operation.result() self.log.info("Model deployed.") - return result class AutoMLTablesListTableSpecsOperator(BaseOperator): @@ -1092,7 +1096,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - result = [MessageToDict(spec) for spec in page_iterator] + result = [TableSpec.to_dict(spec) for spec in page_iterator] self.log.info(result) self.log.info("Table specs obtained.") return result @@ -1173,7 +1177,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - result = [MessageToDict(dataset) for dataset in page_iterator] + result = [Dataset.to_dict(dataset) for dataset in page_iterator] self.log.info("Datasets obtained.") self.xcom_push( diff --git a/airflow/providers/google/cloud/operators/bigquery_dts.py b/airflow/providers/google/cloud/operators/bigquery_dts.py index e941bd4c4be53..656fc775bb003 100644 --- a/airflow/providers/google/cloud/operators/bigquery_dts.py +++ b/airflow/providers/google/cloud/operators/bigquery_dts.py @@ -19,7 +19,7 @@ from typing import Optional, Sequence, Tuple, Union from google.api_core.retry import Retry -from google.protobuf.json_format import MessageToDict +from google.cloud.bigquery_datatransfer_v1 import StartManualTransferRunsResponse, TransferConfig from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.bigquery_dts import BiqQueryDataTransferServiceHook, get_object_id @@ -110,7 +110,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - result = MessageToDict(response) + result = TransferConfig.to_dict(response) self.log.info("Created DTS transfer config %s", get_object_id(result)) self.xcom_push(context, key="transfer_config_id", value=get_object_id(result)) return result @@ -289,10 +289,8 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - result = MessageToDict(response) - run_id = None - if 'runs' in result: - run_id = get_object_id(result['runs'][0]) - self.xcom_push(context, key="run_id", value=run_id) + result = StartManualTransferRunsResponse.to_dict(response) + run_id = get_object_id(result['runs'][0]) + self.xcom_push(context, key="run_id", value=run_id) self.log.info('Transfer run %s submitted successfully.', run_id) return result diff --git a/airflow/providers/google/cloud/operators/cloud_build.py b/airflow/providers/google/cloud/operators/cloud_build.py index 101c04dbf2dc5..b4c0cf72bf7f9 100644 --- a/airflow/providers/google/cloud/operators/cloud_build.py +++ b/airflow/providers/google/cloud/operators/cloud_build.py @@ -22,7 +22,10 @@ from typing import Any, Dict, Optional, Sequence, Union from urllib.parse import unquote, urlparse -import yaml +try: + import airflow.utils.yaml as yaml +except ImportError: + import yaml from airflow.exceptions import AirflowException from airflow.models import BaseOperator diff --git a/airflow/providers/google/cloud/operators/cloud_memorystore.py b/airflow/providers/google/cloud/operators/cloud_memorystore.py index 0ac264049cf41..64a6251992048 100644 --- a/airflow/providers/google/cloud/operators/cloud_memorystore.py +++ b/airflow/providers/google/cloud/operators/cloud_memorystore.py @@ -20,9 +20,8 @@ from google.api_core.retry import Retry from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.cloud.redis_v1.gapic.enums import FailoverInstanceRequest -from google.cloud.redis_v1.types import FieldMask, InputConfig, Instance, OutputConfig -from google.protobuf.json_format import MessageToDict +from google.cloud.redis_v1 import FailoverInstanceRequest, InputConfig, Instance, OutputConfig +from google.protobuf.field_mask_pb2 import FieldMask from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.cloud_memorystore import ( @@ -134,7 +133,7 @@ def execute(self, context: dict): timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(result) + return Instance.to_dict(result) class CloudMemorystoreDeleteInstanceOperator(BaseOperator): @@ -492,7 +491,7 @@ def execute(self, context: dict): timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(result) + return Instance.to_dict(result) class CloudMemorystoreImportOperator(BaseOperator): @@ -677,7 +676,7 @@ def execute(self, context: dict): timeout=self.timeout, metadata=self.metadata, ) - instances = [MessageToDict(a) for a in result] + instances = [Instance.to_dict(a) for a in result] return instances diff --git a/airflow/providers/google/cloud/operators/datacatalog.py b/airflow/providers/google/cloud/operators/datacatalog.py index 00b276556fe39..4b0da05577bca 100644 --- a/airflow/providers/google/cloud/operators/datacatalog.py +++ b/airflow/providers/google/cloud/operators/datacatalog.py @@ -19,17 +19,16 @@ from google.api_core.exceptions import AlreadyExists, NotFound from google.api_core.retry import Retry -from google.cloud.datacatalog_v1beta1 import DataCatalogClient +from google.cloud.datacatalog_v1beta1 import DataCatalogClient, SearchCatalogResult from google.cloud.datacatalog_v1beta1.types import ( Entry, EntryGroup, - FieldMask, SearchCatalogRequest, Tag, TagTemplate, TagTemplateField, ) -from google.protobuf.json_format import MessageToDict +from google.protobuf.field_mask_pb2 import FieldMask from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.datacatalog import CloudDataCatalogHook @@ -153,7 +152,7 @@ def execute(self, context: dict): _, _, entry_id = result.name.rpartition("/") self.log.info("Current entry_id ID: %s", entry_id) context["task_instance"].xcom_push(key="entry_id", value=entry_id) - return MessageToDict(result) + return Entry.to_dict(result) class CloudDataCatalogCreateEntryGroupOperator(BaseOperator): @@ -268,7 +267,7 @@ def execute(self, context: dict): _, _, entry_group_id = result.name.rpartition("/") self.log.info("Current entry group ID: %s", entry_group_id) context["task_instance"].xcom_push(key="entry_group_id", value=entry_group_id) - return MessageToDict(result) + return EntryGroup.to_dict(result) class CloudDataCatalogCreateTagOperator(BaseOperator): @@ -404,7 +403,7 @@ def execute(self, context: dict): _, _, tag_id = tag.name.rpartition("/") self.log.info("Current Tag ID: %s", tag_id) context["task_instance"].xcom_push(key="tag_id", value=tag_id) - return MessageToDict(tag) + return Tag.to_dict(tag) class CloudDataCatalogCreateTagTemplateOperator(BaseOperator): @@ -516,7 +515,7 @@ def execute(self, context: dict): _, _, tag_template = result.name.rpartition("/") self.log.info("Current Tag ID: %s", tag_template) context["task_instance"].xcom_push(key="tag_template_id", value=tag_template) - return MessageToDict(result) + return TagTemplate.to_dict(result) class CloudDataCatalogCreateTagTemplateFieldOperator(BaseOperator): @@ -638,7 +637,7 @@ def execute(self, context: dict): self.log.info("Current Tag ID: %s", self.tag_template_field_id) context["task_instance"].xcom_push(key="tag_template_field_id", value=self.tag_template_field_id) - return MessageToDict(result) + return TagTemplateField.to_dict(result) class CloudDataCatalogDeleteEntryOperator(BaseOperator): @@ -1216,7 +1215,7 @@ def execute(self, context: dict) -> dict: timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(result) + return Entry.to_dict(result) class CloudDataCatalogGetEntryGroupOperator(BaseOperator): @@ -1234,8 +1233,8 @@ class CloudDataCatalogGetEntryGroupOperator(BaseOperator): :param read_mask: The fields to return. If not set or empty, all fields are returned. If a dict is provided, it must be of the same form as the protobuf message - :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` - :type read_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask] + :class:`~google.protobuf.field_mask_pb2.FieldMask` + :type read_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask] :param project_id: The ID of the Google Cloud project that owns the entry group. If set to ``None`` or missing, the default project_id from the Google Cloud connection is used. :type project_id: Optional[str] @@ -1312,7 +1311,7 @@ def execute(self, context: dict) -> dict: timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(result) + return EntryGroup.to_dict(result) class CloudDataCatalogGetTagTemplateOperator(BaseOperator): @@ -1399,7 +1398,7 @@ def execute(self, context: dict) -> dict: timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(result) + return TagTemplate.to_dict(result) class CloudDataCatalogListTagsOperator(BaseOperator): @@ -1501,7 +1500,7 @@ def execute(self, context: dict) -> list: timeout=self.timeout, metadata=self.metadata, ) - return [MessageToDict(item) for item in result] + return [Tag.to_dict(item) for item in result] class CloudDataCatalogLookupEntryOperator(BaseOperator): @@ -1589,7 +1588,7 @@ def execute(self, context: dict) -> dict: timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(result) + return Entry.to_dict(result) class CloudDataCatalogRenameTagTemplateFieldOperator(BaseOperator): @@ -1809,7 +1808,7 @@ def execute(self, context: dict) -> list: timeout=self.timeout, metadata=self.metadata, ) - return [MessageToDict(item) for item in result] + return [SearchCatalogResult.to_dict(item) for item in result] class CloudDataCatalogUpdateEntryOperator(BaseOperator): @@ -1829,8 +1828,8 @@ class CloudDataCatalogUpdateEntryOperator(BaseOperator): updated. If a dict is provided, it must be of the same form as the protobuf message - :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` - :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask] + :class:`~google.protobuf.field_mask_pb2.FieldMask` + :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask] :param location: Required. The location of the entry to update. :type location: str :param entry_group: The entry group ID for the entry that is being updated. @@ -1940,8 +1939,8 @@ class CloudDataCatalogUpdateTagOperator(BaseOperator): updated. Currently the only modifiable field is the field ``fields``. If a dict is provided, it must be of the same form as the protobuf message - :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` - :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask] + :class:`~google.protobuf.field_mask_pb2.FieldMask` + :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask] :param location: Required. The location of the tag to rename. :type location: str :param entry_group: The entry group ID for the tag that is being updated. @@ -2060,8 +2059,8 @@ class CloudDataCatalogUpdateTagTemplateOperator(BaseOperator): If absent or empty, all of the allowed fields above will be updated. If a dict is provided, it must be of the same form as the protobuf message - :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` - :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask] + :class:`~google.protobuf.field_mask_pb2.FieldMask` + :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask] :param location: Required. The location of the tag template to rename. :type location: str :param tag_template_id: Optional. The tag template ID for the entry that is being updated. @@ -2172,8 +2171,8 @@ class CloudDataCatalogUpdateTagTemplateFieldOperator(BaseOperator): Therefore, enum values can only be added, existing enum values cannot be deleted nor renamed. If a dict is provided, it must be of the same form as the protobuf message - :class:`~google.cloud.datacatalog_v1beta1.types.FieldMask` - :type update_mask: Union[Dict, google.cloud.datacatalog_v1beta1.types.FieldMask] + :class:`~google.protobuf.field_mask_pb2.FieldMask` + :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask] :param tag_template_field_name: Optional. The name of the tag template field to rename. :type tag_template_field_name: str :param location: Optional. The location of the tag to rename. diff --git a/airflow/providers/google/cloud/operators/dataflow.py b/airflow/providers/google/cloud/operators/dataflow.py index 49863dcf4bce5..513fea36a7965 100644 --- a/airflow/providers/google/cloud/operators/dataflow.py +++ b/airflow/providers/google/cloud/operators/dataflow.py @@ -16,15 +16,20 @@ # specific language governing permissions and limitations # under the License. """This module contains Google Dataflow operators.""" - import copy import re +import warnings from contextlib import ExitStack from enum import Enum from typing import Any, Dict, List, Optional, Sequence, Union from airflow.models import BaseOperator -from airflow.providers.google.cloud.hooks.dataflow import DEFAULT_DATAFLOW_LOCATION, DataflowHook +from airflow.providers.apache.beam.hooks.beam import BeamHook, BeamRunnerType +from airflow.providers.google.cloud.hooks.dataflow import ( + DEFAULT_DATAFLOW_LOCATION, + DataflowHook, + process_line_and_extract_dataflow_job_id_callback, +) from airflow.providers.google.cloud.hooks.gcs import GCSHook from airflow.utils.decorators import apply_defaults from airflow.version import version @@ -38,9 +43,131 @@ class CheckJobRunning(Enum): WaitForRun - wait for job to finish and then continue with new job """ - IgnoreJob = 1 - FinishIfRunning = 2 - WaitForRun = 3 + IgnoreJob = 1 # pylint: disable=invalid-name + FinishIfRunning = 2 # pylint: disable=invalid-name + WaitForRun = 3 # pylint: disable=invalid-name + + +class DataflowConfiguration: + """Dataflow configuration that can be passed to + :py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator` and + :py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator`. + + :param job_name: The 'jobName' to use when executing the DataFlow job + (templated). This ends up being set in the pipeline options, so any entry + with key ``'jobName'`` or ``'job_name'``in ``options`` will be overwritten. + :type job_name: str + :param append_job_name: True if unique suffix has to be appended to job name. + :type append_job_name: bool + :param project_id: Optional, the Google Cloud project ID in which to start a job. + If set to None or missing, the default project_id from the Google Cloud connection is used. + :type project_id: str + :param location: Job location. + :type location: str + :param gcp_conn_id: The connection ID to use connecting to Google Cloud. + :type gcp_conn_id: str + :param delegate_to: The account to impersonate using domain-wide delegation of authority, + if any. For this to work, the service account making the request must have + domain-wide delegation enabled. + :type delegate_to: str + :param poll_sleep: The time in seconds to sleep between polling Google + Cloud Platform for the dataflow job status while the job is in the + JOB_STATE_RUNNING state. + :type poll_sleep: int + :param impersonation_chain: Optional service account to impersonate using short-term + credentials, or chained list of accounts required to get the access_token + of the last account in the list, which will be impersonated in the request. + If set as a string, the account must grant the originating account + the Service Account Token Creator IAM role. + If set as a sequence, the identities from the list must grant + Service Account Token Creator IAM role to the directly preceding identity, with first + account from the list granting this role to the originating account (templated). + :type impersonation_chain: Union[str, Sequence[str]] + :param drain_pipeline: Optional, set to True if want to stop streaming job by draining it + instead of canceling during killing task instance. See: + https://cloud.google.com/dataflow/docs/guides/stopping-a-pipeline + :type drain_pipeline: bool + :param cancel_timeout: How long (in seconds) operator should wait for the pipeline to be + successfully cancelled when task is being killed. + :type cancel_timeout: Optional[int] + :param wait_until_finished: (Optional) + If True, wait for the end of pipeline execution before exiting. + If False, only submits job. + If None, default behavior. + + The default behavior depends on the type of pipeline: + + * for the streaming pipeline, wait for jobs to start, + * for the batch pipeline, wait for the jobs to complete. + + .. warning:: + + You cannot call ``PipelineResult.wait_until_finish`` method in your pipeline code for the operator + to work properly. i. e. you must use asynchronous execution. Otherwise, your pipeline will + always wait until finished. For more information, look at: + `Asynchronous execution + `__ + + The process of starting the Dataflow job in Airflow consists of two steps: + + * running a subprocess and reading the stderr/stderr log for the job id. + * loop waiting for the end of the job ID from the previous step. + This loop checks the status of the job. + + Step two is started just after step one has finished, so if you have wait_until_finished in your + pipeline code, step two will not start until the process stops. When this process stops, + steps two will run, but it will only execute one iteration as the job will be in a terminal state. + + If you in your pipeline do not call the wait_for_pipeline method but pass wait_until_finish=True + to the operator, the second loop will wait for the job's terminal state. + + If you in your pipeline do not call the wait_for_pipeline method, and pass wait_until_finish=False + to the operator, the second loop will check once is job not in terminal state and exit the loop. + :type wait_until_finished: Optional[bool] + :param multiple_jobs: If pipeline creates multiple jobs then monitor all jobs. Supported only by + :py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator` + :type multiple_jobs: boolean + :param check_if_running: Before running job, validate that a previous run is not in process. + IgnoreJob = do not check if running. + FinishIfRunning = if job is running finish with nothing. + WaitForRun = wait until job finished and the run job. + Supported only by: + :py:class:`~airflow.providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator` + :type check_if_running: CheckJobRunning + """ + + template_fields = ["job_name", "location"] + + def __init__( + self, + *, + job_name: Optional[str] = "{{task.task_id}}", + append_job_name: bool = True, + project_id: Optional[str] = None, + location: Optional[str] = DEFAULT_DATAFLOW_LOCATION, + gcp_conn_id: str = "google_cloud_default", + delegate_to: Optional[str] = None, + poll_sleep: int = 10, + impersonation_chain: Optional[Union[str, Sequence[str]]] = None, + drain_pipeline: bool = False, + cancel_timeout: Optional[int] = 5 * 60, + wait_until_finished: Optional[bool] = None, + multiple_jobs: Optional[bool] = None, + check_if_running: CheckJobRunning = CheckJobRunning.WaitForRun, + ) -> None: + self.job_name = job_name + self.append_job_name = append_job_name + self.project_id = project_id + self.location = location + self.gcp_conn_id = gcp_conn_id + self.delegate_to = delegate_to + self.poll_sleep = poll_sleep + self.impersonation_chain = impersonation_chain + self.drain_pipeline = drain_pipeline + self.cancel_timeout = cancel_timeout + self.wait_until_finished = wait_until_finished + self.multiple_jobs = multiple_jobs + self.check_if_running = check_if_running # pylint: disable=too-many-instance-attributes @@ -49,6 +176,9 @@ class DataflowCreateJavaJobOperator(BaseOperator): Start a Java Cloud DataFlow batch job. The parameters of the operation will be passed to the job. + This class is deprecated. + Please use `providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator`. + **Example**: :: default_args = { @@ -235,6 +365,14 @@ def __init__( wait_until_finished: Optional[bool] = None, **kwargs, ) -> None: + # TODO: Remove one day + warnings.warn( + "The `{cls}` operator is deprecated, please use " + "`providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator` instead." + "".format(cls=self.__class__.__name__), + DeprecationWarning, + stacklevel=2, + ) super().__init__(**kwargs) dataflow_default_options = dataflow_default_options or {} @@ -257,62 +395,83 @@ def __init__( self.cancel_timeout = cancel_timeout self.wait_until_finished = wait_until_finished self.job_id = None - self.hook = None + self.beam_hook: Optional[BeamHook] = None + self.dataflow_hook: Optional[DataflowHook] = None def execute(self, context): - self.hook = DataflowHook( + """Execute the Apache Beam Pipeline.""" + self.beam_hook = BeamHook(runner=BeamRunnerType.DataflowRunner) + self.dataflow_hook = DataflowHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, poll_sleep=self.poll_sleep, cancel_timeout=self.cancel_timeout, wait_until_finished=self.wait_until_finished, ) - dataflow_options = copy.copy(self.dataflow_default_options) - dataflow_options.update(self.options) - is_running = False - if self.check_if_running != CheckJobRunning.IgnoreJob: - is_running = self.hook.is_job_dataflow_running( # type: ignore[attr-defined] - name=self.job_name, - variables=dataflow_options, - project_id=self.project_id, - location=self.location, - ) - while is_running and self.check_if_running == CheckJobRunning.WaitForRun: - is_running = self.hook.is_job_dataflow_running( # type: ignore[attr-defined] - name=self.job_name, - variables=dataflow_options, - project_id=self.project_id, - location=self.location, - ) + job_name = self.dataflow_hook.build_dataflow_job_name(job_name=self.job_name) + pipeline_options = copy.deepcopy(self.dataflow_default_options) + + pipeline_options["jobName"] = self.job_name + pipeline_options["project"] = self.project_id or self.dataflow_hook.project_id + pipeline_options["region"] = self.location + pipeline_options.update(self.options) + pipeline_options.setdefault("labels", {}).update( + {"airflow-version": "v" + version.replace(".", "-").replace("+", "-")} + ) + pipeline_options.update(self.options) - if not is_running: - with ExitStack() as exit_stack: - if self.jar.lower().startswith("gs://"): - gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to) - tmp_gcs_file = exit_stack.enter_context( # pylint: disable=no-member - gcs_hook.provide_file(object_url=self.jar) - ) - self.jar = tmp_gcs_file.name - - def set_current_job_id(job_id): - self.job_id = job_id - - self.hook.start_java_dataflow( # type: ignore[attr-defined] - job_name=self.job_name, - variables=dataflow_options, - jar=self.jar, - job_class=self.job_class, - append_job_name=True, - multiple_jobs=self.multiple_jobs, - on_new_job_id_callback=set_current_job_id, - project_id=self.project_id, - location=self.location, + def set_current_job_id(job_id): + self.job_id = job_id + + process_line_callback = process_line_and_extract_dataflow_job_id_callback( + on_new_job_id_callback=set_current_job_id + ) + + with ExitStack() as exit_stack: + if self.jar.lower().startswith("gs://"): + gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to) + tmp_gcs_file = exit_stack.enter_context( # pylint: disable=no-member + gcs_hook.provide_file(object_url=self.jar) ) + self.jar = tmp_gcs_file.name + + is_running = False + if self.check_if_running != CheckJobRunning.IgnoreJob: + is_running = ( + self.dataflow_hook.is_job_dataflow_running( # pylint: disable=no-value-for-parameter + name=self.job_name, + variables=pipeline_options, + ) + ) + while is_running and self.check_if_running == CheckJobRunning.WaitForRun: + # pylint: disable=no-value-for-parameter + is_running = self.dataflow_hook.is_job_dataflow_running( + name=self.job_name, + variables=pipeline_options, + ) + if not is_running: + pipeline_options["jobName"] = job_name + self.beam_hook.start_java_pipeline( + variables=pipeline_options, + jar=self.jar, + job_class=self.job_class, + process_line_callback=process_line_callback, + ) + self.dataflow_hook.wait_for_done( # pylint: disable=no-value-for-parameter + job_name=job_name, + location=self.location, + job_id=self.job_id, + multiple_jobs=self.multiple_jobs, + ) + + return {"job_id": self.job_id} def on_kill(self) -> None: self.log.info("On kill.") if self.job_id: - self.hook.cancel_job(job_id=self.job_id, project_id=self.project_id) + self.dataflow_hook.cancel_job( + job_id=self.job_id, project_id=self.project_id or self.dataflow_hook.project_id + ) # pylint: disable=too-many-instance-attributes @@ -558,7 +717,7 @@ class DataflowStartFlexTemplateOperator(BaseOperator): domain-wide delegation enabled. :type delegate_to: str :param drain_pipeline: Optional, set to True if want to stop streaming job by draining it - instead of canceling during during killing task instance. See: + instead of canceling during killing task instance. See: https://cloud.google.com/dataflow/docs/guides/stopping-a-pipeline :type drain_pipeline: bool :param cancel_timeout: How long (in seconds) operator should wait for the pipeline to be @@ -684,7 +843,7 @@ class DataflowStartSqlJobOperator(BaseOperator): domain-wide delegation enabled. :type delegate_to: str :param drain_pipeline: Optional, set to True if want to stop streaming job by draining it - instead of canceling during during killing task instance. See: + instead of canceling during killing task instance. See: https://cloud.google.com/dataflow/docs/guides/stopping-a-pipeline :type drain_pipeline: bool """ @@ -760,6 +919,9 @@ class DataflowCreatePythonJobOperator(BaseOperator): high-level options, for instances, project and zone information, which apply to all dataflow operators in the DAG. + This class is deprecated. + Please use `providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator`. + .. seealso:: For more detail on job submission have a look at the reference: https://cloud.google.com/dataflow/pipelines/specifying-exec-params @@ -820,7 +982,7 @@ class DataflowCreatePythonJobOperator(BaseOperator): JOB_STATE_RUNNING state. :type poll_sleep: int :param drain_pipeline: Optional, set to True if want to stop streaming job by draining it - instead of canceling during during killing task instance. See: + instead of canceling during killing task instance. See: https://cloud.google.com/dataflow/docs/guides/stopping-a-pipeline :type drain_pipeline: bool :param cancel_timeout: How long (in seconds) operator should wait for the pipeline to be @@ -886,7 +1048,14 @@ def __init__( # pylint: disable=too-many-arguments wait_until_finished: Optional[bool] = None, **kwargs, ) -> None: - + # TODO: Remove one day + warnings.warn( + "The `{cls}` operator is deprecated, please use " + "`providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator` instead." + "".format(cls=self.__class__.__name__), + DeprecationWarning, + stacklevel=2, + ) super().__init__(**kwargs) self.py_file = py_file @@ -909,10 +1078,40 @@ def __init__( # pylint: disable=too-many-arguments self.cancel_timeout = cancel_timeout self.wait_until_finished = wait_until_finished self.job_id = None - self.hook: Optional[DataflowHook] = None + self.beam_hook: Optional[BeamHook] = None + self.dataflow_hook: Optional[DataflowHook] = None def execute(self, context): """Execute the python dataflow job.""" + self.beam_hook = BeamHook(runner=BeamRunnerType.DataflowRunner) + self.dataflow_hook = DataflowHook( + gcp_conn_id=self.gcp_conn_id, + delegate_to=self.delegate_to, + poll_sleep=self.poll_sleep, + impersonation_chain=None, + drain_pipeline=self.drain_pipeline, + cancel_timeout=self.cancel_timeout, + wait_until_finished=self.wait_until_finished, + ) + + job_name = self.dataflow_hook.build_dataflow_job_name(job_name=self.job_name) + pipeline_options = self.dataflow_default_options.copy() + pipeline_options["job_name"] = job_name + pipeline_options["project"] = self.project_id or self.dataflow_hook.project_id + pipeline_options["region"] = self.location + pipeline_options.update(self.options) + + # Convert argument names from lowerCamelCase to snake case. + camel_to_snake = lambda name: re.sub(r"[A-Z]", lambda x: "_" + x.group(0).lower(), name) + formatted_pipeline_options = {camel_to_snake(key): pipeline_options[key] for key in pipeline_options} + + def set_current_job_id(job_id): + self.job_id = job_id + + process_line_callback = process_line_and_extract_dataflow_job_id_callback( + on_new_job_id_callback=set_current_job_id + ) + with ExitStack() as exit_stack: if self.py_file.lower().startswith("gs://"): gcs_hook = GCSHook(self.gcp_conn_id, self.delegate_to) @@ -921,38 +1120,28 @@ def execute(self, context): ) self.py_file = tmp_gcs_file.name - self.hook = DataflowHook( - gcp_conn_id=self.gcp_conn_id, - delegate_to=self.delegate_to, - poll_sleep=self.poll_sleep, - drain_pipeline=self.drain_pipeline, - cancel_timeout=self.cancel_timeout, - wait_until_finished=self.wait_until_finished, - ) - dataflow_options = self.dataflow_default_options.copy() - dataflow_options.update(self.options) - # Convert argument names from lowerCamelCase to snake case. - camel_to_snake = lambda name: re.sub(r"[A-Z]", lambda x: "_" + x.group(0).lower(), name) - formatted_options = {camel_to_snake(key): dataflow_options[key] for key in dataflow_options} - - def set_current_job_id(job_id): - self.job_id = job_id - - self.hook.start_python_dataflow( # type: ignore[attr-defined] - job_name=self.job_name, - variables=formatted_options, - dataflow=self.py_file, + self.beam_hook.start_python_pipeline( + variables=formatted_pipeline_options, + py_file=self.py_file, py_options=self.py_options, py_interpreter=self.py_interpreter, py_requirements=self.py_requirements, py_system_site_packages=self.py_system_site_packages, - on_new_job_id_callback=set_current_job_id, - project_id=self.project_id, + process_line_callback=process_line_callback, + ) + + self.dataflow_hook.wait_for_done( # pylint: disable=no-value-for-parameter + job_name=job_name, location=self.location, + job_id=self.job_id, + multiple_jobs=False, ) - return {"job_id": self.job_id} + + return {"job_id": self.job_id} def on_kill(self) -> None: self.log.info("On kill.") if self.job_id: - self.hook.cancel_job(job_id=self.job_id, project_id=self.project_id) + self.dataflow_hook.cancel_job( + job_id=self.job_id, project_id=self.project_id or self.dataflow_hook.project_id + ) diff --git a/airflow/providers/google/cloud/operators/dataproc.py b/airflow/providers/google/cloud/operators/dataproc.py index ac93915ea9a46..7843164c11422 100644 --- a/airflow/providers/google/cloud/operators/dataproc.py +++ b/airflow/providers/google/cloud/operators/dataproc.py @@ -17,7 +17,6 @@ # under the License. # """This module contains Google Dataproc operators.""" -# pylint: disable=C0302 import inspect import ntpath @@ -31,12 +30,9 @@ from google.api_core.exceptions import AlreadyExists, NotFound from google.api_core.retry import Retry, exponential_sleep_generator -from google.cloud.dataproc_v1beta2.types import ( # pylint: disable=no-name-in-module - Cluster, - Duration, - FieldMask, -) -from google.protobuf.json_format import MessageToDict +from google.cloud.dataproc_v1beta2 import Cluster # pylint: disable=no-name-in-module +from google.protobuf.duration_pb2 import Duration +from google.protobuf.field_mask_pb2 import FieldMask from airflow.exceptions import AirflowException from airflow.models import BaseOperator @@ -562,7 +558,7 @@ def _get_cluster(self, hook: DataprocHook) -> Cluster: ) def _handle_error_state(self, hook: DataprocHook, cluster: Cluster) -> None: - if cluster.status.state != cluster.status.ERROR: + if cluster.status.state != cluster.status.State.ERROR: return self.log.info("Cluster is in ERROR state") gcs_uri = hook.diagnose_cluster( @@ -590,7 +586,7 @@ def _wait_for_cluster_in_creating_state(self, hook: DataprocHook) -> Cluster: time_left = self.timeout cluster = self._get_cluster(hook) for time_to_sleep in exponential_sleep_generator(initial=10, maximum=120): - if cluster.status.state != cluster.status.CREATING: + if cluster.status.state != cluster.status.State.CREATING: break if time_left < 0: raise AirflowException(f"Cluster {self.cluster_name} is still CREATING state, aborting") @@ -613,18 +609,18 @@ def execute(self, context) -> dict: # Check if cluster is not in ERROR state self._handle_error_state(hook, cluster) - if cluster.status.state == cluster.status.CREATING: - # Wait for cluster to be be created + if cluster.status.state == cluster.status.State.CREATING: + # Wait for cluster to be created cluster = self._wait_for_cluster_in_creating_state(hook) self._handle_error_state(hook, cluster) - elif cluster.status.state == cluster.status.DELETING: + elif cluster.status.state == cluster.status.State.DELETING: # Wait for cluster to be deleted self._wait_for_cluster_in_deleting_state(hook) # Create new cluster cluster = self._create_cluster(hook) self._handle_error_state(hook, cluster) - return MessageToDict(cluster) + return Cluster.to_dict(cluster) class DataprocScaleClusterOperator(BaseOperator): @@ -1790,7 +1786,7 @@ class DataprocSubmitJobOperator(BaseOperator): :type wait_timeout: int """ - template_fields = ('project_id', 'location', 'job', 'impersonation_chain') + template_fields = ('project_id', 'location', 'job', 'impersonation_chain', 'request_id') template_fields_renderers = {"job": "json"} @apply_defaults @@ -1876,14 +1872,14 @@ class DataprocUpdateClusterOperator(BaseOperator): example, to change the number of workers in a cluster to 5, the ``update_mask`` parameter would be specified as ``config.worker_config.num_instances``, and the ``PATCH`` request body would specify the new value. If a dict is provided, it must be of the same form as the protobuf message - :class:`~google.cloud.dataproc_v1beta2.types.FieldMask` - :type update_mask: Union[Dict, google.cloud.dataproc_v1beta2.types.FieldMask] + :class:`~google.protobuf.field_mask_pb2.FieldMask` + :type update_mask: Union[Dict, google.protobuf.field_mask_pb2.FieldMask] :param graceful_decommission_timeout: Optional. Timeout for graceful YARN decommissioning. Graceful decommissioning allows removing nodes from the cluster without interrupting jobs in progress. Timeout specifies how long to wait for jobs in progress to finish before forcefully removing nodes (and potentially interrupting jobs). Default timeout is 0 (for forceful decommission), and the maximum allowed timeout is 1 day. - :type graceful_decommission_timeout: Union[Dict, google.cloud.dataproc_v1beta2.types.Duration] + :type graceful_decommission_timeout: Union[Dict, google.protobuf.duration_pb2.Duration] :param request_id: Optional. A unique id used to identify the request. If the server receives two ``UpdateClusterRequest`` requests with the same id, then the second request will be ignored and the first ``google.longrunning.Operation`` created and stored in the backend is returned. @@ -1909,7 +1905,7 @@ class DataprocUpdateClusterOperator(BaseOperator): :type impersonation_chain: Union[str, Sequence[str]] """ - template_fields = ('impersonation_chain',) + template_fields = ('impersonation_chain', 'cluster_name') @apply_defaults def __init__( # pylint: disable=too-many-arguments diff --git a/airflow/providers/google/cloud/operators/functions.py b/airflow/providers/google/cloud/operators/functions.py index c2ea87d789ac8..451748433271d 100644 --- a/airflow/providers/google/cloud/operators/functions.py +++ b/airflow/providers/google/cloud/operators/functions.py @@ -397,6 +397,7 @@ def execute(self, context): status = e.resp.status if status == 404: self.log.info('The function does not exist in this project') + return None else: self.log.error('An error occurred. Exiting.') raise e diff --git a/airflow/providers/google/cloud/operators/pubsub.py b/airflow/providers/google/cloud/operators/pubsub.py index e8cf735146b49..23b545f8fb3ce 100644 --- a/airflow/providers/google/cloud/operators/pubsub.py +++ b/airflow/providers/google/cloud/operators/pubsub.py @@ -29,7 +29,6 @@ ReceivedMessage, RetryPolicy, ) -from google.protobuf.json_format import MessageToDict from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.pubsub import PubSubHook @@ -958,6 +957,6 @@ def _default_message_callback( :param context: same as in `execute` :return: value to be saved to XCom. """ - messages_json = [MessageToDict(m) for m in pulled_messages] + messages_json = [ReceivedMessage.to_dict(m) for m in pulled_messages] return messages_json diff --git a/airflow/providers/google/cloud/operators/stackdriver.py b/airflow/providers/google/cloud/operators/stackdriver.py index dc864665c2663..7289b1227bc5e 100644 --- a/airflow/providers/google/cloud/operators/stackdriver.py +++ b/airflow/providers/google/cloud/operators/stackdriver.py @@ -19,6 +19,7 @@ from typing import Optional, Sequence, Union from google.api_core.gapic_v1.method import DEFAULT +from google.cloud.monitoring_v3 import AlertPolicy, NotificationChannel from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.stackdriver import StackdriverHook @@ -125,7 +126,7 @@ def __init__( def execute(self, context): self.log.info( - 'List Alert Policies: Project id: %s Format: %s Filter: %s Order By: %s Page Size: %d', + 'List Alert Policies: Project id: %s Format: %s Filter: %s Order By: %s Page Size: %s', self.project_id, self.format_, self.filter_, @@ -139,7 +140,7 @@ def execute(self, context): impersonation_chain=self.impersonation_chain, ) - return self.hook.list_alert_policies( + result = self.hook.list_alert_policies( project_id=self.project_id, format_=self.format_, filter_=self.filter_, @@ -149,6 +150,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) + return [AlertPolicy.to_dict(policy) for policy in result] class StackdriverEnableAlertPoliciesOperator(BaseOperator): @@ -614,7 +616,7 @@ def __init__( def execute(self, context): self.log.info( - 'List Notification Channels: Project id: %s Format: %s Filter: %s Order By: %s Page Size: %d', + 'List Notification Channels: Project id: %s Format: %s Filter: %s Order By: %s Page Size: %s', self.project_id, self.format_, self.filter_, @@ -627,7 +629,7 @@ def execute(self, context): delegate_to=self.delegate_to, impersonation_chain=self.impersonation_chain, ) - return self.hook.list_notification_channels( + channels = self.hook.list_notification_channels( format_=self.format_, project_id=self.project_id, filter_=self.filter_, @@ -637,6 +639,8 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) + result = [NotificationChannel.to_dict(channel) for channel in channels] + return result class StackdriverEnableNotificationChannelsOperator(BaseOperator): diff --git a/airflow/providers/google/cloud/operators/tasks.py b/airflow/providers/google/cloud/operators/tasks.py index 6598d6664b9b5..2834b324c8aeb 100644 --- a/airflow/providers/google/cloud/operators/tasks.py +++ b/airflow/providers/google/cloud/operators/tasks.py @@ -25,9 +25,8 @@ from google.api_core.exceptions import AlreadyExists from google.api_core.retry import Retry -from google.cloud.tasks_v2 import enums -from google.cloud.tasks_v2.types import FieldMask, Queue, Task -from google.protobuf.json_format import MessageToDict +from google.cloud.tasks_v2.types import Queue, Task +from google.protobuf.field_mask_pb2 import FieldMask from airflow.models import BaseOperator from airflow.providers.google.cloud.hooks.tasks import CloudTasksHook @@ -136,7 +135,7 @@ def execute(self, context): metadata=self.metadata, ) - return MessageToDict(queue) + return Queue.to_dict(queue) class CloudTasksQueueUpdateOperator(BaseOperator): @@ -159,7 +158,7 @@ class CloudTasksQueueUpdateOperator(BaseOperator): :param update_mask: A mast used to specify which fields of the queue are being updated. If empty, then all fields will be updated. If a dict is provided, it must be of the same form as the protobuf message. - :type update_mask: dict or google.cloud.tasks_v2.types.FieldMask + :type update_mask: dict or google.protobuf.field_mask_pb2.FieldMask :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry @@ -237,7 +236,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(queue) + return Queue.to_dict(queue) class CloudTasksQueueGetOperator(BaseOperator): @@ -320,7 +319,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(queue) + return Queue.to_dict(queue) class CloudTasksQueuesListOperator(BaseOperator): @@ -408,7 +407,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return [MessageToDict(q) for q in queues] + return [Queue.to_dict(q) for q in queues] class CloudTasksQueueDeleteOperator(BaseOperator): @@ -571,7 +570,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(queue) + return Queue.to_dict(queue) class CloudTasksQueuePauseOperator(BaseOperator): @@ -646,7 +645,7 @@ def execute(self, context): gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain, ) - queues = hook.pause_queue( + queue = hook.pause_queue( location=self.location, queue_name=self.queue_name, project_id=self.project_id, @@ -654,7 +653,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return [MessageToDict(q) for q in queues] + return Queue.to_dict(queue) class CloudTasksQueueResumeOperator(BaseOperator): @@ -737,7 +736,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(queue) + return Queue.to_dict(queue) class CloudTasksTaskCreateOperator(BaseOperator): @@ -803,7 +802,7 @@ def __init__( # pylint: disable=too-many-arguments task: Union[Dict, Task], project_id: Optional[str] = None, task_name: Optional[str] = None, - response_view: Optional[enums.Task.View] = None, + response_view: Optional = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[MetaData] = None, @@ -840,7 +839,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(task) + return Task.to_dict(task) class CloudTasksTaskGetOperator(BaseOperator): @@ -900,7 +899,7 @@ def __init__( queue_name: str, task_name: str, project_id: Optional[str] = None, - response_view: Optional[enums.Task.View] = None, + response_view: Optional = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[MetaData] = None, @@ -935,7 +934,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(task) + return Task.to_dict(task) class CloudTasksTasksListOperator(BaseOperator): @@ -994,7 +993,7 @@ def __init__( location: str, queue_name: str, project_id: Optional[str] = None, - response_view: Optional[enums.Task.View] = None, + response_view: Optional = None, page_size: Optional[int] = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, @@ -1030,7 +1029,7 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return [MessageToDict(t) for t in tasks] + return [Task.to_dict(t) for t in tasks] class CloudTasksTaskDeleteOperator(BaseOperator): @@ -1134,7 +1133,7 @@ class CloudTasksTaskRunOperator(BaseOperator): :type project_id: str :param response_view: (Optional) This field specifies which subset of the Task will be returned. - :type response_view: google.cloud.tasks_v2.enums.Task.View + :type response_view: google.cloud.tasks_v2.Task.View :param retry: (Optional) A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry @@ -1176,7 +1175,7 @@ def __init__( queue_name: str, task_name: str, project_id: Optional[str] = None, - response_view: Optional[enums.Task.View] = None, + response_view: Optional = None, retry: Optional[Retry] = None, timeout: Optional[float] = None, metadata: Optional[MetaData] = None, @@ -1211,4 +1210,4 @@ def execute(self, context): timeout=self.timeout, metadata=self.metadata, ) - return MessageToDict(task) + return Task.to_dict(task) diff --git a/airflow/providers/google/cloud/operators/workflows.py b/airflow/providers/google/cloud/operators/workflows.py new file mode 100644 index 0000000000000..c7fc96d844d8f --- /dev/null +++ b/airflow/providers/google/cloud/operators/workflows.py @@ -0,0 +1,714 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import hashlib +import json +import re +import uuid +from datetime import datetime, timedelta +from typing import Dict, Optional, Sequence, Tuple, Union + +import pytz +from google.api_core.exceptions import AlreadyExists +from google.api_core.retry import Retry + +# pylint: disable=no-name-in-module +from google.cloud.workflows.executions_v1beta import Execution +from google.cloud.workflows_v1beta import Workflow + +# pylint: enable=no-name-in-module +from google.protobuf.field_mask_pb2 import FieldMask + +from airflow.models import BaseOperator +from airflow.providers.google.cloud.hooks.workflows import WorkflowsHook + + +class WorkflowsCreateWorkflowOperator(BaseOperator): + """ + Creates a new workflow. If a workflow with the specified name + already exists in the specified project and location, the long + running operation will return + [ALREADY_EXISTS][google.rpc.Code.ALREADY_EXISTS] error. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:WorkflowsCreateWorkflowOperator` + + :param workflow: Required. Workflow to be created. + :type workflow: Dict + :param workflow_id: Required. The ID of the workflow to be created. + :type workflow_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + + template_fields = ("location", "workflow", "workflow_id") + template_fields_renderers = {"workflow": "json"} + + def __init__( + self, + *, + workflow: Dict, + workflow_id: str, + location: str, + project_id: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + gcp_conn_id: str = "google_cloud_default", + force_rerun: bool = False, + impersonation_chain: Optional[Union[str, Sequence[str]]] = None, + **kwargs, + ): + super().__init__(**kwargs) + + self.workflow = workflow + self.workflow_id = workflow_id + self.location = location + self.project_id = project_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + self.force_rerun = force_rerun + + def _workflow_id(self, context): + if self.workflow_id and not self.force_rerun: + # If users provide workflow id then assuring the idempotency + # is on their side + return self.workflow_id + + if self.force_rerun: + hash_base = str(uuid.uuid4()) + else: + hash_base = json.dumps(self.workflow, sort_keys=True) + + # We are limited by allowed length of workflow_id so + # we use hash of whole information + exec_date = context['execution_date'].isoformat() + base = f"airflow_{self.dag_id}_{self.task_id}_{exec_date}_{hash_base}" + workflow_id = hashlib.md5(base.encode()).hexdigest() + return re.sub(r"[:\-+.]", "_", workflow_id) + + def execute(self, context): + hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + workflow_id = self._workflow_id(context) + + self.log.info("Creating workflow") + try: + operation = hook.create_workflow( + workflow=self.workflow, + workflow_id=workflow_id, + location=self.location, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + workflow = operation.result() + except AlreadyExists: + workflow = hook.get_workflow( + workflow_id=workflow_id, + location=self.location, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + return Workflow.to_dict(workflow) + + +class WorkflowsUpdateWorkflowOperator(BaseOperator): + """ + Updates an existing workflow. + Running this method has no impact on already running + executions of the workflow. A new revision of the + workflow may be created as a result of a successful + update operation. In that case, such revision will be + used in new workflow executions. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:WorkflowsUpdateWorkflowOperator` + + :param workflow_id: Required. The ID of the workflow to be updated. + :type workflow_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param update_mask: List of fields to be updated. If not present, + the entire workflow will be updated. + :type update_mask: FieldMask + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + + template_fields = ("workflow_id", "update_mask") + template_fields_renderers = {"update_mask": "json"} + + def __init__( + self, + *, + workflow_id: str, + location: str, + project_id: Optional[str] = None, + update_mask: Optional[FieldMask] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + gcp_conn_id: str = "google_cloud_default", + impersonation_chain: Optional[Union[str, Sequence[str]]] = None, + **kwargs, + ): + super().__init__(**kwargs) + + self.workflow_id = workflow_id + self.location = location + self.project_id = project_id + self.update_mask = update_mask + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + + def execute(self, context): + hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + + workflow = hook.get_workflow( + workflow_id=self.workflow_id, + project_id=self.project_id, + location=self.location, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + self.log.info("Updating workflow") + operation = hook.update_workflow( + workflow=workflow, + update_mask=self.update_mask, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + workflow = operation.result() + return Workflow.to_dict(workflow) + + +class WorkflowsDeleteWorkflowOperator(BaseOperator): + """ + Deletes a workflow with the specified name. + This method also cancels and deletes all running + executions of the workflow. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:WorkflowsDeleteWorkflowOperator` + + :param workflow_id: Required. The ID of the workflow to be created. + :type workflow_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + + template_fields = ("location", "workflow_id") + + def __init__( + self, + *, + workflow_id: str, + location: str, + project_id: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + gcp_conn_id: str = "google_cloud_default", + impersonation_chain: Optional[Union[str, Sequence[str]]] = None, + **kwargs, + ): + super().__init__(**kwargs) + + self.workflow_id = workflow_id + self.location = location + self.project_id = project_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + + def execute(self, context): + hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + self.log.info("Deleting workflow %s", self.workflow_id) + operation = hook.delete_workflow( + workflow_id=self.workflow_id, + location=self.location, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + operation.result() + + +class WorkflowsListWorkflowsOperator(BaseOperator): + """ + Lists Workflows in a given project and location. + The default order is not specified. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:WorkflowsListWorkflowsOperator` + + :param filter_: Filter to restrict results to specific workflows. + :type filter_: str + :param order_by: Comma-separated list of fields that that + specify the order of the results. Default sorting order for a field is ascending. + To specify descending order for a field, append a "desc" suffix. + If not specified, the results will be returned in an unspecified order. + :type order_by: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + + template_fields = ("location", "order_by", "filter_") + + def __init__( + self, + *, + location: str, + project_id: Optional[str] = None, + filter_: Optional[str] = None, + order_by: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + gcp_conn_id: str = "google_cloud_default", + impersonation_chain: Optional[Union[str, Sequence[str]]] = None, + **kwargs, + ): + super().__init__(**kwargs) + + self.filter_ = filter_ + self.order_by = order_by + self.location = location + self.project_id = project_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + + def execute(self, context): + hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + self.log.info("Retrieving workflows") + workflows_iter = hook.list_workflows( + filter_=self.filter_, + order_by=self.order_by, + location=self.location, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + return [Workflow.to_dict(w) for w in workflows_iter] + + +class WorkflowsGetWorkflowOperator(BaseOperator): + """ + Gets details of a single Workflow. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:WorkflowsGetWorkflowOperator` + + :param workflow_id: Required. The ID of the workflow to be created. + :type workflow_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + + template_fields = ("location", "workflow_id") + + def __init__( + self, + *, + workflow_id: str, + location: str, + project_id: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + gcp_conn_id: str = "google_cloud_default", + impersonation_chain: Optional[Union[str, Sequence[str]]] = None, + **kwargs, + ): + super().__init__(**kwargs) + + self.workflow_id = workflow_id + self.location = location + self.project_id = project_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + + def execute(self, context): + hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + self.log.info("Retrieving workflow") + workflow = hook.get_workflow( + workflow_id=self.workflow_id, + location=self.location, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + return Workflow.to_dict(workflow) + + +class WorkflowsCreateExecutionOperator(BaseOperator): + """ + Creates a new execution using the latest revision of + the given workflow. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:WorkflowsCreateExecutionOperator` + + :param execution: Required. Execution to be created. + :type execution: Dict + :param workflow_id: Required. The ID of the workflow. + :type workflow_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + + template_fields = ("location", "workflow_id", "execution") + template_fields_renderers = {"execution": "json"} + + def __init__( + self, + *, + workflow_id: str, + execution: Dict, + location: str, + project_id: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + gcp_conn_id: str = "google_cloud_default", + impersonation_chain: Optional[Union[str, Sequence[str]]] = None, + **kwargs, + ): + super().__init__(**kwargs) + + self.workflow_id = workflow_id + self.execution = execution + self.location = location + self.project_id = project_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + + def execute(self, context): + hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + self.log.info("Creating execution") + execution = hook.create_execution( + workflow_id=self.workflow_id, + execution=self.execution, + location=self.location, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + execution_id = execution.name.split("/")[-1] + self.xcom_push(context, key="execution_id", value=execution_id) + return Execution.to_dict(execution) + + +class WorkflowsCancelExecutionOperator(BaseOperator): + """ + Cancels an execution using the given ``workflow_id`` and ``execution_id``. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:WorkflowsCancelExecutionOperator` + + :param workflow_id: Required. The ID of the workflow. + :type workflow_id: str + :param execution_id: Required. The ID of the execution. + :type execution_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + + template_fields = ("location", "workflow_id", "execution_id") + + def __init__( + self, + *, + workflow_id: str, + execution_id: str, + location: str, + project_id: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + gcp_conn_id: str = "google_cloud_default", + impersonation_chain: Optional[Union[str, Sequence[str]]] = None, + **kwargs, + ): + super().__init__(**kwargs) + + self.workflow_id = workflow_id + self.execution_id = execution_id + self.location = location + self.project_id = project_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + + def execute(self, context): + hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + self.log.info("Canceling execution %s", self.execution_id) + execution = hook.cancel_execution( + workflow_id=self.workflow_id, + execution_id=self.execution_id, + location=self.location, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + return Execution.to_dict(execution) + + +class WorkflowsListExecutionsOperator(BaseOperator): + """ + Returns a list of executions which belong to the + workflow with the given name. The method returns + executions of all workflow revisions. Returned + executions are ordered by their start time (newest + first). + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:WorkflowsListExecutionsOperator` + + :param workflow_id: Required. The ID of the workflow to be created. + :type workflow_id: str + :param start_date_filter: If passed only executions older that this date will be returned. + By default operators return executions from last 60 minutes + :type start_date_filter: datetime + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + + template_fields = ("location", "workflow_id") + + def __init__( + self, + *, + workflow_id: str, + location: str, + start_date_filter: Optional[datetime] = None, + project_id: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + gcp_conn_id: str = "google_cloud_default", + impersonation_chain: Optional[Union[str, Sequence[str]]] = None, + **kwargs, + ): + super().__init__(**kwargs) + + self.workflow_id = workflow_id + self.location = location + self.start_date_filter = start_date_filter or datetime.now(tz=pytz.UTC) - timedelta(minutes=60) + self.project_id = project_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + + def execute(self, context): + hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + self.log.info("Retrieving executions for workflow %s", self.workflow_id) + execution_iter = hook.list_executions( + workflow_id=self.workflow_id, + location=self.location, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + + return [Execution.to_dict(e) for e in execution_iter if e.start_time > self.start_date_filter] + + +class WorkflowsGetExecutionOperator(BaseOperator): + """ + Returns an execution for the given ``workflow_id`` and ``execution_id``. + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:WorkflowsGetExecutionOperator` + + :param workflow_id: Required. The ID of the workflow. + :type workflow_id: str + :param execution_id: Required. The ID of the execution. + :type execution_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The GCP region in which to handle the request. + :type location: str + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + + template_fields = ("location", "workflow_id", "execution_id") + + def __init__( + self, + *, + workflow_id: str, + execution_id: str, + location: str, + project_id: Optional[str] = None, + retry: Optional[Retry] = None, + timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + gcp_conn_id: str = "google_cloud_default", + impersonation_chain: Optional[Union[str, Sequence[str]]] = None, + **kwargs, + ): + super().__init__(**kwargs) + + self.workflow_id = workflow_id + self.execution_id = execution_id + self.location = location + self.project_id = project_id + self.retry = retry + self.timeout = timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + + def execute(self, context): + hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + self.log.info("Retrieving execution %s for workflow %s", self.execution_id, self.workflow_id) + execution = hook.get_execution( + workflow_id=self.workflow_id, + execution_id=self.execution_id, + location=self.location, + project_id=self.project_id, + retry=self.retry, + timeout=self.timeout, + metadata=self.metadata, + ) + return Execution.to_dict(execution) diff --git a/airflow/providers/google/cloud/sensors/bigquery_dts.py b/airflow/providers/google/cloud/sensors/bigquery_dts.py index 5b851ed5907db..49e124c842f26 100644 --- a/airflow/providers/google/cloud/sensors/bigquery_dts.py +++ b/airflow/providers/google/cloud/sensors/bigquery_dts.py @@ -19,7 +19,7 @@ from typing import Optional, Sequence, Set, Tuple, Union from google.api_core.retry import Retry -from google.protobuf.json_format import MessageToDict +from google.cloud.bigquery_datatransfer_v1 import TransferState from airflow.providers.google.cloud.hooks.bigquery_dts import BiqQueryDataTransferServiceHook from airflow.sensors.base import BaseSensorOperator @@ -81,7 +81,9 @@ def __init__( *, run_id: str, transfer_config_id: str, - expected_statuses: Union[Set[str], str] = 'SUCCEEDED', + expected_statuses: Union[ + Set[Union[str, TransferState, int]], str, TransferState, int + ] = TransferState.SUCCEEDED, project_id: Optional[str] = None, gcp_conn_id: str = "google_cloud_default", retry: Optional[Retry] = None, @@ -96,13 +98,29 @@ def __init__( self.retry = retry self.request_timeout = request_timeout self.metadata = metadata - self.expected_statuses = ( - {expected_statuses} if isinstance(expected_statuses, str) else expected_statuses - ) + self.expected_statuses = self._normalize_state_list(expected_statuses) self.project_id = project_id self.gcp_cloud_conn_id = gcp_conn_id self.impersonation_chain = impersonation_chain + def _normalize_state_list(self, states) -> Set[TransferState]: + states = {states} if isinstance(states, (str, TransferState, int)) else states + result = set() + for state in states: + if isinstance(state, str): + result.add(TransferState[state.upper()]) + elif isinstance(state, int): + result.add(TransferState(state)) + elif isinstance(state, TransferState): + result.add(state) + else: + raise TypeError( + f"Unsupported type. " + f"Expected: str, int, google.cloud.bigquery_datatransfer_v1.TransferState." + f"Current type: {type(state)}" + ) + return result + def poke(self, context: dict) -> bool: hook = BiqQueryDataTransferServiceHook( gcp_conn_id=self.gcp_cloud_conn_id, @@ -116,8 +134,5 @@ def poke(self, context: dict) -> bool: timeout=self.request_timeout, metadata=self.metadata, ) - result = MessageToDict(run) - state = result["state"] - self.log.info("Status of %s run: %s", self.run_id, state) - - return state in self.expected_statuses + self.log.info("Status of %s run: %s", self.run_id, str(run.state)) + return run.state in self.expected_statuses diff --git a/airflow/providers/google/cloud/sensors/dataproc.py b/airflow/providers/google/cloud/sensors/dataproc.py index 1777a227113ec..93656df257173 100644 --- a/airflow/providers/google/cloud/sensors/dataproc.py +++ b/airflow/providers/google/cloud/sensors/dataproc.py @@ -65,14 +65,18 @@ def poke(self, context: dict) -> bool: job = hook.get_job(job_id=self.dataproc_job_id, location=self.location, project_id=self.project_id) state = job.status.state - if state == JobStatus.ERROR: + if state == JobStatus.State.ERROR: raise AirflowException(f'Job failed:\n{job}') - elif state in {JobStatus.CANCELLED, JobStatus.CANCEL_PENDING, JobStatus.CANCEL_STARTED}: + elif state in { + JobStatus.State.CANCELLED, + JobStatus.State.CANCEL_PENDING, + JobStatus.State.CANCEL_STARTED, + }: raise AirflowException(f'Job was cancelled:\n{job}') - elif JobStatus.DONE == state: + elif JobStatus.State.DONE == state: self.log.debug("Job %s completed successfully.", self.dataproc_job_id) return True - elif JobStatus.ATTEMPT_FAILURE == state: + elif JobStatus.State.ATTEMPT_FAILURE == state: self.log.debug("Job %s attempt has failed.", self.dataproc_job_id) self.log.info("Waiting for job %s to complete.", self.dataproc_job_id) diff --git a/airflow/providers/google/cloud/sensors/pubsub.py b/airflow/providers/google/cloud/sensors/pubsub.py index d6e0be51e79f9..ff1f811c5cf26 100644 --- a/airflow/providers/google/cloud/sensors/pubsub.py +++ b/airflow/providers/google/cloud/sensors/pubsub.py @@ -20,7 +20,6 @@ from typing import Any, Callable, Dict, List, Optional, Sequence, Union from google.cloud.pubsub_v1.types import ReceivedMessage -from google.protobuf.json_format import MessageToDict from airflow.providers.google.cloud.hooks.pubsub import PubSubHook from airflow.sensors.base import BaseSensorOperator @@ -200,6 +199,6 @@ def _default_message_callback( :param context: same as in `execute` :return: value to be saved to XCom. """ - messages_json = [MessageToDict(m) for m in pulled_messages] + messages_json = [ReceivedMessage.to_dict(m) for m in pulled_messages] return messages_json diff --git a/airflow/providers/google/cloud/sensors/workflows.py b/airflow/providers/google/cloud/sensors/workflows.py new file mode 100644 index 0000000000000..5950458b71f13 --- /dev/null +++ b/airflow/providers/google/cloud/sensors/workflows.py @@ -0,0 +1,123 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from typing import Optional, Sequence, Set, Tuple, Union + +from google.api_core.retry import Retry +from google.cloud.workflows.executions_v1beta import Execution + +from airflow.exceptions import AirflowException +from airflow.providers.google.cloud.hooks.workflows import WorkflowsHook +from airflow.sensors.base import BaseSensorOperator + + +class WorkflowExecutionSensor(BaseSensorOperator): + """ + Checks state of an execution for the given ``workflow_id`` and ``execution_id``. + + :param workflow_id: Required. The ID of the workflow. + :type workflow_id: str + :param execution_id: Required. The ID of the execution. + :type execution_id: str + :param project_id: Required. The ID of the Google Cloud project the cluster belongs to. + :type project_id: str + :param location: Required. The Cloud Dataproc region in which to handle the request. + :type location: str + :param success_states: Execution states to be considered as successful, by default + it's only ``SUCCEEDED`` state + :type success_states: List[Execution.State] + :param failure_states: Execution states to be considered as failures, by default + they are ``FAILED`` and ``CANCELLED`` states. + :type failure_states: List[Execution.State] + :param retry: A retry object used to retry requests. If ``None`` is specified, requests will not be + retried. + :type retry: google.api_core.retry.Retry + :param request_timeout: The amount of time, in seconds, to wait for the request to complete. Note that if + ``retry`` is specified, the timeout applies to each individual attempt. + :type request_timeout: float + :param metadata: Additional metadata that is provided to the method. + :type metadata: Sequence[Tuple[str, str]] + """ + + template_fields = ("location", "workflow_id", "execution_id") + + def __init__( + self, + *, + workflow_id: str, + execution_id: str, + location: str, + project_id: str, + success_states: Optional[Set[Execution.State]] = None, + failure_states: Optional[Set[Execution.State]] = None, + retry: Optional[Retry] = None, + request_timeout: Optional[float] = None, + metadata: Optional[Sequence[Tuple[str, str]]] = None, + gcp_conn_id: str = "google_cloud_default", + impersonation_chain: Optional[Union[str, Sequence[str]]] = None, + **kwargs, + ): + super().__init__(**kwargs) + + self.success_states = success_states or {Execution.State.SUCCEEDED} + self.failure_states = failure_states or {Execution.State.FAILED, Execution.State.CANCELLED} + self.workflow_id = workflow_id + self.execution_id = execution_id + self.location = location + self.project_id = project_id + self.retry = retry + self.request_timeout = request_timeout + self.metadata = metadata + self.gcp_conn_id = gcp_conn_id + self.impersonation_chain = impersonation_chain + + def poke(self, context): + hook = WorkflowsHook(gcp_conn_id=self.gcp_conn_id, impersonation_chain=self.impersonation_chain) + self.log.info("Checking state of execution %s for workflow %s", self.execution_id, self.workflow_id) + execution: Execution = hook.get_execution( + workflow_id=self.workflow_id, + execution_id=self.execution_id, + location=self.location, + project_id=self.project_id, + retry=self.retry, + timeout=self.request_timeout, + metadata=self.metadata, + ) + + state = execution.state + if state in self.failure_states: + raise AirflowException( + f"Execution {self.execution_id} for workflow {self.execution_id} " + f"failed and is in `{state}` state", + ) + + if state in self.success_states: + self.log.info( + "Execution %s for workflow %s completed with state: %s", + self.execution_id, + self.workflow_id, + state, + ) + return True + + self.log.info( + "Execution %s for workflow %s does not completed yet, current state: %s", + self.execution_id, + self.workflow_id, + state, + ) + return False diff --git a/airflow/providers/google/cloud/transfers/trino_to_gcs.py b/airflow/providers/google/cloud/transfers/trino_to_gcs.py new file mode 100644 index 0000000000000..e2f2306bc80cc --- /dev/null +++ b/airflow/providers/google/cloud/transfers/trino_to_gcs.py @@ -0,0 +1,210 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Any, Dict, List, Tuple + +from trino.client import TrinoResult +from trino.dbapi import Cursor as TrinoCursor + +from airflow.providers.google.cloud.transfers.sql_to_gcs import BaseSQLToGCSOperator +from airflow.providers.trino.hooks.trino import TrinoHook +from airflow.utils.decorators import apply_defaults + + +class _TrinoToGCSTrinoCursorAdapter: + """ + An adapter that adds additional feature to the Trino cursor. + + The implementation of cursor in the trino library is not sufficient. + The following changes have been made: + + * The poke mechanism for row. You can look at the next row without consuming it. + * The description attribute is available before reading the first row. Thanks to the poke mechanism. + * the iterator interface has been implemented. + + A detailed description of the class methods is available in + `PEP-249 `__. + """ + + def __init__(self, cursor: TrinoCursor): + self.cursor: TrinoCursor = cursor + self.rows: List[Any] = [] + self.initialized: bool = False + + @property + def description(self) -> List[Tuple]: + """ + This read-only attribute is a sequence of 7-item sequences. + + Each of these sequences contains information describing one result column: + + * ``name`` + * ``type_code`` + * ``display_size`` + * ``internal_size`` + * ``precision`` + * ``scale`` + * ``null_ok`` + + The first two items (``name`` and ``type_code``) are mandatory, the other + five are optional and are set to None if no meaningful values can be provided. + """ + if not self.initialized: + # Peek for first row to load description. + self.peekone() + return self.cursor.description + + @property + def rowcount(self) -> int: + """The read-only attribute specifies the number of rows""" + return self.cursor.rowcount + + def close(self) -> None: + """Close the cursor now""" + self.cursor.close() + + def execute(self, *args, **kwargs) -> TrinoResult: + """Prepare and execute a database operation (query or command).""" + self.initialized = False + self.rows = [] + return self.cursor.execute(*args, **kwargs) + + def executemany(self, *args, **kwargs): + """ + Prepare a database operation (query or command) and then execute it against all parameter + sequences or mappings found in the sequence seq_of_parameters. + """ + self.initialized = False + self.rows = [] + return self.cursor.executemany(*args, **kwargs) + + def peekone(self) -> Any: + """Return the next row without consuming it.""" + self.initialized = True + element = self.cursor.fetchone() + self.rows.insert(0, element) + return element + + def fetchone(self) -> Any: + """ + Fetch the next row of a query result set, returning a single sequence, or + ``None`` when no more data is available. + """ + if self.rows: + return self.rows.pop(0) + return self.cursor.fetchone() + + def fetchmany(self, size=None) -> list: + """ + Fetch the next set of rows of a query result, returning a sequence of sequences + (e.g. a list of tuples). An empty sequence is returned when no more rows are available. + """ + if size is None: + size = self.cursor.arraysize + + result = [] + for _ in range(size): + row = self.fetchone() + if row is None: + break + result.append(row) + + return result + + def __next__(self) -> Any: + """ + Return the next row from the currently executing SQL statement using the same semantics as + ``.fetchone()``. A ``StopIteration`` exception is raised when the result set is exhausted. + :return: + """ + result = self.fetchone() + if result is None: + raise StopIteration() + return result + + def __iter__(self) -> "_TrinoToGCSTrinoCursorAdapter": + """Return self to make cursors compatible to the iteration protocol""" + return self + + +class TrinoToGCSOperator(BaseSQLToGCSOperator): + """Copy data from TrinoDB to Google Cloud Storage in JSON or CSV format. + + :param trino_conn_id: Reference to a specific Trino hook. + :type trino_conn_id: str + """ + + ui_color = "#a0e08c" + + type_map = { + "BOOLEAN": "BOOL", + "TINYINT": "INT64", + "SMALLINT": "INT64", + "INTEGER": "INT64", + "BIGINT": "INT64", + "REAL": "FLOAT64", + "DOUBLE": "FLOAT64", + "DECIMAL": "NUMERIC", + "VARCHAR": "STRING", + "CHAR": "STRING", + "VARBINARY": "BYTES", + "JSON": "STRING", + "DATE": "DATE", + "TIME": "TIME", + # BigQuery don't time with timezone native. + "TIME WITH TIME ZONE": "STRING", + "TIMESTAMP": "TIMESTAMP", + # BigQuery supports a narrow range of time zones during import. + # You should use TIMESTAMP function, if you want have TIMESTAMP type + "TIMESTAMP WITH TIME ZONE": "STRING", + "IPADDRESS": "STRING", + "UUID": "STRING", + } + + @apply_defaults + def __init__(self, *, trino_conn_id: str = "trino_default", **kwargs): + super().__init__(**kwargs) + self.trino_conn_id = trino_conn_id + + def query(self): + """Queries trino and returns a cursor to the results.""" + trino = TrinoHook(trino_conn_id=self.trino_conn_id) + conn = trino.get_conn() + cursor = conn.cursor() + self.log.info("Executing: %s", self.sql) + cursor.execute(self.sql) + return _TrinoToGCSTrinoCursorAdapter(cursor) + + def field_to_bigquery(self, field) -> Dict[str, str]: + """Convert trino field type to BigQuery field type.""" + clear_field_type = field[1].upper() + # remove type argument e.g. DECIMAL(2, 10) => DECIMAL + clear_field_type, _, _ = clear_field_type.partition("(") + new_field_type = self.type_map.get(clear_field_type, "STRING") + + return {"name": field[0], "type": new_field_type} + + def convert_type(self, value, schema_type): + """ + Do nothing. Trino uses JSON on the transport layer, so types are simple. + + :param value: Trino column value + :type value: Any + :param schema_type: BigQuery data type + :type schema_type: str + """ + return value diff --git a/airflow/providers/google/provider.yaml b/airflow/providers/google/provider.yaml index 1917f44556647..d1f5b5f9cb435 100644 --- a/airflow/providers/google/provider.yaml +++ b/airflow/providers/google/provider.yaml @@ -28,6 +28,7 @@ description: | - `Google Workspace `__ (formerly Google Suite) versions: + - 2.0.0 - 1.0.0 integrations: @@ -45,6 +46,7 @@ integrations: external-doc-url: https://cloud.google.com/automl/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/automl.rst + logo: /integration-logos/gcp/Cloud-AutoML.png tags: [gcp] - integration-name: Google BigQuery Data Transfer Service external-doc-url: https://cloud.google.com/bigquery/transfer/ @@ -55,16 +57,19 @@ integrations: how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/bigquery.rst external-doc-url: https://cloud.google.com/bigquery/ + logo: /integration-logos/gcp/BigQuery.png tags: [gcp] - integration-name: Google Bigtable how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/bigtable.rst external-doc-url: https://cloud.google.com/bigtable/ + logo: /integration-logos/gcp/Cloud-Bigtable.png tags: [gcp] - integration-name: Google Cloud Build external-doc-url: https://cloud.google.com/cloud-build/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/cloud_build.rst + logo: /integration-logos/gcp/Cloud-Build.png tags: [gcp] - integration-name: Google Cloud Data Loss Prevention (DLP) external-doc-url: https://cloud.google.com/dlp/ @@ -80,9 +85,11 @@ integrations: external-doc-url: https://cloud.google.com/functions/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/functions.rst + logo: /integration-logos/gcp/Cloud-Functions.png tags: [gcp] - integration-name: Google Cloud Key Management Service (KMS) external-doc-url: https://cloud.google.com/kms/ + logo: /integration-logos/gcp/Key-Management-Service.png tags: [gcp] - integration-name: Google Cloud Life Sciences external-doc-url: https://cloud.google.com/life-sciences/ @@ -94,6 +101,7 @@ integrations: how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore.rst - /docs/apache-airflow-providers-google/operators/cloud/cloud_memorystore_memcached.rst + logo: /integration-logos/gcp/Cloud-Memorystore.png tags: [gcp] - integration-name: Google Cloud OS Login external-doc-url: https://cloud.google.com/compute/docs/oslogin/ @@ -102,6 +110,7 @@ integrations: external-doc-url: https://cloud.google.com/pubsub/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/pubsub.rst + logo: /integration-logos/gcp/Cloud-PubSub.png tags: [gcp] - integration-name: Google Cloud Secret Manager external-doc-url: https://cloud.google.com/secret-manager/ @@ -110,17 +119,20 @@ integrations: external-doc-url: https://cloud.google.com/spanner/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/spanner.rst + logo: /integration-logos/gcp/Cloud-Spanner.png tags: [gcp] - integration-name: Google Cloud Speech-to-Text external-doc-url: https://cloud.google.com/speech-to-text/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/speech_to_text.rst - /docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst + logo: /integration-logos/gcp/Cloud-Speech-to-Text.png tags: [gcp] - integration-name: Google Cloud SQL external-doc-url: https://cloud.google.com/sql/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/cloud_sql.rst + logo: /integration-logos/gcp/Cloud-SQL.png tags: [gcp] - integration-name: Google Cloud Stackdriver external-doc-url: https://cloud.google.com/stackdriver @@ -131,36 +143,43 @@ integrations: external-doc-url: https://cloud.google.com/gcs/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/gcs.rst + logo: /integration-logos/gcp/Cloud-Storage.png tags: [gcp] - integration-name: Google Cloud Tasks external-doc-url: https://cloud.google.com/tasks/ + logo: /integration-logos/gcp/Cloud-Tasks.png tags: [gcp] - integration-name: Google Cloud Text-to-Speech external-doc-url: https://cloud.google.com/text-to-speech/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/text_to_speech.rst + logo: /integration-logos/gcp/Cloud-Text-to-Speech.png tags: [gcp] - integration-name: Google Cloud Translation external-doc-url: https://cloud.google.com/translate/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/translate.rst - /docs/apache-airflow-providers-google/operators/cloud/translate_speech.rst + logo: /integration-logos/gcp/Cloud-Translation-API.png tags: [gcp] - integration-name: Google Cloud Video Intelligence external-doc-url: https://cloud.google.com/video_intelligence/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/video_intelligence.rst + logo: /integration-logos/gcp/Cloud-Video-Intelligence-API.png tags: [gcp] - integration-name: Google Cloud Vision external-doc-url: https://cloud.google.com/vision/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/vision.rst + logo: /integration-logos/gcp/Cloud-Vision-API.png tags: [gcp] - integration-name: Google Compute Engine external-doc-url: https://cloud.google.com/compute/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/compute.rst - /docs/apache-airflow-providers-google/operators/cloud/compute_ssh.rst + logo: /integration-logos/gcp/Compute-Engine.png tags: [gcp] - integration-name: Google Data Proc external-doc-url: https://cloud.yandex.com/services/data-proc @@ -172,6 +191,7 @@ integrations: tags: [gcp] - integration-name: Google Dataflow external-doc-url: https://cloud.google.com/dataflow/ + logo: /integration-logos/gcp/Cloud-Dataflow.png tags: [gcp] - integration-name: Google Data Fusion external-doc-url: https://cloud.google.com/data-fusion/ @@ -187,11 +207,13 @@ integrations: external-doc-url: https://cloud.google.com/dataproc/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/dataproc.rst + logo: /integration-logos/gcp/Cloud-Dataproc.png tags: [gcp] - integration-name: Google Datastore external-doc-url: https://cloud.google.com/datastore/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/datastore.rst + logo: /integration-logos/gcp/Cloud-Datastore.png tags: [gcp] - integration-name: Google Deployment Manager external-doc-url: https://cloud.google.com/deployment-manager/ @@ -235,21 +257,30 @@ integrations: external-doc-url: https://cloud.google.com/storage/transfer/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/cloud_storage_transfer_service.rst + logo: /integration-logos/gcp/Cloud-Storage.png tags: [gcp] - integration-name: Google Kubernetes Engine external-doc-url: https://cloud.google.com/kubernetes_engine/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst + logo: /integration-logos/gcp/Kubernetes-Engine.png tags: [gcp] - integration-name: Google Machine Learning Engine external-doc-url: https://cloud.google.com/ai-platform/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/mlengine.rst + logo: /integration-logos/gcp/AI-Platform.png tags: [gcp] - integration-name: Google Cloud Natural Language external-doc-url: https://cloud.google.com/natural-language/ how-to-guide: - /docs/apache-airflow-providers-google/operators/cloud/natural_language.rst + logo: /integration-logos/gcp/Cloud-NLP.png + tags: [gcp] + - integration-name: Google Cloud Workflows + external-doc-url: https://cloud.google.com/workflows/ + how-to-guide: + - /docs/apache-airflow-providers-google/operators/cloud/workflows.rst tags: [gcp] operators: @@ -351,6 +382,9 @@ operators: - integration-name: Google Cloud Vision python-modules: - airflow.providers.google.cloud.operators.vision + - integration-name: Google Cloud Workflows + python-modules: + - airflow.providers.google.cloud.operators.workflows - integration-name: Google Cloud Firestore python-modules: - airflow.providers.google.firebase.operators.firestore @@ -395,6 +429,9 @@ sensors: - integration-name: Google Cloud Pub/Sub python-modules: - airflow.providers.google.cloud.sensors.pubsub + - integration-name: Google Cloud Workflows + python-modules: + - airflow.providers.google.cloud.sensors.workflows - integration-name: Google Campaign Manager python-modules: - airflow.providers.google.marketing_platform.sensors.campaign_manager @@ -515,6 +552,9 @@ hooks: - integration-name: Google Cloud Vision python-modules: - airflow.providers.google.cloud.hooks.vision + - integration-name: Google Cloud Workflows + python-modules: + - airflow.providers.google.cloud.hooks.workflows - integration-name: Google python-modules: - airflow.providers.google.common.hooks.base_google @@ -548,6 +588,10 @@ transfers: target-integration-name: Google Cloud Storage (GCS) how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/presto_to_gcs.rst python-module: airflow.providers.google.cloud.transfers.presto_to_gcs + - source-integration-name: Trino + target-integration-name: Google Cloud Storage (GCS) + how-to-guide: /docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst + python-module: airflow.providers.google.cloud.transfers.trino_to_gcs - source-integration-name: SQL target-integration-name: Google Cloud Storage (GCS) python-module: airflow.providers.google.cloud.transfers.sql_to_gcs diff --git a/airflow/providers/google/suite/hooks/sheets.py b/airflow/providers/google/suite/hooks/sheets.py index 3e4b62f1e5612..2c5723189a77f 100644 --- a/airflow/providers/google/suite/hooks/sheets.py +++ b/airflow/providers/google/suite/hooks/sheets.py @@ -271,7 +271,7 @@ def batch_update_values( """ if len(ranges) != len(values): raise AirflowException( - "'Ranges' and and 'Lists' must be of equal length. \n \ + "'Ranges' and 'Lists' must be of equal length. \n \ 'Ranges' is of length: {} and \n \ 'Values' is of length: {}.".format( str(len(ranges)), str(len(values)) diff --git a/airflow/providers/google/suite/transfers/gcs_to_gdrive.py b/airflow/providers/google/suite/transfers/gcs_to_gdrive.py index 7427c36e9e293..06419b9e40252 100644 --- a/airflow/providers/google/suite/transfers/gcs_to_gdrive.py +++ b/airflow/providers/google/suite/transfers/gcs_to_gdrive.py @@ -30,7 +30,7 @@ class GCSToGoogleDriveOperator(BaseOperator): """ - Copies objects from a Google Cloud Storage service service to Google Drive service, with renaming + Copies objects from a Google Cloud Storage service to a Google Drive service, with renaming if requested. Using this operator requires the following OAuth 2.0 scope: diff --git a/airflow/providers/grpc/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/grpc/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 406b4b4254265..0000000000000 --- a/airflow/providers/grpc/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,20 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30 | Remove sql like function in base_hook (#7901) | -| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23 | Add missing call to Super class in 'http', 'grpc' & 'slack' providers (#7826) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/grpc/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/grpc/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 719cb447b0f23..0000000000000 --- a/airflow/providers/grpc/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,16 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [4eddce22a](https://github.com/apache/airflow/commit/4eddce22a3e0eb605f5661204a005262bbaa54cd) | 2020-07-21 | Add typing for grpc provider (#9884) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/grpc/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/grpc/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/grpc/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/grpc/BACKPORT_PROVIDER_README.md b/airflow/providers/grpc/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 4b7b5a67e2636..0000000000000 --- a/airflow/providers/grpc/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,140 +0,0 @@ - - - -# Package apache-airflow-backport-providers-grpc - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `grpc` provider. All classes for this provider package -are in `airflow.providers.grpc` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-grpc` - -## PIP requirements - -| PIP package | Version required | -|:---------------------|:-------------------| -| google-auth | >=1.0.0, <2.0.0dev | -| google-auth-httplib2 | >=0.0.1 | -| grpcio | >=1.15.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `grpc` provider -are in the `airflow.providers.grpc` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.grpc` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.grpc.GrpcOperator](https://github.com/apache/airflow/blob/master/airflow/providers/grpc/operators/grpc.py) | [contrib.operators.grpc_operator.GrpcOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/grpc_operator.py) | - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.grpc` package | -|:----------------------------------------------------------------------------------------------------------| -| [hooks.grpc.GrpcHook](https://github.com/apache/airflow/blob/master/airflow/providers/grpc/hooks/grpc.py) | - - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [4eddce22a](https://github.com/apache/airflow/commit/4eddce22a3e0eb605f5661204a005262bbaa54cd) | 2020-07-21 | Add typing for grpc provider (#9884) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30 | Remove sql like function in base_hook (#7901) | -| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23 | Add missing call to Super class in 'http', 'grpc' & 'slack' providers (#7826) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/grpc/CHANGELOG.rst b/airflow/providers/grpc/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/grpc/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/grpc/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/grpc/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 0dc1f560695a2..0000000000000 --- a/airflow/providers/grpc/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,50 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [fcfc7f124](https://github.com/apache/airflow/commit/fcfc7f12421bd35a366324fe7814c90da8de5735) | 2020-11-04 | Improve reading SSL credentials file in GRPC Hook (#12094) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [4eddce22a](https://github.com/apache/airflow/commit/4eddce22a3e0eb605f5661204a005262bbaa54cd) | 2020-07-21 | Add typing for grpc provider (#9884) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30 | Remove sql like function in base_hook (#7901) | -| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23 | Add missing call to Super class in 'http', 'grpc' & 'slack' providers (#7826) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/grpc/README.md b/airflow/providers/grpc/README.md deleted file mode 100644 index 29480f86cdb91..0000000000000 --- a/airflow/providers/grpc/README.md +++ /dev/null @@ -1,145 +0,0 @@ - - - -# Package apache-airflow-providers-grpc - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `grpc` provider. All classes for this provider package -are in `airflow.providers.grpc` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-grpc` - -## PIP requirements - -| PIP package | Version required | -|:---------------------|:-------------------| -| google-auth | >=1.0.0, <2.0.0dev | -| google-auth-httplib2 | >=0.0.1 | -| grpcio | >=1.15.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `grpc` provider -are in the `airflow.providers.grpc` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.grpc` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.grpc.GrpcOperator](https://github.com/apache/airflow/blob/master/airflow/providers/grpc/operators/grpc.py) | [contrib.operators.grpc_operator.GrpcOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/grpc_operator.py) | - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.grpc` package | -|:----------------------------------------------------------------------------------------------------------| -| [hooks.grpc.GrpcHook](https://github.com/apache/airflow/blob/master/airflow/providers/grpc/hooks/grpc.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [fcfc7f124](https://github.com/apache/airflow/commit/fcfc7f12421bd35a366324fe7814c90da8de5735) | 2020-11-04 | Improve reading SSL credentials file in GRPC Hook (#12094) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [4eddce22a](https://github.com/apache/airflow/commit/4eddce22a3e0eb605f5661204a005262bbaa54cd) | 2020-07-21 | Add typing for grpc provider (#9884) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [cb0bf4a14](https://github.com/apache/airflow/commit/cb0bf4a142656ee40b43a01660b6f6b08a9840fa) | 2020-03-30 | Remove sql like function in base_hook (#7901) | -| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23 | Add missing call to Super class in 'http', 'grpc' & 'slack' providers (#7826) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 32e6e14e470fa..0000000000000 --- a/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,25 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [df693e0e3](https://github.com/apache/airflow/commit/df693e0e3138f6601c4776cd529d8cb7bcde2f90) | 2020-06-19 | Add more authentication options for HashiCorp Vault classes (#8974) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [d47e070a7](https://github.com/apache/airflow/commit/d47e070a79b574cca043ca9c06f91d47eecb3040) | 2020-06-17 | Add HashiCorp Vault Hook (split-out from Vault secret backend) (#9333) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [d8cb0b5dd](https://github.com/apache/airflow/commit/d8cb0b5ddb02d194742e374d9ac90dd8231f6e80) | 2020-05-04 | Support k8s auth method in Vault Secrets provider (#8640) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [c1c88abfe](https://github.com/apache/airflow/commit/c1c88abfede7a36c3b1d1b511fbc6c03af46d363) | 2020-03-28 | Get Airflow Variables from Hashicorp Vault (#7944) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23 | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795) | -| [a44beaf5b](https://github.com/apache/airflow/commit/a44beaf5bddae2a8de0429af45be5ff78a7d4d4e) | 2020-03-19 | [AIRFLOW-7076] Add support for HashiCorp Vault as Secrets Backend (#7741) | diff --git a/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 44464d0a561e0..0000000000000 --- a/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28 | Update Google Cloud branding (#10615) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08 | Get Airflow configs with sensitive data from Secret Backends (#9645) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [a99aaeb49](https://github.com/apache/airflow/commit/a99aaeb49672e913d5ff79606237f6f3614fc8f5) | 2020-07-03 | Allow setting Hashicorp Vault token from File (#9644) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.11.23.md deleted file mode 100644 index 86dadba692145..0000000000000 --- a/airflow/providers/hashicorp/BACKPORT_PROVIDER_CHANGES_2020.11.23.md +++ /dev/null @@ -1,16 +0,0 @@ - - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [dd2442b1e](https://github.com/apache/airflow/commit/dd2442b1e66d4725e7193e0cab0548a4d8c71fbd) | 2020-11-02 | Vault with optional Variables or Connections (#11736) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | diff --git a/airflow/providers/hashicorp/BACKPORT_PROVIDER_README.md b/airflow/providers/hashicorp/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 4f1b30eab89cf..0000000000000 --- a/airflow/providers/hashicorp/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,178 +0,0 @@ - - - -# Package apache-airflow-backport-providers-hashicorp - -Release: 2020.11.23 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [New hooks](#new-hooks) - - [Secrets](#secrets) - - [Moved secrets](#moved-secrets) -- [Releases](#releases) - - [Release 2020.11.23](#release-20201123) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `hashicorp` provider. All classes for this provider package -are in `airflow.providers.hashicorp` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-hashicorp` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| hvac | ~=0.10 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-hashicorp[google] -``` - -| Dependent package | Extra | -|:-------------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-google](https://github.com/apache/airflow/tree/master/airflow/providers/google) | google | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `hashicorp` provider -are in the `airflow.providers.hashicorp` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.hashicorp` package | -|:------------------------------------------------------------------------------------------------------------------| -| [hooks.vault.VaultHook](https://github.com/apache/airflow/blob/master/airflow/providers/hashicorp/hooks/vault.py) | - - - -## Secrets - - - -### Moved secrets - -| Airflow 2.0 secrets: `airflow.providers.hashicorp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| [secrets.vault.VaultBackend](https://github.com/apache/airflow/blob/master/airflow/providers/hashicorp/secrets/vault.py) | [contrib.secrets.hashicorp_vault.VaultBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/hashicorp_vault.py) | - - - -## Releases - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [dd2442b1e](https://github.com/apache/airflow/commit/dd2442b1e66d4725e7193e0cab0548a4d8c71fbd) | 2020-11-02 | Vault with optional Variables or Connections (#11736) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28 | Update Google Cloud branding (#10615) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08 | Get Airflow configs with sensitive data from Secret Backends (#9645) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [a99aaeb49](https://github.com/apache/airflow/commit/a99aaeb49672e913d5ff79606237f6f3614fc8f5) | 2020-07-03 | Allow setting Hashicorp Vault token from File (#9644) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [df693e0e3](https://github.com/apache/airflow/commit/df693e0e3138f6601c4776cd529d8cb7bcde2f90) | 2020-06-19 | Add more authentication options for HashiCorp Vault classes (#8974) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [d47e070a7](https://github.com/apache/airflow/commit/d47e070a79b574cca043ca9c06f91d47eecb3040) | 2020-06-17 | Add HashiCorp Vault Hook (split-out from Vault secret backend) (#9333) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [d8cb0b5dd](https://github.com/apache/airflow/commit/d8cb0b5ddb02d194742e374d9ac90dd8231f6e80) | 2020-05-04 | Support k8s auth method in Vault Secrets provider (#8640) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [c1c88abfe](https://github.com/apache/airflow/commit/c1c88abfede7a36c3b1d1b511fbc6c03af46d363) | 2020-03-28 | Get Airflow Variables from Hashicorp Vault (#7944) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23 | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795) | -| [a44beaf5b](https://github.com/apache/airflow/commit/a44beaf5bddae2a8de0429af45be5ff78a7d4d4e) | 2020-03-19 | [AIRFLOW-7076] Add support for HashiCorp Vault as Secrets Backend (#7741) | diff --git a/airflow/providers/hashicorp/CHANGELOG.rst b/airflow/providers/hashicorp/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/hashicorp/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/hashicorp/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/hashicorp/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 4e60d70cda095..0000000000000 --- a/airflow/providers/hashicorp/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,56 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [36a9b0f48](https://github.com/apache/airflow/commit/36a9b0f48baf4a8ef8fc02a450a279948a8c0f02) | 2020-11-20 | Fix the default value for VaultBackend's config_path (#12518) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [dd2442b1e](https://github.com/apache/airflow/commit/dd2442b1e66d4725e7193e0cab0548a4d8c71fbd) | 2020-11-02 | Vault with optional Variables or Connections (#11736) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28 | Update Google Cloud branding (#10615) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08 | Get Airflow configs with sensitive data from Secret Backends (#9645) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [a99aaeb49](https://github.com/apache/airflow/commit/a99aaeb49672e913d5ff79606237f6f3614fc8f5) | 2020-07-03 | Allow setting Hashicorp Vault token from File (#9644) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [df693e0e3](https://github.com/apache/airflow/commit/df693e0e3138f6601c4776cd529d8cb7bcde2f90) | 2020-06-19 | Add more authentication options for HashiCorp Vault classes (#8974) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [d47e070a7](https://github.com/apache/airflow/commit/d47e070a79b574cca043ca9c06f91d47eecb3040) | 2020-06-17 | Add HashiCorp Vault Hook (split-out from Vault secret backend) (#9333) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [d8cb0b5dd](https://github.com/apache/airflow/commit/d8cb0b5ddb02d194742e374d9ac90dd8231f6e80) | 2020-05-04 | Support k8s auth method in Vault Secrets provider (#8640) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [c1c88abfe](https://github.com/apache/airflow/commit/c1c88abfede7a36c3b1d1b511fbc6c03af46d363) | 2020-03-28 | Get Airflow Variables from Hashicorp Vault (#7944) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23 | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795) | -| [a44beaf5b](https://github.com/apache/airflow/commit/a44beaf5bddae2a8de0429af45be5ff78a7d4d4e) | 2020-03-19 | [AIRFLOW-7076] Add support for HashiCorp Vault as Secrets Backend (#7741) | diff --git a/airflow/providers/hashicorp/README.md b/airflow/providers/hashicorp/README.md deleted file mode 100644 index 2a54b7fa0cf7b..0000000000000 --- a/airflow/providers/hashicorp/README.md +++ /dev/null @@ -1,165 +0,0 @@ - - - -# Package apache-airflow-providers-hashicorp - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [New hooks](#new-hooks) - - [Secrets](#secrets) - - [Moved secrets](#moved-secrets) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `hashicorp` provider. All classes for this provider package -are in `airflow.providers.hashicorp` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-hashicorp` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| hvac | ~=0.10 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-hashicorp[google] -``` - -| Dependent package | Extra | -|:--------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-providers-google](https://pypi.org/project/apache-airflow-providers-google) | google | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `hashicorp` provider -are in the `airflow.providers.hashicorp` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.hashicorp` package | -|:------------------------------------------------------------------------------------------------------------------| -| [hooks.vault.VaultHook](https://github.com/apache/airflow/blob/master/airflow/providers/hashicorp/hooks/vault.py) | - - - -## Secrets - - - -### Moved secrets - -| Airflow 2.0 secrets: `airflow.providers.hashicorp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| [secrets.vault.VaultBackend](https://github.com/apache/airflow/blob/master/airflow/providers/hashicorp/secrets/vault.py) | [contrib.secrets.hashicorp_vault.VaultBackend](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/secrets/hashicorp_vault.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [36a9b0f48](https://github.com/apache/airflow/commit/36a9b0f48baf4a8ef8fc02a450a279948a8c0f02) | 2020-11-20 | Fix the default value for VaultBackend's config_path (#12518) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [dd2442b1e](https://github.com/apache/airflow/commit/dd2442b1e66d4725e7193e0cab0548a4d8c71fbd) | 2020-11-02 | Vault with optional Variables or Connections (#11736) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [3867f7662](https://github.com/apache/airflow/commit/3867f7662559761864ec4e7be26b776c64c2f199) | 2020-08-28 | Update Google Cloud branding (#10615) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [2f31b3060](https://github.com/apache/airflow/commit/2f31b3060ed8274d5d1b1db7349ce607640b9199) | 2020-07-08 | Get Airflow configs with sensitive data from Secret Backends (#9645) | -| [44d4ae809](https://github.com/apache/airflow/commit/44d4ae809c1e3784ff95b6a5e95113c3412e56b3) | 2020-07-06 | Upgrade to latest pre-commit checks (#9686) | -| [a99aaeb49](https://github.com/apache/airflow/commit/a99aaeb49672e913d5ff79606237f6f3614fc8f5) | 2020-07-03 | Allow setting Hashicorp Vault token from File (#9644) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [df693e0e3](https://github.com/apache/airflow/commit/df693e0e3138f6601c4776cd529d8cb7bcde2f90) | 2020-06-19 | Add more authentication options for HashiCorp Vault classes (#8974) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [d47e070a7](https://github.com/apache/airflow/commit/d47e070a79b574cca043ca9c06f91d47eecb3040) | 2020-06-17 | Add HashiCorp Vault Hook (split-out from Vault secret backend) (#9333) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [d8cb0b5dd](https://github.com/apache/airflow/commit/d8cb0b5ddb02d194742e374d9ac90dd8231f6e80) | 2020-05-04 | Support k8s auth method in Vault Secrets provider (#8640) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [c1c88abfe](https://github.com/apache/airflow/commit/c1c88abfede7a36c3b1d1b511fbc6c03af46d363) | 2020-03-28 | Get Airflow Variables from Hashicorp Vault (#7944) | -| [eb4af4f94](https://github.com/apache/airflow/commit/eb4af4f944c77e67e167bbb6b0a2aaf075a95b50) | 2020-03-28 | Make BaseSecretsBackend.build_path generic (#7948) | -| [686d7d50b](https://github.com/apache/airflow/commit/686d7d50bd21622724d6818021355bc6885fd3de) | 2020-03-25 | Standardize SecretBackend class names (#7846) | -| [eef87b995](https://github.com/apache/airflow/commit/eef87b9953347a65421f315a07dbef37ded9df66) | 2020-03-23 | [AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830) | -| [cdf1809fc](https://github.com/apache/airflow/commit/cdf1809fce0e59c8379a799f1738d8d813abbf51) | 2020-03-23 | [AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795) | -| [a44beaf5b](https://github.com/apache/airflow/commit/a44beaf5bddae2a8de0429af45be5ff78a7d4d4e) | 2020-03-19 | [AIRFLOW-7076] Add support for HashiCorp Vault as Secrets Backend (#7741) | diff --git a/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 27a86dd0bbafd..0000000000000 --- a/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,28 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [249e80b96](https://github.com/apache/airflow/commit/249e80b960ab3453763903493bbb77651be9073b) | 2020-04-30 | Add http system test (#8591) | -| [ddd005e3b](https://github.com/apache/airflow/commit/ddd005e3b97e82ce715dc6604ff60ed5768de6ea) | 2020-04-18 | [AIRFLOW-5156] Fixed doc strigns for HttpHook (#8434) | -| [d61a476da](https://github.com/apache/airflow/commit/d61a476da3a649bf2c1d347b9cb3abc62eae3ce9) | 2020-04-18 | [AIRFLOW-5156] Added auth type to HttpHook (#8429) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23 | Add missing call to Super class in 'http', 'grpc' & 'slack' providers (#7826) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 4281c138b3312..0000000000000 --- a/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,21 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [dc3a4938c](https://github.com/apache/airflow/commit/dc3a4938caa508f4a79985f5f6fa506adf4c29d4) | 2020-08-22 | Fix duplicate task_ids in example_http.py (#10485) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [ac93419d1](https://github.com/apache/airflow/commit/ac93419d1d15fb7779f5dc9cf30b2bca65d13b9e) | 2020-07-22 | Add response_filter parameter to SimpleHttpOperator (#9885) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index a4d4201e11f91..0000000000000 --- a/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [3cddc1182](https://github.com/apache/airflow/commit/3cddc11821ff8f9ed0811384c0643f756a2b3dfa) | 2020-10-16 | Updated template_fields_rendereds for PostgresOperator and SimpleHttpOperator (#11555) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.11.23.md deleted file mode 100644 index e1091fefbcec4..0000000000000 --- a/airflow/providers/http/BACKPORT_PROVIDER_CHANGES_2020.11.23.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [badd89067](https://github.com/apache/airflow/commit/badd890675d3cb3dfc088bff6a1d73dfdc275f31) | 2020-11-09 | Extend the same keyword args callable support in PythonOperator to some other sensors/operators (#11922) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | diff --git a/airflow/providers/http/BACKPORT_PROVIDER_README.md b/airflow/providers/http/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 0fcc61484cb13..0000000000000 --- a/airflow/providers/http/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,175 +0,0 @@ - - - -# Package apache-airflow-backport-providers-http - -Release: 2020.11.23 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.11.23](#release-20201123) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `http` provider. All classes for this provider package -are in `airflow.providers.http` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-http` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `http` provider -are in the `airflow.providers.http` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.http` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| -| [operators.http.SimpleHttpOperator](https://github.com/apache/airflow/blob/master/airflow/providers/http/operators/http.py) | [operators.http_operator.SimpleHttpOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/http_operator.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.http` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------| -| [sensors.http.HttpSensor](https://github.com/apache/airflow/blob/master/airflow/providers/http/sensors/http.py) | [sensors.http_sensor.HttpSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/http_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.http` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------| -| [hooks.http.HttpHook](https://github.com/apache/airflow/blob/master/airflow/providers/http/hooks/http.py) | [hooks.http_hook.HttpHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/http_hook.py) | - - - -## Releases - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [badd89067](https://github.com/apache/airflow/commit/badd890675d3cb3dfc088bff6a1d73dfdc275f31) | 2020-11-09 | Extend the same keyword args callable support in PythonOperator to some other sensors/operators (#11922) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [3cddc1182](https://github.com/apache/airflow/commit/3cddc11821ff8f9ed0811384c0643f756a2b3dfa) | 2020-10-16 | Updated template_fields_rendereds for PostgresOperator and SimpleHttpOperator (#11555) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [dc3a4938c](https://github.com/apache/airflow/commit/dc3a4938caa508f4a79985f5f6fa506adf4c29d4) | 2020-08-22 | Fix duplicate task_ids in example_http.py (#10485) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [ac93419d1](https://github.com/apache/airflow/commit/ac93419d1d15fb7779f5dc9cf30b2bca65d13b9e) | 2020-07-22 | Add response_filter parameter to SimpleHttpOperator (#9885) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [249e80b96](https://github.com/apache/airflow/commit/249e80b960ab3453763903493bbb77651be9073b) | 2020-04-30 | Add http system test (#8591) | -| [ddd005e3b](https://github.com/apache/airflow/commit/ddd005e3b97e82ce715dc6604ff60ed5768de6ea) | 2020-04-18 | [AIRFLOW-5156] Fixed doc strigns for HttpHook (#8434) | -| [d61a476da](https://github.com/apache/airflow/commit/d61a476da3a649bf2c1d347b9cb3abc62eae3ce9) | 2020-04-18 | [AIRFLOW-5156] Added auth type to HttpHook (#8429) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23 | Add missing call to Super class in 'http', 'grpc' & 'slack' providers (#7826) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/http/CHANGELOG.rst b/airflow/providers/http/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/http/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/http/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/http/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index fc6ef37983d7f..0000000000000 --- a/airflow/providers/http/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,65 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c1cd50465](https://github.com/apache/airflow/commit/c1cd50465c5473bc817fded5eeb4c425a0529ae5) | 2020-12-05 | Add 'headers' to template_fields in HttpSensor (#12809) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21 | Fix Python Docstring parameters (#12513) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [badd89067](https://github.com/apache/airflow/commit/badd890675d3cb3dfc088bff6a1d73dfdc275f31) | 2020-11-09 | Extend the same keyword args callable support in PythonOperator to some other sensors/operators (#11922) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [3cddc1182](https://github.com/apache/airflow/commit/3cddc11821ff8f9ed0811384c0643f756a2b3dfa) | 2020-10-16 | Updated template_fields_rendereds for PostgresOperator and SimpleHttpOperator (#11555) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [dc3a4938c](https://github.com/apache/airflow/commit/dc3a4938caa508f4a79985f5f6fa506adf4c29d4) | 2020-08-22 | Fix duplicate task_ids in example_http.py (#10485) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [ac93419d1](https://github.com/apache/airflow/commit/ac93419d1d15fb7779f5dc9cf30b2bca65d13b9e) | 2020-07-22 | Add response_filter parameter to SimpleHttpOperator (#9885) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [249e80b96](https://github.com/apache/airflow/commit/249e80b960ab3453763903493bbb77651be9073b) | 2020-04-30 | Add http system test (#8591) | -| [ddd005e3b](https://github.com/apache/airflow/commit/ddd005e3b97e82ce715dc6604ff60ed5768de6ea) | 2020-04-18 | [AIRFLOW-5156] Fixed doc strigns for HttpHook (#8434) | -| [d61a476da](https://github.com/apache/airflow/commit/d61a476da3a649bf2c1d347b9cb3abc62eae3ce9) | 2020-04-18 | [AIRFLOW-5156] Added auth type to HttpHook (#8429) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23 | Add missing call to Super class in 'http', 'grpc' & 'slack' providers (#7826) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/http/README.md b/airflow/providers/http/README.md deleted file mode 100644 index 2068fb9217341..0000000000000 --- a/airflow/providers/http/README.md +++ /dev/null @@ -1,164 +0,0 @@ - - - -# Package apache-airflow-providers-http - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `http` provider. All classes for this provider package -are in `airflow.providers.http` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-http` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `http` provider -are in the `airflow.providers.http` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.http` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| -| [operators.http.SimpleHttpOperator](https://github.com/apache/airflow/blob/master/airflow/providers/http/operators/http.py) | [operators.http_operator.SimpleHttpOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/http_operator.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.http` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------| -| [sensors.http.HttpSensor](https://github.com/apache/airflow/blob/master/airflow/providers/http/sensors/http.py) | [sensors.http_sensor.HttpSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/sensors/http_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.http` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------| -| [hooks.http.HttpHook](https://github.com/apache/airflow/blob/master/airflow/providers/http/hooks/http.py) | [hooks.http_hook.HttpHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/http_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c1cd50465](https://github.com/apache/airflow/commit/c1cd50465c5473bc817fded5eeb4c425a0529ae5) | 2020-12-05 | Add 'headers' to template_fields in HttpSensor (#12809) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [370e7d07d](https://github.com/apache/airflow/commit/370e7d07d1ed1a53b73fe878425fdcd4c71a7ed1) | 2020-11-21 | Fix Python Docstring parameters (#12513) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [badd89067](https://github.com/apache/airflow/commit/badd890675d3cb3dfc088bff6a1d73dfdc275f31) | 2020-11-09 | Extend the same keyword args callable support in PythonOperator to some other sensors/operators (#11922) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [3cddc1182](https://github.com/apache/airflow/commit/3cddc11821ff8f9ed0811384c0643f756a2b3dfa) | 2020-10-16 | Updated template_fields_rendereds for PostgresOperator and SimpleHttpOperator (#11555) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [dc3a4938c](https://github.com/apache/airflow/commit/dc3a4938caa508f4a79985f5f6fa506adf4c29d4) | 2020-08-22 | Fix duplicate task_ids in example_http.py (#10485) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [ac93419d1](https://github.com/apache/airflow/commit/ac93419d1d15fb7779f5dc9cf30b2bca65d13b9e) | 2020-07-22 | Add response_filter parameter to SimpleHttpOperator (#9885) | -| [4d74ac211](https://github.com/apache/airflow/commit/4d74ac2111862186598daf92cbf2c525617061c2) | 2020-07-19 | Increase typing for Apache and http provider package (#9729) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [249e80b96](https://github.com/apache/airflow/commit/249e80b960ab3453763903493bbb77651be9073b) | 2020-04-30 | Add http system test (#8591) | -| [ddd005e3b](https://github.com/apache/airflow/commit/ddd005e3b97e82ce715dc6604ff60ed5768de6ea) | 2020-04-18 | [AIRFLOW-5156] Fixed doc strigns for HttpHook (#8434) | -| [d61a476da](https://github.com/apache/airflow/commit/d61a476da3a649bf2c1d347b9cb3abc62eae3ce9) | 2020-04-18 | [AIRFLOW-5156] Added auth type to HttpHook (#8429) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23 | Add missing call to Super class in 'http', 'grpc' & 'slack' providers (#7826) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/imap/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/imap/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 9b803bb880084..0000000000000 --- a/airflow/providers/imap/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,21 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/imap/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/imap/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 61a09ad36a3bc..0000000000000 --- a/airflow/providers/imap/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [08dfd8cd0](https://github.com/apache/airflow/commit/08dfd8cd00dae2d7aad53018af04428d933b1ceb) | 2020-09-25 | Increase Type coverage for IMAP provider (#11154) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [0aff69fbd](https://github.com/apache/airflow/commit/0aff69fbd2f5a09c51f5b503ebf1bb72a26d3290) | 2020-07-27 | Add typing to ImapHook (#9887) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/imap/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/imap/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/imap/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/imap/BACKPORT_PROVIDER_README.md b/airflow/providers/imap/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 0dc6054e71138..0000000000000 --- a/airflow/providers/imap/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,133 +0,0 @@ - - - -# Package apache-airflow-backport-providers-imap - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `imap` provider. All classes for this provider package -are in `airflow.providers.imap` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-imap` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `imap` provider -are in the `airflow.providers.imap` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.imap` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.imap_attachment.ImapAttachmentSensor](https://github.com/apache/airflow/blob/master/airflow/providers/imap/sensors/imap_attachment.py) | [contrib.sensors.imap_attachment_sensor.ImapAttachmentSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/imap_attachment_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.imap` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------| -| [hooks.imap.ImapHook](https://github.com/apache/airflow/blob/master/airflow/providers/imap/hooks/imap.py) | [contrib.hooks.imap_hook.ImapHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/imap_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [08dfd8cd0](https://github.com/apache/airflow/commit/08dfd8cd00dae2d7aad53018af04428d933b1ceb) | 2020-09-25 | Increase Type coverage for IMAP provider (#11154) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [0aff69fbd](https://github.com/apache/airflow/commit/0aff69fbd2f5a09c51f5b503ebf1bb72a26d3290) | 2020-07-27 | Add typing to ImapHook (#9887) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/imap/CHANGELOG.rst b/airflow/providers/imap/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/imap/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/imap/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/imap/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 585bf18bb3e18..0000000000000 --- a/airflow/providers/imap/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,50 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [08dfd8cd0](https://github.com/apache/airflow/commit/08dfd8cd00dae2d7aad53018af04428d933b1ceb) | 2020-09-25 | Increase Type coverage for IMAP provider (#11154) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [0aff69fbd](https://github.com/apache/airflow/commit/0aff69fbd2f5a09c51f5b503ebf1bb72a26d3290) | 2020-07-27 | Add typing to ImapHook (#9887) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/imap/README.md b/airflow/providers/imap/README.md deleted file mode 100644 index d56d79d11f97d..0000000000000 --- a/airflow/providers/imap/README.md +++ /dev/null @@ -1,136 +0,0 @@ - - - -# Package apache-airflow-providers-imap - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `imap` provider. All classes for this provider package -are in `airflow.providers.imap` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-imap` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `imap` provider -are in the `airflow.providers.imap` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.imap` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.imap_attachment.ImapAttachmentSensor](https://github.com/apache/airflow/blob/master/airflow/providers/imap/sensors/imap_attachment.py) | [contrib.sensors.imap_attachment_sensor.ImapAttachmentSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/imap_attachment_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.imap` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------| -| [hooks.imap.ImapHook](https://github.com/apache/airflow/blob/master/airflow/providers/imap/hooks/imap.py) | [contrib.hooks.imap_hook.ImapHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/imap_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [08dfd8cd0](https://github.com/apache/airflow/commit/08dfd8cd00dae2d7aad53018af04428d933b1ceb) | 2020-09-25 | Increase Type coverage for IMAP provider (#11154) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [0aff69fbd](https://github.com/apache/airflow/commit/0aff69fbd2f5a09c51f5b503ebf1bb72a26d3290) | 2020-07-27 | Add typing to ImapHook (#9887) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/jdbc/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/jdbc/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index b6645a77b819f..0000000000000 --- a/airflow/providers/jdbc/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,20 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [ca8857bea](https://github.com/apache/airflow/commit/ca8857beaaffacc584db8198d0fa90473533549c) | 2020-03-23 | Change from Instance attribute to variable in JdbcOperator.execute (#7819) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/jdbc/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/jdbc/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index ae58ec15d1bda..0000000000000 --- a/airflow/providers/jdbc/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9dd8310dc](https://github.com/apache/airflow/commit/9dd8310dc12a56cef04cc31d02802422106918c6) | 2020-09-19 | Increase typing coverage JDBC provider (#11021) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/jdbc/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/jdbc/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index b7467a0db0db0..0000000000000 --- a/airflow/providers/jdbc/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,9 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/jdbc/BACKPORT_PROVIDER_README.md b/airflow/providers/jdbc/BACKPORT_PROVIDER_README.md deleted file mode 100644 index d952876801fb6..0000000000000 --- a/airflow/providers/jdbc/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,138 +0,0 @@ - - - -# Package apache-airflow-backport-providers-jdbc - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `jdbc` provider. All classes for this provider package -are in `airflow.providers.jdbc` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-jdbc` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| jaydebeapi | >=1.1.1 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `jdbc` provider -are in the `airflow.providers.jdbc` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.jdbc` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------| -| [operators.jdbc.JdbcOperator](https://github.com/apache/airflow/blob/master/airflow/providers/jdbc/operators/jdbc.py) | [operators.jdbc_operator.JdbcOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/jdbc_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.jdbc` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------| -| [hooks.jdbc.JdbcHook](https://github.com/apache/airflow/blob/master/airflow/providers/jdbc/hooks/jdbc.py) | [hooks.jdbc_hook.JdbcHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/jdbc_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9dd8310dc](https://github.com/apache/airflow/commit/9dd8310dc12a56cef04cc31d02802422106918c6) | 2020-09-19 | Increase typing coverage JDBC provider (#11021) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [ca8857bea](https://github.com/apache/airflow/commit/ca8857beaaffacc584db8198d0fa90473533549c) | 2020-03-23 | Change from Instance attribute to variable in JdbcOperator.execute (#7819) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/jdbc/CHANGELOG.rst b/airflow/providers/jdbc/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/jdbc/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/jdbc/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/jdbc/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index c5b55c0d22b36..0000000000000 --- a/airflow/providers/jdbc/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,52 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [d5589673a](https://github.com/apache/airflow/commit/d5589673a95aaced0b851ea0a4061a010a924a82) | 2020-12-08 | Move dummy_operator.py to dummy.py (#11178) (#11293) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [a7ad20455](https://github.com/apache/airflow/commit/a7ad20455cdcaa5ec851b7688c3126a904574742) | 2020-10-29 | Rename example JDBC dag (#11946) | -| [ba9c044d2](https://github.com/apache/airflow/commit/ba9c044d20ff784630a09eecc0a30029b0f5e199) | 2020-10-29 | Add How-to guide for JDBC Operator (#11472) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9dd8310dc](https://github.com/apache/airflow/commit/9dd8310dc12a56cef04cc31d02802422106918c6) | 2020-09-19 | Increase typing coverage JDBC provider (#11021) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [ca8857bea](https://github.com/apache/airflow/commit/ca8857beaaffacc584db8198d0fa90473533549c) | 2020-03-23 | Change from Instance attribute to variable in JdbcOperator.execute (#7819) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/jdbc/README.md b/airflow/providers/jdbc/README.md deleted file mode 100644 index 0cb8b08782700..0000000000000 --- a/airflow/providers/jdbc/README.md +++ /dev/null @@ -1,145 +0,0 @@ - - - -# Package apache-airflow-providers-jdbc - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `jdbc` provider. All classes for this provider package -are in `airflow.providers.jdbc` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-jdbc` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| jaydebeapi | >=1.1.1 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `jdbc` provider -are in the `airflow.providers.jdbc` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.jdbc` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------| -| [operators.jdbc.JdbcOperator](https://github.com/apache/airflow/blob/master/airflow/providers/jdbc/operators/jdbc.py) | [operators.jdbc_operator.JdbcOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/jdbc_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.jdbc` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------| -| [hooks.jdbc.JdbcHook](https://github.com/apache/airflow/blob/master/airflow/providers/jdbc/hooks/jdbc.py) | [hooks.jdbc_hook.JdbcHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/jdbc_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [d5589673a](https://github.com/apache/airflow/commit/d5589673a95aaced0b851ea0a4061a010a924a82) | 2020-12-08 | Move dummy_operator.py to dummy.py (#11178) (#11293) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [a7ad20455](https://github.com/apache/airflow/commit/a7ad20455cdcaa5ec851b7688c3126a904574742) | 2020-10-29 | Rename example JDBC dag (#11946) | -| [ba9c044d2](https://github.com/apache/airflow/commit/ba9c044d20ff784630a09eecc0a30029b0f5e199) | 2020-10-29 | Add How-to guide for JDBC Operator (#11472) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9dd8310dc](https://github.com/apache/airflow/commit/9dd8310dc12a56cef04cc31d02802422106918c6) | 2020-09-19 | Increase typing coverage JDBC provider (#11021) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [ca8857bea](https://github.com/apache/airflow/commit/ca8857beaaffacc584db8198d0fa90473533549c) | 2020-03-23 | Change from Instance attribute to variable in JdbcOperator.execute (#7819) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/jenkins/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/jenkins/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 3fb285d134cec..0000000000000 --- a/airflow/providers/jenkins/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,24 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [dbcd3d878](https://github.com/apache/airflow/commit/dbcd3d8787741fd8203b6d9bdbc5d1da4b10a15b) | 2020-02-18 | [AIRFLOW-6804] Add the basic test for all example DAGs (#7419) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [633eca1de](https://github.com/apache/airflow/commit/633eca1de5042e95e23aaf2e7680ed3106cb0e87) | 2020-02-02 | [AIRFLOW-6692] Generate excluded_patterns in docs/conf.py (#7304) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | diff --git a/airflow/providers/jenkins/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/jenkins/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 7dd344d54503c..0000000000000 --- a/airflow/providers/jenkins/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [73a08ed75](https://github.com/apache/airflow/commit/73a08ed757bf9f2af27cfca913200b61528a2d80) | 2020-08-04 | Add type annotation to providers/jenkins (#9947) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/jenkins/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/jenkins/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/jenkins/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/jenkins/BACKPORT_PROVIDER_README.md b/airflow/providers/jenkins/BACKPORT_PROVIDER_README.md deleted file mode 100644 index aa44b72549cf9..0000000000000 --- a/airflow/providers/jenkins/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,143 +0,0 @@ - - - -# Package apache-airflow-backport-providers-jenkins - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `jenkins` provider. All classes for this provider package -are in `airflow.providers.jenkins` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-jenkins` - -## PIP requirements - -| PIP package | Version required | -|:---------------|:-------------------| -| python-jenkins | >=1.0.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `jenkins` provider -are in the `airflow.providers.jenkins` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.jenkins` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.jenkins_job_trigger.JenkinsJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/jenkins/operators/jenkins_job_trigger.py) | [contrib.operators.jenkins_job_trigger_operator.JenkinsJobTriggerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/jenkins_job_trigger_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.jenkins` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.jenkins.JenkinsHook](https://github.com/apache/airflow/blob/master/airflow/providers/jenkins/hooks/jenkins.py) | [contrib.hooks.jenkins_hook.JenkinsHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/jenkins_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [73a08ed75](https://github.com/apache/airflow/commit/73a08ed757bf9f2af27cfca913200b61528a2d80) | 2020-08-04 | Add type annotation to providers/jenkins (#9947) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [dbcd3d878](https://github.com/apache/airflow/commit/dbcd3d8787741fd8203b6d9bdbc5d1da4b10a15b) | 2020-02-18 | [AIRFLOW-6804] Add the basic test for all example DAGs (#7419) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [633eca1de](https://github.com/apache/airflow/commit/633eca1de5042e95e23aaf2e7680ed3106cb0e87) | 2020-02-02 | [AIRFLOW-6692] Generate excluded_patterns in docs/conf.py (#7304) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | diff --git a/airflow/providers/jenkins/CHANGELOG.rst b/airflow/providers/jenkins/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/jenkins/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/jenkins/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/jenkins/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index e092fbd100a2f..0000000000000 --- a/airflow/providers/jenkins/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,55 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [a83be6684](https://github.com/apache/airflow/commit/a83be668402884efe2e40af452dac17750d3c48d) | 2020-11-06 | Replace conditional with builtin max (#12122) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26 | Fix spellings (#11825) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [73a08ed75](https://github.com/apache/airflow/commit/73a08ed757bf9f2af27cfca913200b61528a2d80) | 2020-08-04 | Add type annotation to providers/jenkins (#9947) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [dbcd3d878](https://github.com/apache/airflow/commit/dbcd3d8787741fd8203b6d9bdbc5d1da4b10a15b) | 2020-02-18 | [AIRFLOW-6804] Add the basic test for all example DAGs (#7419) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [633eca1de](https://github.com/apache/airflow/commit/633eca1de5042e95e23aaf2e7680ed3106cb0e87) | 2020-02-02 | [AIRFLOW-6692] Generate excluded_patterns in docs/conf.py (#7304) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | diff --git a/airflow/providers/jenkins/README.md b/airflow/providers/jenkins/README.md deleted file mode 100644 index 4551001fdf1ab..0000000000000 --- a/airflow/providers/jenkins/README.md +++ /dev/null @@ -1,148 +0,0 @@ - - - -# Package apache-airflow-providers-jenkins - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `jenkins` provider. All classes for this provider package -are in `airflow.providers.jenkins` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-jenkins` - -## PIP requirements - -| PIP package | Version required | -|:---------------|:-------------------| -| python-jenkins | >=1.0.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `jenkins` provider -are in the `airflow.providers.jenkins` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.jenkins` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.jenkins_job_trigger.JenkinsJobTriggerOperator](https://github.com/apache/airflow/blob/master/airflow/providers/jenkins/operators/jenkins_job_trigger.py) | [contrib.operators.jenkins_job_trigger_operator.JenkinsJobTriggerOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/jenkins_job_trigger_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.jenkins` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.jenkins.JenkinsHook](https://github.com/apache/airflow/blob/master/airflow/providers/jenkins/hooks/jenkins.py) | [contrib.hooks.jenkins_hook.JenkinsHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/jenkins_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [a83be6684](https://github.com/apache/airflow/commit/a83be668402884efe2e40af452dac17750d3c48d) | 2020-11-06 | Replace conditional with builtin max (#12122) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [8afdb6ac6](https://github.com/apache/airflow/commit/8afdb6ac6a7997cb14806bc2734c81c00ed8da97) | 2020-10-26 | Fix spellings (#11825) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [73a08ed75](https://github.com/apache/airflow/commit/73a08ed757bf9f2af27cfca913200b61528a2d80) | 2020-08-04 | Add type annotation to providers/jenkins (#9947) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [dbcd3d878](https://github.com/apache/airflow/commit/dbcd3d8787741fd8203b6d9bdbc5d1da4b10a15b) | 2020-02-18 | [AIRFLOW-6804] Add the basic test for all example DAGs (#7419) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [633eca1de](https://github.com/apache/airflow/commit/633eca1de5042e95e23aaf2e7680ed3106cb0e87) | 2020-02-02 | [AIRFLOW-6692] Generate excluded_patterns in docs/conf.py (#7304) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | diff --git a/airflow/providers/jira/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/jira/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 97aa8095b72d9..0000000000000 --- a/airflow/providers/jira/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [4a21b6216](https://github.com/apache/airflow/commit/4a21b62161a8e14f0dbc06f292f4662832c52669) | 2019-12-13 | [AIRFLOW-5959][AIP-21] Move contrib/*/jira to providers (#6661) | diff --git a/airflow/providers/jira/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/jira/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index f4abc943e12c1..0000000000000 --- a/airflow/providers/jira/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,16 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [ba2d6408e](https://github.com/apache/airflow/commit/ba2d6408e64f219e8f53a20a5a149e3d8109db31) | 2020-07-29 | Add typing for jira provider (#10005) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/jira/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/jira/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index b7467a0db0db0..0000000000000 --- a/airflow/providers/jira/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,9 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/jira/BACKPORT_PROVIDER_README.md b/airflow/providers/jira/BACKPORT_PROVIDER_README.md deleted file mode 100644 index b0d9cee3cecee..0000000000000 --- a/airflow/providers/jira/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,150 +0,0 @@ - - - -# Package apache-airflow-backport-providers-jira - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `jira` provider. All classes for this provider package -are in `airflow.providers.jira` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-jira` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| JIRA | >1.0.7 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `jira` provider -are in the `airflow.providers.jira` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.jira` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.jira.JiraOperator](https://github.com/apache/airflow/blob/master/airflow/providers/jira/operators/jira.py) | [contrib.operators.jira_operator.JiraOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/jira_operator.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.jira` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.jira.JiraSensor](https://github.com/apache/airflow/blob/master/airflow/providers/jira/sensors/jira.py) | [contrib.sensors.jira_sensor.JiraSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/jira_sensor.py) | -| [sensors.jira.JiraTicketSensor](https://github.com/apache/airflow/blob/master/airflow/providers/jira/sensors/jira.py) | [contrib.sensors.jira_sensor.JiraTicketSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/jira_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.jira` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------| -| [hooks.jira.JiraHook](https://github.com/apache/airflow/blob/master/airflow/providers/jira/hooks/jira.py) | [contrib.hooks.jira_hook.JiraHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/jira_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [ba2d6408e](https://github.com/apache/airflow/commit/ba2d6408e64f219e8f53a20a5a149e3d8109db31) | 2020-07-29 | Add typing for jira provider (#10005) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [4a21b6216](https://github.com/apache/airflow/commit/4a21b62161a8e14f0dbc06f292f4662832c52669) | 2019-12-13 | [AIRFLOW-5959][AIP-21] Move contrib/*/jira to providers (#6661) | diff --git a/airflow/providers/jira/CHANGELOG.rst b/airflow/providers/jira/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/jira/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/jira/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/jira/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 901f7bb32dd51..0000000000000 --- a/airflow/providers/jira/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,46 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [ba2d6408e](https://github.com/apache/airflow/commit/ba2d6408e64f219e8f53a20a5a149e3d8109db31) | 2020-07-29 | Add typing for jira provider (#10005) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [4a21b6216](https://github.com/apache/airflow/commit/4a21b62161a8e14f0dbc06f292f4662832c52669) | 2019-12-13 | [AIRFLOW-5959][AIP-21] Move contrib/*/jira to providers (#6661) | diff --git a/airflow/providers/jira/README.md b/airflow/providers/jira/README.md deleted file mode 100644 index 7712d08caa9bf..0000000000000 --- a/airflow/providers/jira/README.md +++ /dev/null @@ -1,153 +0,0 @@ - - - -# Package apache-airflow-providers-jira - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `jira` provider. All classes for this provider package -are in `airflow.providers.jira` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-jira` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| JIRA | >1.0.7 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `jira` provider -are in the `airflow.providers.jira` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.jira` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.jira.JiraOperator](https://github.com/apache/airflow/blob/master/airflow/providers/jira/operators/jira.py) | [contrib.operators.jira_operator.JiraOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/jira_operator.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.jira` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.jira.JiraSensor](https://github.com/apache/airflow/blob/master/airflow/providers/jira/sensors/jira.py) | [contrib.sensors.jira_sensor.JiraSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/jira_sensor.py) | -| [sensors.jira.JiraTicketSensor](https://github.com/apache/airflow/blob/master/airflow/providers/jira/sensors/jira.py) | [contrib.sensors.jira_sensor.JiraTicketSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/jira_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.jira` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------| -| [hooks.jira.JiraHook](https://github.com/apache/airflow/blob/master/airflow/providers/jira/hooks/jira.py) | [contrib.hooks.jira_hook.JiraHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/jira_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [ba2d6408e](https://github.com/apache/airflow/commit/ba2d6408e64f219e8f53a20a5a149e3d8109db31) | 2020-07-29 | Add typing for jira provider (#10005) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [4a21b6216](https://github.com/apache/airflow/commit/4a21b62161a8e14f0dbc06f292f4662832c52669) | 2019-12-13 | [AIRFLOW-5959][AIP-21] Move contrib/*/jira to providers (#6661) | diff --git a/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 1dfaaaadfd419..0000000000000 --- a/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,30 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [d99833c9b](https://github.com/apache/airflow/commit/d99833c9b5be9eafc0c7851343ee86b6c20aed40) | 2020-04-03 | [AIRFLOW-4529] Add support for Azure Batch Service (#8024) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a83eb335e](https://github.com/apache/airflow/commit/a83eb335e58c6a15e96c517a1b492bc79c869ce8) | 2020-03-23 | Add call to Super call in microsoft providers (#7821) | -| [f0e242180](https://github.com/apache/airflow/commit/f0e24218077d4dff8015926d7826477bb0d07f88) | 2020-02-24 | [AIRFLOW-6896] AzureCosmosDBHook: Move DB call out of __init__ (#7520) | -| [4bec1cc48](https://github.com/apache/airflow/commit/4bec1cc489f5d19daf7450c75c3e8057c9709dbd) | 2020-02-24 | [AIRFLOW-6895] AzureFileShareHook: Move DB call out of __init__ (#7519) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [086e30724](https://github.com/apache/airflow/commit/086e307245015d97e89af9aa6c677d6fe817264c) | 2020-02-23 | [AIRFLOW-6890] AzureDataLakeHook: Move DB call out of __init__ (#7513) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [175a16046](https://github.com/apache/airflow/commit/175a1604638016b0a663711cc584496c2fdcd828) | 2020-02-19 | [AIRFLOW-6828] Stop using the zope library (#7448) | -| [1e0024301](https://github.com/apache/airflow/commit/1e00243014382d4cb7152ca7c5011b97cbd733b0) | 2020-02-10 | [AIRFLOW-5176] Add Azure Data Explorer (Kusto) operator (#5785) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [290330ba6](https://github.com/apache/airflow/commit/290330ba60653686cc6f009d89a377f09f26f35a) | 2020-01-15 | [AIRFLOW-6552] Move Azure classes to providers.microsoft package (#7158) | diff --git a/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 17fca7b03d415..0000000000000 --- a/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,25 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [f77a11d5b](https://github.com/apache/airflow/commit/f77a11d5b1e9d76b1d57c8a0d653b3ab28f33894) | 2020-09-13 | Add Secrets backend for Microsoft Azure Key Vault (#10898) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [2f552233f](https://github.com/apache/airflow/commit/2f552233f5c99b206c8f4c2088fcc0c05e7e26dc) | 2020-08-21 | Add AzureBaseHook (#9747) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [0bf330ba8](https://github.com/apache/airflow/commit/0bf330ba8681c417fd5a10b3ba01c75600dc5f2e) | 2020-07-24 | Add get_blobs_list method to WasbHook (#9950) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [d3c76da95](https://github.com/apache/airflow/commit/d3c76da95250068161580036a86e26ee2790fa07) | 2020-07-12 | Improve type hinting to provider microsoft (#9774) | -| [23f80f34a](https://github.com/apache/airflow/commit/23f80f34adec86da24e4896168c53d213d01a7f6) | 2020-07-08 | Move gcs & wasb task handlers to their respective provider packages (#9714) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index a40a8a36d18a2..0000000000000 --- a/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,20 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [f8ff217e2](https://github.com/apache/airflow/commit/f8ff217e2f2152bbb9fc701ff4c0b6eb447ad65c) | 2020-10-18 | Fix incorrect typing and move config args out of extra connection config to operator args (#11635) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [686e0ee7d](https://github.com/apache/airflow/commit/686e0ee7dfb26224e2f91c9af6ef41d59e2f2e96) | 2020-10-11 | Fix incorrect typing, remove hardcoded argument values and improve code in AzureContainerInstancesOperator (#11408) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [832a7850f](https://github.com/apache/airflow/commit/832a7850f12a3a54767d59f1967a9541e0e33293) | 2020-10-08 | Add Azure Blob Storage to GCS transfer operator (#11321) | -| [5d007fd2f](https://github.com/apache/airflow/commit/5d007fd2ff7365229c3d85bc2bbb506ead00247e) | 2020-10-08 | Strict type check for azure hooks (#11342) | -| [b0fcf6755](https://github.com/apache/airflow/commit/b0fcf675595494b306800e1a516548dc0dc671f8) | 2020-10-07 | Add AzureFileShareToGCSOperator (#10991) | -| [c51016b0b](https://github.com/apache/airflow/commit/c51016b0b8e894f8d94c2de408c5fc9b472aba3b) | 2020-10-05 | Add LocalToAzureDataLakeStorageOperator (#10814) | -| [fd682fd70](https://github.com/apache/airflow/commit/fd682fd70a97a1f937786a1a136f0fa929c8fb80) | 2020-10-05 | fix job deletion (#11272) | -| [421061878](https://github.com/apache/airflow/commit/4210618789215dfe9cb2ab350f6477d3c6ce365e) | 2020-10-03 | Ensure target_dedicated_nodes or enable_auto_scale is set in AzureBatchOperator (#11251) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.11.23.md deleted file mode 100644 index 11f8d1bde725a..0000000000000 --- a/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_CHANGES_2020.11.23.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [3ff7e0743](https://github.com/apache/airflow/commit/3ff7e0743a1156efe1d6aaf7b8f82136d0bba08f) | 2020-11-08 | azure key vault optional lookup (#12174) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | diff --git a/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_README.md b/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 2c75b44b6c540..0000000000000 --- a/airflow/providers/microsoft/azure/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,296 +0,0 @@ - - - -# Package apache-airflow-backport-providers-microsoft-azure - -Release: 2020.11.23 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [New transfer operators](#new-transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) - - [Moved hooks](#moved-hooks) - - [Secrets](#secrets) - - [New secrets](#new-secrets) -- [Releases](#releases) - - [Release 2020.11.23](#release-20201123) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `microsoft.azure` provider. All classes for this provider package -are in `airflow.providers.microsoft.azure` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-microsoft-azure` - -## PIP requirements - -| PIP package | Version required | -|:-----------------------------|:-------------------| -| azure-batch | >=8.0.0 | -| azure-cosmos | >=3.0.1,<4 | -| azure-datalake-store | >=0.0.45 | -| azure-identity | >=1.3.1 | -| azure-keyvault | >=4.1.0 | -| azure-kusto-data | >=0.0.43,<0.1 | -| azure-mgmt-containerinstance | >=1.5.0,<2.0 | -| azure-mgmt-datalake-store | >=0.5.0 | -| azure-mgmt-resource | >=2.2.0 | -| azure-storage | >=0.34.0, <0.37.0 | -| azure-storage-blob | <12.0 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-microsoft-azure[google] -``` - -| Dependent package | Extra | -|:-------------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-google](https://github.com/apache/airflow/tree/master/airflow/providers/google) | google | -| [apache-airflow-backport-providers-oracle](https://github.com/apache/airflow/tree/master/airflow/providers/oracle) | oracle | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `microsoft.azure` provider -are in the `airflow.providers.microsoft.azure` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.microsoft.azure` package | -|:-----------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.adx.AzureDataExplorerQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/adx.py) | -| [operators.azure_batch.AzureBatchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_batch.py) | - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.microsoft.azure` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.adls_list.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/adls_list.py) | [contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_list_operator.py) | -| [operators.azure_container_instances.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_container_instances.py) | [contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_container_instances_operator.py) | -| [operators.azure_cosmos.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_cosmos.py) | [contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_cosmos_operator.py) | -| [operators.wasb_delete_blob.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py) | [contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/wasb_delete_blob_operator.py) | - - -## Transfer operators - - -### New transfer operators - -| New Airflow 2.0 transfers: `airflow.providers.microsoft.azure` package | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.azure_blob_to_gcs.AzureBlobStorageToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py) | -| [transfers.local_to_adls.LocalToAzureDataLakeStorageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/local_to_adls.py) | - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.microsoft.azure` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/file_to_wasb.py) | [contrib.operators.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_wasb.py) | -| [transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py) | [contrib.operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py) | - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.microsoft.azure` package | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.azure_cosmos.AzureCosmosDocumentSensor](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/sensors/azure_cosmos.py) | - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.microsoft.azure` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.wasb.WasbBlobSensor](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/sensors/wasb.py) | [contrib.sensors.wasb_sensor.WasbBlobSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/wasb_sensor.py) | -| [sensors.wasb.WasbPrefixSensor](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/sensors/wasb.py) | [contrib.sensors.wasb_sensor.WasbPrefixSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/wasb_sensor.py) | - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.microsoft.azure` package | -|:----------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.adx.AzureDataExplorerHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/adx.py) | -| [hooks.azure_batch.AzureBatchHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_batch.py) | -| [hooks.azure_data_lake.AzureDataLakeHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_data_lake.py) | -| [hooks.base_azure.AzureBaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/base_azure.py) | - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.microsoft.azure` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.azure_container_instance.AzureContainerInstanceHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_container_instance.py) | [contrib.hooks.azure_container_instance_hook.AzureContainerInstanceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/azure_container_instance_hook.py) | -| [hooks.azure_container_registry.AzureContainerRegistryHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_container_registry.py) | [contrib.hooks.azure_container_registry_hook.AzureContainerRegistryHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/azure_container_registry_hook.py) | -| [hooks.azure_container_volume.AzureContainerVolumeHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_container_volume.py) | [contrib.hooks.azure_container_volume_hook.AzureContainerVolumeHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/azure_container_volume_hook.py) | -| [hooks.azure_cosmos.AzureCosmosDBHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_cosmos.py) | [contrib.hooks.azure_cosmos_hook.AzureCosmosDBHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/azure_cosmos_hook.py) | -| [hooks.azure_fileshare.AzureFileShareHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_fileshare.py) | [contrib.hooks.azure_fileshare_hook.AzureFileShareHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/azure_fileshare_hook.py) | -| [hooks.wasb.WasbHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/wasb.py) | [contrib.hooks.wasb_hook.WasbHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/wasb_hook.py) | - - -## Secrets - - -### New secrets - -| New Airflow 2.0 secrets: `airflow.providers.microsoft.azure` package | -|:-----------------------------------------------------------------------------------------------------------------------------------------------------------| -| [secrets.azure_key_vault.AzureKeyVaultBackend](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/secrets/azure_key_vault.py) | - - - - -## Releases - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [3ff7e0743](https://github.com/apache/airflow/commit/3ff7e0743a1156efe1d6aaf7b8f82136d0bba08f) | 2020-11-08 | azure key vault optional lookup (#12174) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [f8ff217e2](https://github.com/apache/airflow/commit/f8ff217e2f2152bbb9fc701ff4c0b6eb447ad65c) | 2020-10-18 | Fix incorrect typing and move config args out of extra connection config to operator args (#11635) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [686e0ee7d](https://github.com/apache/airflow/commit/686e0ee7dfb26224e2f91c9af6ef41d59e2f2e96) | 2020-10-11 | Fix incorrect typing, remove hardcoded argument values and improve code in AzureContainerInstancesOperator (#11408) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [832a7850f](https://github.com/apache/airflow/commit/832a7850f12a3a54767d59f1967a9541e0e33293) | 2020-10-08 | Add Azure Blob Storage to GCS transfer operator (#11321) | -| [5d007fd2f](https://github.com/apache/airflow/commit/5d007fd2ff7365229c3d85bc2bbb506ead00247e) | 2020-10-08 | Strict type check for azure hooks (#11342) | -| [b0fcf6755](https://github.com/apache/airflow/commit/b0fcf675595494b306800e1a516548dc0dc671f8) | 2020-10-07 | Add AzureFileShareToGCSOperator (#10991) | -| [c51016b0b](https://github.com/apache/airflow/commit/c51016b0b8e894f8d94c2de408c5fc9b472aba3b) | 2020-10-05 | Add LocalToAzureDataLakeStorageOperator (#10814) | -| [fd682fd70](https://github.com/apache/airflow/commit/fd682fd70a97a1f937786a1a136f0fa929c8fb80) | 2020-10-05 | fix job deletion (#11272) | -| [421061878](https://github.com/apache/airflow/commit/4210618789215dfe9cb2ab350f6477d3c6ce365e) | 2020-10-03 | Ensure target_dedicated_nodes or enable_auto_scale is set in AzureBatchOperator (#11251) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [f77a11d5b](https://github.com/apache/airflow/commit/f77a11d5b1e9d76b1d57c8a0d653b3ab28f33894) | 2020-09-13 | Add Secrets backend for Microsoft Azure Key Vault (#10898) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [2f552233f](https://github.com/apache/airflow/commit/2f552233f5c99b206c8f4c2088fcc0c05e7e26dc) | 2020-08-21 | Add AzureBaseHook (#9747) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [0bf330ba8](https://github.com/apache/airflow/commit/0bf330ba8681c417fd5a10b3ba01c75600dc5f2e) | 2020-07-24 | Add get_blobs_list method to WasbHook (#9950) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [d3c76da95](https://github.com/apache/airflow/commit/d3c76da95250068161580036a86e26ee2790fa07) | 2020-07-12 | Improve type hinting to provider microsoft (#9774) | -| [23f80f34a](https://github.com/apache/airflow/commit/23f80f34adec86da24e4896168c53d213d01a7f6) | 2020-07-08 | Move gcs & wasb task handlers to their respective provider packages (#9714) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [d99833c9b](https://github.com/apache/airflow/commit/d99833c9b5be9eafc0c7851343ee86b6c20aed40) | 2020-04-03 | [AIRFLOW-4529] Add support for Azure Batch Service (#8024) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a83eb335e](https://github.com/apache/airflow/commit/a83eb335e58c6a15e96c517a1b492bc79c869ce8) | 2020-03-23 | Add call to Super call in microsoft providers (#7821) | -| [f0e242180](https://github.com/apache/airflow/commit/f0e24218077d4dff8015926d7826477bb0d07f88) | 2020-02-24 | [AIRFLOW-6896] AzureCosmosDBHook: Move DB call out of __init__ (#7520) | -| [4bec1cc48](https://github.com/apache/airflow/commit/4bec1cc489f5d19daf7450c75c3e8057c9709dbd) | 2020-02-24 | [AIRFLOW-6895] AzureFileShareHook: Move DB call out of __init__ (#7519) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [086e30724](https://github.com/apache/airflow/commit/086e307245015d97e89af9aa6c677d6fe817264c) | 2020-02-23 | [AIRFLOW-6890] AzureDataLakeHook: Move DB call out of __init__ (#7513) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [175a16046](https://github.com/apache/airflow/commit/175a1604638016b0a663711cc584496c2fdcd828) | 2020-02-19 | [AIRFLOW-6828] Stop using the zope library (#7448) | -| [1e0024301](https://github.com/apache/airflow/commit/1e00243014382d4cb7152ca7c5011b97cbd733b0) | 2020-02-10 | [AIRFLOW-5176] Add Azure Data Explorer (Kusto) operator (#5785) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [290330ba6](https://github.com/apache/airflow/commit/290330ba60653686cc6f009d89a377f09f26f35a) | 2020-01-15 | [AIRFLOW-6552] Move Azure classes to providers.microsoft package (#7158) | diff --git a/airflow/providers/microsoft/azure/CHANGELOG.rst b/airflow/providers/microsoft/azure/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/microsoft/azure/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/microsoft/azure/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/microsoft/azure/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index b921e4359e7d8..0000000000000 --- a/airflow/providers/microsoft/azure/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,84 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [543d88b3a](https://github.com/apache/airflow/commit/543d88b3a1ec7f0a41af390273868d9aed4edb7b) | 2020-11-28 | Add example dag and system tests for azure wasb and fileshare (#12673) | -| [6b3c6add9](https://github.com/apache/airflow/commit/6b3c6add9ea245b43ee367491bf9193d59bd248c) | 2020-11-27 | Update setup.py to get non-conflicting set of dependencies (#12636) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [3ff7e0743](https://github.com/apache/airflow/commit/3ff7e0743a1156efe1d6aaf7b8f82136d0bba08f) | 2020-11-08 | azure key vault optional lookup (#12174) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [f8ff217e2](https://github.com/apache/airflow/commit/f8ff217e2f2152bbb9fc701ff4c0b6eb447ad65c) | 2020-10-18 | Fix incorrect typing and move config args out of extra connection config to operator args (#11635) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [686e0ee7d](https://github.com/apache/airflow/commit/686e0ee7dfb26224e2f91c9af6ef41d59e2f2e96) | 2020-10-11 | Fix incorrect typing, remove hardcoded argument values and improve code in AzureContainerInstancesOperator (#11408) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [832a7850f](https://github.com/apache/airflow/commit/832a7850f12a3a54767d59f1967a9541e0e33293) | 2020-10-08 | Add Azure Blob Storage to GCS transfer operator (#11321) | -| [5d007fd2f](https://github.com/apache/airflow/commit/5d007fd2ff7365229c3d85bc2bbb506ead00247e) | 2020-10-08 | Strict type check for azure hooks (#11342) | -| [b0fcf6755](https://github.com/apache/airflow/commit/b0fcf675595494b306800e1a516548dc0dc671f8) | 2020-10-07 | Add AzureFileShareToGCSOperator (#10991) | -| [c51016b0b](https://github.com/apache/airflow/commit/c51016b0b8e894f8d94c2de408c5fc9b472aba3b) | 2020-10-05 | Add LocalToAzureDataLakeStorageOperator (#10814) | -| [fd682fd70](https://github.com/apache/airflow/commit/fd682fd70a97a1f937786a1a136f0fa929c8fb80) | 2020-10-05 | fix job deletion (#11272) | -| [421061878](https://github.com/apache/airflow/commit/4210618789215dfe9cb2ab350f6477d3c6ce365e) | 2020-10-03 | Ensure target_dedicated_nodes or enable_auto_scale is set in AzureBatchOperator (#11251) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [f77a11d5b](https://github.com/apache/airflow/commit/f77a11d5b1e9d76b1d57c8a0d653b3ab28f33894) | 2020-09-13 | Add Secrets backend for Microsoft Azure Key Vault (#10898) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [2f552233f](https://github.com/apache/airflow/commit/2f552233f5c99b206c8f4c2088fcc0c05e7e26dc) | 2020-08-21 | Add AzureBaseHook (#9747) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [0bf330ba8](https://github.com/apache/airflow/commit/0bf330ba8681c417fd5a10b3ba01c75600dc5f2e) | 2020-07-24 | Add get_blobs_list method to WasbHook (#9950) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [d3c76da95](https://github.com/apache/airflow/commit/d3c76da95250068161580036a86e26ee2790fa07) | 2020-07-12 | Improve type hinting to provider microsoft (#9774) | -| [23f80f34a](https://github.com/apache/airflow/commit/23f80f34adec86da24e4896168c53d213d01a7f6) | 2020-07-08 | Move gcs & wasb task handlers to their respective provider packages (#9714) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [d99833c9b](https://github.com/apache/airflow/commit/d99833c9b5be9eafc0c7851343ee86b6c20aed40) | 2020-04-03 | [AIRFLOW-4529] Add support for Azure Batch Service (#8024) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a83eb335e](https://github.com/apache/airflow/commit/a83eb335e58c6a15e96c517a1b492bc79c869ce8) | 2020-03-23 | Add call to Super call in microsoft providers (#7821) | -| [f0e242180](https://github.com/apache/airflow/commit/f0e24218077d4dff8015926d7826477bb0d07f88) | 2020-02-24 | [AIRFLOW-6896] AzureCosmosDBHook: Move DB call out of __init__ (#7520) | -| [4bec1cc48](https://github.com/apache/airflow/commit/4bec1cc489f5d19daf7450c75c3e8057c9709dbd) | 2020-02-24 | [AIRFLOW-6895] AzureFileShareHook: Move DB call out of __init__ (#7519) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [086e30724](https://github.com/apache/airflow/commit/086e307245015d97e89af9aa6c677d6fe817264c) | 2020-02-23 | [AIRFLOW-6890] AzureDataLakeHook: Move DB call out of __init__ (#7513) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [175a16046](https://github.com/apache/airflow/commit/175a1604638016b0a663711cc584496c2fdcd828) | 2020-02-19 | [AIRFLOW-6828] Stop using the zope library (#7448) | -| [1e0024301](https://github.com/apache/airflow/commit/1e00243014382d4cb7152ca7c5011b97cbd733b0) | 2020-02-10 | [AIRFLOW-5176] Add Azure Data Explorer (Kusto) operator (#5785) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [290330ba6](https://github.com/apache/airflow/commit/290330ba60653686cc6f009d89a377f09f26f35a) | 2020-01-15 | [AIRFLOW-6552] Move Azure classes to providers.microsoft package (#7158) | diff --git a/airflow/providers/microsoft/azure/README.md b/airflow/providers/microsoft/azure/README.md deleted file mode 100644 index fd9ac6a900d94..0000000000000 --- a/airflow/providers/microsoft/azure/README.md +++ /dev/null @@ -1,285 +0,0 @@ - - - -# Package apache-airflow-providers-microsoft-azure - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [New transfer operators](#new-transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) - - [Moved hooks](#moved-hooks) - - [Secrets](#secrets) - - [New secrets](#new-secrets) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `microsoft.azure` provider. All classes for this provider package -are in `airflow.providers.microsoft.azure` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-microsoft-azure` - -## PIP requirements - -| PIP package | Version required | -|:-----------------------------|:-------------------| -| azure-batch | >=8.0.0 | -| azure-cosmos | >=3.0.1,<4 | -| azure-datalake-store | >=0.0.45 | -| azure-identity | >=1.3.1 | -| azure-keyvault | >=4.1.0 | -| azure-kusto-data | >=0.0.43,<0.1 | -| azure-mgmt-containerinstance | >=1.5.0,<2.0 | -| azure-mgmt-datalake-store | >=0.5.0 | -| azure-mgmt-resource | >=2.2.0 | -| azure-storage | >=0.34.0, <0.37.0 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-microsoft-azure[google] -``` - -| Dependent package | Extra | -|:--------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-providers-google](https://pypi.org/project/apache-airflow-providers-google) | google | -| [apache-airflow-providers-oracle](https://pypi.org/project/apache-airflow-providers-oracle) | oracle | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `microsoft.azure` provider -are in the `airflow.providers.microsoft.azure` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.microsoft.azure` package | -|:-----------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.adx.AzureDataExplorerQueryOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/adx.py) | -| [operators.azure_batch.AzureBatchOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_batch.py) | - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.microsoft.azure` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.adls_list.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/adls_list.py) | [contrib.operators.adls_list_operator.AzureDataLakeStorageListOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/adls_list_operator.py) | -| [operators.azure_container_instances.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_container_instances.py) | [contrib.operators.azure_container_instances_operator.AzureContainerInstancesOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_container_instances_operator.py) | -| [operators.azure_cosmos.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/azure_cosmos.py) | [contrib.operators.azure_cosmos_operator.AzureCosmosInsertDocumentOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/azure_cosmos_operator.py) | -| [operators.wasb_delete_blob.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/operators/wasb_delete_blob.py) | [contrib.operators.wasb_delete_blob_operator.WasbDeleteBlobOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/wasb_delete_blob_operator.py) | - - -## Transfer operators - - -### New transfer operators - -| New Airflow 2.0 transfers: `airflow.providers.microsoft.azure` package | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.azure_blob_to_gcs.AzureBlobStorageToGCSOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/azure_blob_to_gcs.py) | -| [transfers.local_to_adls.LocalToAzureDataLakeStorageOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/local_to_adls.py) | - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.microsoft.azure` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/file_to_wasb.py) | [contrib.operators.file_to_wasb.FileToWasbOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/file_to_wasb.py) | -| [transfers.oracle_to_azure_data_lake.OracleToAzureDataLakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/transfers/oracle_to_azure_data_lake.py) | [contrib.operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_azure_data_lake_transfer.py) | - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.microsoft.azure` package | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.azure_cosmos.AzureCosmosDocumentSensor](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/sensors/azure_cosmos.py) | - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.microsoft.azure` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.wasb.WasbBlobSensor](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/sensors/wasb.py) | [contrib.sensors.wasb_sensor.WasbBlobSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/wasb_sensor.py) | -| [sensors.wasb.WasbPrefixSensor](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/sensors/wasb.py) | [contrib.sensors.wasb_sensor.WasbPrefixSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/wasb_sensor.py) | - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.microsoft.azure` package | -|:----------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.adx.AzureDataExplorerHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/adx.py) | -| [hooks.azure_batch.AzureBatchHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_batch.py) | -| [hooks.azure_data_lake.AzureDataLakeHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_data_lake.py) | -| [hooks.base_azure.AzureBaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/base_azure.py) | - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.microsoft.azure` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.azure_container_instance.AzureContainerInstanceHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_container_instance.py) | [contrib.hooks.azure_container_instance_hook.AzureContainerInstanceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/azure_container_instance_hook.py) | -| [hooks.azure_container_registry.AzureContainerRegistryHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_container_registry.py) | [contrib.hooks.azure_container_registry_hook.AzureContainerRegistryHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/azure_container_registry_hook.py) | -| [hooks.azure_container_volume.AzureContainerVolumeHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_container_volume.py) | [contrib.hooks.azure_container_volume_hook.AzureContainerVolumeHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/azure_container_volume_hook.py) | -| [hooks.azure_cosmos.AzureCosmosDBHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_cosmos.py) | [contrib.hooks.azure_cosmos_hook.AzureCosmosDBHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/azure_cosmos_hook.py) | -| [hooks.azure_fileshare.AzureFileShareHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/azure_fileshare.py) | [contrib.hooks.azure_fileshare_hook.AzureFileShareHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/azure_fileshare_hook.py) | -| [hooks.wasb.WasbHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/hooks/wasb.py) | [contrib.hooks.wasb_hook.WasbHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/wasb_hook.py) | - - -## Secrets - - -### New secrets - -| New Airflow 2.0 secrets: `airflow.providers.microsoft.azure` package | -|:-----------------------------------------------------------------------------------------------------------------------------------------------------------| -| [secrets.azure_key_vault.AzureKeyVaultBackend](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/azure/secrets/azure_key_vault.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [543d88b3a](https://github.com/apache/airflow/commit/543d88b3a1ec7f0a41af390273868d9aed4edb7b) | 2020-11-28 | Add example dag and system tests for azure wasb and fileshare (#12673) | -| [6b3c6add9](https://github.com/apache/airflow/commit/6b3c6add9ea245b43ee367491bf9193d59bd248c) | 2020-11-27 | Update setup.py to get non-conflicting set of dependencies (#12636) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [3ff7e0743](https://github.com/apache/airflow/commit/3ff7e0743a1156efe1d6aaf7b8f82136d0bba08f) | 2020-11-08 | azure key vault optional lookup (#12174) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [6ce855af1](https://github.com/apache/airflow/commit/6ce855af118daeaa4c249669079ab9d9aad23945) | 2020-10-24 | Fix spelling (#11821) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [f8ff217e2](https://github.com/apache/airflow/commit/f8ff217e2f2152bbb9fc701ff4c0b6eb447ad65c) | 2020-10-18 | Fix incorrect typing and move config args out of extra connection config to operator args (#11635) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [686e0ee7d](https://github.com/apache/airflow/commit/686e0ee7dfb26224e2f91c9af6ef41d59e2f2e96) | 2020-10-11 | Fix incorrect typing, remove hardcoded argument values and improve code in AzureContainerInstancesOperator (#11408) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [832a7850f](https://github.com/apache/airflow/commit/832a7850f12a3a54767d59f1967a9541e0e33293) | 2020-10-08 | Add Azure Blob Storage to GCS transfer operator (#11321) | -| [5d007fd2f](https://github.com/apache/airflow/commit/5d007fd2ff7365229c3d85bc2bbb506ead00247e) | 2020-10-08 | Strict type check for azure hooks (#11342) | -| [b0fcf6755](https://github.com/apache/airflow/commit/b0fcf675595494b306800e1a516548dc0dc671f8) | 2020-10-07 | Add AzureFileShareToGCSOperator (#10991) | -| [c51016b0b](https://github.com/apache/airflow/commit/c51016b0b8e894f8d94c2de408c5fc9b472aba3b) | 2020-10-05 | Add LocalToAzureDataLakeStorageOperator (#10814) | -| [fd682fd70](https://github.com/apache/airflow/commit/fd682fd70a97a1f937786a1a136f0fa929c8fb80) | 2020-10-05 | fix job deletion (#11272) | -| [421061878](https://github.com/apache/airflow/commit/4210618789215dfe9cb2ab350f6477d3c6ce365e) | 2020-10-03 | Ensure target_dedicated_nodes or enable_auto_scale is set in AzureBatchOperator (#11251) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [f77a11d5b](https://github.com/apache/airflow/commit/f77a11d5b1e9d76b1d57c8a0d653b3ab28f33894) | 2020-09-13 | Add Secrets backend for Microsoft Azure Key Vault (#10898) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [2f552233f](https://github.com/apache/airflow/commit/2f552233f5c99b206c8f4c2088fcc0c05e7e26dc) | 2020-08-21 | Add AzureBaseHook (#9747) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [0bf330ba8](https://github.com/apache/airflow/commit/0bf330ba8681c417fd5a10b3ba01c75600dc5f2e) | 2020-07-24 | Add get_blobs_list method to WasbHook (#9950) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [d3c76da95](https://github.com/apache/airflow/commit/d3c76da95250068161580036a86e26ee2790fa07) | 2020-07-12 | Improve type hinting to provider microsoft (#9774) | -| [23f80f34a](https://github.com/apache/airflow/commit/23f80f34adec86da24e4896168c53d213d01a7f6) | 2020-07-08 | Move gcs & wasb task handlers to their respective provider packages (#9714) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [d99833c9b](https://github.com/apache/airflow/commit/d99833c9b5be9eafc0c7851343ee86b6c20aed40) | 2020-04-03 | [AIRFLOW-4529] Add support for Azure Batch Service (#8024) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a83eb335e](https://github.com/apache/airflow/commit/a83eb335e58c6a15e96c517a1b492bc79c869ce8) | 2020-03-23 | Add call to Super call in microsoft providers (#7821) | -| [f0e242180](https://github.com/apache/airflow/commit/f0e24218077d4dff8015926d7826477bb0d07f88) | 2020-02-24 | [AIRFLOW-6896] AzureCosmosDBHook: Move DB call out of __init__ (#7520) | -| [4bec1cc48](https://github.com/apache/airflow/commit/4bec1cc489f5d19daf7450c75c3e8057c9709dbd) | 2020-02-24 | [AIRFLOW-6895] AzureFileShareHook: Move DB call out of __init__ (#7519) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [086e30724](https://github.com/apache/airflow/commit/086e307245015d97e89af9aa6c677d6fe817264c) | 2020-02-23 | [AIRFLOW-6890] AzureDataLakeHook: Move DB call out of __init__ (#7513) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [175a16046](https://github.com/apache/airflow/commit/175a1604638016b0a663711cc584496c2fdcd828) | 2020-02-19 | [AIRFLOW-6828] Stop using the zope library (#7448) | -| [1e0024301](https://github.com/apache/airflow/commit/1e00243014382d4cb7152ca7c5011b97cbd733b0) | 2020-02-10 | [AIRFLOW-5176] Add Azure Data Explorer (Kusto) operator (#5785) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [290330ba6](https://github.com/apache/airflow/commit/290330ba60653686cc6f009d89a377f09f26f35a) | 2020-01-15 | [AIRFLOW-6552] Move Azure classes to providers.microsoft package (#7158) | diff --git a/airflow/providers/microsoft/azure/hooks/azure_data_factory.py b/airflow/providers/microsoft/azure/hooks/azure_data_factory.py new file mode 100644 index 0000000000000..d6c686be83611 --- /dev/null +++ b/airflow/providers/microsoft/azure/hooks/azure_data_factory.py @@ -0,0 +1,716 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import inspect +from functools import wraps +from typing import Any, Callable, Optional + +from azure.mgmt.datafactory import DataFactoryManagementClient +from azure.mgmt.datafactory.models import ( + CreateRunResponse, + Dataset, + DatasetResource, + Factory, + LinkedService, + LinkedServiceResource, + PipelineResource, + PipelineRun, + Trigger, + TriggerResource, +) +from msrestazure.azure_operation import AzureOperationPoller + +from airflow.exceptions import AirflowException +from airflow.providers.microsoft.azure.hooks.base_azure import AzureBaseHook + + +def provide_targeted_factory(func: Callable) -> Callable: + """ + Provide the targeted factory to the decorated function in case it isn't specified. + + If ``resource_group_name`` or ``factory_name`` is not provided it defaults to the value specified in + the connection extras. + """ + signature = inspect.signature(func) + + @wraps(func) + def wrapper(*args, **kwargs) -> Callable: + bound_args = signature.bind(*args, **kwargs) + + def bind_argument(arg, default_key): + if arg not in bound_args.arguments: + self = args[0] + conn = self.get_connection(self.conn_id) + default_value = conn.extra_dejson.get(default_key) + + if not default_value: + raise AirflowException("Could not determine the targeted data factory.") + + bound_args.arguments[arg] = conn.extra_dejson[default_key] + + bind_argument("resource_group_name", "resourceGroup") + bind_argument("factory_name", "factory") + + return func(*bound_args.args, **bound_args.kwargs) + + return wrapper + + +class AzureDataFactoryHook(AzureBaseHook): # pylint: disable=too-many-public-methods + """ + A hook to interact with Azure Data Factory. + + :param conn_id: The Azure Data Factory connection id. + """ + + def __init__(self, conn_id: str = "azure_data_factory_default"): + super().__init__(sdk_client=DataFactoryManagementClient, conn_id=conn_id) + self._conn: DataFactoryManagementClient = None + + def get_conn(self) -> DataFactoryManagementClient: + if not self._conn: + self._conn = super().get_conn() + + return self._conn + + @provide_targeted_factory + def get_factory( + self, resource_group_name: Optional[str] = None, factory_name: Optional[str] = None, **config: Any + ) -> Factory: + """ + Get the factory. + + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The factory. + """ + return self.get_conn().factories.get(resource_group_name, factory_name, **config) + + def _factory_exists(self, resource_group_name, factory_name) -> bool: + """Return whether or not the factory already exists.""" + factories = { + factory.name for factory in self.get_conn().factories.list_by_resource_group(resource_group_name) + } + + return factory_name in factories + + @provide_targeted_factory + def update_factory( + self, + factory: Factory, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> Factory: + """ + Update the factory. + + :param factory: The factory resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the factory does not exist. + :return: The factory. + """ + if not self._factory_exists(resource_group_name, factory): + raise AirflowException(f"Factory {factory!r} does not exist.") + + return self.get_conn().factories.create_or_update( + resource_group_name, factory_name, factory, **config + ) + + @provide_targeted_factory + def create_factory( + self, + factory: Factory, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> Factory: + """ + Create the factory. + + :param factory: The factory resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the factory already exists. + :return: The factory. + """ + if self._factory_exists(resource_group_name, factory): + raise AirflowException(f"Factory {factory!r} already exists.") + + return self.get_conn().factories.create_or_update( + resource_group_name, factory_name, factory, **config + ) + + @provide_targeted_factory + def delete_factory( + self, resource_group_name: Optional[str] = None, factory_name: Optional[str] = None, **config: Any + ) -> None: + """ + Delete the factory. + + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().factories.delete(resource_group_name, factory_name, **config) + + @provide_targeted_factory + def get_linked_service( + self, + linked_service_name: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> LinkedServiceResource: + """ + Get the linked service. + + :param linked_service_name: The linked service name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The linked service. + """ + return self.get_conn().linked_services.get( + resource_group_name, factory_name, linked_service_name, **config + ) + + def _linked_service_exists(self, resource_group_name, factory_name, linked_service_name) -> bool: + """Return whether or not the linked service already exists.""" + linked_services = { + linked_service.name + for linked_service in self.get_conn().linked_services.list_by_factory( + resource_group_name, factory_name + ) + } + + return linked_service_name in linked_services + + @provide_targeted_factory + def update_linked_service( + self, + linked_service_name: str, + linked_service: LinkedService, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> LinkedServiceResource: + """ + Update the linked service. + + :param linked_service_name: The linked service name. + :param linked_service: The linked service resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the linked service does not exist. + :return: The linked service. + """ + if not self._linked_service_exists(resource_group_name, factory_name, linked_service_name): + raise AirflowException(f"Linked service {linked_service_name!r} does not exist.") + + return self.get_conn().linked_services.create_or_update( + resource_group_name, factory_name, linked_service_name, linked_service, **config + ) + + @provide_targeted_factory + def create_linked_service( + self, + linked_service_name: str, + linked_service: LinkedService, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> LinkedServiceResource: + """ + Create the linked service. + + :param linked_service_name: The linked service name. + :param linked_service: The linked service resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the linked service already exists. + :return: The linked service. + """ + if self._linked_service_exists(resource_group_name, factory_name, linked_service_name): + raise AirflowException(f"Linked service {linked_service_name!r} already exists.") + + return self.get_conn().linked_services.create_or_update( + resource_group_name, factory_name, linked_service_name, linked_service, **config + ) + + @provide_targeted_factory + def delete_linked_service( + self, + linked_service_name: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> None: + """ + Delete the linked service: + + :param linked_service_name: The linked service name. + :param resource_group_name: The linked service name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().linked_services.delete( + resource_group_name, factory_name, linked_service_name, **config + ) + + @provide_targeted_factory + def get_dataset( + self, + dataset_name: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> DatasetResource: + """ + Get the dataset. + + :param dataset_name: The dataset name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The dataset. + """ + return self.get_conn().datasets.get(resource_group_name, factory_name, dataset_name, **config) + + def _dataset_exists(self, resource_group_name, factory_name, dataset_name) -> bool: + """Return whether or not the dataset already exists.""" + datasets = { + dataset.name + for dataset in self.get_conn().datasets.list_by_factory(resource_group_name, factory_name) + } + + return dataset_name in datasets + + @provide_targeted_factory + def update_dataset( + self, + dataset_name: str, + dataset: Dataset, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> DatasetResource: + """ + Update the dataset. + + :param dataset_name: The dataset name. + :param dataset: The dataset resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the dataset does not exist. + :return: The dataset. + """ + if not self._dataset_exists(resource_group_name, factory_name, dataset_name): + raise AirflowException(f"Dataset {dataset_name!r} does not exist.") + + return self.get_conn().datasets.create_or_update( + resource_group_name, factory_name, dataset_name, dataset, **config + ) + + @provide_targeted_factory + def create_dataset( + self, + dataset_name: str, + dataset: Dataset, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> DatasetResource: + """ + Create the dataset. + + :param dataset_name: The dataset name. + :param dataset: The dataset resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the dataset already exists. + :return: The dataset. + """ + if self._dataset_exists(resource_group_name, factory_name, dataset_name): + raise AirflowException(f"Dataset {dataset_name!r} already exists.") + + return self.get_conn().datasets.create_or_update( + resource_group_name, factory_name, dataset_name, dataset, **config + ) + + @provide_targeted_factory + def delete_dataset( + self, + dataset_name: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> None: + """ + Delete the dataset: + + :param dataset_name: The dataset name. + :param resource_group_name: The dataset name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().datasets.delete(resource_group_name, factory_name, dataset_name, **config) + + @provide_targeted_factory + def get_pipeline( + self, + pipeline_name: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> PipelineResource: + """ + Get the pipeline. + + :param pipeline_name: The pipeline name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The pipeline. + """ + return self.get_conn().pipelines.get(resource_group_name, factory_name, pipeline_name, **config) + + def _pipeline_exists(self, resource_group_name, factory_name, pipeline_name) -> bool: + """Return whether or not the pipeline already exists.""" + pipelines = { + pipeline.name + for pipeline in self.get_conn().pipelines.list_by_factory(resource_group_name, factory_name) + } + + return pipeline_name in pipelines + + @provide_targeted_factory + def update_pipeline( + self, + pipeline_name: str, + pipeline: PipelineResource, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> PipelineResource: + """ + Update the pipeline. + + :param pipeline_name: The pipeline name. + :param pipeline: The pipeline resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the pipeline does not exist. + :return: The pipeline. + """ + if not self._pipeline_exists(resource_group_name, factory_name, pipeline_name): + raise AirflowException(f"Pipeline {pipeline_name!r} does not exist.") + + return self.get_conn().pipelines.create_or_update( + resource_group_name, factory_name, pipeline_name, pipeline, **config + ) + + @provide_targeted_factory + def create_pipeline( + self, + pipeline_name: str, + pipeline: PipelineResource, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> PipelineResource: + """ + Create the pipeline. + + :param pipeline_name: The pipeline name. + :param pipeline: The pipeline resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the pipeline already exists. + :return: The pipeline. + """ + if self._pipeline_exists(resource_group_name, factory_name, pipeline_name): + raise AirflowException(f"Pipeline {pipeline_name!r} already exists.") + + return self.get_conn().pipelines.create_or_update( + resource_group_name, factory_name, pipeline_name, pipeline, **config + ) + + @provide_targeted_factory + def delete_pipeline( + self, + pipeline_name: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> None: + """ + Delete the pipeline: + + :param pipeline_name: The pipeline name. + :param resource_group_name: The pipeline name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().pipelines.delete(resource_group_name, factory_name, pipeline_name, **config) + + @provide_targeted_factory + def run_pipeline( + self, + pipeline_name: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> CreateRunResponse: + """ + Run a pipeline. + + :param pipeline_name: The pipeline name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The pipeline run. + """ + return self.get_conn().pipelines.create_run( + resource_group_name, factory_name, pipeline_name, **config + ) + + @provide_targeted_factory + def get_pipeline_run( + self, + run_id: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> PipelineRun: + """ + Get the pipeline run. + + :param run_id: The pipeline run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The pipeline run. + """ + return self.get_conn().pipeline_runs.get(resource_group_name, factory_name, run_id, **config) + + @provide_targeted_factory + def cancel_pipeline_run( + self, + run_id: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> None: + """ + Cancel the pipeline run. + + :param run_id: The pipeline run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().pipeline_runs.cancel(resource_group_name, factory_name, run_id, **config) + + @provide_targeted_factory + def get_trigger( + self, + trigger_name: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> TriggerResource: + """ + Get the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: The trigger. + """ + return self.get_conn().triggers.get(resource_group_name, factory_name, trigger_name, **config) + + def _trigger_exists(self, resource_group_name, factory_name, trigger_name) -> bool: + """Return whether or not the trigger already exists.""" + triggers = { + trigger.name + for trigger in self.get_conn().triggers.list_by_factory(resource_group_name, factory_name) + } + + return trigger_name in triggers + + @provide_targeted_factory + def update_trigger( + self, + trigger_name: str, + trigger: Trigger, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> TriggerResource: + """ + Update the trigger. + + :param trigger_name: The trigger name. + :param trigger: The trigger resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the trigger does not exist. + :return: The trigger. + """ + if not self._trigger_exists(resource_group_name, factory_name, trigger_name): + raise AirflowException(f"Trigger {trigger_name!r} does not exist.") + + return self.get_conn().triggers.create_or_update( + resource_group_name, factory_name, trigger_name, trigger, **config + ) + + @provide_targeted_factory + def create_trigger( + self, + trigger_name: str, + trigger: Trigger, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> TriggerResource: + """ + Create the trigger. + + :param trigger_name: The trigger name. + :param trigger: The trigger resource definition. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :raise AirflowException: If the trigger already exists. + :return: The trigger. + """ + if self._trigger_exists(resource_group_name, factory_name, trigger_name): + raise AirflowException(f"Trigger {trigger_name!r} already exists.") + + return self.get_conn().triggers.create_or_update( + resource_group_name, factory_name, trigger_name, trigger, **config + ) + + @provide_targeted_factory + def delete_trigger( + self, + trigger_name: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> None: + """ + Delete the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().triggers.delete(resource_group_name, factory_name, trigger_name, **config) + + @provide_targeted_factory + def start_trigger( + self, + trigger_name: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> AzureOperationPoller: + """ + Start the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: An Azure operation poller. + """ + return self.get_conn().triggers.start(resource_group_name, factory_name, trigger_name, **config) + + @provide_targeted_factory + def stop_trigger( + self, + trigger_name: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> AzureOperationPoller: + """ + Stop the trigger. + + :param trigger_name: The trigger name. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + :return: An Azure operation poller. + """ + return self.get_conn().triggers.stop(resource_group_name, factory_name, trigger_name, **config) + + @provide_targeted_factory + def rerun_trigger( + self, + trigger_name: str, + run_id: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> None: + """ + Rerun the trigger. + + :param trigger_name: The trigger name. + :param run_id: The trigger run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + return self.get_conn().trigger_runs.rerun( + resource_group_name, factory_name, trigger_name, run_id, **config + ) + + @provide_targeted_factory + def cancel_trigger( + self, + trigger_name: str, + run_id: str, + resource_group_name: Optional[str] = None, + factory_name: Optional[str] = None, + **config: Any, + ) -> None: + """ + Cancel the trigger. + + :param trigger_name: The trigger name. + :param run_id: The trigger run identifier. + :param resource_group_name: The resource group name. + :param factory_name: The factory name. + :param config: Extra parameters for the ADF client. + """ + self.get_conn().trigger_runs.cancel(resource_group_name, factory_name, trigger_name, run_id, **config) diff --git a/airflow/providers/microsoft/azure/log/wasb_task_handler.py b/airflow/providers/microsoft/azure/log/wasb_task_handler.py index 3f12135aff62e..a17b7c70e5e48 100644 --- a/airflow/providers/microsoft/azure/log/wasb_task_handler.py +++ b/airflow/providers/microsoft/azure/log/wasb_task_handler.py @@ -66,6 +66,7 @@ def hook(self): 'the Wasb connection exists.', remote_conn_id, ) + return None def set_context(self, ti) -> None: super().set_context(ti) @@ -159,6 +160,7 @@ def wasb_read(self, remote_log_location: str, return_error: bool = False): # return error if needed if return_error: return msg + return None def wasb_write(self, log: str, remote_log_location: str, append: bool = True) -> None: """ diff --git a/airflow/providers/microsoft/azure/provider.yaml b/airflow/providers/microsoft/azure/provider.yaml index 39b36ab90904e..da7b330263c38 100644 --- a/airflow/providers/microsoft/azure/provider.yaml +++ b/airflow/providers/microsoft/azure/provider.yaml @@ -30,25 +30,34 @@ integrations: tags: [azure] - integration-name: Microsoft Azure Blob Storage external-doc-url: https://azure.microsoft.com/en-us/services/storage/blobs/ + logo: /integration-logos/azure/Blob Storage.svg tags: [azure] - integration-name: Microsoft Azure Container Instances external-doc-url: https://azure.microsoft.com/en-us/services/container-instances/ + logo: /integration-logos/azure/Container Instances.svg tags: [azure] - integration-name: Microsoft Azure Cosmos DB external-doc-url: https://azure.microsoft.com/en-us/services/cosmos-db/ + logo: /integration-logos/azure/Azure Cosmos DB.svg tags: [azure] - integration-name: Microsoft Azure Data Explorer external-doc-url: https://azure.microsoft.com/en-us/services/data-explorer/ tags: [azure] - integration-name: Microsoft Azure Data Lake Storage external-doc-url: https://azure.microsoft.com/en-us/services/storage/data-lake-storage/ + logo: /integration-logos/azure/Data Lake Storage.svg tags: [azure] - integration-name: Microsoft Azure Files external-doc-url: https://azure.microsoft.com/en-us/services/storage/files/ + logo: /integration-logos/azure/Azure Files.svg tags: [azure] - integration-name: Microsoft Azure FileShare external-doc-url: https://cloud.google.com/storage/ tags: [azure] + - integration-name: Microsoft Azure Data Factory + external-doc-url: https://azure.microsoft.com/en-us/services/data-factory/ + logo: /integration-logos/azure/Azure Data Factory.svg + tags: [azure] - integration-name: Microsoft Azure external-doc-url: https://azure.microsoft.com/ tags: [azure] @@ -108,6 +117,9 @@ hooks: - integration-name: Microsoft Azure Blob Storage python-modules: - airflow.providers.microsoft.azure.hooks.wasb + - integration-name: Microsoft Azure Data Factory + python-modules: + - airflow.providers.microsoft.azure.hooks.azure_data_factory transfers: - source-integration-name: Local @@ -133,3 +145,4 @@ hook-class-names: - airflow.providers.microsoft.azure.hooks.azure_data_lake.AzureDataLakeHook - airflow.providers.microsoft.azure.hooks.azure_container_instance.AzureContainerInstanceHook - airflow.providers.microsoft.azure.hooks.wasb.WasbHook + - airflow.providers.microsoft.azure.hooks.azure_data_factory.AzureDataFactoryHook diff --git a/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 7eccb66646447..0000000000000 --- a/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,20 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 3f91c6fef2add..0000000000000 --- a/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,14 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 189d230156fc3..0000000000000 --- a/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,13 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [765d29ecc](https://github.com/apache/airflow/commit/765d29ecc9fd6a3220efa0a6c4ce10848f5cbf82) | 2020-10-15 | Pymssql is maintained again (#11537) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [1845cd11b](https://github.com/apache/airflow/commit/1845cd11b77f302777ab854e84bef9c212c604a0) | 2020-10-11 | Strict type check for google ads and cloud hooks (#11390) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.11.23.md deleted file mode 100644 index c5db3f406aa27..0000000000000 --- a/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_CHANGES_2020.11.23.md +++ /dev/null @@ -1,15 +0,0 @@ - - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | diff --git a/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_README.md b/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 9132a9981bb09..0000000000000 --- a/airflow/providers/microsoft/mssql/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,171 +0,0 @@ - - - -# Package apache-airflow-backport-providers-microsoft-mssql - -Release: 2020.11.23 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.11.23](#release-20201123) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `microsoft.mssql` provider. All classes for this provider package -are in `airflow.providers.microsoft.mssql` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-microsoft-mssql` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pymssql | ~=2.1,>=2.1.5 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-microsoft-mssql[odbc] -``` - -| Dependent package | Extra | -|:---------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-odbc](https://github.com/apache/airflow/tree/master/airflow/providers/odbc) | odbc | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `microsoft.mssql` provider -are in the `airflow.providers.microsoft.mssql` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.microsoft.mssql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------| -| [operators.mssql.MsSqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/mssql/operators/mssql.py) | [operators.mssql_operator.MsSqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mssql_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.microsoft.mssql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------| -| [hooks.mssql.MsSqlHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/mssql/hooks/mssql.py) | [hooks.mssql_hook.MsSqlHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/mssql_hook.py) | - - - -## Releases - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [765d29ecc](https://github.com/apache/airflow/commit/765d29ecc9fd6a3220efa0a6c4ce10848f5cbf82) | 2020-10-15 | Pymssql is maintained again (#11537) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [1845cd11b](https://github.com/apache/airflow/commit/1845cd11b77f302777ab854e84bef9c212c604a0) | 2020-10-11 | Strict type check for google ads and cloud hooks (#11390) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/microsoft/mssql/CHANGELOG.rst b/airflow/providers/microsoft/mssql/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/microsoft/mssql/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/microsoft/mssql/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/microsoft/mssql/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 86d8bb363cdbc..0000000000000 --- a/airflow/providers/microsoft/mssql/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,49 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [765d29ecc](https://github.com/apache/airflow/commit/765d29ecc9fd6a3220efa0a6c4ce10848f5cbf82) | 2020-10-15 | Pymssql is maintained again (#11537) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [1845cd11b](https://github.com/apache/airflow/commit/1845cd11b77f302777ab854e84bef9c212c604a0) | 2020-10-11 | Strict type check for google ads and cloud hooks (#11390) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/microsoft/mssql/README.md b/airflow/providers/microsoft/mssql/README.md deleted file mode 100644 index 4902d66eda5f2..0000000000000 --- a/airflow/providers/microsoft/mssql/README.md +++ /dev/null @@ -1,158 +0,0 @@ - - - -# Package apache-airflow-providers-microsoft-mssql - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `microsoft.mssql` provider. All classes for this provider package -are in `airflow.providers.microsoft.mssql` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-microsoft-mssql` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pymssql | ~=2.1,>=2.1.5 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-microsoft-mssql[odbc] -``` - -| Dependent package | Extra | -|:----------------------------------------------------------------------------------------|:--------| -| [apache-airflow-providers-odbc](https://pypi.org/project/apache-airflow-providers-odbc) | odbc | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `microsoft.mssql` provider -are in the `airflow.providers.microsoft.mssql` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.microsoft.mssql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------| -| [operators.mssql.MsSqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/mssql/operators/mssql.py) | [operators.mssql_operator.MsSqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mssql_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.microsoft.mssql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------| -| [hooks.mssql.MsSqlHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/mssql/hooks/mssql.py) | [hooks.mssql_hook.MsSqlHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/mssql_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [765d29ecc](https://github.com/apache/airflow/commit/765d29ecc9fd6a3220efa0a6c4ce10848f5cbf82) | 2020-10-15 | Pymssql is maintained again (#11537) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [1845cd11b](https://github.com/apache/airflow/commit/1845cd11b77f302777ab854e84bef9c212c604a0) | 2020-10-11 | Strict type check for google ads and cloud hooks (#11390) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 3e2d796c0f514..0000000000000 --- a/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,21 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a83eb335e](https://github.com/apache/airflow/commit/a83eb335e58c6a15e96c517a1b492bc79c869ce8) | 2020-03-23 | Add call to Super call in microsoft providers (#7821) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index fef603bf86c79..0000000000000 --- a/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,15 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 0a1f4309ca0aa..0000000000000 --- a/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.11.23.md deleted file mode 100644 index 222a4691ea330..0000000000000 --- a/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_CHANGES_2020.11.23.md +++ /dev/null @@ -1,16 +0,0 @@ - - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | diff --git a/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_README.md b/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_README.md deleted file mode 100644 index ead3b187382e3..0000000000000 --- a/airflow/providers/microsoft/winrm/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,155 +0,0 @@ - - - -# Package apache-airflow-backport-providers-microsoft-winrm - -Release: 2020.11.23 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.11.23](#release-20201123) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `microsoft.winrm` provider. All classes for this provider package -are in `airflow.providers.microsoft.winrm` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-microsoft-winrm` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pywinrm | ~=0.4 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `microsoft.winrm` provider -are in the `airflow.providers.microsoft.winrm` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.microsoft.winrm` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.winrm.WinRMOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/winrm/operators/winrm.py) | [contrib.operators.winrm_operator.WinRMOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/winrm_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.microsoft.winrm` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------| -| [hooks.winrm.WinRMHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/winrm/hooks/winrm.py) | [contrib.hooks.winrm_hook.WinRMHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/winrm_hook.py) | - - - -## Releases - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a83eb335e](https://github.com/apache/airflow/commit/a83eb335e58c6a15e96c517a1b492bc79c869ce8) | 2020-03-23 | Add call to Super call in microsoft providers (#7821) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/microsoft/winrm/CHANGELOG.rst b/airflow/providers/microsoft/winrm/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/microsoft/winrm/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/microsoft/winrm/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/microsoft/winrm/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 38f3468e15711..0000000000000 --- a/airflow/providers/microsoft/winrm/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,48 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [d5589673a](https://github.com/apache/airflow/commit/d5589673a95aaced0b851ea0a4061a010a924a82) | 2020-12-08 | Move dummy_operator.py to dummy.py (#11178) (#11293) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a83eb335e](https://github.com/apache/airflow/commit/a83eb335e58c6a15e96c517a1b492bc79c869ce8) | 2020-03-23 | Add call to Super call in microsoft providers (#7821) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/microsoft/winrm/README.md b/airflow/providers/microsoft/winrm/README.md deleted file mode 100644 index fe6007d407a5b..0000000000000 --- a/airflow/providers/microsoft/winrm/README.md +++ /dev/null @@ -1,141 +0,0 @@ - - - -# Package apache-airflow-providers-microsoft-winrm - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `microsoft.winrm` provider. All classes for this provider package -are in `airflow.providers.microsoft.winrm` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-microsoft-winrm` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pywinrm | ~=0.4 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `microsoft.winrm` provider -are in the `airflow.providers.microsoft.winrm` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.microsoft.winrm` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.winrm.WinRMOperator](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/winrm/operators/winrm.py) | [contrib.operators.winrm_operator.WinRMOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/winrm_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.microsoft.winrm` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------| -| [hooks.winrm.WinRMHook](https://github.com/apache/airflow/blob/master/airflow/providers/microsoft/winrm/hooks/winrm.py) | [contrib.hooks.winrm_hook.WinRMHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/winrm_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [d5589673a](https://github.com/apache/airflow/commit/d5589673a95aaced0b851ea0a4061a010a924a82) | 2020-12-08 | Move dummy_operator.py to dummy.py (#11178) (#11293) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d2754ef76](https://github.com/apache/airflow/commit/d2754ef76958f8df4dcb6974e2cd2c1edb17935e) | 2020-10-09 | Strict type check for Microsoft (#11359) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [a83eb335e](https://github.com/apache/airflow/commit/a83eb335e58c6a15e96c517a1b492bc79c869ce8) | 2020-03-23 | Add call to Super call in microsoft providers (#7821) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/mongo/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/mongo/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index b5e7598f6b8b7..0000000000000 --- a/airflow/providers/mongo/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/mongo/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/mongo/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 9df430415ee2c..0000000000000 --- a/airflow/providers/mongo/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,15 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [46cdb0e08](https://github.com/apache/airflow/commit/46cdb0e08045f84029ac727cbaf6040acd592810) | 2020-07-19 | Add type hinting for mongo provider (#9875) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/mongo/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/mongo/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/mongo/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/mongo/BACKPORT_PROVIDER_README.md b/airflow/providers/mongo/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 89bd5b1520715..0000000000000 --- a/airflow/providers/mongo/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,136 +0,0 @@ - - - -# Package apache-airflow-backport-providers-mongo - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `mongo` provider. All classes for this provider package -are in `airflow.providers.mongo` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-mongo` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| dnspython | >=1.13.0,<2.0.0 | -| pymongo | >=3.6.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `mongo` provider -are in the `airflow.providers.mongo` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.mongo` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.mongo.MongoSensor](https://github.com/apache/airflow/blob/master/airflow/providers/mongo/sensors/mongo.py) | [contrib.sensors.mongo_sensor.MongoSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/mongo_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.mongo` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------| -| [hooks.mongo.MongoHook](https://github.com/apache/airflow/blob/master/airflow/providers/mongo/hooks/mongo.py) | [contrib.hooks.mongo_hook.MongoHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/mongo_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [46cdb0e08](https://github.com/apache/airflow/commit/46cdb0e08045f84029ac727cbaf6040acd592810) | 2020-07-19 | Add type hinting for mongo provider (#9875) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/mongo/CHANGELOG.rst b/airflow/providers/mongo/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/mongo/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/mongo/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/mongo/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index a228c3386b1f3..0000000000000 --- a/airflow/providers/mongo/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,46 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [46cdb0e08](https://github.com/apache/airflow/commit/46cdb0e08045f84029ac727cbaf6040acd592810) | 2020-07-19 | Add type hinting for mongo provider (#9875) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/mongo/README.md b/airflow/providers/mongo/README.md deleted file mode 100644 index c5dd39819ec6e..0000000000000 --- a/airflow/providers/mongo/README.md +++ /dev/null @@ -1,140 +0,0 @@ - - - -# Package apache-airflow-providers-mongo - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `mongo` provider. All classes for this provider package -are in `airflow.providers.mongo` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-mongo` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| dnspython | >=1.13.0,<2.0.0 | -| pymongo | >=3.6.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `mongo` provider -are in the `airflow.providers.mongo` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.mongo` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.mongo.MongoSensor](https://github.com/apache/airflow/blob/master/airflow/providers/mongo/sensors/mongo.py) | [contrib.sensors.mongo_sensor.MongoSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/mongo_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.mongo` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------| -| [hooks.mongo.MongoHook](https://github.com/apache/airflow/blob/master/airflow/providers/mongo/hooks/mongo.py) | [contrib.hooks.mongo_hook.MongoHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/mongo_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [46cdb0e08](https://github.com/apache/airflow/commit/46cdb0e08045f84029ac727cbaf6040acd592810) | 2020-07-19 | Add type hinting for mongo provider (#9875) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/mysql/ADDITIONAL_INFO.md b/airflow/providers/mysql/ADDITIONAL_INFO.md deleted file mode 100644 index 1c961988b4a4f..0000000000000 --- a/airflow/providers/mysql/ADDITIONAL_INFO.md +++ /dev/null @@ -1,24 +0,0 @@ - - -# Mysql client requirements - -The version of MySQL server has to be 5.6.4+. The exact version upper bound depends -on the version of ``mysqlclient`` package. For example, ``mysqlclient`` 1.3.12 can only be -used with MySQL server 5.6.4 through 5.7. diff --git a/airflow/providers/mysql/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/mysql/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 5867df338c31d..0000000000000 --- a/airflow/providers/mysql/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,28 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [329e6a5f7](https://github.com/apache/airflow/commit/329e6a5f72bc2e3fc19391754256d974179a6ce0) | 2020-04-01 | [AIRFLOW-5907] Add S3 to MySql Operator (#6578) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [b39468d28](https://github.com/apache/airflow/commit/b39468d2878554ba60863656364b4a95eda30685) | 2020-03-09 | [AIRFLOW-5922] Add option to specify the mysql client library used in MySqlHook (#6576) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [94fccca97](https://github.com/apache/airflow/commit/94fccca97030ee59d89f302a98137b17e7b01a33) | 2020-02-04 | [AIRFLOW-XXXX] Add pre-commit check for utf-8 file encoding (#7347) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [1e576f123](https://github.com/apache/airflow/commit/1e576f12343b30c2a37ab3f4f62ee3aa30326e77) | 2020-02-02 | [AIRFLOW-6680] Last changes for AIP-21 (#7301) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [eee34ee80](https://github.com/apache/airflow/commit/eee34ee8080bb7bc81294c3fbd8be93bbf795367) | 2020-01-24 | [AIRFLOW-4204] Update super() calls (#7248) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/mysql/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/mysql/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 2b269d69987c7..0000000000000 --- a/airflow/providers/mysql/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [01f37967c](https://github.com/apache/airflow/commit/01f37967c938f3f11b08517f5920f31aca89676f) | 2020-08-18 | Add typing coverage to mysql providers package (#10095) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/mysql/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/mysql/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/mysql/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/mysql/BACKPORT_PROVIDER_README.md b/airflow/providers/mysql/BACKPORT_PROVIDER_README.md deleted file mode 100644 index d0bedbf7aaeb9..0000000000000 --- a/airflow/providers/mysql/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,188 +0,0 @@ - - - -# Package apache-airflow-backport-providers-mysql - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [New transfer operators](#new-transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `mysql` provider. All classes for this provider package -are in `airflow.providers.mysql` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-mysql` - -## PIP requirements - -| PIP package | Version required | -|:-----------------------|:-------------------| -| mysql-connector-python | >=8.0.11, <=8.0.18 | -| mysqlclient | >=1.3.6,<1.4 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-mysql[amazon] -``` - -| Dependent package | Extra | -|:---------------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-amazon](https://github.com/apache/airflow/tree/master/airflow/providers/amazon) | amazon | -| [apache-airflow-backport-providers-presto](https://github.com/apache/airflow/tree/master/airflow/providers/presto) | presto | -| [apache-airflow-backport-providers-vertica](https://github.com/apache/airflow/tree/master/airflow/providers/vertica) | vertica | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `mysql` provider -are in the `airflow.providers.mysql` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.mysql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------| -| [operators.mysql.MySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/mysql.py) | [operators.mysql_operator.MySqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_operator.py) | - - -## Transfer operators - - -### New transfer operators - -| New Airflow 2.0 transfers: `airflow.providers.mysql` package | -|:------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.s3_to_mysql.S3ToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/s3_to_mysql.py) | - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.mysql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.presto_to_mysql.PrestoToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/presto_to_mysql.py) | [operators.presto_to_mysql.PrestoToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/presto_to_mysql.py) | -| [transfers.vertica_to_mysql.VerticaToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/vertica_to_mysql.py) | [contrib.operators.vertica_to_mysql.VerticaToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_mysql.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.mysql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------| -| [hooks.mysql.MySqlHook](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/hooks/mysql.py) | [hooks.mysql_hook.MySqlHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/mysql_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [01f37967c](https://github.com/apache/airflow/commit/01f37967c938f3f11b08517f5920f31aca89676f) | 2020-08-18 | Add typing coverage to mysql providers package (#10095) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [329e6a5f7](https://github.com/apache/airflow/commit/329e6a5f72bc2e3fc19391754256d974179a6ce0) | 2020-04-01 | [AIRFLOW-5907] Add S3 to MySql Operator (#6578) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [b39468d28](https://github.com/apache/airflow/commit/b39468d2878554ba60863656364b4a95eda30685) | 2020-03-09 | [AIRFLOW-5922] Add option to specify the mysql client library used in MySqlHook (#6576) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [94fccca97](https://github.com/apache/airflow/commit/94fccca97030ee59d89f302a98137b17e7b01a33) | 2020-02-04 | [AIRFLOW-XXXX] Add pre-commit check for utf-8 file encoding (#7347) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [1e576f123](https://github.com/apache/airflow/commit/1e576f12343b30c2a37ab3f4f62ee3aa30326e77) | 2020-02-02 | [AIRFLOW-6680] Last changes for AIP-21 (#7301) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [eee34ee80](https://github.com/apache/airflow/commit/eee34ee8080bb7bc81294c3fbd8be93bbf795367) | 2020-01-24 | [AIRFLOW-4204] Update super() calls (#7248) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/mysql/CHANGELOG.rst b/airflow/providers/mysql/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/mysql/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/mysql/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/mysql/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 9fe91de79c0d6..0000000000000 --- a/airflow/providers/mysql/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,61 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [75f229601](https://github.com/apache/airflow/commit/75f229601edebfc25b295683a2200d1f1d69dceb) | 2020-11-04 | Adding MySql howto-documentation and example DAG (#12077) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [01f37967c](https://github.com/apache/airflow/commit/01f37967c938f3f11b08517f5920f31aca89676f) | 2020-08-18 | Add typing coverage to mysql providers package (#10095) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [329e6a5f7](https://github.com/apache/airflow/commit/329e6a5f72bc2e3fc19391754256d974179a6ce0) | 2020-04-01 | [AIRFLOW-5907] Add S3 to MySql Operator (#6578) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [b39468d28](https://github.com/apache/airflow/commit/b39468d2878554ba60863656364b4a95eda30685) | 2020-03-09 | [AIRFLOW-5922] Add option to specify the mysql client library used in MySqlHook (#6576) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [94fccca97](https://github.com/apache/airflow/commit/94fccca97030ee59d89f302a98137b17e7b01a33) | 2020-02-04 | [AIRFLOW-XXXX] Add pre-commit check for utf-8 file encoding (#7347) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [1e576f123](https://github.com/apache/airflow/commit/1e576f12343b30c2a37ab3f4f62ee3aa30326e77) | 2020-02-02 | [AIRFLOW-6680] Last changes for AIP-21 (#7301) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [eee34ee80](https://github.com/apache/airflow/commit/eee34ee8080bb7bc81294c3fbd8be93bbf795367) | 2020-01-24 | [AIRFLOW-4204] Update super() calls (#7248) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/mysql/README.md b/airflow/providers/mysql/README.md deleted file mode 100644 index 39894ecbc314c..0000000000000 --- a/airflow/providers/mysql/README.md +++ /dev/null @@ -1,194 +0,0 @@ - - - -# Package apache-airflow-providers-mysql - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [New transfer operators](#new-transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `mysql` provider. All classes for this provider package -are in `airflow.providers.mysql` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-mysql` - -## PIP requirements - -| PIP package | Version required | -|:-----------------------|:-------------------| -| mysql-connector-python | >=8.0.11, <=8.0.18 | -| mysqlclient | >=1.3.6,<1.4 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-mysql[amazon] -``` - -| Dependent package | Extra | -|:----------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-providers-amazon](https://pypi.org/project/apache-airflow-providers-amazon) | amazon | -| [apache-airflow-providers-presto](https://pypi.org/project/apache-airflow-providers-presto) | presto | -| [apache-airflow-providers-vertica](https://pypi.org/project/apache-airflow-providers-vertica) | vertica | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `mysql` provider -are in the `airflow.providers.mysql` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.mysql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------| -| [operators.mysql.MySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/operators/mysql.py) | [operators.mysql_operator.MySqlOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/mysql_operator.py) | - - -## Transfer operators - - -### New transfer operators - -| New Airflow 2.0 transfers: `airflow.providers.mysql` package | -|:------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.s3_to_mysql.S3ToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/s3_to_mysql.py) | - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.mysql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.presto_to_mysql.PrestoToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/presto_to_mysql.py) | [operators.presto_to_mysql.PrestoToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/presto_to_mysql.py) | -| [transfers.vertica_to_mysql.VerticaToMySqlOperator](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/transfers/vertica_to_mysql.py) | [contrib.operators.vertica_to_mysql.VerticaToMySqlTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_to_mysql.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.mysql` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------| -| [hooks.mysql.MySqlHook](https://github.com/apache/airflow/blob/master/airflow/providers/mysql/hooks/mysql.py) | [hooks.mysql_hook.MySqlHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/mysql_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [75f229601](https://github.com/apache/airflow/commit/75f229601edebfc25b295683a2200d1f1d69dceb) | 2020-11-04 | Adding MySql howto-documentation and example DAG (#12077) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [01f37967c](https://github.com/apache/airflow/commit/01f37967c938f3f11b08517f5920f31aca89676f) | 2020-08-18 | Add typing coverage to mysql providers package (#10095) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [329e6a5f7](https://github.com/apache/airflow/commit/329e6a5f72bc2e3fc19391754256d974179a6ce0) | 2020-04-01 | [AIRFLOW-5907] Add S3 to MySql Operator (#6578) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [b39468d28](https://github.com/apache/airflow/commit/b39468d2878554ba60863656364b4a95eda30685) | 2020-03-09 | [AIRFLOW-5922] Add option to specify the mysql client library used in MySqlHook (#6576) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [94fccca97](https://github.com/apache/airflow/commit/94fccca97030ee59d89f302a98137b17e7b01a33) | 2020-02-04 | [AIRFLOW-XXXX] Add pre-commit check for utf-8 file encoding (#7347) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [1e576f123](https://github.com/apache/airflow/commit/1e576f12343b30c2a37ab3f4f62ee3aa30326e77) | 2020-02-02 | [AIRFLOW-6680] Last changes for AIP-21 (#7301) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [eee34ee80](https://github.com/apache/airflow/commit/eee34ee8080bb7bc81294c3fbd8be93bbf795367) | 2020-01-24 | [AIRFLOW-4204] Update super() calls (#7248) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/mysql/provider.yaml b/airflow/providers/mysql/provider.yaml index df3b34321e243..9eea5d1cf98b8 100644 --- a/airflow/providers/mysql/provider.yaml +++ b/airflow/providers/mysql/provider.yaml @@ -48,9 +48,12 @@ transfers: - source-integration-name: Amazon Simple Storage Service (S3) target-integration-name: MySQL python-module: airflow.providers.mysql.transfers.s3_to_mysql - - source-integration-name: Snowflake + - source-integration-name: Presto target-integration-name: MySQL python-module: airflow.providers.mysql.transfers.presto_to_mysql + - source-integration-name: Trino + target-integration-name: MySQL + python-module: airflow.providers.mysql.transfers.trino_to_mysql hook-class-names: - airflow.providers.mysql.hooks.mysql.MySqlHook diff --git a/airflow/providers/mysql/transfers/trino_to_mysql.py b/airflow/providers/mysql/transfers/trino_to_mysql.py new file mode 100644 index 0000000000000..b97550e116d8f --- /dev/null +++ b/airflow/providers/mysql/transfers/trino_to_mysql.py @@ -0,0 +1,83 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Dict, Optional + +from airflow.models import BaseOperator +from airflow.providers.mysql.hooks.mysql import MySqlHook +from airflow.providers.trino.hooks.trino import TrinoHook +from airflow.utils.decorators import apply_defaults + + +class TrinoToMySqlOperator(BaseOperator): + """ + Moves data from Trino to MySQL, note that for now the data is loaded + into memory before being pushed to MySQL, so this operator should + be used for smallish amount of data. + + :param sql: SQL query to execute against Trino. (templated) + :type sql: str + :param mysql_table: target MySQL table, use dot notation to target a + specific database. (templated) + :type mysql_table: str + :param mysql_conn_id: source mysql connection + :type mysql_conn_id: str + :param trino_conn_id: source trino connection + :type trino_conn_id: str + :param mysql_preoperator: sql statement to run against mysql prior to + import, typically use to truncate of delete in place + of the data coming in, allowing the task to be idempotent (running + the task twice won't double load data). (templated) + :type mysql_preoperator: str + """ + + template_fields = ('sql', 'mysql_table', 'mysql_preoperator') + template_ext = ('.sql',) + template_fields_renderers = {"mysql_preoperator": "sql"} + ui_color = '#a0e08c' + + @apply_defaults + def __init__( + self, + *, + sql: str, + mysql_table: str, + trino_conn_id: str = 'trino_default', + mysql_conn_id: str = 'mysql_default', + mysql_preoperator: Optional[str] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.sql = sql + self.mysql_table = mysql_table + self.mysql_conn_id = mysql_conn_id + self.mysql_preoperator = mysql_preoperator + self.trino_conn_id = trino_conn_id + + def execute(self, context: Dict) -> None: + trino = TrinoHook(trino_conn_id=self.trino_conn_id) + self.log.info("Extracting data from Trino: %s", self.sql) + results = trino.get_records(self.sql) + + mysql = MySqlHook(mysql_conn_id=self.mysql_conn_id) + if self.mysql_preoperator: + self.log.info("Running MySQL preoperator") + self.log.info(self.mysql_preoperator) + mysql.run(self.mysql_preoperator) + + self.log.info("Inserting rows into MySQL") + mysql.insert_rows(table=self.mysql_table, rows=results) diff --git a/airflow/providers/neo4j/CHANGELOG.rst b/airflow/providers/neo4j/CHANGELOG.rst new file mode 100644 index 0000000000000..38614f7c70e06 --- /dev/null +++ b/airflow/providers/neo4j/CHANGELOG.rst @@ -0,0 +1,26 @@ + + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/dev/provider_packages/PROVIDER_CHANGES_TEMPLATE.md.jinja2 b/airflow/providers/neo4j/README.md similarity index 89% rename from dev/provider_packages/PROVIDER_CHANGES_TEMPLATE.md.jinja2 rename to airflow/providers/neo4j/README.md index cf700c5f89e53..ef14affc68c62 100644 --- a/dev/provider_packages/PROVIDER_CHANGES_TEMPLATE.md.jinja2 +++ b/airflow/providers/neo4j/README.md @@ -1,4 +1,4 @@ -{# + diff --git a/airflow/providers/neo4j/__init__.py b/airflow/providers/neo4j/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/neo4j/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/neo4j/example_dags/__init__.py b/airflow/providers/neo4j/example_dags/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/neo4j/example_dags/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/neo4j/example_dags/example_neo4j.py b/airflow/providers/neo4j/example_dags/example_neo4j.py new file mode 100644 index 0000000000000..7d6f2fc7df1b0 --- /dev/null +++ b/airflow/providers/neo4j/example_dags/example_neo4j.py @@ -0,0 +1,48 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""" +Example use of Neo4j related operators. +""" + +from airflow import DAG +from airflow.providers.neo4j.operators.neo4j import Neo4jOperator +from airflow.utils.dates import days_ago + +default_args = { + 'owner': 'airflow', +} + +dag = DAG( + 'example_neo4j', + default_args=default_args, + start_date=days_ago(2), + tags=['example'], +) + +# [START run_query_neo4j_operator] + +neo4j_task = Neo4jOperator( + task_id='run_neo4j_query', + neo4j_conn_id='neo4j_conn_id', + sql='MATCH (tom {name: "Tom Hanks"}) RETURN tom', + dag=dag, +) + +# [END run_query_neo4j_operator] + +neo4j_task diff --git a/airflow/providers/neo4j/hooks/__init__.py b/airflow/providers/neo4j/hooks/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/neo4j/hooks/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/neo4j/hooks/neo4j.py b/airflow/providers/neo4j/hooks/neo4j.py new file mode 100644 index 0000000000000..d473b0144e0d3 --- /dev/null +++ b/airflow/providers/neo4j/hooks/neo4j.py @@ -0,0 +1,117 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""This module allows to connect to a Neo4j database.""" + +from neo4j import GraphDatabase, Neo4jDriver, Result + +from airflow.hooks.base import BaseHook +from airflow.models import Connection + + +class Neo4jHook(BaseHook): + """ + Interact with Neo4j. + + Performs a connection to Neo4j and runs the query. + """ + + conn_name_attr = 'neo4j_conn_id' + default_conn_name = 'neo4j_default' + conn_type = 'neo4j' + hook_name = 'Neo4j' + + def __init__(self, conn_id: str = default_conn_name, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + self.neo4j_conn_id = conn_id + self.connection = kwargs.pop("connection", None) + self.client = None + self.extras = None + self.uri = None + + def get_conn(self) -> Neo4jDriver: + """ + Function that initiates a new Neo4j connection + with username, password and database schema. + """ + self.connection = self.get_connection(self.neo4j_conn_id) + self.extras = self.connection.extra_dejson.copy() + + self.uri = self.get_uri(self.connection) + self.log.info('URI: %s', self.uri) + + if self.client is not None: + return self.client + + is_encrypted = self.connection.extra_dejson.get('encrypted', False) + + self.client = GraphDatabase.driver( + self.uri, auth=(self.connection.login, self.connection.password), encrypted=is_encrypted + ) + + return self.client + + def get_uri(self, conn: Connection) -> str: + """ + Build the uri based on extras + - Default - uses bolt scheme(bolt://) + - neo4j_scheme - neo4j:// + - certs_self_signed - neo4j+ssc:// + - certs_trusted_ca - neo4j+s:// + :param conn: connection object. + :return: uri + """ + use_neo4j_scheme = conn.extra_dejson.get('neo4j_scheme', False) + scheme = 'neo4j' if use_neo4j_scheme else 'bolt' + + # Self signed certificates + ssc = conn.extra_dejson.get('certs_self_signed', False) + + # Only certificates signed by CA. + trusted_ca = conn.extra_dejson.get('certs_trusted_ca', False) + encryption_scheme = '' + + if ssc: + encryption_scheme = '+ssc' + elif trusted_ca: + encryption_scheme = '+s' + + return '{scheme}{encryption_scheme}://{host}:{port}'.format( + scheme=scheme, + encryption_scheme=encryption_scheme, + host=conn.host, + port='7687' if conn.port is None else f'{conn.port}', + ) + + def run(self, query) -> Result: + """ + Function to create a neo4j session + and execute the query in the session. + + + :param query: Neo4j query + :return: Result + """ + driver = self.get_conn() + if not self.connection.schema: + with driver.session() as session: + result = session.run(query) + else: + with driver.session(database=self.connection.schema) as session: + result = session.run(query) + return result diff --git a/airflow/providers/neo4j/operators/__init__.py b/airflow/providers/neo4j/operators/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/neo4j/operators/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/neo4j/operators/neo4j.py b/airflow/providers/neo4j/operators/neo4j.py new file mode 100644 index 0000000000000..20df9cb7d6b8f --- /dev/null +++ b/airflow/providers/neo4j/operators/neo4j.py @@ -0,0 +1,62 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Dict, Iterable, Mapping, Optional, Union + +from airflow.models import BaseOperator +from airflow.providers.neo4j.hooks.neo4j import Neo4jHook +from airflow.utils.decorators import apply_defaults + + +class Neo4jOperator(BaseOperator): + """ + Executes sql code in a specific Neo4j database + + .. seealso:: + For more information on how to use this operator, take a look at the guide: + :ref:`howto/operator:Neo4jOperator` + + :param sql: the sql code to be executed. Can receive a str representing a + sql statement, a list of str (sql statements) + :type sql: str or list[str] + :param neo4j_conn_id: reference to a specific Neo4j database + :type neo4j_conn_id: str + """ + + @apply_defaults + def __init__( + self, + *, + sql: str, + neo4j_conn_id: str = 'neo4j_default', + parameters: Optional[Union[Mapping, Iterable]] = None, + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.neo4j_conn_id = neo4j_conn_id + self.sql = sql + self.parameters = parameters + self.hook = None + + def get_hook(self): + """Function to retrieve the Neo4j Hook.""" + return Neo4jHook(conn_id=self.neo4j_conn_id) + + def execute(self, context: Dict) -> None: + self.log.info('Executing: %s', self.sql) + self.hook = self.get_hook() + self.hook.run(self.sql) diff --git a/airflow/providers/neo4j/provider.yaml b/airflow/providers/neo4j/provider.yaml new file mode 100644 index 0000000000000..9081694729fca --- /dev/null +++ b/airflow/providers/neo4j/provider.yaml @@ -0,0 +1,44 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +--- +package-name: apache-airflow-providers-neo4j +name: Neo4j +description: | + `Neo4j `__ + +versions: + - 1.0.0 +integrations: + - integration-name: Neo4j + external-doc-url: https://neo4j.com/ + how-to-guide: + - /docs/apache-airflow-providers-neo4j/operators/neo4j.rst + tags: [software] + +operators: + - integration-name: Neo4j + python-modules: + - airflow.providers.neo4j.operators.neo4j + +hooks: + - integration-name: Neo4j + python-modules: + - airflow.providers.neo4j.hooks.neo4j + +hook-class-names: + - airflow.providers.neo4j.hooks.neo4j.Neo4jHook diff --git a/airflow/providers/odbc/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/odbc/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index db942bb7f282d..0000000000000 --- a/airflow/providers/odbc/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [2b06d0a3d](https://github.com/apache/airflow/commit/2b06d0a3deb4a4fcc64ee1948bb484e457096474) | 2020-01-21 | [AIRFLOW-6603] Remove unnecessary pylint warnings (#7224) | -| [2a819b11f](https://github.com/apache/airflow/commit/2a819b11fb8dfba7b3c9b500d07467b455724506) | 2020-01-19 | [AIRFLOW-6296] add OdbcHook & deprecation warning for pymssql (#6850) | diff --git a/airflow/providers/odbc/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/odbc/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 6df32f6d2d8ad..0000000000000 --- a/airflow/providers/odbc/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,13 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/odbc/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/odbc/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/odbc/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/odbc/BACKPORT_PROVIDER_README.md b/airflow/providers/odbc/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 1e316a45a01ed..0000000000000 --- a/airflow/providers/odbc/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,119 +0,0 @@ - - - -# Package apache-airflow-backport-providers-odbc - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `odbc` provider. All classes for this provider package -are in `airflow.providers.odbc` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-odbc` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pyodbc | | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `odbc` provider -are in the `airflow.providers.odbc` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.odbc` package | -|:----------------------------------------------------------------------------------------------------------| -| [hooks.odbc.OdbcHook](https://github.com/apache/airflow/blob/master/airflow/providers/odbc/hooks/odbc.py) | - - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [2b06d0a3d](https://github.com/apache/airflow/commit/2b06d0a3deb4a4fcc64ee1948bb484e457096474) | 2020-01-21 | [AIRFLOW-6603] Remove unnecessary pylint warnings (#7224) | -| [2a819b11f](https://github.com/apache/airflow/commit/2a819b11fb8dfba7b3c9b500d07467b455724506) | 2020-01-19 | [AIRFLOW-6296] add OdbcHook & deprecation warning for pymssql (#6850) | diff --git a/airflow/providers/odbc/CHANGELOG.rst b/airflow/providers/odbc/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/odbc/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/odbc/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/odbc/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index e5dd7f68a7081..0000000000000 --- a/airflow/providers/odbc/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,43 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [de3b1e687](https://github.com/apache/airflow/commit/de3b1e687b26c524c6909b7b4dfbb60d25019751) | 2020-11-28 | Move connection guides to provider documentation packages (#12653) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [2b06d0a3d](https://github.com/apache/airflow/commit/2b06d0a3deb4a4fcc64ee1948bb484e457096474) | 2020-01-21 | [AIRFLOW-6603] Remove unnecessary pylint warnings (#7224) | -| [2a819b11f](https://github.com/apache/airflow/commit/2a819b11fb8dfba7b3c9b500d07467b455724506) | 2020-01-19 | [AIRFLOW-6296] add OdbcHook & deprecation warning for pymssql (#6850) | diff --git a/airflow/providers/odbc/README.md b/airflow/providers/odbc/README.md deleted file mode 100644 index c97865c76d432..0000000000000 --- a/airflow/providers/odbc/README.md +++ /dev/null @@ -1,123 +0,0 @@ - - - -# Package apache-airflow-providers-odbc - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `odbc` provider. All classes for this provider package -are in `airflow.providers.odbc` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-odbc` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pyodbc | | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `odbc` provider -are in the `airflow.providers.odbc` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.odbc` package | -|:----------------------------------------------------------------------------------------------------------| -| [hooks.odbc.OdbcHook](https://github.com/apache/airflow/blob/master/airflow/providers/odbc/hooks/odbc.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [de3b1e687](https://github.com/apache/airflow/commit/de3b1e687b26c524c6909b7b4dfbb60d25019751) | 2020-11-28 | Move connection guides to provider documentation packages (#12653) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [2b06d0a3d](https://github.com/apache/airflow/commit/2b06d0a3deb4a4fcc64ee1948bb484e457096474) | 2020-01-21 | [AIRFLOW-6603] Remove unnecessary pylint warnings (#7224) | -| [2a819b11f](https://github.com/apache/airflow/commit/2a819b11fb8dfba7b3c9b500d07467b455724506) | 2020-01-19 | [AIRFLOW-6296] add OdbcHook & deprecation warning for pymssql (#6850) | diff --git a/airflow/providers/openfaas/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/openfaas/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 74ebf187c51d4..0000000000000 --- a/airflow/providers/openfaas/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [5f784ae5c](https://github.com/apache/airflow/commit/5f784ae5c0e629ebe117874029b4a9d789587be0) | 2020-03-14 | [AIRFLOW-7061] Rename openfass to openfaas (#7721) | diff --git a/airflow/providers/openfaas/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/openfaas/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 2286d4c215448..0000000000000 --- a/airflow/providers/openfaas/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,13 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [fc8d38d5f](https://github.com/apache/airflow/commit/fc8d38d5f22a59db08d845de55e65e818f3be4dc) | 2020-07-20 | improve typing for openfaas provider (#9883) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/openfaas/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/openfaas/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/openfaas/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/openfaas/BACKPORT_PROVIDER_README.md b/airflow/providers/openfaas/BACKPORT_PROVIDER_README.md deleted file mode 100644 index c7c92b752bc50..0000000000000 --- a/airflow/providers/openfaas/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,113 +0,0 @@ - - - -# Package apache-airflow-backport-providers-openfaas - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `openfaas` provider. All classes for this provider package -are in `airflow.providers.openfaas` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-openfaas` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `openfaas` provider -are in the `airflow.providers.openfaas` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.openfaas` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.openfaas.OpenFaasHook](https://github.com/apache/airflow/blob/master/airflow/providers/openfaas/hooks/openfaas.py) | [contrib.hooks.openfaas_hook.OpenFaasHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/openfaas_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [fc8d38d5f](https://github.com/apache/airflow/commit/fc8d38d5f22a59db08d845de55e65e818f3be4dc) | 2020-07-20 | improve typing for openfaas provider (#9883) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [5f784ae5c](https://github.com/apache/airflow/commit/5f784ae5c0e629ebe117874029b4a9d789587be0) | 2020-03-14 | [AIRFLOW-7061] Rename openfass to openfaas (#7721) | diff --git a/airflow/providers/openfaas/CHANGELOG.rst b/airflow/providers/openfaas/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/openfaas/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/openfaas/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/openfaas/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index e73ebaf58fe41..0000000000000 --- a/airflow/providers/openfaas/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,40 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [fc8d38d5f](https://github.com/apache/airflow/commit/fc8d38d5f22a59db08d845de55e65e818f3be4dc) | 2020-07-20 | improve typing for openfaas provider (#9883) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [5f784ae5c](https://github.com/apache/airflow/commit/5f784ae5c0e629ebe117874029b4a9d789587be0) | 2020-03-14 | [AIRFLOW-7061] Rename openfass to openfaas (#7721) | diff --git a/airflow/providers/openfaas/README.md b/airflow/providers/openfaas/README.md deleted file mode 100644 index 0e26549d00804..0000000000000 --- a/airflow/providers/openfaas/README.md +++ /dev/null @@ -1,113 +0,0 @@ - - - -# Package apache-airflow-providers-openfaas - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `openfaas` provider. All classes for this provider package -are in `airflow.providers.openfaas` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-openfaas` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `openfaas` provider -are in the `airflow.providers.openfaas` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.openfaas` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.openfaas.OpenFaasHook](https://github.com/apache/airflow/blob/master/airflow/providers/openfaas/hooks/openfaas.py) | [contrib.hooks.openfaas_hook.OpenFaasHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/openfaas_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [fc8d38d5f](https://github.com/apache/airflow/commit/fc8d38d5f22a59db08d845de55e65e818f3be4dc) | 2020-07-20 | improve typing for openfaas provider (#9883) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [5f784ae5c](https://github.com/apache/airflow/commit/5f784ae5c0e629ebe117874029b4a9d789587be0) | 2020-03-14 | [AIRFLOW-7061] Rename openfass to openfaas (#7721) | diff --git a/airflow/providers/opsgenie/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/opsgenie/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 2002231a6c4ed..0000000000000 --- a/airflow/providers/opsgenie/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,21 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [305053cb7](https://github.com/apache/airflow/commit/305053cb7c72220fbe32e4fab7cb1da49d5a14e2) | 2020-02-23 | [AIRFLOW-6889] Change mutable argument value in OpsgenieAlertHook (#7512) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/opsgenie/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/opsgenie/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index c7dc162be04cc..0000000000000 --- a/airflow/providers/opsgenie/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,15 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/opsgenie/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/opsgenie/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/opsgenie/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/opsgenie/BACKPORT_PROVIDER_README.md b/airflow/providers/opsgenie/BACKPORT_PROVIDER_README.md deleted file mode 100644 index bd483bfc5dc1d..0000000000000 --- a/airflow/providers/opsgenie/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,147 +0,0 @@ - - - -# Package apache-airflow-backport-providers-opsgenie - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `opsgenie` provider. All classes for this provider package -are in `airflow.providers.opsgenie` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-opsgenie` - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-opsgenie[http] -``` - -| Dependent package | Extra | -|:---------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-http](https://github.com/apache/airflow/tree/master/airflow/providers/http) | http | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `opsgenie` provider -are in the `airflow.providers.opsgenie` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.opsgenie` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.opsgenie_alert.OpsgenieAlertOperator](https://github.com/apache/airflow/blob/master/airflow/providers/opsgenie/operators/opsgenie_alert.py) | [contrib.operators.opsgenie_alert_operator.OpsgenieAlertOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/opsgenie_alert_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.opsgenie` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.opsgenie_alert.OpsgenieAlertHook](https://github.com/apache/airflow/blob/master/airflow/providers/opsgenie/hooks/opsgenie_alert.py) | [contrib.hooks.opsgenie_alert_hook.OpsgenieAlertHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/opsgenie_alert_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [305053cb7](https://github.com/apache/airflow/commit/305053cb7c72220fbe32e4fab7cb1da49d5a14e2) | 2020-02-23 | [AIRFLOW-6889] Change mutable argument value in OpsgenieAlertHook (#7512) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/opsgenie/CHANGELOG.rst b/airflow/providers/opsgenie/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/opsgenie/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/opsgenie/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/opsgenie/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index fe70172c8df9e..0000000000000 --- a/airflow/providers/opsgenie/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,45 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [305053cb7](https://github.com/apache/airflow/commit/305053cb7c72220fbe32e4fab7cb1da49d5a14e2) | 2020-02-23 | [AIRFLOW-6889] Change mutable argument value in OpsgenieAlertHook (#7512) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/opsgenie/README.md b/airflow/providers/opsgenie/README.md deleted file mode 100644 index 163107ccae180..0000000000000 --- a/airflow/providers/opsgenie/README.md +++ /dev/null @@ -1,147 +0,0 @@ - - - -# Package apache-airflow-providers-opsgenie - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `opsgenie` provider. All classes for this provider package -are in `airflow.providers.opsgenie` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-opsgenie` - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-opsgenie[http] -``` - -| Dependent package | Extra | -|:----------------------------------------------------------------------------------------|:--------| -| [apache-airflow-providers-http](https://pypi.org/project/apache-airflow-providers-http) | http | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `opsgenie` provider -are in the `airflow.providers.opsgenie` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.opsgenie` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.opsgenie_alert.OpsgenieAlertOperator](https://github.com/apache/airflow/blob/master/airflow/providers/opsgenie/operators/opsgenie_alert.py) | [contrib.operators.opsgenie_alert_operator.OpsgenieAlertOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/opsgenie_alert_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.opsgenie` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.opsgenie_alert.OpsgenieAlertHook](https://github.com/apache/airflow/blob/master/airflow/providers/opsgenie/hooks/opsgenie_alert.py) | [contrib.hooks.opsgenie_alert_hook.OpsgenieAlertHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/opsgenie_alert_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [305053cb7](https://github.com/apache/airflow/commit/305053cb7c72220fbe32e4fab7cb1da49d5a14e2) | 2020-02-23 | [AIRFLOW-6889] Change mutable argument value in OpsgenieAlertHook (#7512) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/oracle/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/oracle/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 6c98819176636..0000000000000 --- a/airflow/providers/oracle/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,20 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/oracle/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/oracle/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 550de8fa4bfb5..0000000000000 --- a/airflow/providers/oracle/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/oracle/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/oracle/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 50ad6077367a3..0000000000000 --- a/airflow/providers/oracle/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,11 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/oracle/BACKPORT_PROVIDER_README.md b/airflow/providers/oracle/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 9148246abcf2c..0000000000000 --- a/airflow/providers/oracle/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,153 +0,0 @@ - - - -# Package apache-airflow-backport-providers-oracle - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `oracle` provider. All classes for this provider package -are in `airflow.providers.oracle` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-oracle` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| cx_Oracle | >=5.1.2 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `oracle` provider -are in the `airflow.providers.oracle` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.oracle` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| -| [operators.oracle.OracleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/operators/oracle.py) | [operators.oracle_operator.OracleOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/oracle_operator.py) | - - -## Transfer operators - - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.oracle` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.oracle_to_oracle.OracleToOracleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/transfers/oracle_to_oracle.py) | [contrib.operators.oracle_to_oracle_transfer.OracleToOracleTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_oracle_transfer.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.oracle` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------| -| [hooks.oracle.OracleHook](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/hooks/oracle.py) | [hooks.oracle_hook.OracleHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/oracle_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/oracle/CHANGELOG.rst b/airflow/providers/oracle/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/oracle/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/oracle/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/oracle/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 5f416db7770d6..0000000000000 --- a/airflow/providers/oracle/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,51 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/oracle/README.md b/airflow/providers/oracle/README.md deleted file mode 100644 index a6bfc07514177..0000000000000 --- a/airflow/providers/oracle/README.md +++ /dev/null @@ -1,157 +0,0 @@ - - - -# Package apache-airflow-providers-oracle - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Transfer operators](#transfer-operators) - - [Moved transfer operators](#moved-transfer-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `oracle` provider. All classes for this provider package -are in `airflow.providers.oracle` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-oracle` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| cx_Oracle | >=5.1.2 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `oracle` provider -are in the `airflow.providers.oracle` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.oracle` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| -| [operators.oracle.OracleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/operators/oracle.py) | [operators.oracle_operator.OracleOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/oracle_operator.py) | - - -## Transfer operators - - - -### Moved transfer operators - -| Airflow 2.0 transfers: `airflow.providers.oracle` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.oracle_to_oracle.OracleToOracleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/transfers/oracle_to_oracle.py) | [contrib.operators.oracle_to_oracle_transfer.OracleToOracleTransfer](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/oracle_to_oracle_transfer.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.oracle` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------| -| [hooks.oracle.OracleHook](https://github.com/apache/airflow/blob/master/airflow/providers/oracle/hooks/oracle.py) | [hooks.oracle_hook.OracleHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/oracle_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [057f3ae3a](https://github.com/apache/airflow/commit/057f3ae3a4afedf6d462ecf58b01dd6304d3e135) | 2020-01-29 | [AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/pagerduty/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/pagerduty/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index bada75ab3c869..0000000000000 --- a/airflow/providers/pagerduty/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/pagerduty/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/pagerduty/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 28a1f1f660553..0000000000000 --- a/airflow/providers/pagerduty/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,12 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/pagerduty/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/pagerduty/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 8a027331edcf5..0000000000000 --- a/airflow/providers/pagerduty/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [3ee618623](https://github.com/apache/airflow/commit/3ee618623be6079ed177da793b490cb7436d5cb6) | 2020-10-20 | Switch PagerdutyHook from pypd to use pdpyras instead (#11151) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/pagerduty/BACKPORT_PROVIDER_README.md b/airflow/providers/pagerduty/BACKPORT_PROVIDER_README.md deleted file mode 100644 index b8ed3b8922afb..0000000000000 --- a/airflow/providers/pagerduty/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,120 +0,0 @@ - - - -# Package apache-airflow-backport-providers-pagerduty - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `pagerduty` provider. All classes for this provider package -are in `airflow.providers.pagerduty` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-pagerduty` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pdpyras | >=4.1.2,<5 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `pagerduty` provider -are in the `airflow.providers.pagerduty` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.pagerduty` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.pagerduty.PagerdutyHook](https://github.com/apache/airflow/blob/master/airflow/providers/pagerduty/hooks/pagerduty.py) | [contrib.hooks.pagerduty_hook.PagerdutyHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/pagerduty_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [3ee618623](https://github.com/apache/airflow/commit/3ee618623be6079ed177da793b490cb7436d5cb6) | 2020-10-20 | Switch PagerdutyHook from pypd to use pdpyras instead (#11151) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/pagerduty/CHANGELOG.rst b/airflow/providers/pagerduty/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/pagerduty/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/pagerduty/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/pagerduty/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 5bb93d92bf444..0000000000000 --- a/airflow/providers/pagerduty/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,40 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [3ee618623](https://github.com/apache/airflow/commit/3ee618623be6079ed177da793b490cb7436d5cb6) | 2020-10-20 | Switch PagerdutyHook from pypd to use pdpyras instead (#11151) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/pagerduty/README.md b/airflow/providers/pagerduty/README.md deleted file mode 100644 index f9c921ce2980b..0000000000000 --- a/airflow/providers/pagerduty/README.md +++ /dev/null @@ -1,120 +0,0 @@ - - - -# Package apache-airflow-providers-pagerduty - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `pagerduty` provider. All classes for this provider package -are in `airflow.providers.pagerduty` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-pagerduty` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pdpyras | >=4.1.2,<5 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `pagerduty` provider -are in the `airflow.providers.pagerduty` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.pagerduty` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.pagerduty.PagerdutyHook](https://github.com/apache/airflow/blob/master/airflow/providers/pagerduty/hooks/pagerduty.py) | [contrib.hooks.pagerduty_hook.PagerdutyHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/pagerduty_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [3ee618623](https://github.com/apache/airflow/commit/3ee618623be6079ed177da793b490cb7436d5cb6) | 2020-10-20 | Switch PagerdutyHook from pypd to use pdpyras instead (#11151) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/papermill/ADDITIONAL_INFO.md b/airflow/providers/papermill/ADDITIONAL_INFO.md deleted file mode 100644 index d9dc56a6f3d28..0000000000000 --- a/airflow/providers/papermill/ADDITIONAL_INFO.md +++ /dev/null @@ -1,23 +0,0 @@ - - -## Additional notes - -Papermill operator is the only one to work with AUTO inlets for now (for lineage support). -However, since AUTO inlets is a feature of Airflow 2 not backported to 1.10.*. diff --git a/airflow/providers/papermill/CHANGELOG.rst b/airflow/providers/papermill/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/papermill/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/papermill/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/papermill/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index c5194ab1c5605..0000000000000 --- a/airflow/providers/papermill/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,35 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [cb7c67dea](https://github.com/apache/airflow/commit/cb7c67dea9cd9b9c5de10e355b63039446003149) | 2020-10-20 | Fix example DAGs in pip packages (#11687) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [4c81bcd86](https://github.com/apache/airflow/commit/4c81bcd8601fa08efa570ee231f8f103ef830304) | 2020-02-01 | [AIRFLOW-6698] Add shorthand notation for lineage (#7314) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/papermill/README.md b/airflow/providers/papermill/README.md deleted file mode 100644 index 5d9fd9abcd910..0000000000000 --- a/airflow/providers/papermill/README.md +++ /dev/null @@ -1,121 +0,0 @@ - - - -# Package apache-airflow-providers-papermill - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `papermill` provider. All classes for this provider package -are in `airflow.providers.papermill` python package. - - -## Additional notes - -Papermill operator is the only one to work with AUTO inlets for now (for lineage support). -However, since AUTO inlets is a feature of Airflow 2 not backported to 1.10.*. - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-papermill` - -## PIP requirements - -| PIP package | Version required | -|:-----------------------|:-------------------| -| papermill[all] | >=1.2.1 | -| nteract-scrapbook[all] | >=0.3.1 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `papermill` provider -are in the `airflow.providers.papermill` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.papermill` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.papermill.PapermillOperator](https://github.com/apache/airflow/blob/master/airflow/providers/papermill/operators/papermill.py) | [operators.papermill_operator.PapermillOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/papermill_operator.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [cb7c67dea](https://github.com/apache/airflow/commit/cb7c67dea9cd9b9c5de10e355b63039446003149) | 2020-10-20 | Fix example DAGs in pip packages (#11687) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [cdb3f2545](https://github.com/apache/airflow/commit/cdb3f25456e49d0199cd7ccd680626dac01c9be6) | 2020-05-26 | All classes in backport providers are now importable in Airflow 1.10 (#8991) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [4c81bcd86](https://github.com/apache/airflow/commit/4c81bcd8601fa08efa570ee231f8f103ef830304) | 2020-02-01 | [AIRFLOW-6698] Add shorthand notation for lineage (#7314) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/plexus/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/plexus/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 2386b540fb9a2..0000000000000 --- a/airflow/providers/plexus/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [b9dc3c51b](https://github.com/apache/airflow/commit/b9dc3c51ba2cba1c61d327488cecf2623d6445b3) | 2020-09-10 | Added Plexus as an Airflow provider (#10591) | diff --git a/airflow/providers/plexus/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/plexus/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 97d92642b416c..0000000000000 --- a/airflow/providers/plexus/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [56d72e3ff](https://github.com/apache/airflow/commit/56d72e3ff8798a2662847355d1b73b2c1f57b31f) | 2020-10-24 | Replace non-empty sets with set literals (#11810) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/plexus/BACKPORT_PROVIDER_README.md b/airflow/providers/plexus/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 77ac87b89d5fb..0000000000000 --- a/airflow/providers/plexus/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,111 +0,0 @@ - - - -# Package apache-airflow-backport-providers-plexus - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - -## Backport package - -This is a backport providers package for `plexus` provider. All classes for this provider package -are in `airflow.providers.plexus` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-plexus` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| arrow | >=0.16.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `plexus` provider -are in the `airflow.providers.plexus` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.plexus` package | -|:---------------------------------------------------------------------------------------------------------------------------| -| [operators.job.PlexusJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/plexus/operators/job.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.plexus` package | -|:------------------------------------------------------------------------------------------------------------------| -| [hooks.plexus.PlexusHook](https://github.com/apache/airflow/blob/master/airflow/providers/plexus/hooks/plexus.py) | - - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [56d72e3ff](https://github.com/apache/airflow/commit/56d72e3ff8798a2662847355d1b73b2c1f57b31f) | 2020-10-24 | Replace non-empty sets with set literals (#11810) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:--------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [b9dc3c51b](https://github.com/apache/airflow/commit/b9dc3c51ba2cba1c61d327488cecf2623d6445b3) | 2020-09-10 | Added Plexus as an Airflow provider (#10591) | diff --git a/airflow/providers/plexus/CHANGELOG.rst b/airflow/providers/plexus/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/plexus/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/plexus/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/plexus/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index dd9e979417afc..0000000000000 --- a/airflow/providers/plexus/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,26 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [56d72e3ff](https://github.com/apache/airflow/commit/56d72e3ff8798a2662847355d1b73b2c1f57b31f) | 2020-10-24 | Replace non-empty sets with set literals (#11810) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [b9dc3c51b](https://github.com/apache/airflow/commit/b9dc3c51ba2cba1c61d327488cecf2623d6445b3) | 2020-09-10 | Added Plexus as an Airflow provider (#10591) | diff --git a/airflow/providers/plexus/README.md b/airflow/providers/plexus/README.md deleted file mode 100644 index 2e5353fa95a6d..0000000000000 --- a/airflow/providers/plexus/README.md +++ /dev/null @@ -1,119 +0,0 @@ - - - -# Package apache-airflow-providers-plexus - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `plexus` provider. All classes for this provider package -are in `airflow.providers.plexus` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-plexus` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| arrow | >=0.16.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `plexus` provider -are in the `airflow.providers.plexus` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.plexus` package | -|:---------------------------------------------------------------------------------------------------------------------------| -| [operators.job.PlexusJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/plexus/operators/job.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.plexus` package | -|:------------------------------------------------------------------------------------------------------------------| -| [hooks.plexus.PlexusHook](https://github.com/apache/airflow/blob/master/airflow/providers/plexus/hooks/plexus.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [56d72e3ff](https://github.com/apache/airflow/commit/56d72e3ff8798a2662847355d1b73b2c1f57b31f) | 2020-10-24 | Replace non-empty sets with set literals (#11810) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [b9dc3c51b](https://github.com/apache/airflow/commit/b9dc3c51ba2cba1c61d327488cecf2623d6445b3) | 2020-09-10 | Added Plexus as an Airflow provider (#10591) | diff --git a/airflow/providers/postgres/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/postgres/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 3759f8cb5cceb..0000000000000 --- a/airflow/providers/postgres/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,23 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [a28c66f23](https://github.com/apache/airflow/commit/a28c66f23d373cd0f8bfc765a515f21d4b66a0e9) | 2020-04-30 | [AIRFLOW-4734] Upsert functionality for PostgresHook.insert_rows() (#8625) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/postgres/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/postgres/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 521911e321b2d..0000000000000 --- a/airflow/providers/postgres/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,16 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [eff1525d1](https://github.com/apache/airflow/commit/eff1525d124f7f697e4fbe5cc9a54ed32ec0138d) | 2020-09-16 | Increase typing coverage for postgres provider (#10864) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/postgres/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/postgres/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 95a5149d13014..0000000000000 --- a/airflow/providers/postgres/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,13 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [a4dc11fae](https://github.com/apache/airflow/commit/a4dc11fae63d56bc6cbb029525113948862fd45d) | 2020-10-19 | Change to pass all extra connection paramaters to psycopg2 (#11019) | -| [3cddc1182](https://github.com/apache/airflow/commit/3cddc11821ff8f9ed0811384c0643f756a2b3dfa) | 2020-10-16 | Updated template_fields_rendereds for PostgresOperator and SimpleHttpOperator (#11555) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/postgres/BACKPORT_PROVIDER_README.md b/airflow/providers/postgres/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 1b65fe6d3a551..0000000000000 --- a/airflow/providers/postgres/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,160 +0,0 @@ - - - -# Package apache-airflow-backport-providers-postgres - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `postgres` provider. All classes for this provider package -are in `airflow.providers.postgres` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-postgres` - -## PIP requirements - -| PIP package | Version required | -|:----------------|:-------------------| -| psycopg2-binary | >=2.7.4 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-postgres[amazon] -``` - -| Dependent package | Extra | -|:-------------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-amazon](https://github.com/apache/airflow/tree/master/airflow/providers/amazon) | amazon | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `postgres` provider -are in the `airflow.providers.postgres` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.postgres` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.postgres.PostgresOperator](https://github.com/apache/airflow/blob/master/airflow/providers/postgres/operators/postgres.py) | [operators.postgres_operator.PostgresOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/postgres_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.postgres` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------| -| [hooks.postgres.PostgresHook](https://github.com/apache/airflow/blob/master/airflow/providers/postgres/hooks/postgres.py) | [hooks.postgres_hook.PostgresHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/postgres_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [a4dc11fae](https://github.com/apache/airflow/commit/a4dc11fae63d56bc6cbb029525113948862fd45d) | 2020-10-19 | Change to pass all extra connection paramaters to psycopg2 (#11019) | -| [3cddc1182](https://github.com/apache/airflow/commit/3cddc11821ff8f9ed0811384c0643f756a2b3dfa) | 2020-10-16 | Updated template_fields_rendereds for PostgresOperator and SimpleHttpOperator (#11555) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [eff1525d1](https://github.com/apache/airflow/commit/eff1525d124f7f697e4fbe5cc9a54ed32ec0138d) | 2020-09-16 | Increase typing coverage for postgres provider (#10864) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [a28c66f23](https://github.com/apache/airflow/commit/a28c66f23d373cd0f8bfc765a515f21d4b66a0e9) | 2020-04-30 | [AIRFLOW-4734] Upsert functionality for PostgresHook.insert_rows() (#8625) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/postgres/CHANGELOG.rst b/airflow/providers/postgres/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/postgres/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/postgres/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/postgres/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 51b2a0efea197..0000000000000 --- a/airflow/providers/postgres/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,54 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [a4dc11fae](https://github.com/apache/airflow/commit/a4dc11fae63d56bc6cbb029525113948862fd45d) | 2020-10-19 | Change to pass all extra connection paramaters to psycopg2 (#11019) | -| [3cddc1182](https://github.com/apache/airflow/commit/3cddc11821ff8f9ed0811384c0643f756a2b3dfa) | 2020-10-16 | Updated template_fields_rendereds for PostgresOperator and SimpleHttpOperator (#11555) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [eff1525d1](https://github.com/apache/airflow/commit/eff1525d124f7f697e4fbe5cc9a54ed32ec0138d) | 2020-09-16 | Increase typing coverage for postgres provider (#10864) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [a28c66f23](https://github.com/apache/airflow/commit/a28c66f23d373cd0f8bfc765a515f21d4b66a0e9) | 2020-04-30 | [AIRFLOW-4734] Upsert functionality for PostgresHook.insert_rows() (#8625) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/postgres/README.md b/airflow/providers/postgres/README.md deleted file mode 100644 index 59bb2fe9da06e..0000000000000 --- a/airflow/providers/postgres/README.md +++ /dev/null @@ -1,163 +0,0 @@ - - - -# Package apache-airflow-providers-postgres - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `postgres` provider. All classes for this provider package -are in `airflow.providers.postgres` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-postgres` - -## PIP requirements - -| PIP package | Version required | -|:----------------|:-------------------| -| psycopg2-binary | >=2.7.4 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-postgres[amazon] -``` - -| Dependent package | Extra | -|:--------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-providers-amazon](https://pypi.org/project/apache-airflow-providers-amazon) | amazon | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `postgres` provider -are in the `airflow.providers.postgres` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.postgres` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.postgres.PostgresOperator](https://github.com/apache/airflow/blob/master/airflow/providers/postgres/operators/postgres.py) | [operators.postgres_operator.PostgresOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/postgres_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.postgres` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------| -| [hooks.postgres.PostgresHook](https://github.com/apache/airflow/blob/master/airflow/providers/postgres/hooks/postgres.py) | [hooks.postgres_hook.PostgresHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/postgres_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [a4dc11fae](https://github.com/apache/airflow/commit/a4dc11fae63d56bc6cbb029525113948862fd45d) | 2020-10-19 | Change to pass all extra connection paramaters to psycopg2 (#11019) | -| [3cddc1182](https://github.com/apache/airflow/commit/3cddc11821ff8f9ed0811384c0643f756a2b3dfa) | 2020-10-16 | Updated template_fields_rendereds for PostgresOperator and SimpleHttpOperator (#11555) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [eff1525d1](https://github.com/apache/airflow/commit/eff1525d124f7f697e4fbe5cc9a54ed32ec0138d) | 2020-09-16 | Increase typing coverage for postgres provider (#10864) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [a28c66f23](https://github.com/apache/airflow/commit/a28c66f23d373cd0f8bfc765a515f21d4b66a0e9) | 2020-04-30 | [AIRFLOW-4734] Upsert functionality for PostgresHook.insert_rows() (#8625) | -| [68d1714f2](https://github.com/apache/airflow/commit/68d1714f296989b7aad1a04b75dc033e76afb747) | 2020-04-04 | [AIRFLOW-6822] AWS hooks should cache boto3 client (#7541) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [82c0e5aff](https://github.com/apache/airflow/commit/82c0e5aff6004f636b98e207c3caec40b403fbbe) | 2020-01-28 | [AIRFLOW-6655] Move AWS classes to providers (#7271) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/presto/ADDITIONAL_INFO.md b/airflow/providers/presto/ADDITIONAL_INFO.md deleted file mode 100644 index 479ae500c548b..0000000000000 --- a/airflow/providers/presto/ADDITIONAL_INFO.md +++ /dev/null @@ -1,30 +0,0 @@ - - -## Additional limitations - -The presto provider might not work correctly with Kerberos + SSL when it is installed together with the -Snowflake provider. It's because of Snowflake monkeypatching the urllib3 -library as described in [this issue](https://github.com/snowflakedb/snowflake-connector-python/issues/324) -the offending code is [here](https://github.com/snowflakedb/snowflake-connector-python/blob/133d6215f7920d304c5f2d466bae38127c1b836d/src/snowflake/connector/network.py#L89-L92) - -In the future Snowflake plans to get rid of the monkeypatching. - -You can keep track of [the issue](https://github.com/apache/airflow/issues/12881) in order to know when the -issue will be resolved. diff --git a/airflow/providers/presto/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/presto/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index cb5963e4349a5..0000000000000 --- a/airflow/providers/presto/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,21 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [1100cea1f](https://github.com/apache/airflow/commit/1100cea1fb9e010e6f4acc699c6d54d056c0541c) | 2020-05-03 | Remove _get_pretty_exception_message in PrestoHook | -| [35834c380](https://github.com/apache/airflow/commit/35834c3809ce6f5f1dcff130d0e68cabed7f72de) | 2020-03-26 | Remove Presto check operators (#7884) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [029c84e55](https://github.com/apache/airflow/commit/029c84e5527b6db6bdbdbe026f455da325bedef3) | 2020-03-18 | [AIRFLOW-5421] Add Presto to GCS transfer operator (#7718) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/presto/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/presto/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 6b08cea66db3a..0000000000000 --- a/airflow/providers/presto/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,14 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/presto/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/presto/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 3ebd63b63e9cb..0000000000000 --- a/airflow/providers/presto/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,11 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [1543923c1](https://github.com/apache/airflow/commit/1543923c197f658533ca0a0bb259b59a002cce43) | 2020-10-20 | Add Kerberos Auth for PrestoHook (#10488) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/presto/BACKPORT_PROVIDER_README.md b/airflow/providers/presto/BACKPORT_PROVIDER_README.md deleted file mode 100644 index ffdee0c4cf397..0000000000000 --- a/airflow/providers/presto/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,125 +0,0 @@ - - - -# Package apache-airflow-backport-providers-presto - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `presto` provider. All classes for this provider package -are in `airflow.providers.presto` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-presto` - -## PIP requirements - -| PIP package | Version required | -|:---------------------|:-------------------| -| presto-python-client | >=0.7.0,<0.8 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `presto` provider -are in the `airflow.providers.presto` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.presto` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------| -| [hooks.presto.PrestoHook](https://github.com/apache/airflow/blob/master/airflow/providers/presto/hooks/presto.py) | [hooks.presto_hook.PrestoHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/presto_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [1543923c1](https://github.com/apache/airflow/commit/1543923c197f658533ca0a0bb259b59a002cce43) | 2020-10-20 | Add Kerberos Auth for PrestoHook (#10488) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [1100cea1f](https://github.com/apache/airflow/commit/1100cea1fb9e010e6f4acc699c6d54d056c0541c) | 2020-05-03 | Remove _get_pretty_exception_message in PrestoHook | -| [35834c380](https://github.com/apache/airflow/commit/35834c3809ce6f5f1dcff130d0e68cabed7f72de) | 2020-03-26 | Remove Presto check operators (#7884) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [029c84e55](https://github.com/apache/airflow/commit/029c84e5527b6db6bdbdbe026f455da325bedef3) | 2020-03-18 | [AIRFLOW-5421] Add Presto to GCS transfer operator (#7718) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/presto/CHANGELOG.rst b/airflow/providers/presto/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/presto/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/presto/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/presto/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index f36376eb5b850..0000000000000 --- a/airflow/providers/presto/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,48 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [5c74c3a5c](https://github.com/apache/airflow/commit/5c74c3a5c1bc6424a068f1dd21a2d999b92cd8c5) | 2020-12-09 | Add explanation of the Presto Kerberos + SSL + snowflake problem (#12939) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [1543923c1](https://github.com/apache/airflow/commit/1543923c197f658533ca0a0bb259b59a002cce43) | 2020-10-20 | Add Kerberos Auth for PrestoHook (#10488) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [1100cea1f](https://github.com/apache/airflow/commit/1100cea1fb9e010e6f4acc699c6d54d056c0541c) | 2020-05-03 | Remove _get_pretty_exception_message in PrestoHook | -| [35834c380](https://github.com/apache/airflow/commit/35834c3809ce6f5f1dcff130d0e68cabed7f72de) | 2020-03-26 | Remove Presto check operators (#7884) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [029c84e55](https://github.com/apache/airflow/commit/029c84e5527b6db6bdbdbe026f455da325bedef3) | 2020-03-18 | [AIRFLOW-5421] Add Presto to GCS transfer operator (#7718) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/presto/README.md b/airflow/providers/presto/README.md deleted file mode 100644 index 0e59de21876e7..0000000000000 --- a/airflow/providers/presto/README.md +++ /dev/null @@ -1,140 +0,0 @@ - - - -# Package apache-airflow-providers-presto - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `presto` provider. All classes for this provider package -are in `airflow.providers.presto` python package. - - -## Additional limitations - -The presto provider might not work correctly with Kerberos + SSL when it is installed together with the -Snowflake provider. It's because of Snowflake monkeypatching the urllib3 -library as described in [this issue](https://github.com/snowflakedb/snowflake-connector-python/issues/324) -the offending code is [here](https://github.com/snowflakedb/snowflake-connector-python/blob/133d6215f7920d304c5f2d466bae38127c1b836d/src/snowflake/connector/network.py#L89-L92) - -In the future Snowflake plans to get rid of the monkeypatching. - -You can keep track of [the issue](https://github.com/apache/airflow/issues/12881) in order to know when the -issue will be resolved. - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-presto` - -## PIP requirements - -| PIP package | Version required | -|:---------------------|:-------------------| -| presto-python-client | >=0.7.0,<0.8 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `presto` provider -are in the `airflow.providers.presto` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.presto` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------| -| [hooks.presto.PrestoHook](https://github.com/apache/airflow/blob/master/airflow/providers/presto/hooks/presto.py) | [hooks.presto_hook.PrestoHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/presto_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [5c74c3a5c](https://github.com/apache/airflow/commit/5c74c3a5c1bc6424a068f1dd21a2d999b92cd8c5) | 2020-12-09 | Add explanation of the Presto Kerberos + SSL + snowflake problem (#12939) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [1543923c1](https://github.com/apache/airflow/commit/1543923c197f658533ca0a0bb259b59a002cce43) | 2020-10-20 | Add Kerberos Auth for PrestoHook (#10488) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [8f8db8959](https://github.com/apache/airflow/commit/8f8db8959e526be54d700845d36ee9f315bae2ea) | 2020-08-12 | DbApiHook: Support kwargs in get_pandas_df (#9730) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [1100cea1f](https://github.com/apache/airflow/commit/1100cea1fb9e010e6f4acc699c6d54d056c0541c) | 2020-05-03 | Remove _get_pretty_exception_message in PrestoHook | -| [35834c380](https://github.com/apache/airflow/commit/35834c3809ce6f5f1dcff130d0e68cabed7f72de) | 2020-03-26 | Remove Presto check operators (#7884) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [029c84e55](https://github.com/apache/airflow/commit/029c84e5527b6db6bdbdbe026f455da325bedef3) | 2020-03-18 | [AIRFLOW-5421] Add Presto to GCS transfer operator (#7718) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/qubole/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/qubole/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index d64c20c63530d..0000000000000 --- a/airflow/providers/qubole/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,30 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4b06fde0f](https://github.com/apache/airflow/commit/4b06fde0f10ce178b3c336c5d901e3b089f2863d) | 2020-05-12 | Fix Flake8 errors (#8841) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [de7e934ca](https://github.com/apache/airflow/commit/de7e934ca3f21ce82f67accf92811b3ac044476f) | 2020-03-17 | [AIRFLOW-7079] Remove redundant code for storing template_fields (#7750) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [a2d6a2f85](https://github.com/apache/airflow/commit/a2d6a2f85e07c38be479e91e4a27981f308f4711) | 2020-01-31 | [AIRFLOW-6687] Switch kubernetes tests to example_dags (#7299) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/qubole/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/qubole/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 6e321a27bc267..0000000000000 --- a/airflow/providers/qubole/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,23 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [c58d60635](https://github.com/apache/airflow/commit/c58d60635dbab1a91f38e989f72f91645cb7eb62) | 2020-09-11 | Update qubole_hook to not remove pool as an arg for qubole_operator (#10820) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [36aa88ffc](https://github.com/apache/airflow/commit/36aa88ffc1e3feb5c6f4520871a4f6e3196c0804) | 2020-09-03 | Add jupytercmd and fix task failure when notify set as true in qubole operator (#10599) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [3190db524](https://github.com/apache/airflow/commit/3190db52469f9d9a338231a9e8e7f333a6fbb638) | 2020-06-24 | [AIRFLOW-9347] Fix QuboleHook unable to add list to tags (#9349) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/qubole/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/qubole/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/qubole/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/qubole/BACKPORT_PROVIDER_README.md b/airflow/providers/qubole/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 90c45ed1f3b8b..0000000000000 --- a/airflow/providers/qubole/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,173 +0,0 @@ - - - -# Package apache-airflow-backport-providers-qubole - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `qubole` provider. All classes for this provider package -are in `airflow.providers.qubole` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-qubole` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| qds-sdk | >=1.10.4 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `qubole` provider -are in the `airflow.providers.qubole` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.qubole` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.qubole.QuboleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/operators/qubole.py) | [contrib.operators.qubole_operator.QuboleOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/qubole_operator.py) | -| [operators.qubole_check.QuboleCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/operators/qubole_check.py) | [contrib.operators.qubole_check_operator.QuboleCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/qubole_check_operator.py) | -| [operators.qubole_check.QuboleValueCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/operators/qubole_check.py) | [contrib.operators.qubole_check_operator.QuboleValueCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/qubole_check_operator.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.qubole` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.qubole.QuboleFileSensor](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/sensors/qubole.py) | [contrib.sensors.qubole_sensor.QuboleFileSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/qubole_sensor.py) | -| [sensors.qubole.QubolePartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/sensors/qubole.py) | [contrib.sensors.qubole_sensor.QubolePartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/qubole_sensor.py) | -| [sensors.qubole.QuboleSensor](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/sensors/qubole.py) | [contrib.sensors.qubole_sensor.QuboleSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/qubole_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.qubole` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.qubole.QuboleHook](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/hooks/qubole.py) | [contrib.hooks.qubole_hook.QuboleHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/qubole_hook.py) | -| [hooks.qubole_check.QuboleCheckHook](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/hooks/qubole_check.py) | [contrib.hooks.qubole_check_hook.QuboleCheckHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/qubole_check_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [c58d60635](https://github.com/apache/airflow/commit/c58d60635dbab1a91f38e989f72f91645cb7eb62) | 2020-09-11 | Update qubole_hook to not remove pool as an arg for qubole_operator (#10820) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [36aa88ffc](https://github.com/apache/airflow/commit/36aa88ffc1e3feb5c6f4520871a4f6e3196c0804) | 2020-09-03 | Add jupytercmd and fix task failure when notify set as true in qubole operator (#10599) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [3190db524](https://github.com/apache/airflow/commit/3190db52469f9d9a338231a9e8e7f333a6fbb638) | 2020-06-24 | [AIRFLOW-9347] Fix QuboleHook unable to add list to tags (#9349) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4b06fde0f](https://github.com/apache/airflow/commit/4b06fde0f10ce178b3c336c5d901e3b089f2863d) | 2020-05-12 | Fix Flake8 errors (#8841) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [de7e934ca](https://github.com/apache/airflow/commit/de7e934ca3f21ce82f67accf92811b3ac044476f) | 2020-03-17 | [AIRFLOW-7079] Remove redundant code for storing template_fields (#7750) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [a2d6a2f85](https://github.com/apache/airflow/commit/a2d6a2f85e07c38be479e91e4a27981f308f4711) | 2020-01-31 | [AIRFLOW-6687] Switch kubernetes tests to example_dags (#7299) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/qubole/CHANGELOG.rst b/airflow/providers/qubole/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/qubole/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/qubole/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/qubole/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index a4e3e19df2260..0000000000000 --- a/airflow/providers/qubole/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,67 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [d5589673a](https://github.com/apache/airflow/commit/d5589673a95aaced0b851ea0a4061a010a924a82) | 2020-12-08 | Move dummy_operator.py to dummy.py (#11178) (#11293) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [1dcd3e13f](https://github.com/apache/airflow/commit/1dcd3e13fd0a078fc9440e91b77f6f87aa60dd3b) | 2020-12-05 | Add support for extra links coming from the providers (#12472) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [c58d60635](https://github.com/apache/airflow/commit/c58d60635dbab1a91f38e989f72f91645cb7eb62) | 2020-09-11 | Update qubole_hook to not remove pool as an arg for qubole_operator (#10820) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [36aa88ffc](https://github.com/apache/airflow/commit/36aa88ffc1e3feb5c6f4520871a4f6e3196c0804) | 2020-09-03 | Add jupytercmd and fix task failure when notify set as true in qubole operator (#10599) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [3190db524](https://github.com/apache/airflow/commit/3190db52469f9d9a338231a9e8e7f333a6fbb638) | 2020-06-24 | [AIRFLOW-9347] Fix QuboleHook unable to add list to tags (#9349) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4b06fde0f](https://github.com/apache/airflow/commit/4b06fde0f10ce178b3c336c5d901e3b089f2863d) | 2020-05-12 | Fix Flake8 errors (#8841) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [de7e934ca](https://github.com/apache/airflow/commit/de7e934ca3f21ce82f67accf92811b3ac044476f) | 2020-03-17 | [AIRFLOW-7079] Remove redundant code for storing template_fields (#7750) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [a2d6a2f85](https://github.com/apache/airflow/commit/a2d6a2f85e07c38be479e91e4a27981f308f4711) | 2020-01-31 | [AIRFLOW-6687] Switch kubernetes tests to example_dags (#7299) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/qubole/README.md b/airflow/providers/qubole/README.md deleted file mode 100644 index 7d24eec2e09a6..0000000000000 --- a/airflow/providers/qubole/README.md +++ /dev/null @@ -1,178 +0,0 @@ - - - -# Package apache-airflow-providers-qubole - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `qubole` provider. All classes for this provider package -are in `airflow.providers.qubole` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-qubole` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| qds-sdk | >=1.10.4 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `qubole` provider -are in the `airflow.providers.qubole` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.qubole` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.qubole.QuboleOperator](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/operators/qubole.py) | [contrib.operators.qubole_operator.QuboleOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/qubole_operator.py) | -| [operators.qubole_check.QuboleCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/operators/qubole_check.py) | [contrib.operators.qubole_check_operator.QuboleCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/qubole_check_operator.py) | -| [operators.qubole_check.QuboleValueCheckOperator](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/operators/qubole_check.py) | [contrib.operators.qubole_check_operator.QuboleValueCheckOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/qubole_check_operator.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.qubole` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:---------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.qubole.QuboleFileSensor](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/sensors/qubole.py) | [contrib.sensors.qubole_sensor.QuboleFileSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/qubole_sensor.py) | -| [sensors.qubole.QubolePartitionSensor](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/sensors/qubole.py) | [contrib.sensors.qubole_sensor.QubolePartitionSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/qubole_sensor.py) | -| [sensors.qubole.QuboleSensor](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/sensors/qubole.py) | [contrib.sensors.qubole_sensor.QuboleSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/qubole_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.qubole` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-----------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.qubole.QuboleHook](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/hooks/qubole.py) | [contrib.hooks.qubole_hook.QuboleHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/qubole_hook.py) | -| [hooks.qubole_check.QuboleCheckHook](https://github.com/apache/airflow/blob/master/airflow/providers/qubole/hooks/qubole_check.py) | [contrib.hooks.qubole_check_hook.QuboleCheckHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/qubole_check_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [d5589673a](https://github.com/apache/airflow/commit/d5589673a95aaced0b851ea0a4061a010a924a82) | 2020-12-08 | Move dummy_operator.py to dummy.py (#11178) (#11293) | -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [1dcd3e13f](https://github.com/apache/airflow/commit/1dcd3e13fd0a078fc9440e91b77f6f87aa60dd3b) | 2020-12-05 | Add support for extra links coming from the providers (#12472) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [c58d60635](https://github.com/apache/airflow/commit/c58d60635dbab1a91f38e989f72f91645cb7eb62) | 2020-09-11 | Update qubole_hook to not remove pool as an arg for qubole_operator (#10820) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [36aa88ffc](https://github.com/apache/airflow/commit/36aa88ffc1e3feb5c6f4520871a4f6e3196c0804) | 2020-09-03 | Add jupytercmd and fix task failure when notify set as true in qubole operator (#10599) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [3190db524](https://github.com/apache/airflow/commit/3190db52469f9d9a338231a9e8e7f333a6fbb638) | 2020-06-24 | [AIRFLOW-9347] Fix QuboleHook unable to add list to tags (#9349) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4b06fde0f](https://github.com/apache/airflow/commit/4b06fde0f10ce178b3c336c5d901e3b089f2863d) | 2020-05-12 | Fix Flake8 errors (#8841) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [de7e934ca](https://github.com/apache/airflow/commit/de7e934ca3f21ce82f67accf92811b3ac044476f) | 2020-03-17 | [AIRFLOW-7079] Remove redundant code for storing template_fields (#7750) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [f3ad5cf61](https://github.com/apache/airflow/commit/f3ad5cf6185b9d406d0fb0a4ecc0b5536f79217a) | 2020-02-03 | [AIRFLOW-4681] Make sensors module pylint compatible (#7309) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [a2d6a2f85](https://github.com/apache/airflow/commit/a2d6a2f85e07c38be479e91e4a27981f308f4711) | 2020-01-31 | [AIRFLOW-6687] Switch kubernetes tests to example_dags (#7299) | -| [83c037873](https://github.com/apache/airflow/commit/83c037873ff694eed67ba8b30f2d9c88b2c7c6f2) | 2020-01-30 | [AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/redis/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/redis/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 43524a2eb477f..0000000000000 --- a/airflow/providers/redis/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [a3f02c462](https://github.com/apache/airflow/commit/a3f02c4627c28ad524cca73031670722cd6d8253) | 2020-01-24 | [AIRFLOW-6493] Add SSL configuration to Redis hook connections (#7234) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/redis/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/redis/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index a71e2c4f1ce9f..0000000000000 --- a/airflow/providers/redis/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [0a2acf0b6](https://github.com/apache/airflow/commit/0a2acf0b6542b717f87dee6bbff43397bbb0e83b) | 2020-07-14 | Add type annotations for redis provider (#9815) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/redis/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/redis/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 50ad6077367a3..0000000000000 --- a/airflow/providers/redis/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,11 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/redis/BACKPORT_PROVIDER_README.md b/airflow/providers/redis/BACKPORT_PROVIDER_README.md deleted file mode 100644 index c4b4ae7f20490..0000000000000 --- a/airflow/providers/redis/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,155 +0,0 @@ - - - -# Package apache-airflow-backport-providers-redis - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `redis` provider. All classes for this provider package -are in `airflow.providers.redis` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-redis` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| redis | ~=3.2 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `redis` provider -are in the `airflow.providers.redis` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.redis` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.redis_publish.RedisPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/redis/operators/redis_publish.py) | [contrib.operators.redis_publish_operator.RedisPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/redis_publish_operator.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.redis` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.redis_key.RedisKeySensor](https://github.com/apache/airflow/blob/master/airflow/providers/redis/sensors/redis_key.py) | [contrib.sensors.redis_key_sensor.RedisKeySensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/redis_key_sensor.py) | -| [sensors.redis_pub_sub.RedisPubSubSensor](https://github.com/apache/airflow/blob/master/airflow/providers/redis/sensors/redis_pub_sub.py) | [contrib.sensors.redis_pub_sub_sensor.RedisPubSubSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/redis_pub_sub_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.redis` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------| -| [hooks.redis.RedisHook](https://github.com/apache/airflow/blob/master/airflow/providers/redis/hooks/redis.py) | [contrib.hooks.redis_hook.RedisHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/redis_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [0a2acf0b6](https://github.com/apache/airflow/commit/0a2acf0b6542b717f87dee6bbff43397bbb0e83b) | 2020-07-14 | Add type annotations for redis provider (#9815) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [a3f02c462](https://github.com/apache/airflow/commit/a3f02c4627c28ad524cca73031670722cd6d8253) | 2020-01-24 | [AIRFLOW-6493] Add SSL configuration to Redis hook connections (#7234) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/redis/CHANGELOG.rst b/airflow/providers/redis/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/redis/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/redis/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/redis/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 48f492d9ea6bd..0000000000000 --- a/airflow/providers/redis/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,52 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [0a2acf0b6](https://github.com/apache/airflow/commit/0a2acf0b6542b717f87dee6bbff43397bbb0e83b) | 2020-07-14 | Add type annotations for redis provider (#9815) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [a3f02c462](https://github.com/apache/airflow/commit/a3f02c4627c28ad524cca73031670722cd6d8253) | 2020-01-24 | [AIRFLOW-6493] Add SSL configuration to Redis hook connections (#7234) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/redis/README.md b/airflow/providers/redis/README.md deleted file mode 100644 index 8263506c206c8..0000000000000 --- a/airflow/providers/redis/README.md +++ /dev/null @@ -1,159 +0,0 @@ - - - -# Package apache-airflow-providers-redis - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `redis` provider. All classes for this provider package -are in `airflow.providers.redis` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-redis` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| redis | ~=3.2 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `redis` provider -are in the `airflow.providers.redis` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.redis` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.redis_publish.RedisPublishOperator](https://github.com/apache/airflow/blob/master/airflow/providers/redis/operators/redis_publish.py) | [contrib.operators.redis_publish_operator.RedisPublishOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/redis_publish_operator.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.redis` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.redis_key.RedisKeySensor](https://github.com/apache/airflow/blob/master/airflow/providers/redis/sensors/redis_key.py) | [contrib.sensors.redis_key_sensor.RedisKeySensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/redis_key_sensor.py) | -| [sensors.redis_pub_sub.RedisPubSubSensor](https://github.com/apache/airflow/blob/master/airflow/providers/redis/sensors/redis_pub_sub.py) | [contrib.sensors.redis_pub_sub_sensor.RedisPubSubSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/redis_pub_sub_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.redis` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------| -| [hooks.redis.RedisHook](https://github.com/apache/airflow/blob/master/airflow/providers/redis/hooks/redis.py) | [contrib.hooks.redis_hook.RedisHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/redis_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [250436d96](https://github.com/apache/airflow/commit/250436d962c8c950d38c1eb5e54a998891648cc9) | 2020-11-10 | Fix spelling in Python files (#12230) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [0a2acf0b6](https://github.com/apache/airflow/commit/0a2acf0b6542b717f87dee6bbff43397bbb0e83b) | 2020-07-14 | Add type annotations for redis provider (#9815) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [a3f02c462](https://github.com/apache/airflow/commit/a3f02c4627c28ad524cca73031670722cd6d8253) | 2020-01-24 | [AIRFLOW-6493] Add SSL configuration to Redis hook connections (#7234) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/salesforce/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/salesforce/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 07ad9b2c08a93..0000000000000 --- a/airflow/providers/salesforce/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,27 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [ff342fc23](https://github.com/apache/airflow/commit/ff342fc230982dc5d88acfd5e5eab75187256b58) | 2020-05-17 | Added SalesforceHook missing method to return only dataframe (#8565) (#8644) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [954619283](https://github.com/apache/airflow/commit/95461928365f255c79ab4a164ce60d8eebea29d7) | 2020-03-26 | bumping simple-salesforce to 1.0.0 (#7857) | -| [31efc931e](https://github.com/apache/airflow/commit/31efc931e32841b7da8decd576cafa1e5a6f6d95) | 2020-03-23 | Add missing call to Super class in 'salesforce' provider (#7824) | -| [6140356b8](https://github.com/apache/airflow/commit/6140356b80f68906e89ccf46941a949bdc4d43fa) | 2020-03-12 | [AIRFLOW-6481] Fix bug in SalesforceHook (#7703) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [61a8bb658](https://github.com/apache/airflow/commit/61a8bb65818521ccbb846e647103535b3e36b26d) | 2020-02-22 | [AIRFLOW-6879] Fix Failing CI: Update New import paths (#7500) | -| [a9ad0a929](https://github.com/apache/airflow/commit/a9ad0a929851b6912e0bb8551f1ff80b50281944) | 2020-02-22 | [AIRFLOW-6790] Add basic Tableau Integration (#7410) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/salesforce/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/salesforce/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 58614ad8e50ce..0000000000000 --- a/airflow/providers/salesforce/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [45669bea4](https://github.com/apache/airflow/commit/45669bea4f690021b69a7d2afa8be9567ae0c49d) | 2020-09-24 | Increasing type coverage for salesforce provide (#11135) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/salesforce/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/salesforce/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 50ad6077367a3..0000000000000 --- a/airflow/providers/salesforce/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,11 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/salesforce/BACKPORT_PROVIDER_README.md b/airflow/providers/salesforce/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 6b6d997c9ef00..0000000000000 --- a/airflow/providers/salesforce/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,168 +0,0 @@ - - - -# Package apache-airflow-backport-providers-salesforce - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `salesforce` provider. All classes for this provider package -are in `airflow.providers.salesforce` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-salesforce` - -## PIP requirements - -| PIP package | Version required | -|:------------------|:-------------------| -| simple-salesforce | >=1.0.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `salesforce` provider -are in the `airflow.providers.salesforce` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.salesforce` package | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.tableau_refresh_workbook.TableauRefreshWorkbookOperator](https://github.com/apache/airflow/blob/master/airflow/providers/salesforce/operators/tableau_refresh_workbook.py) | - - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.salesforce` package | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.tableau_job_status.TableauJobStatusSensor](https://github.com/apache/airflow/blob/master/airflow/providers/salesforce/sensors/tableau_job_status.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.salesforce` package | -|:-------------------------------------------------------------------------------------------------------------------------| -| [hooks.tableau.TableauHook](https://github.com/apache/airflow/blob/master/airflow/providers/salesforce/hooks/tableau.py) | - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.salesforce` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.salesforce.SalesforceHook](https://github.com/apache/airflow/blob/master/airflow/providers/salesforce/hooks/salesforce.py) | [contrib.hooks.salesforce_hook.SalesforceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/salesforce_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [45669bea4](https://github.com/apache/airflow/commit/45669bea4f690021b69a7d2afa8be9567ae0c49d) | 2020-09-24 | Increasing type coverage for salesforce provide (#11135) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [ff342fc23](https://github.com/apache/airflow/commit/ff342fc230982dc5d88acfd5e5eab75187256b58) | 2020-05-17 | Added SalesforceHook missing method to return only dataframe (#8565) (#8644) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [954619283](https://github.com/apache/airflow/commit/95461928365f255c79ab4a164ce60d8eebea29d7) | 2020-03-26 | bumping simple-salesforce to 1.0.0 (#7857) | -| [31efc931e](https://github.com/apache/airflow/commit/31efc931e32841b7da8decd576cafa1e5a6f6d95) | 2020-03-23 | Add missing call to Super class in 'salesforce' provider (#7824) | -| [6140356b8](https://github.com/apache/airflow/commit/6140356b80f68906e89ccf46941a949bdc4d43fa) | 2020-03-12 | [AIRFLOW-6481] Fix bug in SalesforceHook (#7703) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [61a8bb658](https://github.com/apache/airflow/commit/61a8bb65818521ccbb846e647103535b3e36b26d) | 2020-02-22 | [AIRFLOW-6879] Fix Failing CI: Update New import paths (#7500) | -| [a9ad0a929](https://github.com/apache/airflow/commit/a9ad0a929851b6912e0bb8551f1ff80b50281944) | 2020-02-22 | [AIRFLOW-6790] Add basic Tableau Integration (#7410) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/salesforce/CHANGELOG.rst b/airflow/providers/salesforce/CHANGELOG.rst new file mode 100644 index 0000000000000..b4eb0edda964a --- /dev/null +++ b/airflow/providers/salesforce/CHANGELOG.rst @@ -0,0 +1,41 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.2 +..... + +Tableau provider moved to separate 'tableau' provider + +Things done: + + - Tableau classes imports classes from 'tableau' provider with deprecation warning + + +1.0.1 +..... + +Updated documentation and readme files. + + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/salesforce/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/salesforce/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index d5b752fafd737..0000000000000 --- a/airflow/providers/salesforce/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,58 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [45669bea4](https://github.com/apache/airflow/commit/45669bea4f690021b69a7d2afa8be9567ae0c49d) | 2020-09-24 | Increasing type coverage for salesforce provide (#11135) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [ff342fc23](https://github.com/apache/airflow/commit/ff342fc230982dc5d88acfd5e5eab75187256b58) | 2020-05-17 | Added SalesforceHook missing method to return only dataframe (#8565) (#8644) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [954619283](https://github.com/apache/airflow/commit/95461928365f255c79ab4a164ce60d8eebea29d7) | 2020-03-26 | bumping simple-salesforce to 1.0.0 (#7857) | -| [31efc931e](https://github.com/apache/airflow/commit/31efc931e32841b7da8decd576cafa1e5a6f6d95) | 2020-03-23 | Add missing call to Super class in 'salesforce' provider (#7824) | -| [6140356b8](https://github.com/apache/airflow/commit/6140356b80f68906e89ccf46941a949bdc4d43fa) | 2020-03-12 | [AIRFLOW-6481] Fix bug in SalesforceHook (#7703) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [61a8bb658](https://github.com/apache/airflow/commit/61a8bb65818521ccbb846e647103535b3e36b26d) | 2020-02-22 | [AIRFLOW-6879] Fix Failing CI: Update New import paths (#7500) | -| [a9ad0a929](https://github.com/apache/airflow/commit/a9ad0a929851b6912e0bb8551f1ff80b50281944) | 2020-02-22 | [AIRFLOW-6790] Add basic Tableau Integration (#7410) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/salesforce/README.md b/airflow/providers/salesforce/README.md deleted file mode 100644 index 2a3841539d1ca..0000000000000 --- a/airflow/providers/salesforce/README.md +++ /dev/null @@ -1,171 +0,0 @@ - - - -# Package apache-airflow-providers-salesforce - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Sensors](#sensors) - - [New sensors](#new-sensors) - - [Hooks](#hooks) - - [New hooks](#new-hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `salesforce` provider. All classes for this provider package -are in `airflow.providers.salesforce` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-salesforce` - -## PIP requirements - -| PIP package | Version required | -|:------------------|:-------------------| -| simple-salesforce | >=1.0.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `salesforce` provider -are in the `airflow.providers.salesforce` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.salesforce` package | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.tableau_refresh_workbook.TableauRefreshWorkbookOperator](https://github.com/apache/airflow/blob/master/airflow/providers/salesforce/operators/tableau_refresh_workbook.py) | - - - -## Sensors - - -### New sensors - -| New Airflow 2.0 sensors: `airflow.providers.salesforce` package | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.tableau_job_status.TableauJobStatusSensor](https://github.com/apache/airflow/blob/master/airflow/providers/salesforce/sensors/tableau_job_status.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.salesforce` package | -|:-------------------------------------------------------------------------------------------------------------------------| -| [hooks.tableau.TableauHook](https://github.com/apache/airflow/blob/master/airflow/providers/salesforce/hooks/tableau.py) | - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.salesforce` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.salesforce.SalesforceHook](https://github.com/apache/airflow/blob/master/airflow/providers/salesforce/hooks/salesforce.py) | [contrib.hooks.salesforce_hook.SalesforceHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/salesforce_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [45669bea4](https://github.com/apache/airflow/commit/45669bea4f690021b69a7d2afa8be9567ae0c49d) | 2020-09-24 | Increasing type coverage for salesforce provide (#11135) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [ff342fc23](https://github.com/apache/airflow/commit/ff342fc230982dc5d88acfd5e5eab75187256b58) | 2020-05-17 | Added SalesforceHook missing method to return only dataframe (#8565) (#8644) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [87969a350](https://github.com/apache/airflow/commit/87969a350ddd41e9e77776af6d780b31e363eaca) | 2020-04-09 | [AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170) | -| [954619283](https://github.com/apache/airflow/commit/95461928365f255c79ab4a164ce60d8eebea29d7) | 2020-03-26 | bumping simple-salesforce to 1.0.0 (#7857) | -| [31efc931e](https://github.com/apache/airflow/commit/31efc931e32841b7da8decd576cafa1e5a6f6d95) | 2020-03-23 | Add missing call to Super class in 'salesforce' provider (#7824) | -| [6140356b8](https://github.com/apache/airflow/commit/6140356b80f68906e89ccf46941a949bdc4d43fa) | 2020-03-12 | [AIRFLOW-6481] Fix bug in SalesforceHook (#7703) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [61a8bb658](https://github.com/apache/airflow/commit/61a8bb65818521ccbb846e647103535b3e36b26d) | 2020-02-22 | [AIRFLOW-6879] Fix Failing CI: Update New import paths (#7500) | -| [a9ad0a929](https://github.com/apache/airflow/commit/a9ad0a929851b6912e0bb8551f1ff80b50281944) | 2020-02-22 | [AIRFLOW-6790] Add basic Tableau Integration (#7410) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [cf141506a](https://github.com/apache/airflow/commit/cf141506a25dbba279b85500d781f7e056540721) | 2020-02-02 | [AIRFLOW-6708] Set unique logger names (#7330) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/salesforce/hooks/tableau.py b/airflow/providers/salesforce/hooks/tableau.py index 51c2f98a2f8d6..cf5f7f3e041ea 100644 --- a/airflow/providers/salesforce/hooks/tableau.py +++ b/airflow/providers/salesforce/hooks/tableau.py @@ -14,102 +14,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from enum import Enum -from typing import Any, Optional -from tableauserverclient import Pager, PersonalAccessTokenAuth, Server, TableauAuth -from tableauserverclient.server import Auth +import warnings -from airflow.hooks.base import BaseHook +# pylint: disable=unused-import +from airflow.providers.tableau.hooks.tableau import TableauHook, TableauJobFinishCode # noqa - -class TableauJobFinishCode(Enum): - """ - The finish code indicates the status of the job. - - .. seealso:: https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_ref.htm#query_job - - """ - - PENDING = -1 - SUCCESS = 0 - ERROR = 1 - CANCELED = 2 - - -class TableauHook(BaseHook): - """ - Connects to the Tableau Server Instance and allows to communicate with it. - - .. seealso:: https://tableau.github.io/server-client-python/docs/ - - :param site_id: The id of the site where the workbook belongs to. - It will connect to the default site if you don't provide an id. - :type site_id: Optional[str] - :param tableau_conn_id: The Tableau Connection id containing the credentials - to authenticate to the Tableau Server. - :type tableau_conn_id: str - """ - - conn_name_attr = 'tableau_conn_id' - default_conn_name = 'tableau_default' - conn_type = 'tableau' - hook_name = 'Tableau' - - def __init__(self, site_id: Optional[str] = None, tableau_conn_id: str = default_conn_name) -> None: - super().__init__() - self.tableau_conn_id = tableau_conn_id - self.conn = self.get_connection(self.tableau_conn_id) - self.site_id = site_id or self.conn.extra_dejson.get('site_id', '') - self.server = Server(self.conn.host, use_server_version=True) - self.tableau_conn = None - - def __enter__(self): - if not self.tableau_conn: - self.tableau_conn = self.get_conn() - return self - - def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: - self.server.auth.sign_out() - - def get_conn(self) -> Auth.contextmgr: - """ - Signs in to the Tableau Server and automatically signs out if used as ContextManager. - - :return: an authorized Tableau Server Context Manager object. - :rtype: tableauserverclient.server.Auth.contextmgr - """ - if self.conn.login and self.conn.password: - return self._auth_via_password() - if 'token_name' in self.conn.extra_dejson and 'personal_access_token' in self.conn.extra_dejson: - return self._auth_via_token() - raise NotImplementedError('No Authentication method found for given Credentials!') - - def _auth_via_password(self) -> Auth.contextmgr: - tableau_auth = TableauAuth( - username=self.conn.login, password=self.conn.password, site_id=self.site_id - ) - return self.server.auth.sign_in(tableau_auth) - - def _auth_via_token(self) -> Auth.contextmgr: - tableau_auth = PersonalAccessTokenAuth( - token_name=self.conn.extra_dejson['token_name'], - personal_access_token=self.conn.extra_dejson['personal_access_token'], - site_id=self.site_id, - ) - return self.server.auth.sign_in_with_personal_access_token(tableau_auth) - - def get_all(self, resource_name: str) -> Pager: - """ - Get all items of the given resource. - - .. seealso:: https://tableau.github.io/server-client-python/docs/page-through-results - - :param resource_name: The name of the resource to paginate. - For example: jobs or workbooks - :type resource_name: str - :return: all items by returning a Pager. - :rtype: tableauserverclient.Pager - """ - resource = getattr(self.server, resource_name) - return Pager(resource.get) +warnings.warn( + "This module is deprecated. Please use `airflow.providers.tableau.hooks.tableau`.", + DeprecationWarning, + stacklevel=2, +) diff --git a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py b/airflow/providers/salesforce/operators/tableau_refresh_workbook.py index 7d4ffdcc3e1b0..309af331c6584 100644 --- a/airflow/providers/salesforce/operators/tableau_refresh_workbook.py +++ b/airflow/providers/salesforce/operators/tableau_refresh_workbook.py @@ -14,84 +14,16 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from typing import Optional -from tableauserverclient import WorkbookItem +import warnings -from airflow.exceptions import AirflowException -from airflow.models import BaseOperator -from airflow.providers.salesforce.hooks.tableau import TableauHook -from airflow.utils.decorators import apply_defaults +# pylint: disable=unused-import +from airflow.providers.tableau.operators.tableau_refresh_workbook import ( # noqa + TableauRefreshWorkbookOperator, +) - -class TableauRefreshWorkbookOperator(BaseOperator): - """ - Refreshes a Tableau Workbook/Extract - - .. seealso:: https://tableau.github.io/server-client-python/docs/api-ref#workbooks - - :param workbook_name: The name of the workbook to refresh. - :type workbook_name: str - :param site_id: The id of the site where the workbook belongs to. - :type site_id: Optional[str] - :param blocking: By default the extract refresh will be blocking means it will wait until it has finished. - :type blocking: bool - :param tableau_conn_id: The Tableau Connection id containing the credentials - to authenticate to the Tableau Server. - :type tableau_conn_id: str - """ - - @apply_defaults - def __init__( - self, - *, - workbook_name: str, - site_id: Optional[str] = None, - blocking: bool = True, - tableau_conn_id: str = 'tableau_default', - **kwargs, - ) -> None: - super().__init__(**kwargs) - self.workbook_name = workbook_name - self.site_id = site_id - self.blocking = blocking - self.tableau_conn_id = tableau_conn_id - - def execute(self, context: dict) -> str: - """ - Executes the Tableau Extract Refresh and pushes the job id to xcom. - - :param context: The task context during execution. - :type context: dict - :return: the id of the job that executes the extract refresh - :rtype: str - """ - with TableauHook(self.site_id, self.tableau_conn_id) as tableau_hook: - workbook = self._get_workbook_by_name(tableau_hook) - - job_id = self._refresh_workbook(tableau_hook, workbook.id) - if self.blocking: - from airflow.providers.salesforce.sensors.tableau_job_status import TableauJobStatusSensor - - TableauJobStatusSensor( - job_id=job_id, - site_id=self.site_id, - tableau_conn_id=self.tableau_conn_id, - task_id='wait_until_succeeded', - dag=None, - ).execute(context={}) - self.log.info('Workbook %s has been successfully refreshed.', self.workbook_name) - return job_id - - def _get_workbook_by_name(self, tableau_hook: TableauHook) -> WorkbookItem: - for workbook in tableau_hook.get_all(resource_name='workbooks'): - if workbook.name == self.workbook_name: - self.log.info('Found matching workbook with id %s', workbook.id) - return workbook - - raise AirflowException(f'Workbook {self.workbook_name} not found!') - - def _refresh_workbook(self, tableau_hook: TableauHook, workbook_id: str) -> str: - job = tableau_hook.server.workbooks.refresh(workbook_id) - self.log.info('Refreshing Workbook %s...', self.workbook_name) - return job.id +warnings.warn( + "This module is deprecated. Please use `airflow.providers.tableau.operators.tableau_refresh_workbook`.", + DeprecationWarning, + stacklevel=2, +) diff --git a/airflow/providers/salesforce/provider.yaml b/airflow/providers/salesforce/provider.yaml index fe739ff133add..c0992d86271c2 100644 --- a/airflow/providers/salesforce/provider.yaml +++ b/airflow/providers/salesforce/provider.yaml @@ -22,6 +22,8 @@ description: | `Salesforce `__ versions: + - 1.0.2 + - 1.0.1 - 1.0.0 integrations: @@ -40,10 +42,12 @@ sensors: - airflow.providers.salesforce.sensors.tableau_job_status hooks: + - integration-name: Tableau + python-modules: + - airflow.providers.salesforce.hooks.tableau - integration-name: Salesforce python-modules: - airflow.providers.salesforce.hooks.salesforce - - airflow.providers.salesforce.hooks.tableau hook-class-names: - airflow.providers.salesforce.hooks.tableau.TableauHook diff --git a/airflow/providers/salesforce/sensors/tableau_job_status.py b/airflow/providers/salesforce/sensors/tableau_job_status.py index 4939203a2e7f6..076159ee73e24 100644 --- a/airflow/providers/salesforce/sensors/tableau_job_status.py +++ b/airflow/providers/salesforce/sensors/tableau_job_status.py @@ -14,63 +14,17 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -from typing import Optional -from airflow.exceptions import AirflowException -from airflow.providers.salesforce.hooks.tableau import TableauHook, TableauJobFinishCode -from airflow.sensors.base import BaseSensorOperator -from airflow.utils.decorators import apply_defaults +import warnings +# pylint: disable=unused-import +from airflow.providers.tableau.sensors.tableau_job_status import ( # noqa + TableauJobFailedException, + TableauJobStatusSensor, +) -class TableauJobFailedException(AirflowException): - """An exception that indicates that a Job failed to complete.""" - - -class TableauJobStatusSensor(BaseSensorOperator): - """ - Watches the status of a Tableau Server Job. - - .. seealso:: https://tableau.github.io/server-client-python/docs/api-ref#jobs - - :param job_id: The job to watch. - :type job_id: str - :param site_id: The id of the site where the workbook belongs to. - :type site_id: Optional[str] - :param tableau_conn_id: The Tableau Connection id containing the credentials - to authenticate to the Tableau Server. - :type tableau_conn_id: str - """ - - template_fields = ('job_id',) - - @apply_defaults - def __init__( - self, - *, - job_id: str, - site_id: Optional[str] = None, - tableau_conn_id: str = 'tableau_default', - **kwargs, - ) -> None: - super().__init__(**kwargs) - self.tableau_conn_id = tableau_conn_id - self.job_id = job_id - self.site_id = site_id - - def poke(self, context: dict) -> bool: - """ - Pokes until the job has successfully finished. - - :param context: The task context during execution. - :type context: dict - :return: True if it succeeded and False if not. - :rtype: bool - """ - with TableauHook(self.site_id, self.tableau_conn_id) as tableau_hook: - finish_code = TableauJobFinishCode( - int(tableau_hook.server.jobs.get_by_id(self.job_id).finish_code) - ) - self.log.info('Current finishCode is %s (%s)', finish_code.name, finish_code.value) - if finish_code in [TableauJobFinishCode.ERROR, TableauJobFinishCode.CANCELED]: - raise TableauJobFailedException('The Tableau Refresh Workbook Job failed!') - return finish_code == TableauJobFinishCode.SUCCESS +warnings.warn( + "This module is deprecated. Please use `airflow.providers.tableau.sensors.tableau_job_status`.", + DeprecationWarning, + stacklevel=2, +) diff --git a/airflow/providers/samba/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/samba/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 085f57de4862a..0000000000000 --- a/airflow/providers/samba/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/samba/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/samba/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 487f0e843e9fe..0000000000000 --- a/airflow/providers/samba/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,13 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/samba/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/samba/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/samba/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/samba/BACKPORT_PROVIDER_README.md b/airflow/providers/samba/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 6d13f19db073c..0000000000000 --- a/airflow/providers/samba/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,121 +0,0 @@ - - - -# Package apache-airflow-backport-providers-samba - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `samba` provider. All classes for this provider package -are in `airflow.providers.samba` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-samba` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pysmbclient | >=0.1.3 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `samba` provider -are in the `airflow.providers.samba` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.samba` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------| -| [hooks.samba.SambaHook](https://github.com/apache/airflow/blob/master/airflow/providers/samba/hooks/samba.py) | [hooks.samba_hook.SambaHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/samba_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/samba/CHANGELOG.rst b/airflow/providers/samba/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/samba/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/samba/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/samba/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 0f3798c999b66..0000000000000 --- a/airflow/providers/samba/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,43 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/samba/README.md b/airflow/providers/samba/README.md deleted file mode 100644 index 699408132b81f..0000000000000 --- a/airflow/providers/samba/README.md +++ /dev/null @@ -1,123 +0,0 @@ - - - -# Package apache-airflow-providers-samba - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `samba` provider. All classes for this provider package -are in `airflow.providers.samba` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-samba` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| pysmbclient | >=0.1.3 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `samba` provider -are in the `airflow.providers.samba` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.samba` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------| -| [hooks.samba.SambaHook](https://github.com/apache/airflow/blob/master/airflow/providers/samba/hooks/samba.py) | [hooks.samba_hook.SambaHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/samba_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/segment/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/segment/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 3fbe69dbbdbeb..0000000000000 --- a/airflow/providers/segment/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/segment/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/segment/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index b731ae13e733e..0000000000000 --- a/airflow/providers/segment/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,16 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [85cc2a605](https://github.com/apache/airflow/commit/85cc2a605e82c6ad26f310e989b365d9d490f2f3) | 2020-08-03 | Add typing annotations to Segment provider (#10120) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/segment/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/segment/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/segment/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/segment/BACKPORT_PROVIDER_README.md b/airflow/providers/segment/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 838ac6192a4f6..0000000000000 --- a/airflow/providers/segment/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,137 +0,0 @@ - - - -# Package apache-airflow-backport-providers-segment - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `segment` provider. All classes for this provider package -are in `airflow.providers.segment` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-segment` - -## PIP requirements - -| PIP package | Version required | -|:-----------------|:-------------------| -| analytics-python | >=1.2.9 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `segment` provider -are in the `airflow.providers.segment` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.segment` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.segment_track_event.SegmentTrackEventOperator](https://github.com/apache/airflow/blob/master/airflow/providers/segment/operators/segment_track_event.py) | [contrib.operators.segment_track_event_operator.SegmentTrackEventOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/segment_track_event_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.segment` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.segment.SegmentHook](https://github.com/apache/airflow/blob/master/airflow/providers/segment/hooks/segment.py) | [contrib.hooks.segment_hook.SegmentHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/segment_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [85cc2a605](https://github.com/apache/airflow/commit/85cc2a605e82c6ad26f310e989b365d9d490f2f3) | 2020-08-03 | Add typing annotations to Segment provider (#10120) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/segment/CHANGELOG.rst b/airflow/providers/segment/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/segment/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/segment/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/segment/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index ed0863bb9b22b..0000000000000 --- a/airflow/providers/segment/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,47 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [85cc2a605](https://github.com/apache/airflow/commit/85cc2a605e82c6ad26f310e989b365d9d490f2f3) | 2020-08-03 | Add typing annotations to Segment provider (#10120) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/segment/README.md b/airflow/providers/segment/README.md deleted file mode 100644 index 36e23c5150322..0000000000000 --- a/airflow/providers/segment/README.md +++ /dev/null @@ -1,140 +0,0 @@ - - - -# Package apache-airflow-providers-segment - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `segment` provider. All classes for this provider package -are in `airflow.providers.segment` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-segment` - -## PIP requirements - -| PIP package | Version required | -|:-----------------|:-------------------| -| analytics-python | >=1.2.9 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `segment` provider -are in the `airflow.providers.segment` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.segment` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.segment_track_event.SegmentTrackEventOperator](https://github.com/apache/airflow/blob/master/airflow/providers/segment/operators/segment_track_event.py) | [contrib.operators.segment_track_event_operator.SegmentTrackEventOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/segment_track_event_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.segment` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.segment.SegmentHook](https://github.com/apache/airflow/blob/master/airflow/providers/segment/hooks/segment.py) | [contrib.hooks.segment_hook.SegmentHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/segment_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [85cc2a605](https://github.com/apache/airflow/commit/85cc2a605e82c6ad26f310e989b365d9d490f2f3) | 2020-08-03 | Add typing annotations to Segment provider (#10120) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/sendgrid/CHANGELOG.rst b/airflow/providers/sendgrid/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/sendgrid/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/sendgrid/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/sendgrid/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 5ffa06d01407e..0000000000000 --- a/airflow/providers/sendgrid/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,16 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [c5806efb5](https://github.com/apache/airflow/commit/c5806efb54ad06049e13a5fc7df2f03846fe566e) | 2020-11-10 | Added missing sendgrid readme (#12245) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [7269d15ad](https://github.com/apache/airflow/commit/7269d15adfb74188359757b1705485f5d368486a) | 2020-08-03 | [GH-9708] Add type coverage to Sendgrid module (#10134) | -| [a97400d0d](https://github.com/apache/airflow/commit/a97400d0d89ccd6de0cab3a50c58a2969d164a0d) | 2020-06-28 | Move out sendgrid emailer from airflow.contrib (#9355) | diff --git a/airflow/providers/sendgrid/README.md b/airflow/providers/sendgrid/README.md deleted file mode 100644 index eee58da774fcd..0000000000000 --- a/airflow/providers/sendgrid/README.md +++ /dev/null @@ -1,83 +0,0 @@ - - - -# Package apache-airflow-providers-sendgrid - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `sendgrid` provider. All classes for this provider package -are in `airflow.providers.sendgrid` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-sendgrid` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| sendgrid | >=6.0.0,<7 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `sendgrid` provider -are in the `airflow.providers.sendgrid` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [c5806efb5](https://github.com/apache/airflow/commit/c5806efb54ad06049e13a5fc7df2f03846fe566e) | 2020-11-10 | Added missing sendgrid readme (#12245) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [7269d15ad](https://github.com/apache/airflow/commit/7269d15adfb74188359757b1705485f5d368486a) | 2020-08-03 | [GH-9708] Add type coverage to Sendgrid module (#10134) | -| [a97400d0d](https://github.com/apache/airflow/commit/a97400d0d89ccd6de0cab3a50c58a2969d164a0d) | 2020-06-28 | Move out sendgrid emailer from airflow.contrib (#9355) | diff --git a/airflow/providers/sftp/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/sftp/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 1018d78e0a5ab..0000000000000 --- a/airflow/providers/sftp/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,21 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [bac0ab27c](https://github.com/apache/airflow/commit/bac0ab27cfc89e715efddc97214fcd7738084361) | 2020-03-30 | close sftp connection without error (#7953) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [69629a5a9](https://github.com/apache/airflow/commit/69629a5a948ab2c4ac04a4a4dca6ac86d19c11bd) | 2019-12-09 | [AIRFLOW-5807] Move SFTP from contrib to providers. (#6464) | diff --git a/airflow/providers/sftp/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/sftp/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 205878a7405da..0000000000000 --- a/airflow/providers/sftp/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [4e3799fec](https://github.com/apache/airflow/commit/4e3799fec4c23d0f43603a0489c5a6158aeba035) | 2020-08-02 | [AIRFLOW-4541] Replace os.mkdirs usage with pathlib.Path(path).mkdir (#10117) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/sftp/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/sftp/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 24a056334198e..0000000000000 --- a/airflow/providers/sftp/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,13 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [ae791e191](https://github.com/apache/airflow/commit/ae791e19163b4ee6e84940d2567adbf0a2626fb4) | 2020-10-21 | Fix formatting errors introduced in #11720 (#11733) | -| [1fb3c28e1](https://github.com/apache/airflow/commit/1fb3c28e1a4fd54c9d83dccd413659a7a87c7315) | 2020-10-21 | Add support for setting ciphers for SFTPHook (#11720) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/sftp/BACKPORT_PROVIDER_README.md b/airflow/providers/sftp/BACKPORT_PROVIDER_README.md deleted file mode 100644 index a172ff359cefe..0000000000000 --- a/airflow/providers/sftp/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,175 +0,0 @@ - - - -# Package apache-airflow-backport-providers-sftp - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `sftp` provider. All classes for this provider package -are in `airflow.providers.sftp` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-sftp` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| paramiko | >=2.6.0 | -| pysftp | >=0.2.9 | -| sshtunnel | >=0.1.4,<0.2 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-sftp[ssh] -``` - -| Dependent package | Extra | -|:-------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-ssh](https://github.com/apache/airflow/tree/master/airflow/providers/ssh) | ssh | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `sftp` provider -are in the `airflow.providers.sftp` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.sftp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.sftp.SFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/sftp/operators/sftp.py) | [contrib.operators.sftp_operator.SFTPOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sftp_operator.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.sftp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.sftp.SFTPSensor](https://github.com/apache/airflow/blob/master/airflow/providers/sftp/sensors/sftp.py) | [contrib.sensors.sftp_sensor.SFTPSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sftp_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.sftp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------| -| [hooks.sftp.SFTPHook](https://github.com/apache/airflow/blob/master/airflow/providers/sftp/hooks/sftp.py) | [contrib.hooks.sftp_hook.SFTPHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/sftp_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [ae791e191](https://github.com/apache/airflow/commit/ae791e19163b4ee6e84940d2567adbf0a2626fb4) | 2020-10-21 | Fix formatting errors introduced in #11720 (#11733) | -| [1fb3c28e1](https://github.com/apache/airflow/commit/1fb3c28e1a4fd54c9d83dccd413659a7a87c7315) | 2020-10-21 | Add support for setting ciphers for SFTPHook (#11720) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [4e3799fec](https://github.com/apache/airflow/commit/4e3799fec4c23d0f43603a0489c5a6158aeba035) | 2020-08-02 | [AIRFLOW-4541] Replace os.mkdirs usage with pathlib.Path(path).mkdir (#10117) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [bac0ab27c](https://github.com/apache/airflow/commit/bac0ab27cfc89e715efddc97214fcd7738084361) | 2020-03-30 | close sftp connection without error (#7953) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [69629a5a9](https://github.com/apache/airflow/commit/69629a5a948ab2c4ac04a4a4dca6ac86d19c11bd) | 2019-12-09 | [AIRFLOW-5807] Move SFTP from contrib to providers. (#6464) | diff --git a/airflow/providers/sftp/CHANGELOG.rst b/airflow/providers/sftp/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/sftp/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/sftp/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/sftp/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index a9f86ccf961ec..0000000000000 --- a/airflow/providers/sftp/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,54 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [ae791e191](https://github.com/apache/airflow/commit/ae791e19163b4ee6e84940d2567adbf0a2626fb4) | 2020-10-21 | Fix formatting errors introduced in #11720 (#11733) | -| [1fb3c28e1](https://github.com/apache/airflow/commit/1fb3c28e1a4fd54c9d83dccd413659a7a87c7315) | 2020-10-21 | Add support for setting ciphers for SFTPHook (#11720) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [4e3799fec](https://github.com/apache/airflow/commit/4e3799fec4c23d0f43603a0489c5a6158aeba035) | 2020-08-02 | [AIRFLOW-4541] Replace os.mkdirs usage with pathlib.Path(path).mkdir (#10117) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [bac0ab27c](https://github.com/apache/airflow/commit/bac0ab27cfc89e715efddc97214fcd7738084361) | 2020-03-30 | close sftp connection without error (#7953) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [69629a5a9](https://github.com/apache/airflow/commit/69629a5a948ab2c4ac04a4a4dca6ac86d19c11bd) | 2019-12-09 | [AIRFLOW-5807] Move SFTP from contrib to providers. (#6464) | diff --git a/airflow/providers/sftp/README.md b/airflow/providers/sftp/README.md deleted file mode 100644 index 44eb453420892..0000000000000 --- a/airflow/providers/sftp/README.md +++ /dev/null @@ -1,178 +0,0 @@ - - - -# Package apache-airflow-providers-sftp - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Sensors](#sensors) - - [Moved sensors](#moved-sensors) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `sftp` provider. All classes for this provider package -are in `airflow.providers.sftp` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-sftp` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| paramiko | >=2.6.0 | -| pysftp | >=0.2.9 | -| sshtunnel | >=0.1.4,<0.2 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-sftp[ssh] -``` - -| Dependent package | Extra | -|:--------------------------------------------------------------------------------------|:--------| -| [apache-airflow-providers-ssh](https://pypi.org/project/apache-airflow-providers-ssh) | ssh | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `sftp` provider -are in the `airflow.providers.sftp` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.sftp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.sftp.SFTPOperator](https://github.com/apache/airflow/blob/master/airflow/providers/sftp/operators/sftp.py) | [contrib.operators.sftp_operator.SFTPOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/sftp_operator.py) | - - -## Sensors - - - -### Moved sensors - -| Airflow 2.0 sensors: `airflow.providers.sftp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| -| [sensors.sftp.SFTPSensor](https://github.com/apache/airflow/blob/master/airflow/providers/sftp/sensors/sftp.py) | [contrib.sensors.sftp_sensor.SFTPSensor](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/sensors/sftp_sensor.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.sftp` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------| -| [hooks.sftp.SFTPHook](https://github.com/apache/airflow/blob/master/airflow/providers/sftp/hooks/sftp.py) | [contrib.hooks.sftp_hook.SFTPHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/sftp_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [ae791e191](https://github.com/apache/airflow/commit/ae791e19163b4ee6e84940d2567adbf0a2626fb4) | 2020-10-21 | Fix formatting errors introduced in #11720 (#11733) | -| [1fb3c28e1](https://github.com/apache/airflow/commit/1fb3c28e1a4fd54c9d83dccd413659a7a87c7315) | 2020-10-21 | Add support for setting ciphers for SFTPHook (#11720) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [4e3799fec](https://github.com/apache/airflow/commit/4e3799fec4c23d0f43603a0489c5a6158aeba035) | 2020-08-02 | [AIRFLOW-4541] Replace os.mkdirs usage with pathlib.Path(path).mkdir (#10117) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [bac0ab27c](https://github.com/apache/airflow/commit/bac0ab27cfc89e715efddc97214fcd7738084361) | 2020-03-30 | close sftp connection without error (#7953) | -| [42eef3821](https://github.com/apache/airflow/commit/42eef38217e709bc7a7f71bf0286e9e61293a43e) | 2020-03-07 | [AIRFLOW-6877] Add cross-provider dependencies as extras (#7506) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [ceea293c1](https://github.com/apache/airflow/commit/ceea293c1652240e7e856c201e4341a87ef97a0f) | 2020-01-28 | [AIRFLOW-6656] Fix AIP-21 moving (#7272) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [69629a5a9](https://github.com/apache/airflow/commit/69629a5a948ab2c4ac04a4a4dca6ac86d19c11bd) | 2019-12-09 | [AIRFLOW-5807] Move SFTP from contrib to providers. (#6464) | diff --git a/airflow/providers/sftp/hooks/sftp.py b/airflow/providers/sftp/hooks/sftp.py index 498f362c10179..e2a991e8704f2 100644 --- a/airflow/providers/sftp/hooks/sftp.py +++ b/airflow/providers/sftp/hooks/sftp.py @@ -115,6 +115,12 @@ def get_conn(self) -> pysftp.Connection: cnopts = pysftp.CnOpts() if self.no_host_key_check: cnopts.hostkeys = None + else: + if self.host_key is not None: + cnopts.hostkeys.add(self.remote_host, 'ssh-rsa', self.host_key) + else: + pass # will fallback to system host keys if none explicitly specified in conn extra + cnopts.compression = self.compress cnopts.ciphers = self.ciphers conn_params = { diff --git a/airflow/providers/singularity/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/singularity/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 51da61dc263a0..0000000000000 --- a/airflow/providers/singularity/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23 | Fix typo in test_project_structure (#8978) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [42c59755a](https://github.com/apache/airflow/commit/42c59755affd49cd35bea8464e2a4c9256084d88) | 2020-05-09 | Update example SingularityOperator DAG (#8790) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [0bb687990](https://github.com/apache/airflow/commit/0bb687990b94da7445f4ba081592de8cea73119e) | 2020-02-23 | [AIRFLOW-4030] second attempt to add singularity to airflow (#7191) | diff --git a/airflow/providers/singularity/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/singularity/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 95edc96cf0a6a..0000000000000 --- a/airflow/providers/singularity/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/singularity/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/singularity/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 4fe31e5b82243..0000000000000 --- a/airflow/providers/singularity/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/singularity/BACKPORT_PROVIDER_README.md b/airflow/providers/singularity/BACKPORT_PROVIDER_README.md deleted file mode 100644 index e322459570c2d..0000000000000 --- a/airflow/providers/singularity/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,125 +0,0 @@ - - - -# Package apache-airflow-backport-providers-singularity - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `singularity` provider. All classes for this provider package -are in `airflow.providers.singularity` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-singularity` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| spython | >=0.0.56 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `singularity` provider -are in the `airflow.providers.singularity` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.singularity` package | -|:--------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.singularity.SingularityOperator](https://github.com/apache/airflow/blob/master/airflow/providers/singularity/operators/singularity.py) | - - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23 | Fix typo in test_project_structure (#8978) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [42c59755a](https://github.com/apache/airflow/commit/42c59755affd49cd35bea8464e2a4c9256084d88) | 2020-05-09 | Update example SingularityOperator DAG (#8790) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [0bb687990](https://github.com/apache/airflow/commit/0bb687990b94da7445f4ba081592de8cea73119e) | 2020-02-23 | [AIRFLOW-4030] second attempt to add singularity to airflow (#7191) | diff --git a/airflow/providers/singularity/CHANGELOG.rst b/airflow/providers/singularity/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/singularity/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/singularity/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/singularity/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 620f655e6b0d8..0000000000000 --- a/airflow/providers/singularity/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,44 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23 | Fix typo in test_project_structure (#8978) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [42c59755a](https://github.com/apache/airflow/commit/42c59755affd49cd35bea8464e2a4c9256084d88) | 2020-05-09 | Update example SingularityOperator DAG (#8790) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [0bb687990](https://github.com/apache/airflow/commit/0bb687990b94da7445f4ba081592de8cea73119e) | 2020-02-23 | [AIRFLOW-4030] second attempt to add singularity to airflow (#7191) | diff --git a/airflow/providers/singularity/README.md b/airflow/providers/singularity/README.md deleted file mode 100644 index 0d313c10cf5c8..0000000000000 --- a/airflow/providers/singularity/README.md +++ /dev/null @@ -1,124 +0,0 @@ - - - -# Package apache-airflow-providers-singularity - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `singularity` provider. All classes for this provider package -are in `airflow.providers.singularity` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-singularity` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| spython | >=0.0.56 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `singularity` provider -are in the `airflow.providers.singularity` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.singularity` package | -|:--------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.singularity.SingularityOperator](https://github.com/apache/airflow/blob/master/airflow/providers/singularity/operators/singularity.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------------| -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [54353f874](https://github.com/apache/airflow/commit/54353f874589f9be236458995147d13e0e763ffc) | 2020-09-27 | Increase type coverage for five different providers (#11170) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [7c206a82a](https://github.com/apache/airflow/commit/7c206a82a6f074abcc4898a005ecd2c84a920054) | 2020-08-22 | Replace assigment with Augmented assignment (#10468) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [e742ef7c7](https://github.com/apache/airflow/commit/e742ef7c704c18bf69b7a7235adb7f75e742f902) | 2020-05-23 | Fix typo in test_project_structure (#8978) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [42c59755a](https://github.com/apache/airflow/commit/42c59755affd49cd35bea8464e2a4c9256084d88) | 2020-05-09 | Update example SingularityOperator DAG (#8790) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [0bb687990](https://github.com/apache/airflow/commit/0bb687990b94da7445f4ba081592de8cea73119e) | 2020-02-23 | [AIRFLOW-4030] second attempt to add singularity to airflow (#7191) | diff --git a/airflow/providers/slack/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/slack/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index fce5309ea9655..0000000000000 --- a/airflow/providers/slack/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,23 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [5cf46fad1](https://github.com/apache/airflow/commit/5cf46fad1e0a9cdde213258b2064e16d30d3160e) | 2020-05-29 | Add SlackAPIFileOperator impementing files.upload from Slack API (#9004) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [427257c2e](https://github.com/apache/airflow/commit/427257c2e2ffc886ef9f516e9c4d015a4ede9bbd) | 2020-05-24 | Remove defunct code from setup.py (#8982) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [578fc514c](https://github.com/apache/airflow/commit/578fc514cd325b7d190bdcfb749a384d101238fa) | 2020-05-12 | [AIRFLOW-4543] Update slack operator to support slackclient v2 (#5519) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23 | Add missing call to Super class in 'http', 'grpc' & 'slack' providers (#7826) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/slack/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/slack/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 1abf8a64702bb..0000000000000 --- a/airflow/providers/slack/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,23 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7cc1c8bc0](https://github.com/apache/airflow/commit/7cc1c8bc0031f1d9839baaa5a6c7a9bc7ec37ead) | 2020-07-25 | Updates the slack WebClient call to use the instance variable - token (#9995) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [df8efd04f](https://github.com/apache/airflow/commit/df8efd04f394afc4b5affb677bc78d8b7bd5275a) | 2020-06-21 | Enable & Fix "Docstring Content Issues" PyDocStyle Check (#9460) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/slack/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/slack/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index cbcf8dd81c10d..0000000000000 --- a/airflow/providers/slack/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,12 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [4fb5c017f](https://github.com/apache/airflow/commit/4fb5c017fe5ca41ed95547a857c9c39efc4f1476) | 2020-10-21 | Check response status in slack webhook hook. (#11620) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/slack/BACKPORT_PROVIDER_README.md b/airflow/providers/slack/BACKPORT_PROVIDER_README.md deleted file mode 100644 index 7863eb454e4cc..0000000000000 --- a/airflow/providers/slack/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,176 +0,0 @@ - - - -# Package apache-airflow-backport-providers-slack - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `slack` provider. All classes for this provider package -are in `airflow.providers.slack` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-slack` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| slackclient | >=2.0.0,<3.0.0 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-slack[http] -``` - -| Dependent package | Extra | -|:---------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-http](https://github.com/apache/airflow/tree/master/airflow/providers/http) | http | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `slack` provider -are in the `airflow.providers.slack` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.slack` package | -|:---------------------------------------------------------------------------------------------------------------------------------| -| [operators.slack.SlackAPIFileOperator](https://github.com/apache/airflow/blob/master/airflow/providers/slack/operators/slack.py) | - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.slack` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.slack.SlackAPIOperator](https://github.com/apache/airflow/blob/master/airflow/providers/slack/operators/slack.py) | [operators.slack_operator.SlackAPIOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/slack_operator.py) | -| [operators.slack.SlackAPIPostOperator](https://github.com/apache/airflow/blob/master/airflow/providers/slack/operators/slack.py) | [operators.slack_operator.SlackAPIPostOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/slack_operator.py) | -| [operators.slack_webhook.SlackWebhookOperator](https://github.com/apache/airflow/blob/master/airflow/providers/slack/operators/slack_webhook.py) | [contrib.operators.slack_webhook_operator.SlackWebhookOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/slack_webhook_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.slack` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:-------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.slack.SlackHook](https://github.com/apache/airflow/blob/master/airflow/providers/slack/hooks/slack.py) | [hooks.slack_hook.SlackHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/slack_hook.py) | -| [hooks.slack_webhook.SlackWebhookHook](https://github.com/apache/airflow/blob/master/airflow/providers/slack/hooks/slack_webhook.py) | [contrib.hooks.slack_webhook_hook.SlackWebhookHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/slack_webhook_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [4fb5c017f](https://github.com/apache/airflow/commit/4fb5c017fe5ca41ed95547a857c9c39efc4f1476) | 2020-10-21 | Check response status in slack webhook hook. (#11620) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7cc1c8bc0](https://github.com/apache/airflow/commit/7cc1c8bc0031f1d9839baaa5a6c7a9bc7ec37ead) | 2020-07-25 | Updates the slack WebClient call to use the instance variable - token (#9995) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [df8efd04f](https://github.com/apache/airflow/commit/df8efd04f394afc4b5affb677bc78d8b7bd5275a) | 2020-06-21 | Enable & Fix "Docstring Content Issues" PyDocStyle Check (#9460) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [5cf46fad1](https://github.com/apache/airflow/commit/5cf46fad1e0a9cdde213258b2064e16d30d3160e) | 2020-05-29 | Add SlackAPIFileOperator impementing files.upload from Slack API (#9004) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [427257c2e](https://github.com/apache/airflow/commit/427257c2e2ffc886ef9f516e9c4d015a4ede9bbd) | 2020-05-24 | Remove defunct code from setup.py (#8982) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [578fc514c](https://github.com/apache/airflow/commit/578fc514cd325b7d190bdcfb749a384d101238fa) | 2020-05-12 | [AIRFLOW-4543] Update slack operator to support slackclient v2 (#5519) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23 | Add missing call to Super class in 'http', 'grpc' & 'slack' providers (#7826) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/slack/CHANGELOG.rst b/airflow/providers/slack/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/slack/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/slack/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/slack/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 79362d5268287..0000000000000 --- a/airflow/providers/slack/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,60 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [2947e0999](https://github.com/apache/airflow/commit/2947e0999979fad1f2c98aeb4f1e46297e4c9864) | 2020-12-02 | SlackWebhookHook use password instead of extra (#12674) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [4fb5c017f](https://github.com/apache/airflow/commit/4fb5c017fe5ca41ed95547a857c9c39efc4f1476) | 2020-10-21 | Check response status in slack webhook hook. (#11620) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [2f2d8dbfa](https://github.com/apache/airflow/commit/2f2d8dbfafefb4be3dd80f22f31c649c8498f148) | 2020-08-25 | Remove all "noinspection" comments native to IntelliJ (#10525) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7cc1c8bc0](https://github.com/apache/airflow/commit/7cc1c8bc0031f1d9839baaa5a6c7a9bc7ec37ead) | 2020-07-25 | Updates the slack WebClient call to use the instance variable - token (#9995) | -| [33f0cd265](https://github.com/apache/airflow/commit/33f0cd2657b2e77ea3477e0c93f13f1474be628e) | 2020-07-22 | apply_default keeps the function signature for mypy (#9784) | -| [df8efd04f](https://github.com/apache/airflow/commit/df8efd04f394afc4b5affb677bc78d8b7bd5275a) | 2020-06-21 | Enable & Fix "Docstring Content Issues" PyDocStyle Check (#9460) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [5cf46fad1](https://github.com/apache/airflow/commit/5cf46fad1e0a9cdde213258b2064e16d30d3160e) | 2020-05-29 | Add SlackAPIFileOperator impementing files.upload from Slack API (#9004) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [427257c2e](https://github.com/apache/airflow/commit/427257c2e2ffc886ef9f516e9c4d015a4ede9bbd) | 2020-05-24 | Remove defunct code from setup.py (#8982) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [578fc514c](https://github.com/apache/airflow/commit/578fc514cd325b7d190bdcfb749a384d101238fa) | 2020-05-12 | [AIRFLOW-4543] Update slack operator to support slackclient v2 (#5519) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [be2b2baa7](https://github.com/apache/airflow/commit/be2b2baa7c5f53c2d73646e4623cdb6731551b70) | 2020-03-23 | Add missing call to Super class in 'http', 'grpc' & 'slack' providers (#7826) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/slack/README.md b/airflow/providers/slack/README.md index 7a630c66eb1a3..ea4968ac1e2b1 100644 --- a/airflow/providers/slack/README.md +++ b/airflow/providers/slack/README.md @@ -61,7 +61,7 @@ You can install this package on top of an existing airflow 2.* installation via | PIP package | Version required | |:--------------|:-------------------| -| slackclient | >=2.0.0,<3.0.0 | +| slack_sdk | >=3.0.0,<4.0.0 | ## Cross provider package dependencies diff --git a/airflow/providers/slack/hooks/slack.py b/airflow/providers/slack/hooks/slack.py index 6f27091dd7d18..da449a7f0c349 100644 --- a/airflow/providers/slack/hooks/slack.py +++ b/airflow/providers/slack/hooks/slack.py @@ -18,7 +18,7 @@ """Hook for Slack""" from typing import Any, Optional -from slack import WebClient +from slack_sdk import WebClient from airflow.exceptions import AirflowException from airflow.hooks.base import BaseHook @@ -41,7 +41,7 @@ class SlackHook(BaseHook): # noqa slack_hook.call("chat.postMessage", json={"channel": "#random", "text": "Hello world!"}) # Call method from Slack SDK (you have to handle errors yourself) - # For more details check https://slack.dev/python-slackclient/basic_usage.html#sending-a-message + # For more details check https://slack.dev/python-slack-sdk/web/index.html#messaging slack_hook.client.chat_postMessage(channel="#random", text="Hello world!") :param token: Slack API token diff --git a/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index b79d82d5853e7..0000000000000 --- a/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,24 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [1c9374d25](https://github.com/apache/airflow/commit/1c9374d2573483dd66f5c35032e24140864e72c0) | 2020-06-03 | Add snowflake to slack operator (#9023) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [a546a10b1](https://github.com/apache/airflow/commit/a546a10b13b1f7a119071d8d2001cb17ccdcbbf7) | 2020-05-16 | Add Snowflake system test (#8422) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [cd635dd7d](https://github.com/apache/airflow/commit/cd635dd7d57cab2f41efac2d3d94e8f80a6c96d6) | 2020-05-10 | [AIRFLOW-5906] Add authenticator parameter to snowflake_hook (#8642) | -| [297ad3088](https://github.com/apache/airflow/commit/297ad30885eeb77c062f37df78a78f381e7d140e) | 2020-04-20 | Fix Snowflake hook conn id (#8423) | -| [cf1109d66](https://github.com/apache/airflow/commit/cf1109d661991943bb4861a0468ba4bc8946376d) | 2020-02-07 | [AIRFLOW-6755] Fix snowflake hook bug and tests (#7380) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [eee34ee80](https://github.com/apache/airflow/commit/eee34ee8080bb7bc81294c3fbd8be93bbf795367) | 2020-01-24 | [AIRFLOW-4204] Update super() calls (#7248) | -| [17af3beea](https://github.com/apache/airflow/commit/17af3beea5095d9aec81c06404614ca6d1057a45) | 2020-01-21 | [AIRFLOW-5816] Add S3 to snowflake operator (#6469) | diff --git a/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 726aa464ebeb7..0000000000000 --- a/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,20 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 56a2b983bb700..0000000000000 --- a/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,12 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.11.23.md b/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.11.23.md deleted file mode 100644 index 40c2b1c60a5e0..0000000000000 --- a/airflow/providers/snowflake/BACKPORT_PROVIDER_CHANGES_2020.11.23.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [9276607b5](https://github.com/apache/airflow/commit/9276607b58bedfb2128c63fabec85d77e7dba07f) | 2020-11-12 | Add session_parameters option to snowflake_hook (#12071) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [d363adb61](https://github.com/apache/airflow/commit/d363adb6187e9cba1d965f424c95058fa933df1f) | 2020-10-31 | Adding SnowflakeOperator howto-documentation and example DAG (#11975) | -| [ecc3a4df0](https://github.com/apache/airflow/commit/ecc3a4df0da67f258c3ad04733d6e561d8266c93) | 2020-10-30 | Add autocommit property for snowflake connection (#10838) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | diff --git a/airflow/providers/snowflake/BACKPORT_PROVIDER_README.md b/airflow/providers/snowflake/BACKPORT_PROVIDER_README.md deleted file mode 100644 index c56c06dfd487b..0000000000000 --- a/airflow/providers/snowflake/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,200 +0,0 @@ - - - -# Package apache-airflow-backport-providers-snowflake - -Release: 2020.11.23 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Transfer operators](#transfer-operators) - - [New transfer operators](#new-transfer-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 2020.11.23](#release-20201123) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `snowflake` provider. All classes for this provider package -are in `airflow.providers.snowflake` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-snowflake` - -## PIP requirements - -| PIP package | Version required | -|:---------------------------|:-------------------| -| requests | <2.24.0 | -| snowflake-connector-python | >=1.5.2 | -| snowflake-sqlalchemy | >=1.1.0 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-backport-providers-snowflake[slack] -``` - -| Dependent package | Extra | -|:-----------------------------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-backport-providers-slack](https://github.com/apache/airflow/tree/master/airflow/providers/slack) | slack | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `snowflake` provider -are in the `airflow.providers.snowflake` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.snowflake` package | -|:------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.snowflake.SnowflakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/operators/snowflake.py) | - - - -## Transfer operators - - -### New transfer operators - -| New Airflow 2.0 transfers: `airflow.providers.snowflake` package | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.s3_to_snowflake.S3ToSnowflakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/transfers/s3_to_snowflake.py) | -| [transfers.snowflake_to_slack.SnowflakeToSlackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/transfers/snowflake_to_slack.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.snowflake` package | -|:------------------------------------------------------------------------------------------------------------------------------| -| [hooks.snowflake.SnowflakeHook](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/hooks/snowflake.py) | - - - - -## Releases - -### Release 2020.11.23 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [19b7e4565](https://github.com/apache/airflow/commit/19b7e4565e6372d50ef0fbb5678a484a7afbdbee) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [9276607b5](https://github.com/apache/airflow/commit/9276607b58bedfb2128c63fabec85d77e7dba07f) | 2020-11-12 | Add session_parameters option to snowflake_hook (#12071) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [d363adb61](https://github.com/apache/airflow/commit/d363adb6187e9cba1d965f424c95058fa933df1f) | 2020-10-31 | Adding SnowflakeOperator howto-documentation and example DAG (#11975) | -| [ecc3a4df0](https://github.com/apache/airflow/commit/ecc3a4df0da67f258c3ad04733d6e561d8266c93) | 2020-10-30 | Add autocommit property for snowflake connection (#10838) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:----------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [1c9374d25](https://github.com/apache/airflow/commit/1c9374d2573483dd66f5c35032e24140864e72c0) | 2020-06-03 | Add snowflake to slack operator (#9023) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [a546a10b1](https://github.com/apache/airflow/commit/a546a10b13b1f7a119071d8d2001cb17ccdcbbf7) | 2020-05-16 | Add Snowflake system test (#8422) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [cd635dd7d](https://github.com/apache/airflow/commit/cd635dd7d57cab2f41efac2d3d94e8f80a6c96d6) | 2020-05-10 | [AIRFLOW-5906] Add authenticator parameter to snowflake_hook (#8642) | -| [297ad3088](https://github.com/apache/airflow/commit/297ad30885eeb77c062f37df78a78f381e7d140e) | 2020-04-20 | Fix Snowflake hook conn id (#8423) | -| [cf1109d66](https://github.com/apache/airflow/commit/cf1109d661991943bb4861a0468ba4bc8946376d) | 2020-02-07 | [AIRFLOW-6755] Fix snowflake hook bug and tests (#7380) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [eee34ee80](https://github.com/apache/airflow/commit/eee34ee8080bb7bc81294c3fbd8be93bbf795367) | 2020-01-24 | [AIRFLOW-4204] Update super() calls (#7248) | -| [17af3beea](https://github.com/apache/airflow/commit/17af3beea5095d9aec81c06404614ca6d1057a45) | 2020-01-21 | [AIRFLOW-5816] Add S3 to snowflake operator (#6469) | diff --git a/airflow/providers/snowflake/CHANGELOG.rst b/airflow/providers/snowflake/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/snowflake/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/snowflake/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/snowflake/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 78060e1b10af3..0000000000000 --- a/airflow/providers/snowflake/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,65 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [ef4af2135](https://github.com/apache/airflow/commit/ef4af2135171c6e451f1407ea1a280ea875f2175) | 2020-11-22 | Move providers docs to separate package + Spell-check in a common job with docs-build (#12527) | -| [234d68938](https://github.com/apache/airflow/commit/234d689387ef89222bfdee481987c37d1e79b5af) | 2020-11-21 | Fix S3ToSnowflakeOperator docstring (#12504) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [9276607b5](https://github.com/apache/airflow/commit/9276607b58bedfb2128c63fabec85d77e7dba07f) | 2020-11-12 | Add session_parameters option to snowflake_hook (#12071) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [d363adb61](https://github.com/apache/airflow/commit/d363adb6187e9cba1d965f424c95058fa933df1f) | 2020-10-31 | Adding SnowflakeOperator howto-documentation and example DAG (#11975) | -| [ecc3a4df0](https://github.com/apache/airflow/commit/ecc3a4df0da67f258c3ad04733d6e561d8266c93) | 2020-10-30 | Add autocommit property for snowflake connection (#10838) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [1c9374d25](https://github.com/apache/airflow/commit/1c9374d2573483dd66f5c35032e24140864e72c0) | 2020-06-03 | Add snowflake to slack operator (#9023) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [a546a10b1](https://github.com/apache/airflow/commit/a546a10b13b1f7a119071d8d2001cb17ccdcbbf7) | 2020-05-16 | Add Snowflake system test (#8422) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [cd635dd7d](https://github.com/apache/airflow/commit/cd635dd7d57cab2f41efac2d3d94e8f80a6c96d6) | 2020-05-10 | [AIRFLOW-5906] Add authenticator parameter to snowflake_hook (#8642) | -| [297ad3088](https://github.com/apache/airflow/commit/297ad30885eeb77c062f37df78a78f381e7d140e) | 2020-04-20 | Fix Snowflake hook conn id (#8423) | -| [cf1109d66](https://github.com/apache/airflow/commit/cf1109d661991943bb4861a0468ba4bc8946376d) | 2020-02-07 | [AIRFLOW-6755] Fix snowflake hook bug and tests (#7380) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [eee34ee80](https://github.com/apache/airflow/commit/eee34ee8080bb7bc81294c3fbd8be93bbf795367) | 2020-01-24 | [AIRFLOW-4204] Update super() calls (#7248) | -| [17af3beea](https://github.com/apache/airflow/commit/17af3beea5095d9aec81c06404614ca6d1057a45) | 2020-01-21 | [AIRFLOW-5816] Add S3 to snowflake operator (#6469) | diff --git a/airflow/providers/snowflake/README.md b/airflow/providers/snowflake/README.md deleted file mode 100644 index fedd176ade33d..0000000000000 --- a/airflow/providers/snowflake/README.md +++ /dev/null @@ -1,192 +0,0 @@ - - - -# Package apache-airflow-providers-snowflake - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Cross provider package dependencies](#cross-provider-package-dependencies) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Transfer operators](#transfer-operators) - - [New transfer operators](#new-transfer-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `snowflake` provider. All classes for this provider package -are in `airflow.providers.snowflake` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-snowflake` - -## PIP requirements - -| PIP package | Version required | -|:---------------------------|:-------------------| -| azure-storage-blob | | -| azure-storage-common | | -| requests | <2.24.0 | -| snowflake-connector-python | >=1.5.2 | -| snowflake-sqlalchemy | >=1.1.0 | - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install apache-airflow-providers-snowflake[slack] -``` - -| Dependent package | Extra | -|:------------------------------------------------------------------------------------------|:--------| -| [apache-airflow-providers-slack](https://pypi.org/project/apache-airflow-providers-slack) | slack | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `snowflake` provider -are in the `airflow.providers.snowflake` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.snowflake` package | -|:------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.snowflake.SnowflakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/operators/snowflake.py) | - - - -## Transfer operators - - -### New transfer operators - -| New Airflow 2.0 transfers: `airflow.providers.snowflake` package | -|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [transfers.s3_to_snowflake.S3ToSnowflakeOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/transfers/s3_to_snowflake.py) | -| [transfers.snowflake_to_slack.SnowflakeToSlackOperator](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/transfers/snowflake_to_slack.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.snowflake` package | -|:------------------------------------------------------------------------------------------------------------------------------| -| [hooks.snowflake.SnowflakeHook](https://github.com/apache/airflow/blob/master/airflow/providers/snowflake/hooks/snowflake.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [ef4af2135](https://github.com/apache/airflow/commit/ef4af2135171c6e451f1407ea1a280ea875f2175) | 2020-11-22 | Move providers docs to separate package + Spell-check in a common job with docs-build (#12527) | -| [234d68938](https://github.com/apache/airflow/commit/234d689387ef89222bfdee481987c37d1e79b5af) | 2020-11-21 | Fix S3ToSnowflakeOperator docstring (#12504) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [7ca0b6f12](https://github.com/apache/airflow/commit/7ca0b6f121c9cec6e25de130f86a56d7c7fbe38c) | 2020-11-18 | Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [9276607b5](https://github.com/apache/airflow/commit/9276607b58bedfb2128c63fabec85d77e7dba07f) | 2020-11-12 | Add session_parameters option to snowflake_hook (#12071) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [d363adb61](https://github.com/apache/airflow/commit/d363adb6187e9cba1d965f424c95058fa933df1f) | 2020-10-31 | Adding SnowflakeOperator howto-documentation and example DAG (#11975) | -| [ecc3a4df0](https://github.com/apache/airflow/commit/ecc3a4df0da67f258c3ad04733d6e561d8266c93) | 2020-10-30 | Add autocommit property for snowflake connection (#10838) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [483068745](https://github.com/apache/airflow/commit/48306874538eea7cfd42358d5ebb59705204bfc4) | 2020-10-24 | Use Python 3 style super classes (#11806) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [d305876be](https://github.com/apache/airflow/commit/d305876bee328287ff391a29cc1cd632468cc731) | 2020-10-12 | Remove redundant None provided as default to dict.get() (#11448) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [0161b5ea2](https://github.com/apache/airflow/commit/0161b5ea2b805d62a0317e5cab6f797b92c8abf1) | 2020-09-26 | Increasing type coverage for multiple provider (#11159) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [d1bce91bb](https://github.com/apache/airflow/commit/d1bce91bb21d5a468fa6a0207156c28fe1ca6513) | 2020-08-25 | PyDocStyle: Enable D403: Capitalized first word of docstring (#10530) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [24c8e4c2d](https://github.com/apache/airflow/commit/24c8e4c2d6e359ecc2c7d6275dccc68de4a82832) | 2020-08-06 | Changes to all the constructors to remove the args argument (#10163) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [1c9374d25](https://github.com/apache/airflow/commit/1c9374d2573483dd66f5c35032e24140864e72c0) | 2020-06-03 | Add snowflake to slack operator (#9023) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [a546a10b1](https://github.com/apache/airflow/commit/a546a10b13b1f7a119071d8d2001cb17ccdcbbf7) | 2020-05-16 | Add Snowflake system test (#8422) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [cd635dd7d](https://github.com/apache/airflow/commit/cd635dd7d57cab2f41efac2d3d94e8f80a6c96d6) | 2020-05-10 | [AIRFLOW-5906] Add authenticator parameter to snowflake_hook (#8642) | -| [297ad3088](https://github.com/apache/airflow/commit/297ad30885eeb77c062f37df78a78f381e7d140e) | 2020-04-20 | Fix Snowflake hook conn id (#8423) | -| [cf1109d66](https://github.com/apache/airflow/commit/cf1109d661991943bb4861a0468ba4bc8946376d) | 2020-02-07 | [AIRFLOW-6755] Fix snowflake hook bug and tests (#7380) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [eee34ee80](https://github.com/apache/airflow/commit/eee34ee8080bb7bc81294c3fbd8be93bbf795367) | 2020-01-24 | [AIRFLOW-4204] Update super() calls (#7248) | -| [17af3beea](https://github.com/apache/airflow/commit/17af3beea5095d9aec81c06404614ca6d1057a45) | 2020-01-21 | [AIRFLOW-5816] Add S3 to snowflake operator (#6469) | diff --git a/airflow/providers/sqlite/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/sqlite/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index d359b8c271f8f..0000000000000 --- a/airflow/providers/sqlite/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/sqlite/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/sqlite/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 5ebbe5caa4f64..0000000000000 --- a/airflow/providers/sqlite/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,17 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [1bca31b54](https://github.com/apache/airflow/commit/1bca31b541c9c39fb8e79131e1dd4a868b5122d4) | 2020-08-04 | Add type annotations for Sqlite (#10157) | -| [4a0fdb630](https://github.com/apache/airflow/commit/4a0fdb6308400ddda38b0904cfe14b5872e5c0eb) | 2020-08-04 | Use conn_name_attr for SqliteHook connection (#10156) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/sqlite/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/sqlite/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/sqlite/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/sqlite/BACKPORT_PROVIDER_README.md b/airflow/providers/sqlite/BACKPORT_PROVIDER_README.md deleted file mode 100644 index a1ff8af709c3c..0000000000000 --- a/airflow/providers/sqlite/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,131 +0,0 @@ - - - -# Package apache-airflow-backport-providers-sqlite - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `sqlite` provider. All classes for this provider package -are in `airflow.providers.sqlite` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-sqlite` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `sqlite` provider -are in the `airflow.providers.sqlite` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.sqlite` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| -| [operators.sqlite.SqliteOperator](https://github.com/apache/airflow/blob/master/airflow/providers/sqlite/operators/sqlite.py) | [operators.sqlite_operator.SqliteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/sqlite_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.sqlite` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------| -| [hooks.sqlite.SqliteHook](https://github.com/apache/airflow/blob/master/airflow/providers/sqlite/hooks/sqlite.py) | [hooks.sqlite_hook.SqliteHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/sqlite_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [1bca31b54](https://github.com/apache/airflow/commit/1bca31b541c9c39fb8e79131e1dd4a868b5122d4) | 2020-08-04 | Add type annotations for Sqlite (#10157) | -| [4a0fdb630](https://github.com/apache/airflow/commit/4a0fdb6308400ddda38b0904cfe14b5872e5c0eb) | 2020-08-04 | Use conn_name_attr for SqliteHook connection (#10156) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/sqlite/CHANGELOG.rst b/airflow/providers/sqlite/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/sqlite/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/sqlite/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/sqlite/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 4b9fb4284bcc1..0000000000000 --- a/airflow/providers/sqlite/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,47 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [1bca31b54](https://github.com/apache/airflow/commit/1bca31b541c9c39fb8e79131e1dd4a868b5122d4) | 2020-08-04 | Add type annotations for Sqlite (#10157) | -| [4a0fdb630](https://github.com/apache/airflow/commit/4a0fdb6308400ddda38b0904cfe14b5872e5c0eb) | 2020-08-04 | Use conn_name_attr for SqliteHook connection (#10156) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/sqlite/README.md b/airflow/providers/sqlite/README.md deleted file mode 100644 index 5b065008c1d76..0000000000000 --- a/airflow/providers/sqlite/README.md +++ /dev/null @@ -1,133 +0,0 @@ - - - -# Package apache-airflow-providers-sqlite - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `sqlite` provider. All classes for this provider package -are in `airflow.providers.sqlite` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-sqlite` - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `sqlite` provider -are in the `airflow.providers.sqlite` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.sqlite` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------| -| [operators.sqlite.SqliteOperator](https://github.com/apache/airflow/blob/master/airflow/providers/sqlite/operators/sqlite.py) | [operators.sqlite_operator.SqliteOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/operators/sqlite_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.sqlite` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------| -| [hooks.sqlite.SqliteHook](https://github.com/apache/airflow/blob/master/airflow/providers/sqlite/hooks/sqlite.py) | [hooks.sqlite_hook.SqliteHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/sqlite_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [1bca31b54](https://github.com/apache/airflow/commit/1bca31b541c9c39fb8e79131e1dd4a868b5122d4) | 2020-08-04 | Add type annotations for Sqlite (#10157) | -| [4a0fdb630](https://github.com/apache/airflow/commit/4a0fdb6308400ddda38b0904cfe14b5872e5c0eb) | 2020-08-04 | Use conn_name_attr for SqliteHook connection (#10156) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [e13a14c87](https://github.com/apache/airflow/commit/e13a14c8730f4f633d996dd7d3468fe827136a84) | 2020-06-21 | Enable & Fix Whitespace related PyDocStyle Checks (#9458) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [059eda05f](https://github.com/apache/airflow/commit/059eda05f82fefce4410f44f761f945a27d83daf) | 2020-01-21 | [AIRFLOW-6610] Move software classes to providers package (#7231) | diff --git a/airflow/providers/ssh/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/ssh/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index c70dc37157416..0000000000000 --- a/airflow/providers/ssh/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,21 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [21cc7d729](https://github.com/apache/airflow/commit/21cc7d729827e9f3af0698bf647b2d41fc87b11c) | 2020-05-10 | Document default timeout value for SSHOperator (#8744) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [74c2a6ded](https://github.com/apache/airflow/commit/74c2a6ded4d615de8e1b1c04a25146344138e920) | 2020-03-23 | Add call to Super class in 'ftp' & 'ssh' providers (#7822) | -| [df24b4337](https://github.com/apache/airflow/commit/df24b43370ca5812273ecd91d35104e023a407e6) | 2020-02-14 | [AIRFLOW-6800] Close file object after parsing ssh config (#7415) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/ssh/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/ssh/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index b0660bb72d7d0..0000000000000 --- a/airflow/providers/ssh/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [b6d5d1e98](https://github.com/apache/airflow/commit/b6d5d1e985ffc19867647ea0b35fa14c2cdfb59a) | 2020-10-01 | Strict type checking for SSH (#11216) | -| [68fa29bff](https://github.com/apache/airflow/commit/68fa29bff0203bc02b85ef93b7617770219c260a) | 2020-09-25 | Added support for encrypted private keys in SSHHook (#11097) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [2248a5da1](https://github.com/apache/airflow/commit/2248a5da1d83ca901ec24d5809e718bbbd2c3894) | 2020-06-29 | Expose option: look_for_keys in ssh_hook via extras (#8793) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/ssh/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/ssh/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 7dc5b61443364..0000000000000 --- a/airflow/providers/ssh/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,11 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [27e637fbe](https://github.com/apache/airflow/commit/27e637fbe3f17737e898774ff151448f4f0aa129) | 2020-10-09 | Bugfix: Error in SSHOperator when command is None (#11361) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/ssh/BACKPORT_PROVIDER_README.md b/airflow/providers/ssh/BACKPORT_PROVIDER_README.md deleted file mode 100644 index a94717f0f499a..0000000000000 --- a/airflow/providers/ssh/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,145 +0,0 @@ - - - -# Package apache-airflow-backport-providers-ssh - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `ssh` provider. All classes for this provider package -are in `airflow.providers.ssh` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-ssh` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| paramiko | >=2.6.0 | -| pysftp | >=0.2.9 | -| sshtunnel | >=0.1.4,<0.2 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `ssh` provider -are in the `airflow.providers.ssh` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.ssh` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.ssh.SSHOperator](https://github.com/apache/airflow/blob/master/airflow/providers/ssh/operators/ssh.py) | [contrib.operators.ssh_operator.SSHOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/ssh_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.ssh` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------| -| [hooks.ssh.SSHHook](https://github.com/apache/airflow/blob/master/airflow/providers/ssh/hooks/ssh.py) | [contrib.hooks.ssh_hook.SSHHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/ssh_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [27e637fbe](https://github.com/apache/airflow/commit/27e637fbe3f17737e898774ff151448f4f0aa129) | 2020-10-09 | Bugfix: Error in SSHOperator when command is None (#11361) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [b6d5d1e98](https://github.com/apache/airflow/commit/b6d5d1e985ffc19867647ea0b35fa14c2cdfb59a) | 2020-10-01 | Strict type checking for SSH (#11216) | -| [68fa29bff](https://github.com/apache/airflow/commit/68fa29bff0203bc02b85ef93b7617770219c260a) | 2020-09-25 | Added support for encrypted private keys in SSHHook (#11097) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [2248a5da1](https://github.com/apache/airflow/commit/2248a5da1d83ca901ec24d5809e718bbbd2c3894) | 2020-06-29 | Expose option: look_for_keys in ssh_hook via extras (#8793) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [21cc7d729](https://github.com/apache/airflow/commit/21cc7d729827e9f3af0698bf647b2d41fc87b11c) | 2020-05-10 | Document default timeout value for SSHOperator (#8744) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [74c2a6ded](https://github.com/apache/airflow/commit/74c2a6ded4d615de8e1b1c04a25146344138e920) | 2020-03-23 | Add call to Super class in 'ftp' & 'ssh' providers (#7822) | -| [df24b4337](https://github.com/apache/airflow/commit/df24b43370ca5812273ecd91d35104e023a407e6) | 2020-02-14 | [AIRFLOW-6800] Close file object after parsing ssh config (#7415) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/ssh/CHANGELOG.rst b/airflow/providers/ssh/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/ssh/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/ssh/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/ssh/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 0677d4ccde703..0000000000000 --- a/airflow/providers/ssh/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,54 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [27e637fbe](https://github.com/apache/airflow/commit/27e637fbe3f17737e898774ff151448f4f0aa129) | 2020-10-09 | Bugfix: Error in SSHOperator when command is None (#11361) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [b6d5d1e98](https://github.com/apache/airflow/commit/b6d5d1e985ffc19867647ea0b35fa14c2cdfb59a) | 2020-10-01 | Strict type checking for SSH (#11216) | -| [68fa29bff](https://github.com/apache/airflow/commit/68fa29bff0203bc02b85ef93b7617770219c260a) | 2020-09-25 | Added support for encrypted private keys in SSHHook (#11097) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [2248a5da1](https://github.com/apache/airflow/commit/2248a5da1d83ca901ec24d5809e718bbbd2c3894) | 2020-06-29 | Expose option: look_for_keys in ssh_hook via extras (#8793) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [21cc7d729](https://github.com/apache/airflow/commit/21cc7d729827e9f3af0698bf647b2d41fc87b11c) | 2020-05-10 | Document default timeout value for SSHOperator (#8744) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [74c2a6ded](https://github.com/apache/airflow/commit/74c2a6ded4d615de8e1b1c04a25146344138e920) | 2020-03-23 | Add call to Super class in 'ftp' & 'ssh' providers (#7822) | -| [df24b4337](https://github.com/apache/airflow/commit/df24b43370ca5812273ecd91d35104e023a407e6) | 2020-02-14 | [AIRFLOW-6800] Close file object after parsing ssh config (#7415) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/ssh/README.md b/airflow/providers/ssh/README.md deleted file mode 100644 index 5f9a868151775..0000000000000 --- a/airflow/providers/ssh/README.md +++ /dev/null @@ -1,149 +0,0 @@ - - - -# Package apache-airflow-providers-ssh - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `ssh` provider. All classes for this provider package -are in `airflow.providers.ssh` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-ssh` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| paramiko | >=2.6.0 | -| pysftp | >=0.2.9 | -| sshtunnel | >=0.1.4,<0.2 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `ssh` provider -are in the `airflow.providers.ssh` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.ssh` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.ssh.SSHOperator](https://github.com/apache/airflow/blob/master/airflow/providers/ssh/operators/ssh.py) | [contrib.operators.ssh_operator.SSHOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/ssh_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.ssh` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------| -| [hooks.ssh.SSHHook](https://github.com/apache/airflow/blob/master/airflow/providers/ssh/hooks/ssh.py) | [contrib.hooks.ssh_hook.SSHHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/ssh_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [dd2095f4a](https://github.com/apache/airflow/commit/dd2095f4a8b07c9b1a4c279a3578cd1e23b71a1b) | 2020-11-10 | Simplify string expressions & Use f-string (#12216) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [41bf172c1](https://github.com/apache/airflow/commit/41bf172c1dc75099f4f9d8b3f3350b4b1f523ef9) | 2020-11-04 | Simplify string expressions (#12093) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [27e637fbe](https://github.com/apache/airflow/commit/27e637fbe3f17737e898774ff151448f4f0aa129) | 2020-10-09 | Bugfix: Error in SSHOperator when command is None (#11361) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [b6d5d1e98](https://github.com/apache/airflow/commit/b6d5d1e985ffc19867647ea0b35fa14c2cdfb59a) | 2020-10-01 | Strict type checking for SSH (#11216) | -| [68fa29bff](https://github.com/apache/airflow/commit/68fa29bff0203bc02b85ef93b7617770219c260a) | 2020-09-25 | Added support for encrypted private keys in SSHHook (#11097) | -| [f3e87c503](https://github.com/apache/airflow/commit/f3e87c503081a3085dff6c7352640d7f08beb5bc) | 2020-09-22 | Add D202 pydocstyle check (#11032) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [2248a5da1](https://github.com/apache/airflow/commit/2248a5da1d83ca901ec24d5809e718bbbd2c3894) | 2020-06-29 | Expose option: look_for_keys in ssh_hook via extras (#8793) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [21cc7d729](https://github.com/apache/airflow/commit/21cc7d729827e9f3af0698bf647b2d41fc87b11c) | 2020-05-10 | Document default timeout value for SSHOperator (#8744) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [74c2a6ded](https://github.com/apache/airflow/commit/74c2a6ded4d615de8e1b1c04a25146344138e920) | 2020-03-23 | Add call to Super class in 'ftp' & 'ssh' providers (#7822) | -| [df24b4337](https://github.com/apache/airflow/commit/df24b43370ca5812273ecd91d35104e023a407e6) | 2020-02-14 | [AIRFLOW-6800] Close file object after parsing ssh config (#7415) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [9a04013b0](https://github.com/apache/airflow/commit/9a04013b0e40b0d744ff4ac9f008491806d60df2) | 2020-01-27 | [AIRFLOW-6646][AIP-21] Move protocols classes to providers package (#7268) | diff --git a/airflow/providers/ssh/hooks/ssh.py b/airflow/providers/ssh/hooks/ssh.py index d420b1bf271d7..1b35db31fd54a 100644 --- a/airflow/providers/ssh/hooks/ssh.py +++ b/airflow/providers/ssh/hooks/ssh.py @@ -19,6 +19,7 @@ import getpass import os import warnings +from base64 import decodebytes from io import StringIO from typing import Dict, Optional, Tuple, Union @@ -30,7 +31,7 @@ from airflow.hooks.base import BaseHook -class SSHHook(BaseHook): +class SSHHook(BaseHook): # pylint: disable=too-many-instance-attributes """ Hook for ssh remote execution using Paramiko. ref: https://github.com/paramiko/paramiko @@ -72,7 +73,7 @@ def get_ui_field_behaviour() -> Dict: }, } - def __init__( + def __init__( # pylint: disable=too-many-statements self, ssh_conn_id: Optional[str] = None, remote_host: Optional[str] = None, @@ -99,6 +100,7 @@ def __init__( self.no_host_key_check = True self.allow_host_key_change = False self.host_proxy = None + self.host_key = None self.look_for_keys = True # Placeholder for deprecated __enter__ @@ -149,7 +151,9 @@ def __init__( and str(extra_options["look_for_keys"]).lower() == 'false' ): self.look_for_keys = False - + if "host_key" in extra_options and self.no_host_key_check is False: + decoded_host_key = decodebytes(extra_options["host_key"].encode('utf-8')) + self.host_key = paramiko.RSAKey(data=decoded_host_key) if self.pkey and self.key_file: raise AirflowException( "Params key_file and private_key both provided. Must provide no more than one." @@ -198,10 +202,18 @@ def get_conn(self) -> paramiko.SSHClient: 'This wont protect against Man-In-The-Middle attacks' ) client.load_system_host_keys() + if self.no_host_key_check: self.log.warning('No Host Key Verification. This wont protect against Man-In-The-Middle attacks') # Default is RejectPolicy client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + else: + if self.host_key is not None: + client_host_keys = client.get_host_keys() + client_host_keys.add(self.remote_host, 'ssh-rsa', self.host_key) + else: + pass # will fallback to system host keys if none explicitly specified in conn extra + connect_kwargs = dict( hostname=self.remote_host, username=self.username, diff --git a/airflow/providers/tableau/CHANGELOG.rst b/airflow/providers/tableau/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/tableau/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/tableau/__init__.py b/airflow/providers/tableau/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/tableau/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/tableau/example_dags/__init__.py b/airflow/providers/tableau/example_dags/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/tableau/example_dags/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/salesforce/example_dags/example_tableau_refresh_workbook.py b/airflow/providers/tableau/example_dags/example_tableau_refresh_workbook.py similarity index 92% rename from airflow/providers/salesforce/example_dags/example_tableau_refresh_workbook.py rename to airflow/providers/tableau/example_dags/example_tableau_refresh_workbook.py index 32b347ce62451..da1cc8be411cd 100644 --- a/airflow/providers/salesforce/example_dags/example_tableau_refresh_workbook.py +++ b/airflow/providers/tableau/example_dags/example_tableau_refresh_workbook.py @@ -23,8 +23,8 @@ from datetime import timedelta from airflow import DAG -from airflow.providers.salesforce.operators.tableau_refresh_workbook import TableauRefreshWorkbookOperator -from airflow.providers.salesforce.sensors.tableau_job_status import TableauJobStatusSensor +from airflow.providers.tableau.operators.tableau_refresh_workbook import TableauRefreshWorkbookOperator +from airflow.providers.tableau.sensors.tableau_job_status import TableauJobStatusSensor from airflow.utils.dates import days_ago DEFAULT_ARGS = { diff --git a/airflow/providers/tableau/hooks/__init__.py b/airflow/providers/tableau/hooks/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/tableau/hooks/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/tableau/hooks/tableau.py b/airflow/providers/tableau/hooks/tableau.py new file mode 100644 index 0000000000000..51c2f98a2f8d6 --- /dev/null +++ b/airflow/providers/tableau/hooks/tableau.py @@ -0,0 +1,115 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from enum import Enum +from typing import Any, Optional + +from tableauserverclient import Pager, PersonalAccessTokenAuth, Server, TableauAuth +from tableauserverclient.server import Auth + +from airflow.hooks.base import BaseHook + + +class TableauJobFinishCode(Enum): + """ + The finish code indicates the status of the job. + + .. seealso:: https://help.tableau.com/current/api/rest_api/en-us/REST/rest_api_ref.htm#query_job + + """ + + PENDING = -1 + SUCCESS = 0 + ERROR = 1 + CANCELED = 2 + + +class TableauHook(BaseHook): + """ + Connects to the Tableau Server Instance and allows to communicate with it. + + .. seealso:: https://tableau.github.io/server-client-python/docs/ + + :param site_id: The id of the site where the workbook belongs to. + It will connect to the default site if you don't provide an id. + :type site_id: Optional[str] + :param tableau_conn_id: The Tableau Connection id containing the credentials + to authenticate to the Tableau Server. + :type tableau_conn_id: str + """ + + conn_name_attr = 'tableau_conn_id' + default_conn_name = 'tableau_default' + conn_type = 'tableau' + hook_name = 'Tableau' + + def __init__(self, site_id: Optional[str] = None, tableau_conn_id: str = default_conn_name) -> None: + super().__init__() + self.tableau_conn_id = tableau_conn_id + self.conn = self.get_connection(self.tableau_conn_id) + self.site_id = site_id or self.conn.extra_dejson.get('site_id', '') + self.server = Server(self.conn.host, use_server_version=True) + self.tableau_conn = None + + def __enter__(self): + if not self.tableau_conn: + self.tableau_conn = self.get_conn() + return self + + def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None: + self.server.auth.sign_out() + + def get_conn(self) -> Auth.contextmgr: + """ + Signs in to the Tableau Server and automatically signs out if used as ContextManager. + + :return: an authorized Tableau Server Context Manager object. + :rtype: tableauserverclient.server.Auth.contextmgr + """ + if self.conn.login and self.conn.password: + return self._auth_via_password() + if 'token_name' in self.conn.extra_dejson and 'personal_access_token' in self.conn.extra_dejson: + return self._auth_via_token() + raise NotImplementedError('No Authentication method found for given Credentials!') + + def _auth_via_password(self) -> Auth.contextmgr: + tableau_auth = TableauAuth( + username=self.conn.login, password=self.conn.password, site_id=self.site_id + ) + return self.server.auth.sign_in(tableau_auth) + + def _auth_via_token(self) -> Auth.contextmgr: + tableau_auth = PersonalAccessTokenAuth( + token_name=self.conn.extra_dejson['token_name'], + personal_access_token=self.conn.extra_dejson['personal_access_token'], + site_id=self.site_id, + ) + return self.server.auth.sign_in_with_personal_access_token(tableau_auth) + + def get_all(self, resource_name: str) -> Pager: + """ + Get all items of the given resource. + + .. seealso:: https://tableau.github.io/server-client-python/docs/page-through-results + + :param resource_name: The name of the resource to paginate. + For example: jobs or workbooks + :type resource_name: str + :return: all items by returning a Pager. + :rtype: tableauserverclient.Pager + """ + resource = getattr(self.server, resource_name) + return Pager(resource.get) diff --git a/airflow/providers/tableau/operators/__init__.py b/airflow/providers/tableau/operators/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/tableau/operators/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/tableau/operators/tableau_refresh_workbook.py b/airflow/providers/tableau/operators/tableau_refresh_workbook.py new file mode 100644 index 0000000000000..25ca77b7dafd4 --- /dev/null +++ b/airflow/providers/tableau/operators/tableau_refresh_workbook.py @@ -0,0 +1,97 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Optional + +from tableauserverclient import WorkbookItem + +from airflow.exceptions import AirflowException +from airflow.models import BaseOperator +from airflow.providers.tableau.hooks.tableau import TableauHook +from airflow.utils.decorators import apply_defaults + + +class TableauRefreshWorkbookOperator(BaseOperator): + """ + Refreshes a Tableau Workbook/Extract + + .. seealso:: https://tableau.github.io/server-client-python/docs/api-ref#workbooks + + :param workbook_name: The name of the workbook to refresh. + :type workbook_name: str + :param site_id: The id of the site where the workbook belongs to. + :type site_id: Optional[str] + :param blocking: By default the extract refresh will be blocking means it will wait until it has finished. + :type blocking: bool + :param tableau_conn_id: The Tableau Connection id containing the credentials + to authenticate to the Tableau Server. + :type tableau_conn_id: str + """ + + @apply_defaults + def __init__( + self, + *, + workbook_name: str, + site_id: Optional[str] = None, + blocking: bool = True, + tableau_conn_id: str = 'tableau_default', + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.workbook_name = workbook_name + self.site_id = site_id + self.blocking = blocking + self.tableau_conn_id = tableau_conn_id + + def execute(self, context: dict) -> str: + """ + Executes the Tableau Extract Refresh and pushes the job id to xcom. + + :param context: The task context during execution. + :type context: dict + :return: the id of the job that executes the extract refresh + :rtype: str + """ + with TableauHook(self.site_id, self.tableau_conn_id) as tableau_hook: + workbook = self._get_workbook_by_name(tableau_hook) + + job_id = self._refresh_workbook(tableau_hook, workbook.id) + if self.blocking: + from airflow.providers.tableau.sensors.tableau_job_status import TableauJobStatusSensor + + TableauJobStatusSensor( + job_id=job_id, + site_id=self.site_id, + tableau_conn_id=self.tableau_conn_id, + task_id='wait_until_succeeded', + dag=None, + ).execute(context={}) + self.log.info('Workbook %s has been successfully refreshed.', self.workbook_name) + return job_id + + def _get_workbook_by_name(self, tableau_hook: TableauHook) -> WorkbookItem: + for workbook in tableau_hook.get_all(resource_name='workbooks'): + if workbook.name == self.workbook_name: + self.log.info('Found matching workbook with id %s', workbook.id) + return workbook + + raise AirflowException(f'Workbook {self.workbook_name} not found!') + + def _refresh_workbook(self, tableau_hook: TableauHook, workbook_id: str) -> str: + job = tableau_hook.server.workbooks.refresh(workbook_id) + self.log.info('Refreshing Workbook %s...', self.workbook_name) + return job.id diff --git a/airflow/providers/tableau/provider.yaml b/airflow/providers/tableau/provider.yaml new file mode 100644 index 0000000000000..e777947b9382b --- /dev/null +++ b/airflow/providers/tableau/provider.yaml @@ -0,0 +1,49 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +--- +package-name: apache-airflow-providers-tableau +name: Tableau +description: | + `Tableau `__ + +versions: + - 1.0.0 + +integrations: + - integration-name: Tableau + external-doc-url: https://www.tableau.com/ + logo: /integration-logos/tableau/tableau.png + tags: [service] + +operators: + - integration-name: Tableau + python-modules: + - airflow.providers.tableau.operators.tableau_refresh_workbook + +sensors: + - integration-name: Tableau + python-modules: + - airflow.providers.tableau.sensors.tableau_job_status + +hooks: + - integration-name: Tableau + python-modules: + - airflow.providers.tableau.hooks.tableau + +hook-class-names: + - airflow.providers.tableau.hooks.tableau.TableauHook diff --git a/airflow/providers/tableau/sensors/__init__.py b/airflow/providers/tableau/sensors/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/airflow/providers/tableau/sensors/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/tableau/sensors/tableau_job_status.py b/airflow/providers/tableau/sensors/tableau_job_status.py new file mode 100644 index 0000000000000..518e2f087bd08 --- /dev/null +++ b/airflow/providers/tableau/sensors/tableau_job_status.py @@ -0,0 +1,76 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +from typing import Optional + +from airflow.exceptions import AirflowException +from airflow.providers.tableau.hooks.tableau import TableauHook, TableauJobFinishCode +from airflow.sensors.base import BaseSensorOperator +from airflow.utils.decorators import apply_defaults + + +class TableauJobFailedException(AirflowException): + """An exception that indicates that a Job failed to complete.""" + + +class TableauJobStatusSensor(BaseSensorOperator): + """ + Watches the status of a Tableau Server Job. + + .. seealso:: https://tableau.github.io/server-client-python/docs/api-ref#jobs + + :param job_id: The job to watch. + :type job_id: str + :param site_id: The id of the site where the workbook belongs to. + :type site_id: Optional[str] + :param tableau_conn_id: The Tableau Connection id containing the credentials + to authenticate to the Tableau Server. + :type tableau_conn_id: str + """ + + template_fields = ('job_id',) + + @apply_defaults + def __init__( + self, + *, + job_id: str, + site_id: Optional[str] = None, + tableau_conn_id: str = 'tableau_default', + **kwargs, + ) -> None: + super().__init__(**kwargs) + self.tableau_conn_id = tableau_conn_id + self.job_id = job_id + self.site_id = site_id + + def poke(self, context: dict) -> bool: + """ + Pokes until the job has successfully finished. + + :param context: The task context during execution. + :type context: dict + :return: True if it succeeded and False if not. + :rtype: bool + """ + with TableauHook(self.site_id, self.tableau_conn_id) as tableau_hook: + finish_code = TableauJobFinishCode( + int(tableau_hook.server.jobs.get_by_id(self.job_id).finish_code) + ) + self.log.info('Current finishCode is %s (%s)', finish_code.name, finish_code.value) + if finish_code in [TableauJobFinishCode.ERROR, TableauJobFinishCode.CANCELED]: + raise TableauJobFailedException('The Tableau Refresh Workbook Job failed!') + return finish_code == TableauJobFinishCode.SUCCESS diff --git a/airflow/providers/telegram/CHANGELOG.rst b/airflow/providers/telegram/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/telegram/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/telegram/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/telegram/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index ae01547693cb7..0000000000000 --- a/airflow/providers/telegram/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,8 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [cd66450b4](https://github.com/apache/airflow/commit/cd66450b4ee2a219ddc847970255e420ed679700) | 2020-12-05 | Add Telegram hook and operator (#11850) | diff --git a/airflow/providers/trino/CHANGELOG.rst b/airflow/providers/trino/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/trino/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/trino/__init__.py b/airflow/providers/trino/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/trino/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/trino/hooks/__init__.py b/airflow/providers/trino/hooks/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/airflow/providers/trino/hooks/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/airflow/providers/trino/hooks/trino.py b/airflow/providers/trino/hooks/trino.py new file mode 100644 index 0000000000000..0914d04b32e4b --- /dev/null +++ b/airflow/providers/trino/hooks/trino.py @@ -0,0 +1,191 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import os +from typing import Any, Iterable, Optional + +import trino +from trino.exceptions import DatabaseError +from trino.transaction import IsolationLevel + +from airflow import AirflowException +from airflow.configuration import conf +from airflow.hooks.dbapi import DbApiHook +from airflow.models import Connection + + +class TrinoException(Exception): + """Trino exception""" + + +def _boolify(value): + if isinstance(value, bool): + return value + if isinstance(value, str): + if value.lower() == 'false': + return False + elif value.lower() == 'true': + return True + return value + + +class TrinoHook(DbApiHook): + """ + Interact with Trino through trino package. + + >>> ph = TrinoHook() + >>> sql = "SELECT count(1) AS num FROM airflow.static_babynames" + >>> ph.get_records(sql) + [[340698]] + """ + + conn_name_attr = 'trino_conn_id' + default_conn_name = 'trino_default' + conn_type = 'trino' + hook_name = 'Trino' + + def get_conn(self) -> Connection: + """Returns a connection object""" + db = self.get_connection( + self.trino_conn_id # type: ignore[attr-defined] # pylint: disable=no-member + ) + extra = db.extra_dejson + auth = None + if db.password and extra.get('auth') == 'kerberos': + raise AirflowException("Kerberos authorization doesn't support password.") + elif db.password: + auth = trino.auth.BasicAuthentication(db.login, db.password) + elif extra.get('auth') == 'kerberos': + auth = trino.auth.KerberosAuthentication( + config=extra.get('kerberos__config', os.environ.get('KRB5_CONFIG')), + service_name=extra.get('kerberos__service_name'), + mutual_authentication=_boolify(extra.get('kerberos__mutual_authentication', False)), + force_preemptive=_boolify(extra.get('kerberos__force_preemptive', False)), + hostname_override=extra.get('kerberos__hostname_override'), + sanitize_mutual_error_response=_boolify( + extra.get('kerberos__sanitize_mutual_error_response', True) + ), + principal=extra.get('kerberos__principal', conf.get('kerberos', 'principal')), + delegate=_boolify(extra.get('kerberos__delegate', False)), + ca_bundle=extra.get('kerberos__ca_bundle'), + ) + + trino_conn = trino.dbapi.connect( + host=db.host, + port=db.port, + user=db.login, + source=db.extra_dejson.get('source', 'airflow'), + http_scheme=db.extra_dejson.get('protocol', 'http'), + catalog=db.extra_dejson.get('catalog', 'hive'), + schema=db.schema, + auth=auth, + isolation_level=self.get_isolation_level(), # type: ignore[func-returns-value] + ) + if extra.get('verify') is not None: + # Unfortunately verify parameter is available via public API. + # The PR is merged in the trino library, but has not been released. + # See: https://github.com/trinodb/trino-python-client/pull/31 + trino_conn._http_session.verify = _boolify(extra['verify']) # pylint: disable=protected-access + + return trino_conn + + def get_isolation_level(self) -> Any: + """Returns an isolation level""" + db = self.get_connection( + self.trino_conn_id # type: ignore[attr-defined] # pylint: disable=no-member + ) + isolation_level = db.extra_dejson.get('isolation_level', 'AUTOCOMMIT').upper() + return getattr(IsolationLevel, isolation_level, IsolationLevel.AUTOCOMMIT) + + @staticmethod + def _strip_sql(sql: str) -> str: + return sql.strip().rstrip(';') + + def get_records(self, hql, parameters: Optional[dict] = None): + """Get a set of records from Trino""" + try: + return super().get_records(self._strip_sql(hql), parameters) + except DatabaseError as e: + raise TrinoException(e) + + def get_first(self, hql: str, parameters: Optional[dict] = None) -> Any: + """Returns only the first row, regardless of how many rows the query returns.""" + try: + return super().get_first(self._strip_sql(hql), parameters) + except DatabaseError as e: + raise TrinoException(e) + + def get_pandas_df(self, hql, parameters=None, **kwargs): + """Get a pandas dataframe from a sql query.""" + import pandas + + cursor = self.get_cursor() + try: + cursor.execute(self._strip_sql(hql), parameters) + data = cursor.fetchall() + except DatabaseError as e: + raise TrinoException(e) + column_descriptions = cursor.description + if data: + df = pandas.DataFrame(data, **kwargs) + df.columns = [c[0] for c in column_descriptions] + else: + df = pandas.DataFrame(**kwargs) + return df + + def run( + self, + hql, + autocommit: bool = False, + parameters: Optional[dict] = None, + ) -> None: + """Execute the statement against Trino. Can be used to create views.""" + return super().run(sql=self._strip_sql(hql), parameters=parameters) + + def insert_rows( + self, + table: str, + rows: Iterable[tuple], + target_fields: Optional[Iterable[str]] = None, + commit_every: int = 0, + replace: bool = False, + **kwargs, + ) -> None: + """ + A generic way to insert a set of tuples into a table. + + :param table: Name of the target table + :type table: str + :param rows: The rows to insert into the table + :type rows: iterable of tuples + :param target_fields: The names of the columns to fill in the table + :type target_fields: iterable of strings + :param commit_every: The maximum number of rows to insert in one + transaction. Set to 0 to insert all rows in one transaction. + :type commit_every: int + :param replace: Whether to replace instead of insert + :type replace: bool + """ + if self.get_isolation_level() == IsolationLevel.AUTOCOMMIT: + self.log.info( + 'Transactions are not enable in trino connection. ' + 'Please use the isolation_level property to enable it. ' + 'Falling back to insert all rows in one transaction.' + ) + commit_every = 0 + + super().insert_rows(table, rows, target_fields, commit_every) diff --git a/airflow/providers/trino/provider.yaml b/airflow/providers/trino/provider.yaml new file mode 100644 index 0000000000000..a59aaae6abbe1 --- /dev/null +++ b/airflow/providers/trino/provider.yaml @@ -0,0 +1,39 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +--- +package-name: apache-airflow-providers-trino +name: Trino +description: | + `Trino `__ + +versions: + - 1.0.0 + +integrations: + - integration-name: Trino + external-doc-url: https://trino.io/docs/ + logo: /integration-logos/trino/trino-og.png + tags: [software] + +hooks: + - integration-name: Trino + python-modules: + - airflow.providers.trino.hooks.trino + +hook-class-names: + - airflow.providers.trino.hooks.trino.TrinoHook diff --git a/airflow/providers/vertica/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/vertica/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 36c8f5ed3bca5..0000000000000 --- a/airflow/providers/vertica/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/vertica/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/vertica/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 78a8a3e24fffa..0000000000000 --- a/airflow/providers/vertica/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,15 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [867bc44ac](https://github.com/apache/airflow/commit/867bc44acaae71a8121310931b75cab7423fc8b0) | 2020-07-23 | Add type annotations to providers/vertica (#9936) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/vertica/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/vertica/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/vertica/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/vertica/BACKPORT_PROVIDER_README.md b/airflow/providers/vertica/BACKPORT_PROVIDER_README.md deleted file mode 100644 index d5c8fa62bb2b7..0000000000000 --- a/airflow/providers/vertica/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,135 +0,0 @@ - - - -# Package apache-airflow-backport-providers-vertica - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `vertica` provider. All classes for this provider package -are in `airflow.providers.vertica` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-vertica` - -## PIP requirements - -| PIP package | Version required | -|:---------------|:-------------------| -| vertica-python | >=0.5.1 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `vertica` provider -are in the `airflow.providers.vertica` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.vertica` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.vertica.VerticaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/vertica/operators/vertica.py) | [contrib.operators.vertica_operator.VerticaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.vertica` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.vertica.VerticaHook](https://github.com/apache/airflow/blob/master/airflow/providers/vertica/hooks/vertica.py) | [contrib.hooks.vertica_hook.VerticaHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/vertica_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [867bc44ac](https://github.com/apache/airflow/commit/867bc44acaae71a8121310931b75cab7423fc8b0) | 2020-07-23 | Add type annotations to providers/vertica (#9936) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/vertica/CHANGELOG.rst b/airflow/providers/vertica/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/vertica/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/vertica/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/vertica/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 29734bd609152..0000000000000 --- a/airflow/providers/vertica/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,45 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [3a72fc824](https://github.com/apache/airflow/commit/3a72fc82475df3b745a00a7b5e34eef9d27b3329) | 2020-11-14 | Fix Description of Provider Docs (#12361) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [867bc44ac](https://github.com/apache/airflow/commit/867bc44acaae71a8121310931b75cab7423fc8b0) | 2020-07-23 | Add type annotations to providers/vertica (#9936) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/vertica/README.md b/airflow/providers/vertica/README.md deleted file mode 100644 index 376cbf43bdd1a..0000000000000 --- a/airflow/providers/vertica/README.md +++ /dev/null @@ -1,138 +0,0 @@ - - - -# Package apache-airflow-providers-vertica - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [Moved operators](#moved-operators) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `vertica` provider. All classes for this provider package -are in `airflow.providers.vertica` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-vertica` - -## PIP requirements - -| PIP package | Version required | -|:---------------|:-------------------| -| vertica-python | >=0.5.1 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `vertica` provider -are in the `airflow.providers.vertica` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - - -### Moved operators - -| Airflow 2.0 operators: `airflow.providers.vertica` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.vertica.VerticaOperator](https://github.com/apache/airflow/blob/master/airflow/providers/vertica/operators/vertica.py) | [contrib.operators.vertica_operator.VerticaOperator](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/operators/vertica_operator.py) | - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.vertica` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.vertica.VerticaHook](https://github.com/apache/airflow/blob/master/airflow/providers/vertica/hooks/vertica.py) | [contrib.hooks.vertica_hook.VerticaHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/contrib/hooks/vertica_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [2037303ee](https://github.com/apache/airflow/commit/2037303eef93fd36ab13746b045d1c1fee6aa143) | 2020-11-29 | Adds support for Connection/Hook discovery from providers (#12466) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [3a72fc824](https://github.com/apache/airflow/commit/3a72fc82475df3b745a00a7b5e34eef9d27b3329) | 2020-11-14 | Fix Description of Provider Docs (#12361) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [867bc44ac](https://github.com/apache/airflow/commit/867bc44acaae71a8121310931b75cab7423fc8b0) | 2020-07-23 | Add type annotations to providers/vertica (#9936) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/yandex/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/yandex/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 167fe113ce085..0000000000000 --- a/airflow/providers/yandex/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,22 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [59a4f2669](https://github.com/apache/airflow/commit/59a4f26699125b1594496940d62be78d7732b4be) | 2020-04-17 | stop rendering some class docs in wrong place (#8095) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [ee1ab7697](https://github.com/apache/airflow/commit/ee1ab7697c6106b7107b285d8fe9ad01766dc19e) | 2020-02-14 | [AIRFLOW-6531] Initial Yandex.Cloud Dataproc support (#7252) | diff --git a/airflow/providers/yandex/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/yandex/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 30005e1b94d9f..0000000000000 --- a/airflow/providers/yandex/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,18 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/yandex/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/yandex/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 528a7e085f91f..0000000000000 --- a/airflow/providers/yandex/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,10 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/yandex/BACKPORT_PROVIDER_README.md b/airflow/providers/yandex/BACKPORT_PROVIDER_README.md deleted file mode 100644 index b0661de2d0291..0000000000000 --- a/airflow/providers/yandex/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,148 +0,0 @@ - - - -# Package apache-airflow-backport-providers-yandex - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `yandex` provider. All classes for this provider package -are in `airflow.providers.yandex` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-yandex` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| yandexcloud | >=0.22.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `yandex` provider -are in the `airflow.providers.yandex` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.yandex` package | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.yandexcloud_dataproc.DataprocCreateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | -| [operators.yandexcloud_dataproc.DataprocCreateHiveJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | -| [operators.yandexcloud_dataproc.DataprocCreateMapReduceJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | -| [operators.yandexcloud_dataproc.DataprocCreatePysparkJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | -| [operators.yandexcloud_dataproc.DataprocCreateSparkJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | -| [operators.yandexcloud_dataproc.DataprocDeleteClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.yandex` package | -|:------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.yandex.YandexCloudBaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/hooks/yandex.py) | -| [hooks.yandexcloud_dataproc.DataprocHook](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/hooks/yandexcloud_dataproc.py) | - - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [59a4f2669](https://github.com/apache/airflow/commit/59a4f26699125b1594496940d62be78d7732b4be) | 2020-04-17 | stop rendering some class docs in wrong place (#8095) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [ee1ab7697](https://github.com/apache/airflow/commit/ee1ab7697c6106b7107b285d8fe9ad01766dc19e) | 2020-02-14 | [AIRFLOW-6531] Initial Yandex.Cloud Dataproc support (#7252) | diff --git a/airflow/providers/yandex/CHANGELOG.rst b/airflow/providers/yandex/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/yandex/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/yandex/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/yandex/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 250ca6aeafd83..0000000000000 --- a/airflow/providers/yandex/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,53 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [ef4af2135](https://github.com/apache/airflow/commit/ef4af2135171c6e451f1407ea1a280ea875f2175) | 2020-11-22 | Move providers docs to separate package + Spell-check in a common job with docs-build (#12527) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [59a4f2669](https://github.com/apache/airflow/commit/59a4f26699125b1594496940d62be78d7732b4be) | 2020-04-17 | stop rendering some class docs in wrong place (#8095) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [ee1ab7697](https://github.com/apache/airflow/commit/ee1ab7697c6106b7107b285d8fe9ad01766dc19e) | 2020-02-14 | [AIRFLOW-6531] Initial Yandex.Cloud Dataproc support (#7252) | diff --git a/airflow/providers/yandex/README.md b/airflow/providers/yandex/README.md deleted file mode 100644 index 858b33317955c..0000000000000 --- a/airflow/providers/yandex/README.md +++ /dev/null @@ -1,152 +0,0 @@ - - - -# Package apache-airflow-providers-yandex - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Operators](#operators) - - [New operators](#new-operators) - - [Hooks](#hooks) - - [New hooks](#new-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `yandex` provider. All classes for this provider package -are in `airflow.providers.yandex` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-yandex` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| yandexcloud | >=0.22.0 | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `yandex` provider -are in the `airflow.providers.yandex` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Operators - - -### New operators - -| New Airflow 2.0 operators: `airflow.providers.yandex` package | -|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [operators.yandexcloud_dataproc.DataprocCreateClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | -| [operators.yandexcloud_dataproc.DataprocCreateHiveJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | -| [operators.yandexcloud_dataproc.DataprocCreateMapReduceJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | -| [operators.yandexcloud_dataproc.DataprocCreatePysparkJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | -| [operators.yandexcloud_dataproc.DataprocCreateSparkJobOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | -| [operators.yandexcloud_dataproc.DataprocDeleteClusterOperator](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/operators/yandexcloud_dataproc.py) | - - - -## Hooks - - -### New hooks - -| New Airflow 2.0 hooks: `airflow.providers.yandex` package | -|:------------------------------------------------------------------------------------------------------------------------------------------------| -| [hooks.yandex.YandexCloudBaseHook](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/hooks/yandex.py) | -| [hooks.yandexcloud_dataproc.DataprocHook](https://github.com/apache/airflow/blob/master/airflow/providers/yandex/hooks/yandexcloud_dataproc.py) | - - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [9b39f2478](https://github.com/apache/airflow/commit/9b39f24780e85f859236672e9060b2fbeee81b36) | 2020-12-08 | Add support for dynamic connection form fields per provider (#12558) | -| [bd90136aa](https://github.com/apache/airflow/commit/bd90136aaf5035e3234fe545b79a3e4aad21efe2) | 2020-11-30 | Move operator guides to provider documentation packages (#12681) | -| [ef4af2135](https://github.com/apache/airflow/commit/ef4af2135171c6e451f1407ea1a280ea875f2175) | 2020-11-22 | Move providers docs to separate package + Spell-check in a common job with docs-build (#12527) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [8c42cf1b0](https://github.com/apache/airflow/commit/8c42cf1b00c90f0d7f11b8a3a455381de8e003c5) | 2020-11-03 | Use PyUpgrade to use Python 3.6 features (#11447) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [5093245d6](https://github.com/apache/airflow/commit/5093245d6f77a370fbd2f9e3df35ac6acf46a1c4) | 2020-09-30 | Strict type coverage for Oracle and Yandex provider (#11198) | -| [9549274d1](https://github.com/apache/airflow/commit/9549274d110f689a0bd709db829a4d69e274eed9) | 2020-09-09 | Upgrade black to 20.8b1 (#10818) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [f6734b3b8](https://github.com/apache/airflow/commit/f6734b3b850d33d3712763f93c114e80f5af9ffb) | 2020-08-12 | Enable Sphinx spellcheck for doc generation (#10280) | -| [cdec30125](https://github.com/apache/airflow/commit/cdec3012542b45d23a05f62d69110944ba542e2a) | 2020-08-07 | Add correct signature to all operators and sensors (#10205) | -| [aeea71274](https://github.com/apache/airflow/commit/aeea71274d4527ff2351102e94aa38bda6099e7f) | 2020-08-02 | Remove `args` parameter from provider operator constructors (#10097) | -| [7d24b088c](https://github.com/apache/airflow/commit/7d24b088cd736cfa18f9214e4c9d6ce2d5865f3d) | 2020-07-25 | Stop using start_date in default_args in example_dags (2) (#9985) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [40bf8f28f](https://github.com/apache/airflow/commit/40bf8f28f97f17f40d993d207ea740eba54593ee) | 2020-06-18 | Detect automatically the lack of reference to the guide in the operator descriptions (#9290) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [1d36b0303](https://github.com/apache/airflow/commit/1d36b0303b8632fce6de78ca4e782ae26ee06fea) | 2020-05-23 | Fix references in docs (#8984) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [59a4f2669](https://github.com/apache/airflow/commit/59a4f26699125b1594496940d62be78d7732b4be) | 2020-04-17 | stop rendering some class docs in wrong place (#8095) | -| [3320e432a](https://github.com/apache/airflow/commit/3320e432a129476dbc1c55be3b3faa3326a635bc) | 2020-02-24 | [AIRFLOW-6817] Lazy-load `airflow.DAG` to keep user-facing API untouched (#7517) | -| [4d03e33c1](https://github.com/apache/airflow/commit/4d03e33c115018e30fa413c42b16212481ad25cc) | 2020-02-22 | [AIRFLOW-6817] remove imports from `airflow/__init__.py`, replaced implicit imports with explicit imports, added entry to `UPDATING.MD` - squashed/rebased (#7456) | -| [9cbd7de6d](https://github.com/apache/airflow/commit/9cbd7de6d115795aba8bfb8addb060bfdfbdf87b) | 2020-02-18 | [AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412) | -| [ee1ab7697](https://github.com/apache/airflow/commit/ee1ab7697c6106b7107b285d8fe9ad01766dc19e) | 2020-02-14 | [AIRFLOW-6531] Initial Yandex.Cloud Dataproc support (#7252) | diff --git a/airflow/providers/zendesk/BACKPORT_PROVIDER_CHANGES_2020.06.24.md b/airflow/providers/zendesk/BACKPORT_PROVIDER_CHANGES_2020.06.24.md deleted file mode 100644 index 3fbe69dbbdbeb..0000000000000 --- a/airflow/providers/zendesk/BACKPORT_PROVIDER_CHANGES_2020.06.24.md +++ /dev/null @@ -1,19 +0,0 @@ - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/zendesk/BACKPORT_PROVIDER_CHANGES_2020.10.05.md b/airflow/providers/zendesk/BACKPORT_PROVIDER_CHANGES_2020.10.05.md deleted file mode 100644 index 487f0e843e9fe..0000000000000 --- a/airflow/providers/zendesk/BACKPORT_PROVIDER_CHANGES_2020.10.05.md +++ /dev/null @@ -1,13 +0,0 @@ - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | diff --git a/airflow/providers/zendesk/BACKPORT_PROVIDER_CHANGES_2020.10.29.md b/airflow/providers/zendesk/BACKPORT_PROVIDER_CHANGES_2020.10.29.md deleted file mode 100644 index 099a3c3966bc7..0000000000000 --- a/airflow/providers/zendesk/BACKPORT_PROVIDER_CHANGES_2020.10.29.md +++ /dev/null @@ -1,11 +0,0 @@ - - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [03ff06715](https://github.com/apache/airflow/commit/03ff067152ed3202b7d4beb0fe9b371a0ef51058) | 2020-10-06 | Add type annotations to ZendeskHook, update unit test (#10888) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | diff --git a/airflow/providers/zendesk/BACKPORT_PROVIDER_README.md b/airflow/providers/zendesk/BACKPORT_PROVIDER_README.md deleted file mode 100644 index ab20e8c5f4080..0000000000000 --- a/airflow/providers/zendesk/BACKPORT_PROVIDER_README.md +++ /dev/null @@ -1,122 +0,0 @@ - - - -# Package apache-airflow-backport-providers-zendesk - -Release: 2020.10.29 - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 2020.10.29](#release-20201029) - - [Release 2020.10.5](#release-2020105) - - [Release 2020.6.24](#release-2020624) - -## Backport package - -This is a backport providers package for `zendesk` provider. All classes for this provider package -are in `airflow.providers.zendesk` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - - - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install apache-airflow-backport-providers-zendesk` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| zdesk | | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `zendesk` provider -are in the `airflow.providers.zendesk` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.zendesk` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------| -| [hooks.zendesk.ZendeskHook](https://github.com/apache/airflow/blob/master/airflow/providers/zendesk/hooks/zendesk.py) | [hooks.zendesk_hook.ZendeskHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/zendesk_hook.py) | - - - -## Releases - -### Release 2020.10.29 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:---------------------------------------------------------------| -| [b680bbc0b](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-24 | Generated backport providers readmes/setup for 2020.10.29 | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [03ff06715](https://github.com/apache/airflow/commit/03ff067152ed3202b7d4beb0fe9b371a0ef51058) | 2020-10-06 | Add type annotations to ZendeskHook, update unit test (#10888) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | - - -### Release 2020.10.5 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-----------------------------------------------------------| -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | - - -### Release 2020.6.24 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------| -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 release of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/zendesk/CHANGELOG.rst b/airflow/providers/zendesk/CHANGELOG.rst new file mode 100644 index 0000000000000..cef7dda80708a --- /dev/null +++ b/airflow/providers/zendesk/CHANGELOG.rst @@ -0,0 +1,25 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/airflow/providers/zendesk/PROVIDER_CHANGES_1.0.0.md b/airflow/providers/zendesk/PROVIDER_CHANGES_1.0.0.md deleted file mode 100644 index 1a7def6a5d459..0000000000000 --- a/airflow/providers/zendesk/PROVIDER_CHANGES_1.0.0.md +++ /dev/null @@ -1,42 +0,0 @@ - - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [03ff06715](https://github.com/apache/airflow/commit/03ff067152ed3202b7d4beb0fe9b371a0ef51058) | 2020-10-06 | Add type annotations to ZendeskHook, update unit test (#10888) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers/zendesk/README.md b/airflow/providers/zendesk/README.md deleted file mode 100644 index 8e21457cf39bb..0000000000000 --- a/airflow/providers/zendesk/README.md +++ /dev/null @@ -1,122 +0,0 @@ - - - -# Package apache-airflow-providers-zendesk - -Release: 1.0.0 - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -- [PIP requirements](#pip-requirements) -- [Provider class summary](#provider-classes-summary) - - [Hooks](#hooks) - - [Moved hooks](#moved-hooks) -- [Releases](#releases) - - [Release 1.0.0](#release-100) - -## Provider package - -This is a provider package for `zendesk` provider. All classes for this provider package -are in `airflow.providers.zendesk` python package. - - - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install apache-airflow-providers-zendesk` - -## PIP requirements - -| PIP package | Version required | -|:--------------|:-------------------| -| zdesk | | - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `zendesk` provider -are in the `airflow.providers.zendesk` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - - -## Hooks - - - -### Moved hooks - -| Airflow 2.0 hooks: `airflow.providers.zendesk` package | Airflow 1.10.* previous location (usually `airflow.contrib`) | -|:----------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------| -| [hooks.zendesk.ZendeskHook](https://github.com/apache/airflow/blob/master/airflow/providers/zendesk/hooks/zendesk.py) | [hooks.zendesk_hook.ZendeskHook](https://github.com/apache/airflow/blob/v1-10-stable/airflow/hooks/zendesk_hook.py) | - - - -## Releases - -### Release 1.0.0 - -| Commit | Committed | Subject | -|:-----------------------------------------------------------------------------------------------|:------------|:-------------------------------------------------------------------------------| -| [b40dffa08](https://github.com/apache/airflow/commit/b40dffa08547b610162f8cacfa75847f3c4ca364) | 2020-12-08 | Rename remaing modules to match AIP-21 (#12917) | -| [c34ef853c](https://github.com/apache/airflow/commit/c34ef853c890e08f5468183c03dc8f3f3ce84af2) | 2020-11-20 | Separate out documentation building per provider (#12444) | -| [008035450](https://github.com/apache/airflow/commit/00803545023b096b8db4fbd6eb473843096d7ce4) | 2020-11-18 | Update provider READMEs for 1.0.0b2 batch release (#12449) | -| [ae7cb4a1e](https://github.com/apache/airflow/commit/ae7cb4a1e2a96351f1976cf5832615e24863e05d) | 2020-11-17 | Update wrong commit hash in backport provider changes (#12390) | -| [6889a333c](https://github.com/apache/airflow/commit/6889a333cff001727eb0a66e375544a28c9a5f03) | 2020-11-15 | Improvements for operators and hooks ref docs (#12366) | -| [7825e8f59](https://github.com/apache/airflow/commit/7825e8f59034645ab3247229be83a3aa90baece1) | 2020-11-13 | Docs installation improvements (#12304) | -| [85a18e13d](https://github.com/apache/airflow/commit/85a18e13d9dec84275283ff69e34704b60d54a75) | 2020-11-09 | Point at pypi project pages for cross-dependency of provider packages (#12212) | -| [59eb5de78](https://github.com/apache/airflow/commit/59eb5de78c70ee9c7ae6e4cba5c7a2babb8103ca) | 2020-11-09 | Update provider READMEs for up-coming 1.0.0beta1 releases (#12206) | -| [b2a28d159](https://github.com/apache/airflow/commit/b2a28d1590410630d66966aa1f2b2a049a8c3b32) | 2020-11-09 | Moves provider packages scripts to dev (#12082) | -| [4e8f9cc8d](https://github.com/apache/airflow/commit/4e8f9cc8d02b29c325b8a5a76b4837671bdf5f68) | 2020-11-03 | Enable Black - Python Auto Formmatter (#9550) | -| [5a439e84e](https://github.com/apache/airflow/commit/5a439e84eb6c0544dc6c3d6a9f4ceeb2172cd5d0) | 2020-10-26 | Prepare providers release 0.0.2a1 (#11855) | -| [872b1566a](https://github.com/apache/airflow/commit/872b1566a11cb73297e657ff325161721b296574) | 2020-10-25 | Generated backport providers readmes/setup for 2020.10.29 (#11826) | -| [349b0811c](https://github.com/apache/airflow/commit/349b0811c3022605426ba57d30936240a7c2848a) | 2020-10-20 | Add D200 pydocstyle check (#11688) | -| [16e712971](https://github.com/apache/airflow/commit/16e7129719f1c0940aef2a93bed81368e997a746) | 2020-10-13 | Added support for provider packages for Airflow 2.0 (#11487) | -| [03ff06715](https://github.com/apache/airflow/commit/03ff067152ed3202b7d4beb0fe9b371a0ef51058) | 2020-10-06 | Add type annotations to ZendeskHook, update unit test (#10888) | -| [0a0e1af80](https://github.com/apache/airflow/commit/0a0e1af80038ef89974c3c8444461fe867945daa) | 2020-10-03 | Fix Broken Markdown links in Providers README TOC (#11249) | -| [ca4238eb4](https://github.com/apache/airflow/commit/ca4238eb4d9a2aef70eb641343f59ee706d27d13) | 2020-10-02 | Fixed month in backport packages to October (#11242) | -| [5220e4c38](https://github.com/apache/airflow/commit/5220e4c3848a2d2c81c266ef939709df9ce581c5) | 2020-10-02 | Prepare Backport release 2020.09.07 (#11238) | -| [720912f67](https://github.com/apache/airflow/commit/720912f67b3af0bdcbac64d6b8bf6d51c6247e26) | 2020-10-02 | Strict type check for multiple providers (#11229) | -| [fdd9b6f65](https://github.com/apache/airflow/commit/fdd9b6f65b608c516b8a062b058972d9a45ec9e3) | 2020-08-25 | Enable Black on Providers Packages (#10543) | -| [3696c34c2](https://github.com/apache/airflow/commit/3696c34c28c6bc7b442deab999d9ecba24ed0e34) | 2020-08-24 | Fix typo in the word "release" (#10528) | -| [ee7ca128a](https://github.com/apache/airflow/commit/ee7ca128a17937313566f2badb6cc569c614db94) | 2020-08-22 | Fix broken Markdown refernces in Providers README (#10483) | -| [d0e7db402](https://github.com/apache/airflow/commit/d0e7db4024806af35e3c9a2cae460fdeedd4d2ec) | 2020-06-19 | Fixed release number for fresh release (#9408) | -| [12af6a080](https://github.com/apache/airflow/commit/12af6a08009b8776e00d8a0aab92363eb8c4e8b1) | 2020-06-19 | Final cleanup for 2020.6.23rc1 release preparation (#9404) | -| [c7e5bce57](https://github.com/apache/airflow/commit/c7e5bce57fe7f51cefce4f8a41ce408ac5675d13) | 2020-06-19 | Prepare backport release candidate for 2020.6.23rc1 (#9370) | -| [f6bd817a3](https://github.com/apache/airflow/commit/f6bd817a3aac0a16430fc2e3d59c1f17a69a15ac) | 2020-06-16 | Introduce 'transfers' packages (#9320) | -| [0b0e4f7a4](https://github.com/apache/airflow/commit/0b0e4f7a4cceff3efe15161fb40b984782760a34) | 2020-05-26 | Preparing for RC3 relase of backports (#9026) | -| [00642a46d](https://github.com/apache/airflow/commit/00642a46d019870c4decb3d0e47c01d6a25cb88c) | 2020-05-26 | Fixed name of 20 remaining wrongly named operators. (#8994) | -| [375d1ca22](https://github.com/apache/airflow/commit/375d1ca229464617780623c61c6e8a1bf570c87f) | 2020-05-19 | Release candidate 2 for backport packages 2020.05.20 (#8898) | -| [12c5e5d8a](https://github.com/apache/airflow/commit/12c5e5d8ae25fa633efe63ccf4db389e2b796d79) | 2020-05-17 | Prepare release candidate for backport packages (#8891) | -| [f3521fb0e](https://github.com/apache/airflow/commit/f3521fb0e36733d8bd356123e56a453fd37a6dca) | 2020-05-16 | Regenerate readme files for backport package release (#8886) | -| [92585ca4c](https://github.com/apache/airflow/commit/92585ca4cb375ac879f4ab331b3a063106eb7b92) | 2020-05-15 | Added automated release notes generation for backport operators (#8807) | -| [4bde99f13](https://github.com/apache/airflow/commit/4bde99f1323d72f6c84c1548079d5e98fc0a2a9a) | 2020-03-23 | Make airflow/providers pylint compatible (#7802) | -| [05443c6dc](https://github.com/apache/airflow/commit/05443c6dc8100e791446bbcc0df04de6e34017bb) | 2020-03-23 | Add missing call to Super class in remaining providers (#7828) | -| [97a429f9d](https://github.com/apache/airflow/commit/97a429f9d0cf740c5698060ad55f11e93cb57b55) | 2020-02-02 | [AIRFLOW-6714] Remove magic comments about UTF-8 (#7338) | -| [c42a375e7](https://github.com/apache/airflow/commit/c42a375e799e5adb3f9536616372dc90ff47e6c8) | 2020-01-27 | [AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265) | diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py index d3b7ffbaa5c64..d29ec70952026 100644 --- a/airflow/providers_manager.py +++ b/airflow/providers_manager.py @@ -25,9 +25,9 @@ from typing import Any, Dict, NamedTuple, Set import jsonschema -import yaml from wtforms import Field +import airflow.utils.yaml as yaml from airflow.utils.entry_points import entry_points_with_dist try: diff --git a/airflow/secrets/local_filesystem.py b/airflow/secrets/local_filesystem.py index c63bb91e10cbc..3ec20e12d41f3 100644 --- a/airflow/secrets/local_filesystem.py +++ b/airflow/secrets/local_filesystem.py @@ -25,8 +25,7 @@ from json import JSONDecodeError from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple -import yaml - +import airflow.utils.yaml as yaml from airflow.exceptions import ( AirflowException, AirflowFileParseException, diff --git a/airflow/sensors/sql.py b/airflow/sensors/sql.py index 573c7cd81bc99..2a76ea175b97a 100644 --- a/airflow/sensors/sql.py +++ b/airflow/sensors/sql.py @@ -84,6 +84,7 @@ def _get_hook(self): 'presto', 'snowflake', 'sqlite', + 'trino', 'vertica', } if conn.conn_type not in allowed_conn_type: diff --git a/airflow/sentry.py b/airflow/sentry.py index 8dc9091513845..62eac9abf7610 100644 --- a/airflow/sentry.py +++ b/airflow/sentry.py @@ -21,7 +21,7 @@ from functools import wraps from airflow.configuration import conf -from airflow.utils.session import provide_session +from airflow.utils.session import find_session_idx, provide_session from airflow.utils.state import State log = logging.getLogger(__name__) @@ -149,14 +149,21 @@ def add_breadcrumbs(self, task_instance, session=None): def enrich_errors(self, func): """Wrap TaskInstance._run_raw_task to support task specific tags and breadcrumbs.""" + session_args_idx = find_session_idx(func) @wraps(func) - def wrapper(task_instance, *args, session=None, **kwargs): + def wrapper(task_instance, *args, **kwargs): # Wrapping the _run_raw_task function with push_scope to contain # tags and breadcrumbs to a specific Task Instance + + try: + session = kwargs.get('session', args[session_args_idx]) + except IndexError: + session = None + with sentry_sdk.push_scope(): try: - return func(task_instance, *args, session=session, **kwargs) + return func(task_instance, *args, **kwargs) except Exception as e: self.add_tagging(task_instance) self.add_breadcrumbs(task_instance, session=session) diff --git a/airflow/serialization/schema.json b/airflow/serialization/schema.json index c831334b9d60c..3bc11ee9a9cee 100644 --- a/airflow/serialization/schema.json +++ b/airflow/serialization/schema.json @@ -145,6 +145,7 @@ "execution_timeout": { "$ref": "#/definitions/timedelta" }, "retry_delay": { "$ref": "#/definitions/timedelta" }, "retry_exponential_backoff": { "type": "boolean" }, + "max_retry_delay": { "$ref": "#/definitions/timedelta" }, "params": { "$ref": "#/definitions/dict" }, "priority_weight": { "type": "number" }, "weight_rule": { "type": "string" }, @@ -167,7 +168,12 @@ "type": "array", "items": { "type": "string" }, "uniqueItems": true - } + }, + "doc": { "type": "string" }, + "doc_md": { "type": "string" }, + "doc_json": { "type": "string" }, + "doc_yaml": { "type": "string" }, + "doc_rst": { "type": "string" } }, "additionalProperties": true }, diff --git a/airflow/serialization/serialized_objects.py b/airflow/serialization/serialized_objects.py index 38df10a5a0a4d..b6cfdf26bd06e 100644 --- a/airflow/serialization/serialized_objects.py +++ b/airflow/serialization/serialized_objects.py @@ -240,7 +240,10 @@ def _serialize(cls, var: Any) -> Any: # Unfortunately there is no support for r return str(get_python_source(var)) elif isinstance(var, set): # FIXME: casts set to list in customized serialization in future. - return cls._encode([cls._serialize(v) for v in var], type_=DAT.SET) + try: + return cls._encode(sorted(cls._serialize(v) for v in var), type_=DAT.SET) + except TypeError: + return cls._encode([cls._serialize(v) for v in var], type_=DAT.SET) elif isinstance(var, tuple): # FIXME: casts tuple to list in customized serialization in future. return cls._encode([cls._serialize(v) for v in var], type_=DAT.TUPLE) @@ -290,6 +293,7 @@ def _deserialize(cls, encoded_var: Any) -> Any: # pylint: disable=too-many-retu elif type_ == DAT.SET: return {cls._deserialize(v) for v in var} elif type_ == DAT.TUPLE: + # pylint: disable=consider-using-generator return tuple([cls._deserialize(v) for v in var]) else: raise TypeError(f'Invalid type {type_!s} in deserialization.') @@ -451,7 +455,7 @@ def deserialize_operator(cls, encoded_op: Dict[str, Any]) -> BaseOperator: v = set(v) elif k == "subdag": v = SerializedDAG.deserialize_dag(v) - elif k in {"retry_delay", "execution_timeout", "sla"}: + elif k in {"retry_delay", "execution_timeout", "sla", "max_retry_delay"}: v = cls._deserialize_timedelta(v) elif k in encoded_op["template_fields"]: pass diff --git a/airflow/stats.py b/airflow/stats.py index 2797afd6159a4..34677daff2362 100644 --- a/airflow/stats.py +++ b/airflow/stats.py @@ -243,6 +243,7 @@ class AllowListValidator: def __init__(self, allow_list=None): if allow_list: + # pylint: disable=consider-using-generator self.allow_list = tuple([item.strip().lower() for item in allow_list.split(',')]) else: self.allow_list = None @@ -342,30 +343,33 @@ def timer(self, stat=None, *args, tags=None, **kwargs): """Timer metric that can be cancelled""" if stat and self.allow_list_validator.test(stat): tags = tags or [] - return Timer(self.dogstatsd.timer(stat, *args, tags=tags, **kwargs)) + return Timer(self.dogstatsd.timed(stat, *args, tags=tags, **kwargs)) return Timer() class _Stats(type): + factory = None instance: Optional[StatsLogger] = None def __getattr__(cls, name): + if not cls.instance: + try: + cls.instance = cls.factory() + except (socket.gaierror, ImportError) as e: + log.error("Could not configure StatsClient: %s, using DummyStatsLogger instead.", e) + cls.instance = DummyStatsLogger() return getattr(cls.instance, name) def __init__(cls, *args, **kwargs): super().__init__(cls) - if cls.__class__.instance is None: - try: - is_datadog_enabled_defined = conf.has_option('metrics', 'statsd_datadog_enabled') - if is_datadog_enabled_defined and conf.getboolean('metrics', 'statsd_datadog_enabled'): - cls.__class__.instance = cls.get_dogstatsd_logger() - elif conf.getboolean('metrics', 'statsd_on'): - cls.__class__.instance = cls.get_statsd_logger() - else: - cls.__class__.instance = DummyStatsLogger() - except (socket.gaierror, ImportError) as e: - log.error("Could not configure StatsClient: %s, using DummyStatsLogger instead.", e) - cls.__class__.instance = DummyStatsLogger() + if cls.__class__.factory is None: + is_datadog_enabled_defined = conf.has_option('metrics', 'statsd_datadog_enabled') + if is_datadog_enabled_defined and conf.getboolean('metrics', 'statsd_datadog_enabled'): + cls.__class__.factory = cls.get_dogstatsd_logger + elif conf.getboolean('metrics', 'statsd_on'): + cls.__class__.factory = cls.get_statsd_logger + else: + cls.__class__.factory = DummyStatsLogger @classmethod def get_statsd_logger(cls): diff --git a/airflow/task/task_runner/standard_task_runner.py b/airflow/task/task_runner/standard_task_runner.py index 505b2252de0f4..bb566b275f2a1 100644 --- a/airflow/task/task_runner/standard_task_runner.py +++ b/airflow/task/task_runner/standard_task_runner.py @@ -121,3 +121,11 @@ def terminate(self): if self._rc is None: # Something else reaped it before we had a chance, so let's just "guess" at an error code. self._rc = -9 + + if self._rc == -9: + # If either we or psutil gives out a -9 return code, it likely means + # an OOM happened + self.log.error( + 'Job %s was killed before it finished (likely due to running out of memory)', + self._task_instance.job_id, + ) diff --git a/airflow/utils/cli.py b/airflow/utils/cli.py index 68a0b448d7868..fc73dfc8db729 100644 --- a/airflow/utils/cli.py +++ b/airflow/utils/cli.py @@ -110,17 +110,19 @@ def _build_metrics(func_name, namespace): """ from airflow.models import Log + sub_commands_to_check = {'users', 'connections'} sensitive_fields = {'-p', '--password', '--conn-password'} full_command = list(sys.argv) - for idx, command in enumerate(full_command): # pylint: disable=too-many-nested-blocks - if command in sensitive_fields: - # For cases when password is passed as "--password xyz" (with space between key and value) - full_command[idx + 1] = "*" * 8 - else: - # For cases when password is passed as "--password=xyz" (with '=' between key and value) - for sensitive_field in sensitive_fields: - if command.startswith(f'{sensitive_field}='): - full_command[idx] = f'{sensitive_field}={"*" * 8}' + if full_command[1] in sub_commands_to_check: # pylint: disable=too-many-nested-blocks + for idx, command in enumerate(full_command): + if command in sensitive_fields: + # For cases when password is passed as "--password xyz" (with space between key and value) + full_command[idx + 1] = "*" * 8 + else: + # For cases when password is passed as "--password=xyz" (with '=' between key and value) + for sensitive_field in sensitive_fields: + if command.startswith(f'{sensitive_field}='): + full_command[idx] = f'{sensitive_field}={"*" * 8}' metrics = { 'sub_command': func_name, diff --git a/airflow/utils/dag_processing.py b/airflow/utils/dag_processing.py index 7e98c111a0d24..5ba44e3cf239b 100644 --- a/airflow/utils/dag_processing.py +++ b/airflow/utils/dag_processing.py @@ -30,7 +30,7 @@ from datetime import datetime, timedelta from importlib import import_module from multiprocessing.connection import Connection as MultiprocessingConnection -from typing import Any, Callable, Dict, List, NamedTuple, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, Callable, Dict, List, NamedTuple, Optional, Tuple, Union, cast from setproctitle import setproctitle # pylint: disable=no-name-in-module from sqlalchemy import or_ @@ -52,6 +52,9 @@ from airflow.utils.session import provide_session from airflow.utils.state import State +if TYPE_CHECKING: + import pathlib + class AbstractDagFileProcessorProcess(metaclass=ABCMeta): """Processes a DAG file. See SchedulerJob.process_file() for more details.""" @@ -136,7 +139,6 @@ def waitable_handle(self): class DagParsingStat(NamedTuple): """Information on processing progress""" - file_paths: List[str] done: bool all_files_processed: bool @@ -489,7 +491,7 @@ class DagFileProcessorManager(LoggingMixin): # pylint: disable=too-many-instanc def __init__( self, - dag_directory: str, + dag_directory: Union[str, "pathlib.Path"], max_runs: int, processor_factory: Callable[[str, List[CallbackRequest]], AbstractDagFileProcessorProcess], processor_timeout: timedelta, @@ -510,6 +512,15 @@ def __init__( self._async_mode = async_mode self._parsing_start_time: Optional[int] = None + # Set the signal conn in to non-blocking mode, so that attempting to + # send when the buffer is full errors, rather than hangs for-ever + # attempting to send (this is to avoid deadlocks!) + # + # Don't do this in sync_mode, as we _need_ the DagParsingStat sent to + # continue the scheduler + if self._async_mode: + os.set_blocking(self._signal_conn.fileno(), False) + self._parallelism = conf.getint('scheduler', 'parsing_processes') if 'sqlite' in conf.get('core', 'sql_alchemy_conn') and self._parallelism > 1: self.log.warning( @@ -618,6 +629,7 @@ def _run_parsing_loop(self): ready = multiprocessing.connection.wait(self.waitables.keys(), timeout=poll_time) if self._signal_conn in ready: agent_signal = self._signal_conn.recv() + self.log.debug("Received %s signal from DagFileProcessorAgent", agent_signal) if agent_signal == DagParsingSignal.TERMINATE_MANAGER: self.terminate() @@ -690,12 +702,21 @@ def _run_parsing_loop(self): all_files_processed = all(self.get_last_finish_time(x) is not None for x in self.file_paths) max_runs_reached = self.max_runs_reached() - dag_parsing_stat = DagParsingStat( - self._file_paths, - max_runs_reached, - all_files_processed, - ) - self._signal_conn.send(dag_parsing_stat) + try: + self._signal_conn.send( + DagParsingStat( + max_runs_reached, + all_files_processed, + ) + ) + except BlockingIOError: + # Try again next time around the loop! + + # It is better to fail, than it is deadlock. This should + # "almost never happen" since the DagParsingStat object is + # small, and in async mode this stat is not actually _required_ + # for normal operation (It only drives "max runs") + self.log.debug("BlockingIOError recived trying to send DagParsingStat, ignoring") if max_runs_reached: self.log.info( diff --git a/airflow/utils/db.py b/airflow/utils/db.py index 20b4b0bf9aa2a..4a9816c68a376 100644 --- a/airflow/utils/db.py +++ b/airflow/utils/db.py @@ -508,6 +508,16 @@ def create_default_connections(session=None): ), session, ) + merge_conn( + Connection( + conn_id="trino_default", + conn_type="trino", + host="localhost", + schema="hive", + port=3400, + ), + session, + ) merge_conn( Connection( conn_id="vertica_default", diff --git a/airflow/utils/dot_renderer.py b/airflow/utils/dot_renderer.py index 990c7a7d126fb..4123f99303b9e 100644 --- a/airflow/utils/dot_renderer.py +++ b/airflow/utils/dot_renderer.py @@ -17,13 +17,17 @@ # specific language governing permissions and limitations # under the License. """Renderer DAG (tasks and dependencies) to the graphviz object.""" -from typing import List, Optional +from typing import Dict, List, Optional import graphviz from airflow.models import TaskInstance +from airflow.models.baseoperator import BaseOperator from airflow.models.dag import DAG +from airflow.models.taskmixin import TaskMixin from airflow.utils.state import State +from airflow.utils.task_group import TaskGroup +from airflow.www.views import dag_edges def _refine_color(color: str): @@ -42,6 +46,88 @@ def _refine_color(color: str): return color +def _draw_task(task: BaseOperator, parent_graph: graphviz.Digraph, states_by_task_id: Dict[str, str]) -> None: + """Draw a single task on the given parent_graph""" + if states_by_task_id: + state = states_by_task_id.get(task.task_id, State.NONE) + color = State.color_fg(state) + fill_color = State.color(state) + else: + color = task.ui_fgcolor + fill_color = task.ui_color + + parent_graph.node( + task.task_id, + _attributes={ + "label": task.label, + "shape": "rectangle", + "style": "filled,rounded", + "color": _refine_color(color), + "fillcolor": _refine_color(fill_color), + }, + ) + + +def _draw_task_group( + task_group: TaskGroup, parent_graph: graphviz.Digraph, states_by_task_id: Dict[str, str] +) -> None: + """Draw the given task_group and its children on the given parent_graph""" + # Draw joins + if task_group.upstream_group_ids or task_group.upstream_task_ids: + parent_graph.node( + task_group.upstream_join_id, + _attributes={ + "label": "", + "shape": "circle", + "style": "filled,rounded", + "color": _refine_color(task_group.ui_fgcolor), + "fillcolor": _refine_color(task_group.ui_color), + "width": "0.2", + "height": "0.2", + }, + ) + + if task_group.downstream_group_ids or task_group.downstream_task_ids: + parent_graph.node( + task_group.downstream_join_id, + _attributes={ + "label": "", + "shape": "circle", + "style": "filled,rounded", + "color": _refine_color(task_group.ui_fgcolor), + "fillcolor": _refine_color(task_group.ui_color), + "width": "0.2", + "height": "0.2", + }, + ) + + # Draw children + for child in sorted(task_group.children.values(), key=lambda t: t.label): + _draw_nodes(child, parent_graph, states_by_task_id) + + +def _draw_nodes(node: TaskMixin, parent_graph: graphviz.Digraph, states_by_task_id: Dict[str, str]) -> None: + """Draw the node and its children on the given parent_graph recursively.""" + if isinstance(node, BaseOperator): + _draw_task(node, parent_graph, states_by_task_id) + else: + # Draw TaskGroup + if node.is_root: + # No need to draw background for root TaskGroup. + _draw_task_group(node, parent_graph, states_by_task_id) + else: + with parent_graph.subgraph(name=f"cluster_{node.group_id}") as sub: + sub.attr( + shape="rectangle", + style="filled", + color=_refine_color(node.ui_fgcolor), + # Partially transparent CornflowerBlue + fillcolor="#6495ed7f", + label=node.label, + ) + _draw_task_group(node, sub, states_by_task_id) + + def render_dag(dag: DAG, tis: Optional[List[TaskInstance]] = None) -> graphviz.Digraph: """ Renders the DAG object to the DOT object. @@ -66,30 +152,10 @@ def render_dag(dag: DAG, tis: Optional[List[TaskInstance]] = None) -> graphviz.D states_by_task_id = None if tis is not None: states_by_task_id = {ti.task_id: ti.state for ti in tis} - for task in dag.tasks: - node_attrs = { - "shape": "rectangle", - "style": "filled,rounded", - } - if states_by_task_id is None: - node_attrs.update( - { - "color": _refine_color(task.ui_fgcolor), - "fillcolor": _refine_color(task.ui_color), - } - ) - else: - state = states_by_task_id.get(task.task_id, State.NONE) - node_attrs.update( - { - "color": State.color_fg(state), - "fillcolor": State.color(state), - } - ) - dot.node( - task.task_id, - _attributes=node_attrs, - ) - for downstream_task_id in task.downstream_task_ids: - dot.edge(task.task_id, downstream_task_id) + + _draw_nodes(dag.task_group, dot, states_by_task_id) + + for edge in dag_edges(dag): + dot.edge(edge["source_id"], edge["target_id"]) + return dot diff --git a/airflow/utils/file.py b/airflow/utils/file.py index 553c506696e5b..03343cd9097c5 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -20,10 +20,13 @@ import re import zipfile from pathlib import Path -from typing import Dict, Generator, List, Optional, Pattern +from typing import TYPE_CHECKING, Dict, Generator, List, Optional, Pattern, Union from airflow.configuration import conf +if TYPE_CHECKING: + import pathlib + log = logging.getLogger(__name__) @@ -130,7 +133,7 @@ def find_path_from_directory(base_dir_path: str, ignore_file_name: str) -> Gener def list_py_file_paths( - directory: str, + directory: Union[str, "pathlib.Path"], safe_mode: bool = conf.getboolean('core', 'DAG_DISCOVERY_SAFE_MODE', fallback=True), include_examples: Optional[bool] = None, include_smart_sensor: Optional[bool] = conf.getboolean('smart_sensor', 'use_smart_sensor'), @@ -158,7 +161,7 @@ def list_py_file_paths( if directory is None: file_paths = [] elif os.path.isfile(directory): - file_paths = [directory] + file_paths = [str(directory)] elif os.path.isdir(directory): find_dag_file_paths(directory, file_paths, safe_mode) if include_examples: @@ -174,9 +177,9 @@ def list_py_file_paths( return file_paths -def find_dag_file_paths(directory: str, file_paths: list, safe_mode: bool): +def find_dag_file_paths(directory: Union[str, "pathlib.Path"], file_paths: list, safe_mode: bool): """Finds file paths of all DAG files.""" - for file_path in find_path_from_directory(directory, ".airflowignore"): + for file_path in find_path_from_directory(str(directory), ".airflowignore"): try: if not os.path.isfile(file_path): continue diff --git a/airflow/utils/helpers.py b/airflow/utils/helpers.py index 69ac5a07818d4..7fce1774a321a 100644 --- a/airflow/utils/helpers.py +++ b/airflow/utils/helpers.py @@ -24,6 +24,7 @@ from typing import Any, Callable, Dict, Generator, Iterable, List, Optional, TypeVar from urllib import parse +from flask import url_for from jinja2 import Template from airflow.configuration import conf @@ -213,4 +214,5 @@ def build_airflow_url_with_query(query: Dict[str, Any]) -> str: 'http://0.0.0.0:8000/base/graph?dag_id=my-task&root=&execution_date=2020-10-27T10%3A59%3A25.615587 """ view = conf.get('webserver', 'dag_default_view').lower() - return f"/{view}?{parse.urlencode(query)}" + url = url_for(f"Airflow.{view}") + return f"{url}?{parse.urlencode(query)}" diff --git a/airflow/utils/json.py b/airflow/utils/json.py index 45dda757afb02..8e22408adf123 100644 --- a/airflow/utils/json.py +++ b/airflow/utils/json.py @@ -66,7 +66,7 @@ def _default(obj): obj, (np.float_, np.float16, np.float32, np.float64, np.complex_, np.complex64, np.complex128) ): return float(obj) - elif k8s is not None and isinstance(obj, k8s.V1Pod): + elif k8s is not None and isinstance(obj, (k8s.V1Pod, k8s.V1ResourceRequirements)): from airflow.kubernetes.pod_generator import PodGenerator return PodGenerator.serialize_pod(obj) diff --git a/airflow/utils/session.py b/airflow/utils/session.py index 4001a0f454387..f8b9bcd071eb4 100644 --- a/airflow/utils/session.py +++ b/airflow/utils/session.py @@ -40,6 +40,18 @@ def create_session(): RT = TypeVar("RT") # pylint: disable=invalid-name +def find_session_idx(func: Callable[..., RT]) -> int: + """Find session index in function call parameter.""" + func_params = signature(func).parameters + try: + # func_params is an ordered dict -- this is the "recommended" way of getting the position + session_args_idx = tuple(func_params).index("session") + except ValueError: + raise ValueError(f"Function {func.__qualname__} has no `session` argument") from None + + return session_args_idx + + def provide_session(func: Callable[..., RT]) -> Callable[..., RT]: """ Function decorator that provides a session if it isn't provided. @@ -47,14 +59,7 @@ def provide_session(func: Callable[..., RT]) -> Callable[..., RT]: database transaction, you pass it to the function, if not this wrapper will create one and close it for you. """ - func_params = signature(func).parameters - try: - # func_params is an ordered dict -- this is the "recommended" way of getting the position - session_args_idx = tuple(func_params).index("session") - except ValueError: - raise ValueError(f"Function {func.__qualname__} has no `session` argument") from None - # We don't need this anymore -- ensure we don't keep a reference to it by mistake - del func_params + session_args_idx = find_session_idx(func) @wraps(func) def wrapper(*args, **kwargs) -> RT: diff --git a/airflow/utils/state.py b/airflow/utils/state.py index 681cbc5cb7fee..d5300e11b6a9f 100644 --- a/airflow/utils/state.py +++ b/airflow/utils/state.py @@ -57,6 +57,7 @@ class State: NONE, SCHEDULED, SENSING, + REMOVED, ) dag_states = ( diff --git a/airflow/utils/timezone.py b/airflow/utils/timezone.py index d302cbe1a7c6c..09736e5b4736d 100644 --- a/airflow/utils/timezone.py +++ b/airflow/utils/timezone.py @@ -56,7 +56,7 @@ def utcnow() -> dt.datetime: :return: """ # pendulum utcnow() is not used as that sets a TimezoneInfo object - # instead of a Timezone. This is not pickable and also creates issues + # instead of a Timezone. This is not picklable and also creates issues # when using replace() result = dt.datetime.utcnow() result = result.replace(tzinfo=utc) @@ -71,7 +71,7 @@ def utc_epoch() -> dt.datetime: :return: """ # pendulum utcnow() is not used as that sets a TimezoneInfo object - # instead of a Timezone. This is not pickable and also creates issues + # instead of a Timezone. This is not picklable and also creates issues # when using replace() result = dt.datetime(1970, 1, 1) result = result.replace(tzinfo=utc) diff --git a/airflow/utils/yaml.py b/airflow/utils/yaml.py new file mode 100644 index 0000000000000..e3be61c315014 --- /dev/null +++ b/airflow/utils/yaml.py @@ -0,0 +1,76 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""Use libyaml for YAML dump/load operations where possible. + +If libyaml is available we will use it -- it is significantly faster. + +This module delegates all other properties to the yaml module, so it can be used as: + +.. code-block:: python + import airflow.utils.yaml as yaml + +And then be used directly in place of the normal python module. +""" +import sys +from typing import TYPE_CHECKING, Any, BinaryIO, TextIO, Union, cast + +if TYPE_CHECKING: + from yaml.error import MarkedYAMLError # noqa + + +def safe_load(stream: Union[bytes, str, BinaryIO, TextIO]) -> Any: + """Like yaml.safe_load, but use the C libyaml for speed where we can""" + # delay import until use. + from yaml import load as orig + + try: + from yaml import CSafeLoader as SafeLoader + except ImportError: + from yaml import SafeLoader # type: ignore[no-redef] + + return orig(stream, SafeLoader) + + +def dump(data: Any, **kwargs) -> str: + """Like yaml.safe_dump, but use the C libyaml for speed where we can""" + # delay import until use. + from yaml import dump as orig + + try: + from yaml import CSafeDumper as SafeDumper + except ImportError: + from yaml import SafeDumper # type: ignore[no-redef] + + return cast(str, orig(data, Dumper=SafeDumper, **kwargs)) + + +def __getattr__(name): + # Delegate anything else to the yaml module + import yaml + + if name == "FullLoader": + # Try to use CFullLoader by default + getattr(yaml, "CFullLoader", yaml.FullLoader) + + return getattr(yaml, name) + + +if sys.version_info < (3, 7): + from pep562 import Pep562 + + Pep562(__name__) diff --git a/airflow/www/app.py b/airflow/www/app.py index 77b5a5175c38b..aa9b5ed7af5fe 100644 --- a/airflow/www/app.py +++ b/airflow/www/app.py @@ -16,6 +16,7 @@ # specific language governing permissions and limitations # under the License. # +import warnings from datetime import timedelta from typing import Optional @@ -79,7 +80,16 @@ def create_app(config=None, testing=False, app_name="Airflow"): flask_app.config['SESSION_COOKIE_HTTPONLY'] = True flask_app.config['SESSION_COOKIE_SECURE'] = conf.getboolean('webserver', 'COOKIE_SECURE') - flask_app.config['SESSION_COOKIE_SAMESITE'] = conf.get('webserver', 'COOKIE_SAMESITE') + + cookie_samesite_config = conf.get('webserver', 'COOKIE_SAMESITE') + if cookie_samesite_config == "": + warnings.warn( + "Old deprecated value found for `cookie_samesite` option in `[webserver]` section. " + "Using `Lax` instead. Change the value to `Lax` in airflow.cfg to remove this warning.", + DeprecationWarning, + ) + cookie_samesite_config = "Lax" + flask_app.config['SESSION_COOKIE_SAMESITE'] = cookie_samesite_config if config: flask_app.config.from_mapping(config) diff --git a/airflow/www/ask_for_recompile_assets_if_needed.sh b/airflow/www/ask_for_recompile_assets_if_needed.sh index ecd675f1e4998..0d8f7800db9eb 100755 --- a/airflow/www/ask_for_recompile_assets_if_needed.sh +++ b/airflow/www/ask_for_recompile_assets_if_needed.sh @@ -23,6 +23,7 @@ cd "$( dirname "${BASH_SOURCE[0]}" )" MD5SUM_FILE="static/dist/sum.md5" readonly MD5SUM_FILE +GREEN='\033[1;32m' YELLOW='\033[1;33m' NO_COLOR='\033[0m' @@ -36,5 +37,7 @@ if [[ ${old_md5sum} != "${md5sum}" ]]; then echo " ./airflow/www/compile_assets.sh" echo "" else - echo "No need to recompile www assets" + echo + echo -e "${GREEN}No need for www assets recompilation.${NO_COLOR}" + echo fi diff --git a/airflow/www/extensions/init_views.py b/airflow/www/extensions/init_views.py index f9736e6825473..0dcf8c7f0a072 100644 --- a/airflow/www/extensions/init_views.py +++ b/airflow/www/extensions/init_views.py @@ -121,13 +121,8 @@ def init_plugins(app): appbuilder.add_view_no_menu(view["view"]) for menu_link in sorted(plugins_manager.flask_appbuilder_menu_links, key=lambda x: x["name"]): - log.debug("Adding menu link %s", menu_link["name"]) - appbuilder.add_link( - menu_link["name"], - href=menu_link["href"], - category=menu_link["category"], - category_icon=menu_link["category_icon"], - ) + log.debug("Adding menu link %s to %s", menu_link["name"], menu_link["href"]) + appbuilder.add_link(**menu_link) for blue_print in plugins_manager.flask_blueprints: log.debug("Adding blueprint %s:%s", blue_print["name"], blue_print["blueprint"].import_name) diff --git a/airflow/www/package.json b/airflow/www/package.json index 5bf6530f7fab9..730f1423be0d0 100644 --- a/airflow/www/package.json +++ b/airflow/www/package.json @@ -68,11 +68,9 @@ "datatables.net-bs": "^1.10.23", "eonasdan-bootstrap-datetimepicker": "^4.17.47", "jquery": ">=3.4.0", - "js-yaml": "^3.14.0", - "lodash": "^4.17.20", "moment-timezone": "^0.5.28", "nvd3": "^1.8.6", - "redoc": "^2.0.0-rc.30", + "redoc": "^2.0.0-rc.48", "url-search-params-polyfill": "^8.1.0" } } diff --git a/airflow/www/security.py b/airflow/www/security.py index 09af167acba0b..a6db620b50737 100644 --- a/airflow/www/security.py +++ b/airflow/www/security.py @@ -17,7 +17,8 @@ # under the License. # -from typing import Optional, Sequence, Set, Tuple +import warnings +from typing import Dict, Optional, Sequence, Set, Tuple from flask import current_app, g from flask_appbuilder.security.sqla import models as sqla_models @@ -174,16 +175,34 @@ def __init__(self, appbuilder): def init_role(self, role_name, perms): """ Initialize the role with the permissions and related view-menus. - :param role_name: :param perms: :return: """ - role = self.find_role(role_name) - if not role: - role = self.add_role(role_name) + warnings.warn( + "`init_role` has been deprecated. Please use `bulk_sync_roles` instead.", + DeprecationWarning, + stacklevel=2, + ) + self.bulk_sync_roles([{'role': role_name, 'perms': perms}]) - self.add_permissions(role, set(perms)) + def bulk_sync_roles(self, roles): + """Sync the provided roles and permissions.""" + existing_roles = self._get_all_roles_with_permissions() + pvs = self._get_all_non_dag_permissionviews() + + for config in roles: + role_name = config['role'] + perms = config['perms'] + role = existing_roles.get(role_name) or self.add_role(role_name) + + for perm_name, view_name in perms: + perm_view = pvs.get((perm_name, view_name)) or self.add_permission_view_menu( + perm_name, view_name + ) + + if perm_view not in role.permissions: + self.add_permission_role(role, perm_view) def add_permissions(self, role, perms): """Adds resource permissions to a given role.""" @@ -457,33 +476,62 @@ def add_homepage_access_to_custom_roles(self): self.get_session.commit() - def get_all_permissions(self): + def get_all_permissions(self) -> Set[Tuple[str, str]]: """Returns all permissions as a set of tuples with the perm name and view menu name""" - perms = set() - for permission_view in self.get_session.query(self.permissionview_model).all(): - if permission_view.permission and permission_view.view_menu: - perms.add((permission_view.permission.name, permission_view.view_menu.name)) + return set( + self.get_session.query(self.permissionview_model) + .join(self.permission_model) + .join(self.viewmenu_model) + .with_entities(self.permission_model.name, self.viewmenu_model.name) + .all() + ) - return perms + def _get_all_non_dag_permissionviews(self) -> Dict[Tuple[str, str], PermissionView]: + """ + Returns a dict with a key of (perm name, view menu name) and value of perm view + with all perm views except those that are for specific DAGs. + """ + return { + (perm_name, viewmodel_name): viewmodel + for perm_name, viewmodel_name, viewmodel in ( + self.get_session.query(self.permissionview_model) + .join(self.permission_model) + .join(self.viewmenu_model) + .filter(~self.viewmenu_model.name.like(f"{permissions.RESOURCE_DAG_PREFIX}%")) + .with_entities( + self.permission_model.name, self.viewmenu_model.name, self.permissionview_model + ) + .all() + ) + } + + def _get_all_roles_with_permissions(self) -> Dict[str, Role]: + """Returns a dict with a key of role name and value of role with eagrly loaded permissions""" + return { + r.name: r + for r in ( + self.get_session.query(self.role_model).options(joinedload(self.role_model.permissions)).all() + ) + } - @provide_session - def create_dag_specific_permissions(self, session=None): + def create_dag_specific_permissions(self) -> None: """ Creates 'can_read' and 'can_edit' permissions for all active and paused DAGs. :return: None. """ perms = self.get_all_permissions() - dag_models = ( - session.query(models.DagModel) + rows = ( + self.get_session.query(models.DagModel.dag_id) .filter(or_(models.DagModel.is_active, models.DagModel.is_paused)) .all() ) - for dag in dag_models: + for row in rows: + dag_id = row[0] for perm_name in self.DAG_PERMS: - dag_resource_name = self.prefixed_dag_id(dag.dag_id) - if dag_resource_name and perm_name and (dag_resource_name, perm_name) not in perms: + dag_resource_name = self.prefixed_dag_id(dag_id) + if (perm_name, dag_resource_name) not in perms: self._merge_perm(perm_name, dag_resource_name) def update_admin_perm_view(self): @@ -525,11 +573,9 @@ def sync_roles(self): self.create_perm_vm_for_all_dag() self.create_dag_specific_permissions() - # Create default user role. - for config in self.ROLE_CONFIGS: - role = config['role'] - perms = config['perms'] - self.init_role(role, perms) + # Sync the default roles (Admin, Viewer, User, Op, public) with related permissions + self.bulk_sync_roles(self.ROLE_CONFIGS) + self.add_homepage_access_to_custom_roles() # init existing roles, the rest role could be created through UI. self.update_admin_perm_view() diff --git a/airflow/www/static/js/task-instances.js b/airflow/www/static/js/task-instances.js index 1f71fa1c09fbe..be9df244e9676 100644 --- a/airflow/www/static/js/task-instances.js +++ b/airflow/www/static/js/task-instances.js @@ -35,7 +35,7 @@ function generateTooltipDateTimes(startDate, endDate, dagTZ) { } const tzFormat = 'z (Z)'; - const localTZ = moment.defaultZone.name; + const localTZ = moment.defaultZone.name.toUpperCase(); startDate = moment.utc(startDate); endDate = moment.utc(endDate); dagTZ = dagTZ.toUpperCase(); diff --git a/airflow/www/templates/airflow/graph.html b/airflow/www/templates/airflow/graph.html index 844ce38e99bd6..44b0e0152ac3a 100644 --- a/airflow/www/templates/airflow/graph.html +++ b/airflow/www/templates/airflow/graph.html @@ -235,7 +235,7 @@ }); d3.selectAll("g.node").on("mouseout", function (d) { - d3.select(this).selectAll("rect").style("stroke", null); + d3.select(this).selectAll("rect,circle").style("stroke", null); highlight_nodes(g.predecessors(d), null, initialStrokeWidth) highlight_nodes(g.successors(d), null, initialStrokeWidth) d3.selectAll("g.node") @@ -244,6 +244,7 @@ .style("stroke-width", initialStrokeWidth); d3.selectAll("g.edgePath") .style("opacity", 1); + localStorage.removeItem(focused_group_key(dag_id)); }); updateNodesStates(task_instances); setUpZoomSupport(); @@ -417,6 +418,8 @@ .style("opacity", 1); d3.selectAll('.js-state-legend-item') .style("background-color", null); + + localStorage.removeItem(focused_group_key(dag_id)); } function focusState(state, node, color){ @@ -591,6 +594,22 @@ return children } + // Return list of all task group ids in the given task group including the given group itself. + function get_all_group_ids(group) { + var children = [group.id]; + + for (const [key, val] of Object.entries(group.children)) { + if (val.children != undefined) { + // group + const sub_group_children = get_all_group_ids(val) + for (const id of sub_group_children) { + children.push(id); + } + } + } + return children; + } + // Return the state for the node based on the state of its taskinstance or that of its // children if it's a group node @@ -626,6 +645,16 @@ return "no_status" } + // Returns the key used to store expanded task group ids in localStorage + function expanded_groups_key(dag_id) { + return `expanded_groups_${dag_id}`; + } + + // Returns the key used to store the focused task group id in localStorage + function focused_group_key(dag_id) { + return `focused_group_${dag_id}`; + } + // Focus the graph on the expanded/collapsed node function focus_group(node_id) { if(node_id != null && zoom != null) { @@ -644,7 +673,7 @@ // Is there a better way to get node_width and node_height ? const [node_width, node_height] = [rect[0][0].attributes.width.value, rect[0][0].attributes.height.value]; - // Calculate zoom scale to fill most of the canvas with the the node/cluster in focus. + // Calculate zoom scale to fill most of the canvas with the node/cluster in focus. const scale = Math.min( Math.min(width / node_width, height / node_height), 1.5, // cap zoom level to 1.5 so nodes are not too large @@ -668,11 +697,13 @@ .style("opacity", 0.2).duration(duration) } }); + + localStorage.setItem(focused_group_key(dag_id), node_id); } } // Expands a group node - function expand_group(node_id, node) { + function expand_group(node_id, node, focus=true) { node.children.forEach(function (val) { // Set children nodes g.setNode(val.id, val.value) @@ -706,17 +737,22 @@ }) draw() - focus_group(node_id) + + if (focus) { + focus_group(node_id); + } + + save_expanded_group(node_id) } // Remove the node with this node_id from g. function remove_node(node_id) { - if(g.hasNode(node_id)) { + if (g.hasNode(node_id)) { node = g.node(node_id) if(node.children != undefined) { // If the child is an expanded group node, remove children too. node.children.forEach(function (child) { - remove_node(child.id) + remove_node(child.id); }) } } @@ -745,10 +781,77 @@ draw() focus_group(node_id) + + remove_expanded_group(node_id, node); } - expand_group(null, nodes) + function get_saved_groups(dag_id) { + // expanded_groups is a Set + try { + var expanded_groups = new Set(JSON.parse(localStorage.getItem(expanded_groups_key(dag_id)))); + } catch { + var expanded_groups = new Set(); + } + + return expanded_groups; + } + + // Clean up invalid group_ids from saved_group_ids (e.g. due to DAG changes) + function prune_invalid_saved_group_ids() { + // All the group_ids in the whole DAG + const all_group_ids = new Set(get_all_group_ids(nodes)); + var expanded_groups = get_saved_groups(dag_id); + expanded_groups = Array.from(expanded_groups).filter(group_id => all_group_ids.has(group_id)); + localStorage.setItem(expanded_groups_key(dag_id), JSON.stringify(expanded_groups)); + } + + // Remember the expanded groups in local storage so that it can be used to restore the expanded state + // of task groups. + function save_expanded_group(node_id) { + // expanded_groups is a Set + var expanded_groups = get_saved_groups(dag_id); + expanded_groups.add(node_id) + localStorage.setItem(expanded_groups_key(dag_id), JSON.stringify(Array.from(expanded_groups))); + } + + // Remove the node_id from the expanded state + function remove_expanded_group(node_id, node) { + var expanded_groups = get_saved_groups(dag_id); + const child_group_ids = get_all_group_ids(node); + child_group_ids.forEach(child_id => expanded_groups.delete(child_id)); + localStorage.setItem(expanded_groups_key(dag_id), JSON.stringify(Array.from(expanded_groups))); + } + + // Restore previously expanded task groups + function expand_saved_groups(expanded_groups, node) { + if (node.children == undefined) { + return; + } + + node.children.forEach(function (child_node) { + if(expanded_groups.has(child_node.id)) { + expand_group(child_node.id, g.node(child_node.id), false); + + expand_saved_groups(expanded_groups, child_node); + } + }); + } + + prune_invalid_saved_group_ids(); + const focus_node_id = localStorage.getItem(focused_group_key(dag_id)); + const expanded_groups = get_saved_groups(dag_id); + + // Always expand the root node + expand_group(null, nodes); + + // Expand the node that were previously expanded + expand_saved_groups(expanded_groups, nodes); + + // Restore focus (if available) + if(g.hasNode(focus_node_id)) { + focus_group(focus_node_id); + } - initRefresh(); + initRefresh(); {% endblock %} diff --git a/airflow/www/utils.py b/airflow/www/utils.py index 265a12f7904b6..ad53436f38d1d 100644 --- a/airflow/www/utils.py +++ b/airflow/www/utils.py @@ -321,11 +321,13 @@ def render(obj, lexer): return out -def wrapped_markdown(s, css_class=None): +def wrapped_markdown(s, css_class='rich_doc'): """Convert a Markdown string to HTML.""" if s is None: return None + s = '\n'.join(line.lstrip() for line in s.split('\n')) + return Markup(f'

' + markdown.markdown(s, extensions=['tables']) + "
") diff --git a/airflow/www/views.py b/airflow/www/views.py index 4560197a93a74..a38b9cc3b6c24 100644 --- a/airflow/www/views.py +++ b/airflow/www/views.py @@ -34,7 +34,6 @@ import lazy_object_proxy import nvd3 import sqlalchemy as sqla -import yaml from flask import ( Markup, Response, @@ -66,6 +65,7 @@ from wtforms.validators import InputRequired import airflow +import airflow.utils.yaml as yaml from airflow import models, plugins_manager, settings from airflow.api.common.experimental.mark_tasks import ( set_dag_run_state_to_failed, @@ -111,6 +111,14 @@ FILTER_STATUS_COOKIE = 'dag_status_filter' +def truncate_task_duration(task_duration): + """ + Cast the task_duration to an int was for optimization for large/huge dags if task_duration > 10s + otherwise we keep it as a float with 3dp + """ + return int(task_duration) if task_duration > 10.0 else round(task_duration, 3) + + def get_safe_url(url): """Given a user-supplied URL, ensure it points to our web server""" valid_schemes = ['http', 'https', ''] @@ -121,7 +129,13 @@ def get_safe_url(url): parsed = urlparse(url) + # If the url contains semicolon, redirect it to homepage to avoid + # potential XSS. (Similar to https://github.com/python/cpython/pull/24297/files (bpo-42967)) + if ';' in unquote(url): + return url_for('Airflow.index') + query = parse_qsl(parsed.query, keep_blank_values=True) + url = parsed._replace(query=urlencode(query)).geturl() if parsed.scheme in valid_schemes and parsed.netloc in valid_netlocs: @@ -1206,7 +1220,7 @@ def task(self): # Color coding the special attributes that are code special_attrs_rendered = {} for attr_name in wwwutils.get_attr_renderer(): - if hasattr(task, attr_name): + if getattr(task, attr_name, None) is not None: source = getattr(task, attr_name) special_attrs_rendered[attr_name] = wwwutils.get_attr_renderer()[attr_name](source) @@ -1363,6 +1377,7 @@ def run(self): ) return redirect(origin) + executor.job_id = "manual" executor.start() executor.queue_task_instance( ti, @@ -1921,7 +1936,7 @@ def encode_ti(task_instance: Optional[models.TaskInstance]) -> Optional[List]: # round to seconds to reduce payload size task_instance_data[2] = int(task_instance.start_date.timestamp()) if task_instance.duration is not None: - task_instance_data[3] = int(task_instance.duration) + task_instance_data[3] = truncate_task_duration(task_instance.duration) return task_instance_data @@ -2969,6 +2984,11 @@ class PluginView(AirflowBaseView): ] @expose('/plugin') + @auth.has_access( + [ + (permissions.ACTION_CAN_READ, permissions.RESOURCE_PLUGIN), + ] + ) def list(self): """List loaded plugins.""" plugins_manager.ensure_plugins_loaded() @@ -3288,9 +3308,8 @@ class DagRunModelView(AirflowModelView): 'start_date', 'end_date', 'external_trigger', - 'conf', ] - edit_columns = ['state', 'dag_id', 'execution_date', 'run_id', 'conf'] + edit_columns = ['state', 'dag_id', 'execution_date', 'start_date', 'end_date', 'run_id', 'conf'] base_order = ('execution_date', 'desc') @@ -3567,6 +3586,7 @@ class TaskInstanceModelView(AirflowModelView): 'operator', 'start_date', 'end_date', + 'queued_dttm', ] edit_columns = [ @@ -3641,6 +3661,7 @@ def action_clear(self, task_instances, session=None): return redirect(self.get_redirect()) except Exception: # noqa pylint: disable=broad-except flash('Failed to clear task instances', 'error') + return None @provide_session def set_task_instance_state(self, tis, target_state, session=None): @@ -3728,7 +3749,7 @@ class DagModelView(AirflowModelView): list_columns = [ 'dag_id', 'is_paused', - 'last_scheduler_run', + 'last_parsed_time', 'last_expired', 'scheduler_lock', 'fileloc', diff --git a/airflow/www/yarn.lock b/airflow/www/yarn.lock index 8625cd5149bcf..9573141915fbc 100644 --- a/airflow/www/yarn.lock +++ b/airflow/www/yarn.lock @@ -96,13 +96,6 @@ dependencies: "@babel/types" "^7.8.3" -"@babel/helper-module-imports@^7.0.0": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.10.4.tgz#4c5c54be04bd31670a7382797d75b9fa2e5b5620" - integrity sha512-nEQJHqYavI217oD9+s5MUBzk6x1IlvoS9WTPfgG43CbMEeStE0v+r+TucWdx8KFGowPGvyOkDT9+7DHedIDnVw== - dependencies: - "@babel/types" "^7.10.4" - "@babel/helper-module-imports@^7.8.3": version "7.8.3" resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.8.3.tgz#7fe39589b39c016331b6b8c3f441e8f0b1419498" @@ -200,13 +193,20 @@ resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.9.4.tgz#68a35e6b0319bbc014465be43828300113f2f2e8" integrity sha512-bC49otXX6N0/VYhgOMh4gnP26E9xnDZK3TmbNpxYzzz9BQLBosQwfyOe9/cXUU3txYhTzLCbcqd5c8y/OmCjHA== -"@babel/runtime@^7.0.0", "@babel/runtime@^7.7.2", "@babel/runtime@^7.9.2": +"@babel/runtime@^7.0.0": version "7.10.5" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.10.5.tgz#303d8bd440ecd5a491eae6117fd3367698674c5c" integrity sha512-otddXKhdNn7d0ptoFRHtMLa8LqDxLYwTjB4nYgM1yy5N6gU/MUf8zqyyLltCH3yAVitBzmwK4us+DD0l/MauAg== dependencies: regenerator-runtime "^0.13.4" +"@babel/runtime@^7.12.5": + version "7.13.9" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.13.9.tgz#97dbe2116e2630c489f22e0656decd60aaa1fcee" + integrity sha512-aY2kU+xgJ3dJ1eU6FMB9EH8dIe8dmusF1xEku52joLvw6eAFN0AI+WxCLDnpev2LEejWBAy2sBvBOBAjI3zmvA== + dependencies: + regenerator-runtime "^0.13.4" + "@babel/runtime@^7.8.7": version "7.9.2" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.9.2.tgz#d90df0583a3a252f09aaa619665367bae518db06" @@ -280,52 +280,10 @@ lodash "^4.17.13" to-fast-properties "^2.0.0" -"@emotion/babel-utils@^0.6.4": - version "0.6.10" - resolved "https://registry.yarnpkg.com/@emotion/babel-utils/-/babel-utils-0.6.10.tgz#83dbf3dfa933fae9fc566e54fbb45f14674c6ccc" - integrity sha512-/fnkM/LTEp3jKe++T0KyTszVGWNKPNOUJfjNKLO17BzQ6QPxgbg3whayom1Qr2oLFH3V92tDymU+dT5q676uow== - dependencies: - "@emotion/hash" "^0.6.6" - "@emotion/memoize" "^0.6.6" - "@emotion/serialize" "^0.9.1" - convert-source-map "^1.5.1" - find-root "^1.1.0" - source-map "^0.7.2" - -"@emotion/hash@^0.6.2", "@emotion/hash@^0.6.6": - version "0.6.6" - resolved "https://registry.yarnpkg.com/@emotion/hash/-/hash-0.6.6.tgz#62266c5f0eac6941fece302abad69f2ee7e25e44" - integrity sha512-ojhgxzUHZ7am3D2jHkMzPpsBAiB005GF5YU4ea+8DNPybMk01JJUM9V9YRlF/GE95tcOm8DxQvWA2jq19bGalQ== - -"@emotion/memoize@^0.6.1", "@emotion/memoize@^0.6.6": - version "0.6.6" - resolved "https://registry.yarnpkg.com/@emotion/memoize/-/memoize-0.6.6.tgz#004b98298d04c7ca3b4f50ca2035d4f60d2eed1b" - integrity sha512-h4t4jFjtm1YV7UirAFuSuFGyLa+NNxjdkq6DpFLANNQY5rHueFZHVY+8Cu1HYVP6DrheB0kv4m5xPjo7eKT7yQ== - -"@emotion/serialize@^0.9.1": - version "0.9.1" - resolved "https://registry.yarnpkg.com/@emotion/serialize/-/serialize-0.9.1.tgz#a494982a6920730dba6303eb018220a2b629c145" - integrity sha512-zTuAFtyPvCctHBEL8KZ5lJuwBanGSutFEncqLn/m9T1a6a93smBStK+bZzcNPgj4QS8Rkw9VTwJGhRIUVO8zsQ== - dependencies: - "@emotion/hash" "^0.6.6" - "@emotion/memoize" "^0.6.6" - "@emotion/unitless" "^0.6.7" - "@emotion/utils" "^0.8.2" - -"@emotion/stylis@^0.7.0": - version "0.7.1" - resolved "https://registry.yarnpkg.com/@emotion/stylis/-/stylis-0.7.1.tgz#50f63225e712d99e2b2b39c19c70fff023793ca5" - integrity sha512-/SLmSIkN13M//53TtNxgxo57mcJk/UJIDFRKwOiLIBEyBHEcipgR6hNMQ/59Sl4VjCJ0Z/3zeAZyvnSLPG/1HQ== - -"@emotion/unitless@^0.6.2", "@emotion/unitless@^0.6.7": - version "0.6.7" - resolved "https://registry.yarnpkg.com/@emotion/unitless/-/unitless-0.6.7.tgz#53e9f1892f725b194d5e6a1684a7b394df592397" - integrity sha512-Arj1hncvEVqQ2p7Ega08uHLr1JuRYBuO5cIvcA+WWEQ5+VmkOE3ZXzl04NbQxeQpWX78G7u6MqxKuNX3wvYZxg== - -"@emotion/utils@^0.8.2": - version "0.8.2" - resolved "https://registry.yarnpkg.com/@emotion/utils/-/utils-0.8.2.tgz#576ff7fb1230185b619a75d258cbc98f0867a8dc" - integrity sha512-rLu3wcBWH4P5q1CGoSSH/i9hrXs7SlbRLkoq9IGuoPYNGQvDJ3pt/wmOM+XgYjIDRMVIdkUWt0RsfzF50JfnCw== +"@exodus/schemasafe@^1.0.0-rc.2": + version "1.0.0-rc.3" + resolved "https://registry.yarnpkg.com/@exodus/schemasafe/-/schemasafe-1.0.0-rc.3.tgz#dda2fbf3dafa5ad8c63dadff7e01d3fdf4736025" + integrity sha512-GoXw0U2Qaa33m3eUcxuHnHpNvHjNlLo0gtV091XBpaRINaB4X6FGCG5XKxSFNFiPpugUDqNruHzaqpTdDm4AOg== "@nodelib/fs.scandir@2.1.3": version "2.1.3" @@ -355,6 +313,11 @@ dependencies: mkdirp "^1.0.4" +"@redocly/react-dropdown-aria@^2.0.11": + version "2.0.11" + resolved "https://registry.yarnpkg.com/@redocly/react-dropdown-aria/-/react-dropdown-aria-2.0.11.tgz#532b864b329237e646abe45d0f8edc923e77370a" + integrity sha512-rmuSC2JFFl4DkPDdGVrmffT9KcbG2AB5jvhxPIrOc1dO9mHRMUUftQY35KZlvWqqSSqVn+AM+J9dhiTo1ZqR8A== + "@stylelint/postcss-css-in-js@^0.37.1": version "0.37.2" resolved "https://registry.yarnpkg.com/@stylelint/postcss-css-in-js/-/postcss-css-in-js-0.37.2.tgz#7e5a84ad181f4234a2480803422a47b8749af3d2" @@ -631,11 +594,6 @@ resolved "https://registry.yarnpkg.com/@xtuc/long/-/long-4.2.2.tgz#d291c6a4e97989b5c61d9acf396ae4fe133a718d" integrity sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ== -abbrev@1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" - integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== - acorn-jsx@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.2.0.tgz#4c66069173d6fdd68ed85239fc256226182b2ebe" @@ -704,11 +662,6 @@ ansi-regex@^2.0.0: resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-2.1.1.tgz#c3b33ab5ee360d86e0e628f0468ae7ef27d654df" integrity sha1-w7M6te42DYbg5ijwRorn7yfWVN8= -ansi-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-3.0.0.tgz#ed0317c322064f79466c02966bddb605ab37d998" - integrity sha1-7QMXwyIGT3lGbAKWa922Bas32Zg= - ansi-regex@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" @@ -975,38 +928,6 @@ babel-plugin-css-modules-transform@^1.6.1: css-modules-require-hook "^4.0.6" mkdirp "^0.5.1" -babel-plugin-emotion@^9.2.11: - version "9.2.11" - resolved "https://registry.yarnpkg.com/babel-plugin-emotion/-/babel-plugin-emotion-9.2.11.tgz#319c005a9ee1d15bb447f59fe504c35fd5807728" - integrity sha512-dgCImifnOPPSeXod2znAmgc64NhaaOjGEHROR/M+lmStb3841yK1sgaDYAYMnlvWNz8GnpwIPN0VmNpbWYZ+VQ== - dependencies: - "@babel/helper-module-imports" "^7.0.0" - "@emotion/babel-utils" "^0.6.4" - "@emotion/hash" "^0.6.2" - "@emotion/memoize" "^0.6.1" - "@emotion/stylis" "^0.7.0" - babel-plugin-macros "^2.0.0" - babel-plugin-syntax-jsx "^6.18.0" - convert-source-map "^1.5.0" - find-root "^1.1.0" - mkdirp "^0.5.1" - source-map "^0.5.7" - touch "^2.0.1" - -babel-plugin-macros@^2.0.0: - version "2.8.0" - resolved "https://registry.yarnpkg.com/babel-plugin-macros/-/babel-plugin-macros-2.8.0.tgz#0f958a7cc6556b1e65344465d99111a1e5e10138" - integrity sha512-SEP5kJpfGYqYKpBrj5XU3ahw5p5GOHJ0U5ssOSQ/WBVdwkD2Dzlce95exQTs3jOVWPPKLBN2rlEWkCK7dSmLvg== - dependencies: - "@babel/runtime" "^7.7.2" - cosmiconfig "^6.0.0" - resolve "^1.12.0" - -babel-plugin-syntax-jsx@^6.18.0: - version "6.18.0" - resolved "https://registry.yarnpkg.com/babel-plugin-syntax-jsx/-/babel-plugin-syntax-jsx-6.18.0.tgz#0af32a9a6e13ca7a3fd5069e62d7b0f58d0d8946" - integrity sha1-CvMqmm4Tyno/1QaeYtew9Y0NiUY= - babel-polyfill@^6.26.0: version "6.26.0" resolved "https://registry.yarnpkg.com/babel-polyfill/-/babel-polyfill-6.26.0.tgz#379937abc67d7895970adc621f284cd966cf2153" @@ -1606,15 +1527,6 @@ clipboard@^2.0.0: select "^1.1.2" tiny-emitter "^2.0.0" -cliui@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-4.1.0.tgz#348422dbe82d800b3022eef4f6ac10bf2e4d1b49" - integrity sha512-4FG+RSG9DL7uEwRUZXZn3SS34DiDPfzP0VOiEwtUWlE+AR2EIg+hSyvrIgUUfhdgR/UkAeW2QHgeP+hWrXs7jQ== - dependencies: - string-width "^2.1.1" - strip-ansi "^4.0.0" - wrap-ansi "^2.0.0" - cliui@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/cliui/-/cliui-5.0.0.tgz#deefcfdb2e800784aa34f46fa08e06851c7bbbc5" @@ -1633,6 +1545,15 @@ cliui@^6.0.0: strip-ansi "^6.0.0" wrap-ansi "^6.2.0" +cliui@^7.0.2: + version "7.0.4" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-7.0.4.tgz#a0265ee655476fc807aea9df3df8df7783808b4f" + integrity sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.0" + wrap-ansi "^7.0.0" + clone-regexp@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/clone-regexp/-/clone-regexp-2.2.0.tgz#7d65e00885cd8796405c35a737e7a86b7429e36f" @@ -1664,11 +1585,6 @@ code-error-fragment@0.0.230: resolved "https://registry.yarnpkg.com/code-error-fragment/-/code-error-fragment-0.0.230.tgz#d736d75c832445342eca1d1fedbf17d9618b14d7" integrity sha512-cadkfKp6932H8UkhzE/gcUqhRMNf8jHzkAN7+5Myabswaghu4xABTgPHDCjW+dBAJxj/SpkTYokpzDqY4pCzQw== -code-point-at@^1.0.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77" - integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c= - collapse-white-space@^1.0.2: version "1.0.6" resolved "https://registry.yarnpkg.com/collapse-white-space/-/collapse-white-space-1.0.6.tgz#e63629c0016665792060dbbeb79c42239d2c5287" @@ -1777,7 +1693,7 @@ contains-path@^0.1.0: resolved "https://registry.yarnpkg.com/contains-path/-/contains-path-0.1.0.tgz#fe8cf184ff6670b6baef01a9d4861a5cbec4120a" integrity sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo= -convert-source-map@^1.5.0, convert-source-map@^1.5.1, convert-source-map@^1.7.0: +convert-source-map@^1.5.1, convert-source-map@^1.7.0: version "1.7.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== @@ -1862,19 +1778,6 @@ create-ecdh@^4.0.0: bn.js "^4.1.0" elliptic "^6.0.0" -create-emotion@^9.2.12: - version "9.2.12" - resolved "https://registry.yarnpkg.com/create-emotion/-/create-emotion-9.2.12.tgz#0fc8e7f92c4f8bb924b0fef6781f66b1d07cb26f" - integrity sha512-P57uOF9NL2y98Xrbl2OuiDQUZ30GVmASsv5fbsjF4Hlraip2kyAvMm+2PoYUvFFw03Fhgtxk3RqZSm2/qHL9hA== - dependencies: - "@emotion/hash" "^0.6.2" - "@emotion/memoize" "^0.6.1" - "@emotion/stylis" "^0.7.0" - "@emotion/unitless" "^0.6.2" - csstype "^2.5.2" - stylis "^3.5.0" - stylis-rule-sheet "^0.0.10" - create-hash@^1.1.0, create-hash@^1.1.2, create-hash@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/create-hash/-/create-hash-1.2.0.tgz#889078af11a63756bcfb59bd221996be3a9ef196" @@ -1898,7 +1801,7 @@ create-hmac@^1.1.0, create-hmac@^1.1.4, create-hmac@^1.1.7: safe-buffer "^5.0.1" sha.js "^2.4.8" -cross-spawn@^6.0.0, cross-spawn@^6.0.5: +cross-spawn@^6.0.5: version "6.0.5" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== @@ -2115,11 +2018,6 @@ csso@^4.0.2: dependencies: css-tree "1.0.0-alpha.39" -csstype@^2.5.2: - version "2.6.11" - resolved "https://registry.yarnpkg.com/csstype/-/csstype-2.6.11.tgz#452f4d024149ecf260a852b025e36562a253ffc5" - integrity sha512-l8YyEC9NBkSm783PFTvh0FmJy7s5pFKrDp49ZL7zBGX3fWkO+N4EEyan1qqp8cwPLDcD0OSdyY6hAMoxp34JFw== - cyclist@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/cyclist/-/cyclist-1.0.1.tgz#596e9698fd0c80e12038c2b82d6eb1b35b6224d9" @@ -2596,7 +2494,7 @@ domhandler@^3.0.0: dependencies: domelementtype "^2.0.1" -dompurify@^2.0.8: +dompurify@^2.0.12: version "2.2.6" resolved "https://registry.yarnpkg.com/dompurify/-/dompurify-2.2.6.tgz#54945dc5c0b45ce5ae228705777e8e59d7b2edc4" integrity sha512-7b7ZArhhH0SP6W2R9cqK6RjaU82FZ2UPM7RO8qN1b1wyvC/NY1FNWcX1Pu00fFOAnzEORtwXe4bPaClg6pUybQ== @@ -2678,14 +2576,6 @@ emojis-list@^3.0.0: resolved "https://registry.yarnpkg.com/emojis-list/-/emojis-list-3.0.0.tgz#5570662046ad29e2e916e71aae260abdff4f6a78" integrity sha512-/kyM18EfinwXZbno9FyUGeFh87KC8HRQBQGildHZbEuRyWFOmv1U10o9BBp8XVZDVNNuQKyIGIu5ZYAAXJ0V2Q== -emotion@^9.2.6: - version "9.2.12" - resolved "https://registry.yarnpkg.com/emotion/-/emotion-9.2.12.tgz#53925aaa005614e65c6e43db8243c843574d1ea9" - integrity sha512-hcx7jppaI8VoXxIWEhxpDW7I+B4kq9RNzQLmsrF6LY8BGKqe2N+gFAQr0EfuFucFlPs2A9HM4+xNj4NeqEWIOQ== - dependencies: - babel-plugin-emotion "^9.2.11" - create-emotion "^9.2.12" - end-of-stream@^1.0.0, end-of-stream@^1.1.0: version "1.4.4" resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" @@ -2831,7 +2721,7 @@ escalade@^3.0.1: resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.0.2.tgz#6a580d70edb87880f22b4c91d0d56078df6962c4" integrity sha512-gPYAU37hYCUhW5euPeR+Y74F7BL+IBsV93j5cvGriSaD1aG6MGsqsV1yamRdrWrb2j3aiZvb0X+UBOWpx3JWtQ== -escalade@^3.1.0: +escalade@^3.1.0, escalade@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== @@ -3040,10 +2930,10 @@ esutils@^2.0.2: resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== -eventemitter3@^4.0.0: - version "4.0.4" - resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.4.tgz#b5463ace635a083d018bdc7c917b4c5f10a85384" - integrity sha512-rlaVLnVxtxvoyLsQQFBx53YmXHDxRIzzTLbdfxqi4yocpSjAxXwkU0cScM5JgSKMqEhrZpnvQ2D9gjylR0AimQ== +eventemitter3@^4.0.4: + version "4.0.7" + resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" + integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== events@^3.0.0: version "3.2.0" @@ -3058,19 +2948,6 @@ evp_bytestokey@^1.0.0, evp_bytestokey@^1.0.3: md5.js "^1.3.4" safe-buffer "^5.1.1" -execa@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" - integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== - dependencies: - cross-spawn "^6.0.0" - get-stream "^4.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - execall@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/execall/-/execall-2.0.0.tgz#16a06b5fe5099df7d00be5d9c06eecded1663b45" @@ -3253,11 +3130,6 @@ find-cache-dir@^3.3.1: make-dir "^3.0.2" pkg-dir "^4.1.0" -find-root@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/find-root/-/find-root-1.1.0.tgz#abcfc8ba76f708c42a97b3d685b7e9450bfb9ce4" - integrity sha512-NKfW6bec6GfKc0SGx1e07QZY9PE99u0Bft/0rzSD5k3sO/vwkVUpDUKVm5Gpp5Ue3YfShPFTX2070tDs5kB9Ng== - find-up@^2.0.0, find-up@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" @@ -3408,12 +3280,7 @@ gensync@^1.0.0-beta.1: resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.1.tgz#58f4361ff987e5ff6e1e7a210827aa371eaac269" integrity sha512-r8EC6NO1sngH/zdD9fiRDLdcgnbayXah+mLgManTaIZJqEC1MZstmnox8KpnI2/fxQwrp5OpCOYWLp4rBl4Jcg== -get-caller-file@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.3.tgz#f978fa4c90d1dfe7ff2d6beda2a515e713bdcf4a" - integrity sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w== - -get-caller-file@^2.0.1: +get-caller-file@^2.0.1, get-caller-file@^2.0.5: version "2.0.5" resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== @@ -3423,13 +3290,6 @@ get-stdin@^8.0.0: resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-8.0.0.tgz#cbad6a73feb75f6eeb22ba9e01f89aa28aa97a53" integrity sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg== -get-stream@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - get-value@^2.0.3, get-value@^2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" @@ -3914,11 +3774,6 @@ invariant@^2.2.2: dependencies: loose-envify "^1.0.0" -invert-kv@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/invert-kv/-/invert-kv-2.0.0.tgz#7393f5afa59ec9ff5f67a27620d11c226e3eec02" - integrity sha512-wPVv/y/QQ/Uiirj/vh3oP+1Ww+AWehmi1g5fFWGPF6IpCBCDVrhgHRMvrLfdYcwDh3QJbGXDW4JAuzxElLSqKA== - is-absolute-url@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6" @@ -4088,13 +3943,6 @@ is-finite@^1.0.0: dependencies: number-is-nan "^1.0.0" -is-fullwidth-code-point@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz#ef9e31386f031a7f0d643af82fde50c457ef00cb" - integrity sha1-754xOG8DGn8NZDr4L95QxFfvAMs= - dependencies: - number-is-nan "^1.0.0" - is-fullwidth-code-point@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" @@ -4213,11 +4061,6 @@ is-resolvable@^1.0.0: resolved "https://registry.yarnpkg.com/is-resolvable/-/is-resolvable-1.1.0.tgz#fb18f87ce1feb925169c9a407c19318a3206ed88" integrity sha512-qgDYXFSR5WvEfuS5dMj6oTMEbrrSaM0CrFk2Yiq/gXnBvD9pMa2jGXxyhGLfvhZpuMZe18CJpFxAt3CRs42NMg== -is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= - is-string@^1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.5.tgz#40493ed198ef3ff477b8c7f92f644ec82a5cd3a6" @@ -4299,7 +4142,7 @@ js-tokens@^3.0.2: resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b" integrity sha1-mGbfOVECEw449/mWvOtlRDIJwls= -js-yaml@^3.12.1, js-yaml@^3.13.1, js-yaml@^3.14.0: +js-yaml@^3.12.1, js-yaml@^3.13.1: version "3.14.0" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482" integrity sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A== @@ -4307,6 +4150,14 @@ js-yaml@^3.12.1, js-yaml@^3.13.1, js-yaml@^3.14.0: argparse "^1.0.7" esprima "^4.0.0" +js-yaml@^3.14.0: + version "3.14.1" + resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.1.tgz#dae812fdb3825fa306609a8717383c50c36a0537" + integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== + dependencies: + argparse "^1.0.7" + esprima "^4.0.0" + jsesc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b" @@ -4434,13 +4285,6 @@ last-call-webpack-plugin@^3.0.0: lodash "^4.17.5" webpack-sources "^1.1.0" -lcid@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/lcid/-/lcid-2.0.0.tgz#6ef5d2df60e52f82eb228a4c373e8d1f397253cf" - integrity sha512-avPEb8P8EGnwXKClwsNUgryVjllcRqtMYa49NTsbQagYuT1DcXnl1915oxWjoyGrXR6zH/Y0Zc96xWsPcoDKeA== - dependencies: - invert-kv "^2.0.0" - leven@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" @@ -4624,13 +4468,6 @@ make-dir@^3.0.2: dependencies: semver "^6.0.0" -map-age-cleaner@^0.1.1: - version "0.1.3" - resolved "https://registry.yarnpkg.com/map-age-cleaner/-/map-age-cleaner-0.1.3.tgz#7d583a7306434c055fe474b0f45078e6e1b4b92a" - integrity sha512-bJzx6nMoP6PDLPBFmg7+xRKeFZvFboMrGlxmNj9ClvX53KrmvM5bXFXEWjbz4cz1AFn+jWJ9z/DJSz7hrs0w3w== - dependencies: - p-defer "^1.0.0" - map-cache@^0.2.2: version "0.2.2" resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" @@ -4706,15 +4543,6 @@ mdn-data@2.0.6: resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.6.tgz#852dc60fcaa5daa2e8cf6c9189c440ed3e042978" integrity sha512-rQvjv71olwNHgiTbfPZFkJtjNMciWgswYeciZhtvWLO8bmX3TnhyA62I6sTWOyZssWHJJjY6/KiWwqQsWWsqOA== -mem@^4.0.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/mem/-/mem-4.3.0.tgz#461af497bc4ae09608cdb2e60eefb69bff744178" - integrity sha512-qX2bG48pTqYRVmDB37rn/6PT7LcR8T7oAX3bf99u1Tt1nzxYfxkgqDwUwolPlXweM0XzBOBFzSx4kfp7KP1s/w== - dependencies: - map-age-cleaner "^0.1.1" - mimic-fn "^2.0.0" - p-is-promise "^2.0.0" - memoize-one@~5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/memoize-one/-/memoize-one-5.1.1.tgz#047b6e3199b508eaec03504de71229b8eb1d75c0" @@ -4807,11 +4635,6 @@ mime-types@^2.1.26: dependencies: mime-db "1.44.0" -mimic-fn@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" - integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== - min-indent@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/min-indent/-/min-indent-1.0.0.tgz#cfc45c37e9ec0d8f0a0ec3dd4ef7f7c3abe39256" @@ -4930,17 +4753,17 @@ mkdirp@^1.0.3, mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -mobx-react-lite@^1.4.2: - version "1.5.2" - resolved "https://registry.yarnpkg.com/mobx-react-lite/-/mobx-react-lite-1.5.2.tgz#c4395b0568b9cb16f07669d8869cc4efa1b8656d" - integrity sha512-PyZmARqqWtpuQaAoHF5pKX7h6TKNLwq6vtovm4zZvG6sEbMRHHSqioGXSeQbpRmG8Kw8uln3q/W1yMO5IfL5Sg== +mobx-react-lite@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/mobx-react-lite/-/mobx-react-lite-3.2.0.tgz#331d7365a6b053378dfe9c087315b4e41c5df69f" + integrity sha512-q5+UHIqYCOpBoFm/PElDuOhbcatvTllgRp3M1s+Hp5j0Z6XNgDbgqxawJ0ZAUEyKM8X1zs70PCuhAIzX1f4Q/g== -mobx-react@6.1.5: - version "6.1.5" - resolved "https://registry.yarnpkg.com/mobx-react/-/mobx-react-6.1.5.tgz#66a6f67bfe845216abc05d3aea47ceec8e31e2dd" - integrity sha512-EfWoXmGE2CfozH4Xirb65+il1ynHFCmxBSUabMSf+511YfjVs6QRcCrHkiVw+Il8iWp1gIyfa9qKkUgbDA9/2w== +mobx-react@^7.0.5: + version "7.1.0" + resolved "https://registry.yarnpkg.com/mobx-react/-/mobx-react-7.1.0.tgz#d947cada3cfad294b4b6f692e969c242b9c16eaf" + integrity sha512-DxvA6VXmnZ+N9f/UTtolWtdRnAAQY2iHWTSPLktfpj8NKlXUe4dabBAjuXrBcZUM8GjLWnxD1ZEjssXq1M0RAw== dependencies: - mobx-react-lite "^1.4.2" + mobx-react-lite "^3.2.0" moment-locales-webpack-plugin@^1.2.0: version "1.2.0" @@ -5085,13 +4908,6 @@ node-releases@^1.1.61: resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.63.tgz#db6dbb388544c31e888216304e8fd170efee3ff5" integrity sha512-ukW3iCfQaoxJkSPN+iK7KznTeqDGVJatAEuXsJERYHa9tn/KaT5lBdIyxQjLEVTzSkyjJEuQ17/vaEjrOauDkg== -nopt@~1.0.10: - version "1.0.10" - resolved "https://registry.yarnpkg.com/nopt/-/nopt-1.0.10.tgz#6ddd21bd2a31417b92727dd585f8a6f37608ebee" - integrity sha1-bd0hvSoxQXuScn3Vhfim83YI6+4= - dependencies: - abbrev "1" - normalize-package-data@^2.3.2, normalize-package-data@^2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" @@ -5139,13 +4955,6 @@ normalize-url@^3.0.0: resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-3.3.0.tgz#b2e1c4dc4f7c6d57743df733a4f5978d18650559" integrity sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg== -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" - integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= - dependencies: - path-key "^2.0.0" - nth-check@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c" @@ -5168,50 +4977,51 @@ nvd3@^1.8.6: resolved "https://registry.yarnpkg.com/nvd3/-/nvd3-1.8.6.tgz#2d3eba74bf33363b5101ebf1d093c59a53ae73c4" integrity sha1-LT66dL8zNjtRAevx0JPFmlOuc8Q= -oas-kit-common@^1.0.7, oas-kit-common@^1.0.8: +oas-kit-common@^1.0.8: version "1.0.8" resolved "https://registry.yarnpkg.com/oas-kit-common/-/oas-kit-common-1.0.8.tgz#6d8cacf6e9097967a4c7ea8bcbcbd77018e1f535" integrity sha512-pJTS2+T0oGIwgjGpw7sIRU8RQMcUoKCDWFLdBqKB2BNmGpbBMH2sdqAaOXUg8OzonZHU0L7vfJu1mJFEiYDWOQ== dependencies: fast-safe-stringify "^2.0.7" -oas-linter@^3.1.0: - version "3.1.3" - resolved "https://registry.yarnpkg.com/oas-linter/-/oas-linter-3.1.3.tgz#1526b3da32a1bbf124d720f27fd4eb9971cebfff" - integrity sha512-jFWBHjSoqODGo7cKA/VWqqWSLbHNtnyCEpa2nMMS64SzCUbZDk63Oe7LqQZ2qJA0K2VRreYLt6cVkYy6MqNRDg== +oas-linter@^3.1.3: + version "3.2.1" + resolved "https://registry.yarnpkg.com/oas-linter/-/oas-linter-3.2.1.tgz#1a6d9117d146805b58e56df479861de0293b6e5b" + integrity sha512-e5G6bbq3Nrfxm+SDPR5AiZ6n2smVUmhLA1OgI2/Bl8e2ywfWsKw/yuqrwiXXiNHb1wdM/GyPMX6QjCGJODlaaA== dependencies: + "@exodus/schemasafe" "^1.0.0-rc.2" should "^13.2.1" - yaml "^1.8.3" + yaml "^1.10.0" -oas-resolver@^2.3.0: - version "2.4.1" - resolved "https://registry.yarnpkg.com/oas-resolver/-/oas-resolver-2.4.1.tgz#46948226f73e514ac6733f166cc559e800e4389b" - integrity sha512-rRmUv9mDTKPtsB2OGaoNMK4BC1Q/pL+tWRPKRjXJEBoLmfegJhecOZPBtIR0gKEVQb9iAA0MqulkgY43EiCFDg== +oas-resolver@^2.4.3: + version "2.5.4" + resolved "https://registry.yarnpkg.com/oas-resolver/-/oas-resolver-2.5.4.tgz#81fa1aaa7e2387ab2dba1045827e9d7b79822326" + integrity sha512-1vIj5Wkjmi+kZj5sFamt95LkuXoalmoKUohtaUQoCQZjLfPFaY8uZ7nw6IZaWuE6eLON2b6xrXhxD4hiTdYl0g== dependencies: node-fetch-h2 "^2.3.0" oas-kit-common "^1.0.8" - reftools "^1.1.3" - yaml "^1.8.3" - yargs "^15.3.1" + reftools "^1.1.8" + yaml "^1.10.0" + yargs "^16.1.1" -oas-schema-walker@^1.1.3: - version "1.1.4" - resolved "https://registry.yarnpkg.com/oas-schema-walker/-/oas-schema-walker-1.1.4.tgz#4b9d090c3622039741334d3e138510ff38197618" - integrity sha512-foVDDS0RJYMfhQEDh/WdBuCzydTcsCnGo9EeD8SpWq1uW10JXiz+8SfYVDA7LO87kjmlnTRZle/2gr5qxabaEA== +oas-schema-walker@^1.1.5: + version "1.1.5" + resolved "https://registry.yarnpkg.com/oas-schema-walker/-/oas-schema-walker-1.1.5.tgz#74c3cd47b70ff8e0b19adada14455b5d3ac38a22" + integrity sha512-2yucenq1a9YPmeNExoUa9Qwrt9RFkjqaMAA1X+U7sbb0AqBeTIdMHky9SQQ6iN94bO5NW0W4TRYXerG+BdAvAQ== -oas-validator@^3.4.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/oas-validator/-/oas-validator-3.4.0.tgz#7633b02e495af4a4e0224b249288b0928748476d" - integrity sha512-l/SxykuACi2U51osSsBXTxdsFc8Fw41xI7AsZkzgVgWJAzoEFaaNptt35WgY9C3757RUclsm6ye5GvSyYoozLQ== +oas-validator@^4.0.8: + version "4.0.8" + resolved "https://registry.yarnpkg.com/oas-validator/-/oas-validator-4.0.8.tgz#4f1a4d6bd9e030ad07db03fd7a7bc3a91aabcc7d" + integrity sha512-bIt8erTyclF7bkaySTtQ9sppqyVc+mAlPi7vPzCLVHJsL9nrivQjc/jHLX/o+eGbxHd6a6YBwuY/Vxa6wGsiuw== dependencies: ajv "^5.5.2" better-ajv-errors "^0.6.7" call-me-maybe "^1.0.1" - oas-kit-common "^1.0.7" - oas-linter "^3.1.0" - oas-resolver "^2.3.0" - oas-schema-walker "^1.1.3" - reftools "^1.1.0" + oas-kit-common "^1.0.8" + oas-linter "^3.1.3" + oas-resolver "^2.4.3" + oas-schema-walker "^1.1.5" + reftools "^1.1.5" should "^13.2.1" yaml "^1.8.3" @@ -5329,10 +5139,10 @@ ono@^4.0.11: dependencies: format-util "^1.0.3" -openapi-sampler@^1.0.0-beta.16: - version "1.0.0-beta.16" - resolved "https://registry.yarnpkg.com/openapi-sampler/-/openapi-sampler-1.0.0-beta.16.tgz#7813524d5b88d222efb772ceb5a809075d6d9174" - integrity sha512-05+GvwMagTY7GxoDQoWJfmAUFlxfebciiEzqKmu4iq6+MqBEn62AMUkn0CTxyKhnUGIaR2KXjTeslxIeJwVIOw== +openapi-sampler@^1.0.0-beta.18: + version "1.0.0-beta.18" + resolved "https://registry.yarnpkg.com/openapi-sampler/-/openapi-sampler-1.0.0-beta.18.tgz#9e0845616a669e048860625ea5c10d0f554f1b53" + integrity sha512-nG/0kvvSY5FbrU5A+Dbp1xTQN++7pKIh87/atryZlxrzDuok5Y6TCbpxO1jYqpUKLycE4ReKGHCywezngG6xtQ== dependencies: json-pointer "^0.6.0" @@ -5366,35 +5176,11 @@ os-homedir@^1.0.0: resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M= -os-locale@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a" - integrity sha512-Z8l3R4wYWM40/52Z+S265okfFj8Kt2cC2MKY+xNi3kFs+XGI7WXu/I309QQQYbRW4ijiZ+yxs9pqEhJh0DqW3Q== - dependencies: - execa "^1.0.0" - lcid "^2.0.0" - mem "^4.0.0" - os-tmpdir@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= -p-defer@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" - integrity sha1-n26xgvbJqozXQwBKfU+WsZaw+ww= - -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= - -p-is-promise@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-is-promise/-/p-is-promise-2.1.0.tgz#918cebaea248a62cf7ffab8e3bca8c5f882fc42e" - integrity sha512-Y3W0wlRPK8ZMRbNq97l4M5otioeA5lm1z7bkNkxCka8HSPjR0xRWmpCmc9utiaLP9Jb1eD8BgeIxTW4AIF45Pg== - p-limit@^1.1.0: version "1.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" @@ -5569,7 +5355,7 @@ path-is-inside@^1.0.2: resolved "https://registry.yarnpkg.com/path-is-inside/-/path-is-inside-1.0.2.tgz#365417dede44430d1c11af61027facf074bdfc53" integrity sha1-NlQX3t5EQw0cEa9hAn+s8HS9/FM= -path-key@^2.0.0, path-key@^2.0.1: +path-key@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= @@ -5660,12 +5446,12 @@ pkg-dir@^4.1.0: dependencies: find-up "^4.0.0" -polished@^3.4.4: - version "3.6.5" - resolved "https://registry.yarnpkg.com/polished/-/polished-3.6.5.tgz#dbefdde64c675935ec55119fe2a2ab627ca82e9c" - integrity sha512-VwhC9MlhW7O5dg/z7k32dabcAFW1VI2+7fSe8cE/kXcfL7mVdoa5UxciYGW2sJU78ldDLT6+ROEKIZKFNTnUXQ== +polished@^3.6.5: + version "3.7.1" + resolved "https://registry.yarnpkg.com/polished/-/polished-3.7.1.tgz#d1addc87ee16eb5b413c6165eda37600cccb9c11" + integrity sha512-/QgHrNGYwIA4mwxJ/7FSvalUJsm7KNfnXiScVSEG2Xa5qxDeBn4nmdjN2pW00mkM2Tts64ktc47U8F7Ed1BRAA== dependencies: - "@babel/runtime" "^7.9.2" + "@babel/runtime" "^7.12.5" posix-character-classes@^0.1.0: version "0.1.1" @@ -6127,10 +5913,10 @@ prepend-http@^1.0.0: resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" integrity sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw= -prismjs@^1.19.0: - version "1.21.0" - resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.21.0.tgz#36c086ec36b45319ec4218ee164c110f9fc015a3" - integrity sha512-uGdSIu1nk3kej2iZsLyDoJ7e9bnPzIgY0naW/HdknGj61zScaprVEVGHrPoXqI+M9sP0NDnTK2jpkvmldpuqDw== +prismjs@^1.22.0: + version "1.23.0" + resolved "https://registry.yarnpkg.com/prismjs/-/prismjs-1.23.0.tgz#d3b3967f7d72440690497652a9d40ff046067f33" + integrity sha512-c29LVsqOaLbBHuIbsTxaKENh1N2EQBOHaWv7gkHN4dgRbxSREqDnDbtFJYdpPauS4YCplMSNCABQ6Eeor69bAA== optionalDependencies: clipboard "^2.0.0" @@ -6268,22 +6054,15 @@ randomfill@^1.0.3: randombytes "^2.0.5" safe-buffer "^5.1.0" -react-dropdown-aria@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/react-dropdown-aria/-/react-dropdown-aria-2.0.6.tgz#40cec5edd97a591d2f29e8c05aa8c53230e2aa6e" - integrity sha512-/9NlFopChlSKmuGL2P6S3oDwl9ddXcbNLnd1a7POov4f5/oGtSc3qBFmS4wH5xmLJe/38MhPOKF3e2q3laRi1g== - dependencies: - emotion "^9.2.6" - react-is@^16.8.1: version "16.13.1" resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== -react-tabs@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/react-tabs/-/react-tabs-3.1.1.tgz#b363a239f76046bb2158875a1e5921b11064052f" - integrity sha512-HpySC29NN1BkzBAnOC+ajfzPbTaVZcSWzMSjk56uAhPC/rBGtli8lTysR4CfPAyEE/hfweIzagOIoJ7nu80yng== +react-tabs@^3.1.1: + version "3.2.0" + resolved "https://registry.yarnpkg.com/react-tabs/-/react-tabs-3.2.0.tgz#0fd8d595ef26d3684da876c27a3cc90392dffb40" + integrity sha512-q7oNapNRoYTQq8gDhApXwdBheuuN5qQ4YvUaQUAkb6OSSttJulBAvxJ0FS6W5uojvMxbbIZKu1f2I+GXISoLjw== dependencies: clsx "^1.1.0" prop-types "^15.5.0" @@ -6379,40 +6158,40 @@ redent@^3.0.0: indent-string "^4.0.0" strip-indent "^3.0.0" -redoc@^2.0.0-rc.30: - version "2.0.0-rc.33" - resolved "https://registry.yarnpkg.com/redoc/-/redoc-2.0.0-rc.33.tgz#df43f533bb0cc283cc209d69d2a91404a24bd8d1" - integrity sha512-1KLdnOU1aBIddgNBcEIU29h3VqXoTT493gT5hjyHg6sE91x9qEVWPYM2A+eETQFz5ygTwkBCp6xZDxVs+HIA9w== +redoc@^2.0.0-rc.48: + version "2.0.0-rc.48" + resolved "https://registry.yarnpkg.com/redoc/-/redoc-2.0.0-rc.48.tgz#5303cff67af5cba8a2b48dc1347a9854d45be835" + integrity sha512-shArJWhNG2gQ0XKxW8WcfG8peNOtxbZ86CqxgrR9P7MnE5ESAo559CH/PSlezePeVLpcC0C9tcimOfSN5MaAvA== dependencies: + "@redocly/react-dropdown-aria" "^2.0.11" "@types/node" "^13.11.1" classnames "^2.2.6" decko "^1.2.0" - dompurify "^2.0.8" - eventemitter3 "^4.0.0" + dompurify "^2.0.12" + eventemitter3 "^4.0.4" json-pointer "^0.6.0" json-schema-ref-parser "^6.1.0" lunr "2.3.8" mark.js "^8.11.1" marked "^0.7.0" memoize-one "~5.1.1" - mobx-react "6.1.5" - openapi-sampler "^1.0.0-beta.16" + mobx-react "^7.0.5" + openapi-sampler "^1.0.0-beta.18" perfect-scrollbar "^1.4.0" - polished "^3.4.4" - prismjs "^1.19.0" + polished "^3.6.5" + prismjs "^1.22.0" prop-types "^15.7.2" - react-dropdown-aria "^2.0.6" - react-tabs "^3.1.0" - slugify "^1.4.0" + react-tabs "^3.1.1" + slugify "^1.4.4" stickyfill "^1.1.1" - swagger2openapi "^5.3.4" - tslib "^1.11.1" + swagger2openapi "^6.2.1" + tslib "^2.0.0" url-template "^2.0.8" -reftools@^1.1.0, reftools@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/reftools/-/reftools-1.1.3.tgz#f430d11677d81ae97b8dbb3836713bb52b1cd0a7" - integrity sha512-JTlhKmSzqE/gt5Z5RX25yZDq67MlRRtTz1gLy/NY+wPDx1e1vEJsv1PoNrpKZBwitcEMXs2k7pzmbmraP1ZMAQ== +reftools@^1.1.5, reftools@^1.1.8: + version "1.1.8" + resolved "https://registry.yarnpkg.com/reftools/-/reftools-1.1.8.tgz#cc08fd67eb913d779fd330657d010cc080c7d643" + integrity sha512-Yvz9NH8uFHzD/AXX82Li1GdAP6FzDBxEZw+njerNBBQv/XHihqsWAjNfXtaq4QD2l4TEZVnp4UbktdYSegAM3g== regenerate@^1.2.1: version "1.4.0" @@ -6551,11 +6330,6 @@ require-directory@^2.1.1: resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= -require-main-filename@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-1.0.1.tgz#97f717b69d48784f5f526a6c5aa8ffdda055a4d1" - integrity sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE= - require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" @@ -6863,11 +6637,6 @@ should@^13.2.1: should-type-adaptors "^1.0.1" should-util "^1.0.0" -signal-exit@^3.0.0: - version "3.0.3" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" - integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== - signal-exit@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d" @@ -6899,10 +6668,10 @@ slice-ansi@^2.1.0: astral-regex "^1.0.0" is-fullwidth-code-point "^2.0.0" -slugify@^1.4.0: - version "1.4.4" - resolved "https://registry.yarnpkg.com/slugify/-/slugify-1.4.4.tgz#2f032ffa52b1e1ca2a27737c1ce47baae3d0883a" - integrity sha512-N2+9NJ8JzfRMh6PQLrBeDEnVDQZSytE/W4BTC4fNNPmO90Uu58uNwSlIJSs+lmPgWsaAF79WLhVPe5tuy7spjw== +slugify@^1.4.4: + version "1.4.7" + resolved "https://registry.yarnpkg.com/slugify/-/slugify-1.4.7.tgz#e42359d505afd84a44513280868e31202a79a628" + integrity sha512-tf+h5W1IrjNm/9rKKj0JU2MDMruiopx0jjVA5zCdBtcGjfp0+c5rHw/zADLC3IeKlGHtVbHtpfzvYA0OYT+HKg== snapdragon-node@^2.0.1: version "2.1.1" @@ -6987,7 +6756,7 @@ source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== -source-map@^0.7.2, source-map@^0.7.3: +source-map@^0.7.3: version "0.7.3" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== @@ -7109,23 +6878,6 @@ strict-uri-encode@^1.0.0: resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" integrity sha1-J5siXfHVgrH1TmWt3UNS4Y+qBxM= -string-width@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-1.0.2.tgz#118bdf5b8cdc51a2a7e70d211e07e2b0b9b107d3" - integrity sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M= - dependencies: - code-point-at "^1.0.0" - is-fullwidth-code-point "^1.0.0" - strip-ansi "^3.0.0" - -string-width@^2.0.0, string-width@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e" - integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw== - dependencies: - is-fullwidth-code-point "^2.0.0" - strip-ansi "^4.0.0" - string-width@^3.0.0, string-width@^3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" @@ -7201,20 +6953,13 @@ stringify-entities@^3.0.0: is-decimal "^1.0.2" is-hexadecimal "^1.0.0" -strip-ansi@^3.0.0, strip-ansi@^3.0.1: +strip-ansi@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-3.0.1.tgz#6a385fb8853d952d5ff05d0e8aaf94278dc63dcf" integrity sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8= dependencies: ansi-regex "^2.0.0" -strip-ansi@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-4.0.0.tgz#a8479022eb1ac368a871389b635262c505ee368f" - integrity sha1-qEeQIusaw2iocTibY1JixQXuNo8= - dependencies: - ansi-regex "^3.0.0" - strip-ansi@^5.0.0, strip-ansi@^5.1.0, strip-ansi@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" @@ -7239,11 +6984,6 @@ strip-comments@^2.0.1: resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-2.0.1.tgz#4ad11c3fbcac177a67a40ac224ca339ca1c1ba9b" integrity sha512-ZprKx+bBLXv067WTCALv8SSz5l2+XhpYCsVtSqlMnkAXMWDq+/ekVbl1ghqP9rUHTzv6sm/DwCOiYutU/yp1fw== -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" - integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= - strip-indent@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/strip-indent/-/strip-indent-3.0.0.tgz#c32e1cee940b6b3432c771bc2c54bcce73cd3001" @@ -7344,16 +7084,6 @@ stylelint@^13.6.1: v8-compile-cache "^2.1.1" write-file-atomic "^3.0.3" -stylis-rule-sheet@^0.0.10: - version "0.0.10" - resolved "https://registry.yarnpkg.com/stylis-rule-sheet/-/stylis-rule-sheet-0.0.10.tgz#44e64a2b076643f4b52e5ff71efc04d8c3c4a430" - integrity sha512-nTbZoaqoBnmK+ptANthb10ZRZOGC+EmTLLUxeYIuHNkEKcmKgXX1XWKkUBT2Ac4es3NybooPe0SmvKdhKJZAuw== - -stylis@^3.5.0: - version "3.5.4" - resolved "https://registry.yarnpkg.com/stylis/-/stylis-3.5.4.tgz#f665f25f5e299cf3d64654ab949a57c768b73fbe" - integrity sha512-8/3pSmthWM7lsPBKv7NXkzn2Uc9W7NotcwGNpJaa3k7WMM1XDCA4MgT5k/8BIexd5ydZdboXtU90XH9Ec4Bv/Q== - sugarss@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/sugarss/-/sugarss-2.0.0.tgz#ddd76e0124b297d40bf3cca31c8b22ecb43bc61d" @@ -7411,22 +7141,22 @@ svgo@^1.0.0: unquote "~1.1.1" util.promisify "~1.0.0" -swagger2openapi@^5.3.4: - version "5.4.0" - resolved "https://registry.yarnpkg.com/swagger2openapi/-/swagger2openapi-5.4.0.tgz#1e1c8909f7966b1f455bf1b66490093ac1c0029c" - integrity sha512-f5QqfXawiVijhjMtYqWZ55ESHPZFqrPC8L9idhIiuSX8O2qsa1i4MVGtCM3TQF+Smzr/6WfT/7zBuzG3aTgPAA== +swagger2openapi@^6.2.1: + version "6.2.3" + resolved "https://registry.yarnpkg.com/swagger2openapi/-/swagger2openapi-6.2.3.tgz#4a8059f89d851aee4c9ab178f9b7190debd904e2" + integrity sha512-cUUktzLpK69UwpMbcTzjMw2ns9RZChfxh56AHv6+hTx3StPOX2foZjPgds3HlJcINbxosYYBn/D3cG8nwcCWwQ== dependencies: better-ajv-errors "^0.6.1" call-me-maybe "^1.0.1" node-fetch-h2 "^2.3.0" node-readfiles "^0.2.0" - oas-kit-common "^1.0.7" - oas-resolver "^2.3.0" - oas-schema-walker "^1.1.3" - oas-validator "^3.4.0" - reftools "^1.1.0" + oas-kit-common "^1.0.8" + oas-resolver "^2.4.3" + oas-schema-walker "^1.1.5" + oas-validator "^4.0.8" + reftools "^1.1.5" yaml "^1.8.3" - yargs "^12.0.5" + yargs "^15.3.1" table@^5.2.3, table@^5.4.6: version "5.4.6" @@ -7556,13 +7286,6 @@ to-regex@^3.0.1, to-regex@^3.0.2: regex-not "^1.0.2" safe-regex "^1.1.0" -touch@^2.0.1: - version "2.0.2" - resolved "https://registry.yarnpkg.com/touch/-/touch-2.0.2.tgz#ca0b2a3ae3211246a61b16ba9e6cbf1596287164" - integrity sha512-qjNtvsFXTRq7IuMLweVgFxmEuQ6gLbRs2jQxL80TtZ31dEKWYIxRXquij6w6VimyDek5hD3PytljHmEtAs2u0A== - dependencies: - nopt "~1.0.10" - trim-newlines@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/trim-newlines/-/trim-newlines-3.0.0.tgz#79726304a6a898aa8373427298d54c2ee8b1cb30" @@ -7598,11 +7321,16 @@ tsconfig-paths@^3.9.0: minimist "^1.2.0" strip-bom "^3.0.0" -tslib@^1.11.1, tslib@^1.9.0: +tslib@^1.9.0: version "1.13.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.13.0.tgz#c881e13cc7015894ed914862d276436fa9a47043" integrity sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q== +tslib@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.1.0.tgz#da60860f1c2ecaa5703ab7d39bc05b6bf988b97a" + integrity sha512-hcVC3wYEziELGGmEEXue7D75zbwIIVUMWAVbHItGPx0ziyXxrOMQx4rQEVEV45Ut/1IotuEvwqPopzIOkDMf0A== + tty-browserify@0.0.0: version "0.0.0" resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6" @@ -8008,14 +7736,6 @@ worker-farm@^1.7.0: dependencies: errno "~0.1.7" -wrap-ansi@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-2.1.0.tgz#d8fc3d284dd05794fe84973caecdd1cf824fdd85" - integrity sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU= - dependencies: - string-width "^1.0.1" - strip-ansi "^3.0.1" - wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09" @@ -8034,6 +7754,15 @@ wrap-ansi@^6.2.0: string-width "^4.1.0" strip-ansi "^6.0.0" +wrap-ansi@^7.0.0: + version "7.0.0" + resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" + integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== + dependencies: + ansi-styles "^4.0.0" + string-width "^4.1.0" + strip-ansi "^6.0.0" + wrappy@1: version "1.0.2" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" @@ -8061,11 +7790,16 @@ xtend@^4.0.0, xtend@^4.0.1, xtend@~4.0.1: resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== -"y18n@^3.2.1 || ^4.0.0", y18n@^4.0.0: +y18n@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== +y18n@^5.0.5: + version "5.0.5" + resolved "https://registry.yarnpkg.com/y18n/-/y18n-5.0.5.tgz#8769ec08d03b1ea2df2500acef561743bbb9ab18" + integrity sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg== + yallist@^3.0.2: version "3.1.1" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" @@ -8076,6 +7810,11 @@ yallist@^4.0.0: resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== +yaml@^1.10.0, yaml@^1.8.3: + version "1.10.0" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.0.tgz#3b593add944876077d4d683fee01081bd9fff31e" + integrity sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg== + yaml@^1.7.2: version "1.8.3" resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.8.3.tgz#2f420fca58b68ce3a332d0ca64be1d191dd3f87a" @@ -8083,19 +7822,6 @@ yaml@^1.7.2: dependencies: "@babel/runtime" "^7.8.7" -yaml@^1.8.3: - version "1.10.0" - resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.0.tgz#3b593add944876077d4d683fee01081bd9fff31e" - integrity sha512-yr2icI4glYaNG+KWONODapy2/jDdMSDnrONSjblABjD9B4Z5LgiircSt8m8sRZFNi08kG9Sm0uSHtEmP3zaEGg== - -yargs-parser@^11.1.1: - version "11.1.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-11.1.1.tgz#879a0865973bca9f6bab5cbdf3b1c67ec7d3bcf4" - integrity sha512-C6kB/WJDiaxONLJQnF8ccx9SEeoTTLek8RVbaOIsrAUS8VrBEXfmeSnCZxygc+XC2sNMBIwOOnfcxiynjHsVSQ== - dependencies: - camelcase "^5.0.0" - decamelize "^1.2.0" - yargs-parser@^13.1.2: version "13.1.2" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.2.tgz#130f09702ebaeef2650d54ce6e3e5706f7a4fb38" @@ -8112,23 +7838,10 @@ yargs-parser@^18.1.2, yargs-parser@^18.1.3: camelcase "^5.0.0" decamelize "^1.2.0" -yargs@^12.0.5: - version "12.0.5" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-12.0.5.tgz#05f5997b609647b64f66b81e3b4b10a368e7ad13" - integrity sha512-Lhz8TLaYnxq/2ObqHDql8dX8CJi97oHxrjUcYtzKbbykPtVW9WB+poxI+NM2UIzsMgNCZTIf0AQwsjK5yMAqZw== - dependencies: - cliui "^4.0.0" - decamelize "^1.2.0" - find-up "^3.0.0" - get-caller-file "^1.0.1" - os-locale "^3.0.0" - require-directory "^2.1.1" - require-main-filename "^1.0.1" - set-blocking "^2.0.0" - string-width "^2.0.0" - which-module "^2.0.0" - y18n "^3.2.1 || ^4.0.0" - yargs-parser "^11.1.1" +yargs-parser@^20.2.2: + version "20.2.6" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.6.tgz#69f920addf61aafc0b8b89002f5d66e28f2d8b20" + integrity sha512-AP1+fQIWSM/sMiET8fyayjx/J+JmTPt2Mr0FkrgqB4todtfa53sOsrSAcIrJRD5XS20bKUwaDIuMkWKCEiQLKA== yargs@^13.3.2: version "13.3.2" @@ -8162,3 +7875,16 @@ yargs@^15.3.1: which-module "^2.0.0" y18n "^4.0.0" yargs-parser "^18.1.2" + +yargs@^16.1.1: + version "16.2.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-16.2.0.tgz#1c82bf0f6b6a66eafce7ef30e376f49a12477f66" + integrity sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.0" + y18n "^5.0.5" + yargs-parser "^20.2.2" diff --git a/breeze b/breeze index 820418454a8bb..df24b145e4eb1 100755 --- a/breeze +++ b/breeze @@ -18,10 +18,25 @@ # under the License. set -euo pipefail + AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +if [[ ${BREEZE_REDIRECT=} == "" ]]; then + set +u + mkdir -p "${AIRFLOW_SOURCES}"/logs + export BREEZE_REDIRECT="true" + if [[ "$(uname)" == "Darwin" ]]; then + exec script -q "${AIRFLOW_SOURCES}"/logs/breeze.out "$(command -v bash)" -c "$(printf "%q " "${0}" "${@}")" + else + exec script --return --quiet "${AIRFLOW_SOURCES}"/logs/breeze.out -c "$(printf "%q " "${0}" "${@}")" + fi + set -u +fi + export AIRFLOW_SOURCES readonly AIRFLOW_SOURCES + # Bash arrays need to be defined outside of functions unfortunately :( # Because on Mac OS Bash 3.4 defining arrays inside functions does not work # Array with extra options for Docker compose @@ -56,7 +71,7 @@ export EXTRA_STATIC_CHECK_OPTIONS # SUPPRESS_ASCIIART_FILE # MAX_SCREEN_WIDTH # SCREEN_WIDTH -# MOUNT_LOCAL_SOURCES +# MOUNT_SELECTED_LOCAL_SOURCES # FORCE_PULL_IMAGES # FORWARD_CREDENTIALS # DB_RESET @@ -93,13 +108,20 @@ function breeze::setup_default_breeze_constants() { export MAX_SCREEN_WIDTH=100 readonly MAX_SCREEN_WIDTH - # By default we mount local Airflow sources - export MOUNT_LOCAL_SOURCES="true" + # By default we mount selected local Airflow sources + export MOUNT_SELECTED_LOCAL_SOURCES="true" + + # By default we do not mount all local Airflow sources + export MOUNT_ALL_LOCAL_SOURCES="false" # By default we only pull images if we do not have them locally. # This can be overridden by '--force-pull-images' flag export FORCE_PULL_IMAGES="false" + # By default we do not pull python base image. We should do that only when we run upgrade check in + # CI master and when we manually refresh the images to latest versions + export FORCE_PULL_BASE_PYTHON_IMAGE="false" + # Forward common host credentials to docker (gcloud, aws etc.). export FORWARD_CREDENTIALS="false" @@ -119,9 +141,6 @@ function breeze::setup_default_breeze_constants() { # Which means that you do not have to start from scratch export PRESERVE_VOLUMES="false" - # If set to true, Backport packages are prepared not the Regular ones - export BACKPORT_PACKAGES="false" - # If set to true, RBAC UI will not be used for 1.10 version export DISABLE_RBAC="false" @@ -239,7 +258,7 @@ function breeze::initialize_virtualenv() { set +e # We need to export this one to speed up Cassandra driver installation in virtualenv CASS_DRIVER_NO_CYTHON="1" pip install -e ".[devel]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" + --constraint "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" res=$? set -e popd @@ -466,7 +485,7 @@ EOF Branch name: ${BRANCH_NAME} Docker image: ${AIRFLOW_PROD_IMAGE} - Github cache prefix: ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix) + GitHub cache prefix: ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix) Airflow source version: $(build_images::get_airflow_version_from_production_image) EOF else @@ -476,7 +495,7 @@ EOF Branch name: ${BRANCH_NAME} Docker image: ${AIRFLOW_CI_IMAGE} - Github cache prefix: ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix) + GitHub cache prefix: ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix) Airflow source version: ${AIRFLOW_VERSION} EOF fi @@ -501,7 +520,7 @@ EOF Branch name: ${BRANCH_NAME} Docker image: ${AIRFLOW_PROD_IMAGE} - Github cache prefix: ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix) + GitHub cache prefix: ${GITHUB_REGISTRY}/$(get_github_container_registry_image_prefix) EOF else cat <] ${CMDNAME} build-image [FLAGS] Builds docker image (CI or production) without entering the container. You can pass - additional options to this command, such as '--force-build-image', - '--force-pull-image', '--python', '--build-cache-local' or '-build-cache-pulled' - in order to modify build behaviour. + additional options to this command, such as: + + Choosing python version: + '--python' + + Choosing cache option: + '--build-cache-local' or '-build-cache-pulled', or '--build-cache-none' + + Choosing whether to force pull images or force build the image: + '--force-build-image', + '--force-pull-image', '--force-pull-base-python-image' You can also pass '--production-image' flag to build production image rather than CI image. - For DockerHub pull --dockerhub-user and --dockerhub-repo flags can be used to specify - the repository to pull from. For GitHub repository, the --github-repository + For DockerHub pull. '--dockerhub-user' and '--dockerhub-repo' flags can be used to specify + the repository to pull from. For GitHub repository, the '--github-repository' flag can be used for the same purpose. You can also use - --github-image-id | in case you want to pull the image with + '--github-image-id |' in case you want to pull the image with specific COMMIT_SHA tag or RUN_ID. Flags: @@ -1719,49 +1787,51 @@ $(breeze::flag_verbosity) Explains in detail all the flags that can be used with breeze. " readonly DETAILED_USAGE_FLAGS - export DETAILED_USAGE_PREPARE_PROVIDER_README=" -${CMDNAME} prepare-provider-readme [FLAGS] [YYYY.MM.DD] [PACKAGE_ID ...] + export DETAILED_USAGE_PREPARE_PROVIDER_DOCUMENTATION=" +${CMDNAME} prepare-provider-documentation [FLAGS] [PACKAGE_ID ...] + + Prepares documentation files for provider packages. - Prepares README.md files for backport packages. You can provide (after --) optional version - in the form of YYYY.MM.DD, optionally followed by the list of packages to generate readme for. + The command is optionally followed by the list of packages to generate readme for. If the first parameter is not formatted as a date, then today is regenerated. If no packages are specified, readme for all packages are generated. If no date is specified, current date + 3 days is used (allowing for PMC votes to pass). Examples: - '${CMDNAME} prepare-provider-readme' or - '${CMDNAME} prepare-provider-readme 2020.05.10' or - '${CMDNAME} prepare-provider-readme 2020.05.10 https google amazon' + '${CMDNAME} prepare-provider-documentation' or + '${CMDNAME} prepare-provider-documentation --version-suffix-for-pypi rc1' General form: - '${CMDNAME} prepare-provider-readme YYYY.MM.DD ...' - - * YYYY.MM.DD - is the CALVER version of the package to prepare. Note that this date - cannot be earlier than the already released version (the script will fail if it - will be). It can be set in the future anticipating the future release date. + '${CMDNAME} prepare-provider-documentation ...' * is usually directory in the airflow/providers folder (for example 'google' but in several cases, it might be one level deeper separated with '.' for example 'apache.hive' Flags: +$(breeze::flag_version_suffix) +$(breeze::flag_packages) $(breeze::flag_verbosity) " - readonly DETAILED_USAGE_PREPARE_PROVIDER_README + readonly DETAILED_USAGE_PREPARE_PROVIDER_DOCUMENTATION export DETAILED_USAGE_GENERATE_CONSTRAINTS=" ${CMDNAME} generate-constraints [FLAGS] - Generates pinned constraint files from setup.py. Those files are generated in files folder - - separate files for different python version. Those constraint files when pushed to orphan - constraints-master, constraints-2-0 and constraints-1-10 branches are used to generate - repeatable CI builds as well as run repeatable production image builds. You can use those + Generates pinned constraint files with all extras from setup.py. Those files are generated in + files folder - separate files for different python version. Those constraint files when + pushed to orphan constraints-master, constraints-2-0 and constraints-1-10 branches are used + to generate repeatable CI builds as well as run repeatable production image builds and + upgrades when you want to include installing or updating some of the released providers + released at the time particular airflow version was released. You can use those constraints to predictably install released Airflow versions. This is mainly used to test - the constraint generation - constraints are pushed to the orphan branches by a - successful scheduled CRON job in CI automatically. + the constraint generation or manually fix them - constraints are pushed to the orphan + branches by a successful scheduled CRON job in CI automatically, but sometimes manual fix + might be needed. Flags: +$(breeze::flag_generate_constraints) $(breeze::flag_airflow_variants) $(breeze::flag_verbosity) " @@ -1799,7 +1869,7 @@ $(breeze::flag_verbosity) export DETAILED_USAGE_PREPARE_PROVIDER_PACKAGES=" ${CMDNAME} prepare-provider-packages [FLAGS] [PACKAGE_ID ...] - Prepares backport packages. You can provide (after --) optional list of packages to prepare. + Prepares provider packages. You can provide (after --) optional list of packages to prepare. If no packages are specified, readme for all packages are generated. You can specify optional --version-suffix-for-svn flag to generate rc candidate packages to upload to SVN or --version-suffix-for-pypi flag to generate rc candidates for PyPI packages. You can also @@ -1813,7 +1883,7 @@ ${CMDNAME} prepare-provider-packages [FLAGS] [PACKAGE_ID ...] '${CMDNAME} prepare-provider-packages' or '${CMDNAME} prepare-provider-packages google' or - '${CMDNAME} prepare-provider-packages --package-format both google' or + '${CMDNAME} prepare-provider-packages --package-format wheel google' or '${CMDNAME} prepare-provider-packages --version-suffix-for-svn rc1 http google amazon' or '${CMDNAME} prepare-provider-packages --version-suffix-for-pypi rc1 http google amazon' '${CMDNAME} prepare-provider-packages --version-suffix-for-pypi a1 @@ -1945,7 +2015,7 @@ ${CMDNAME} static-check [FLAGS] static_check [-- ] ${FORMATTED_STATIC_CHECKS} - You can pass extra arguments including options to to the pre-commit framework as + You can pass extra arguments including options to the pre-commit framework as passed after --. For example: '${CMDNAME} static-check mypy' or @@ -2084,7 +2154,7 @@ Commands with arguments: " # shellcheck disable=SC2154 for subcommand in ${_breeze_extra_arg_commands}; do - printf " %-30s%-10s %s\n" "${subcommand}" "" "$(breeze::get_usage "${subcommand}")" + printf " %-35s%-10s %s\n" "${subcommand}" "" "$(breeze::get_usage "${subcommand}")" done echo " Help commands: @@ -2321,8 +2391,13 @@ function breeze::flag_local_file_mounting() { function breeze::flag_choose_different_airflow_version() { echo " -a, --install-airflow-version INSTALL_AIRFLOW_VERSION - If specified, installs Airflow directly from PIP released version. This happens at - image building time in production image and at container entering time for CI image. One of: + In CI image, installs Airflow (in entrypoint) from PIP released version or using + the installation method specified (sdist, wheel, none). + + In PROD image the installation of selected method or version happens during image building. + For PROD image, the 'none' options is not valid. + + One of: ${FORMATTED_INSTALL_AIRFLOW_VERSIONS} @@ -2337,8 +2412,9 @@ ${FORMATTED_INSTALL_AIRFLOW_VERSIONS} This can be a GitHub branch like master or v1-10-test, or a tag like 2.0.0a1. --installation-method INSTALLATION_METHOD - Method of installing airflow - either from the sources ('.') or from package - 'apache-airflow' to install from PyPI. Default in Breeze is to install from sources. One of: + Method of installing airflow for production image - either from the sources ('.') + or from package 'apache-airflow' to install from PyPI. + Default in Breeze is to install from sources. One of: ${FORMATTED_INSTALLATION_METHOD} @@ -2359,6 +2435,31 @@ ${FORMATTED_INSTALLATION_METHOD} " } +####################################################################################################### +# +# Prints flags that allow to choose variants of constraint generation +# +# Global constants used: +# GENERATE_CONSTRAINTS_MODES +# +# Outputs: +# Flag information. +####################################################################################################### +function breeze::flag_generate_constraints() { + echo " +--generate-constraints-mode GENERATE_CONSTRAINTS_MODE + Mode of generating constraints - determines whether providers are installed when generating + constraints and which version of them (either the ones from sources are used or the ones + from pypi. + + One of: + +${FORMATTED_GENERATE_CONSTRAINTS_MODE} + +" +} + + ####################################################################################################### # # Prints flags that allow to set assumed answers to questions @@ -2433,6 +2534,11 @@ function breeze::flag_verbosity() { Note that you can further increase verbosity and see all the commands executed by breeze by running 'export VERBOSE_COMMANDS=\"true\"' before running breeze. + +--dry-run-docker + Only show docker commands to execute instead of actually executing them. The docker + commands are printed in yellow color. + " } @@ -2473,6 +2579,13 @@ function breeze::flag_build_docker_images() { images are pulled by default only for the first time you run the environment, later the locally build images are used as cache. +--force-pull-base-python-image + Forces pulling of Python base image from DockerHub before building to + populate cache. This should only be run in case we need to update to latest available + Python base image. This should be a rare and manually triggered event. Also this flag + is used in the scheduled run in CI when we rebuild all the images from the scratch + and run the tests to see if the latest python images do not fail our tests. + Customization options: -E, --extras EXTRAS @@ -2622,7 +2735,7 @@ function breeze::flag_pull_push_docker_images() { and you need to be committer to push to Apache Airflow' GitHub registry. --github-registry GITHUB_REGISTRY - Github registry used. GitHub has legacy Packages registry and Public Beta Container + GitHub registry used. GitHub has legacy Packages registry and Public Beta Container registry. Default: ${_breeze_default_github_registry:=}. @@ -2662,7 +2775,7 @@ ${FORMATTED_GITHUB_REGISTRY} function breeze::flag_version_suffix() { echo " -S, --version-suffix-for-pypi SUFFIX - Adds optional suffix to the version in the generated backport package. It can be used + Adds optional suffix to the version in the generated provider package. It can be used to generate rc1/rc2 ... versions of the packages to be uploaded to PyPI. -N, --version-suffix-for-svn SUFFIX @@ -2795,7 +2908,7 @@ $(breeze::print_star_line) $(breeze::flag_tests) $(breeze::print_star_line) - Flags for generation of the backport packages + Flags for generation of the provider packages $(breeze::flag_version_suffix) $(breeze::print_star_line) @@ -3102,11 +3215,19 @@ function breeze::make_sure_precommit_is_installed() { # ####################################################################################################### function breeze::remove_images() { - set +e - docker rmi "${PYTHON_BASE_IMAGE}" 2>/dev/null >/dev/null - docker rmi "${AIRFLOW_CI_IMAGE}" 2>/dev/null >/dev/null - docker rmi "${AIRFLOW_PROD_IMAGE}" 2>/dev/null >/dev/null - set -e + # shellcheck disable=SC2086 + docker rmi --force ${PYTHON_BASE_IMAGE} \ + ${GITHUB_REGISTRY_PYTHON_BASE_IMAGE} \ + ${AIRFLOW_PYTHON_BASE_IMAGE} \ + ${AIRFLOW_CI_IMAGE} \ + ${DEFAULT_CI_IMAGE} \ + ${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE} \ + ${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE} \ + ${AIRFLOW_PROD_IMAGE} \ + ${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE} \ + ${AIRFLOW_PROD_BUILD_IMAGE} \ + ${GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE} \ + 2>/dev/null >/dev/null && true echo echo "###################################################################" echo "NOTE!! Removed Airflow images for Python version ${PYTHON_MAJOR_MINOR_VERSION}." @@ -3180,7 +3301,7 @@ function breeze::run_build_command() { fi ;; build_docs | perform_static_checks | perform_generate_constraints | \ - perform_prepare_provider_readme | perform_prepare_provider_packages | \ + perform_prepare_provider_documentation | perform_prepare_provider_packages | \ perform_prepare_airflow_packages) build_images::prepare_ci_build build_images::rebuild_ci_image_if_needed @@ -3261,6 +3382,20 @@ function breeze::run_build_command() { esac } +# executes command +function breeze::run_command() { + "${@}" +} + + +# print command instead of executing +function breeze::print_command() { + echo + echo "${COLOR_YELLOW}" "${@}" "${COLOR_RESET}" + echo +} + + ####################################################################################################### # # Runs the actual command - depending on the command chosen it will use the right @@ -3282,25 +3417,28 @@ function breeze::run_build_command() { function breeze::run_breeze_command() { set +u local dc_run_file + local run_command="breeze::run_command" + if [[ ${DRY_RUN_DOCKER=} != "false" ]]; then + run_command="breeze::print_command" + fi if [[ ${PRODUCTION_IMAGE} == "true" ]]; then dc_run_file="${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_PROD}" else dc_run_file="${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI}" fi + case "${command_to_run}" in enter_breeze) - if [[ " ${INTEGRATIONS[*]} " =~ " kerberos " ]]; then - kerberos::create_kerberos_network - fi - + docker_engine_resources::check_all_resources if [[ ${PRODUCTION_IMAGE} == "true" ]]; then - "${dc_run_file}" run --service-ports --rm airflow "${@}" - "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh" + ${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}" + ${run_command} "${SCRIPTS_CI_DIR}/tools/ci_fix_ownership.sh" else - "${dc_run_file}" run --service-ports --rm airflow "${@}" + ${run_command} "${dc_run_file}" run --service-ports --rm airflow "${@}" fi ;; run_exec) + docker_engine_resources::check_all_resources # Unfortunately `docker-compose exec` does not support exec'ing into containers started with run :( # so we have to find it manually set +e @@ -3312,28 +3450,19 @@ function breeze::run_breeze_command() { "/opt/airflow/scripts/in_container/entrypoint_exec.sh" "${@}" ;; run_tests) - if [[ " ${INTEGRATIONS[*]} " =~ " kerberos " ]]; then - kerberos::create_kerberos_network - fi - + docker_engine_resources::check_all_resources export RUN_TESTS="true" readonly RUN_TESTS - "${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI}" run --service-ports --rm airflow "$@" + ${run_command} "${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI}" run --service-ports --rm airflow "$@" ;; run_docker_compose) + docker_engine_resources::check_all_resources set +u - if [[ ${PRODUCTION_IMAGE} == "true" ]]; then - dc_run_file="${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_PROD}" - else - dc_run_file="${BUILD_CACHE_DIR}/${DOCKER_COMPOSE_RUN_SCRIPT_FOR_CI}" - fi - "${dc_run_file}" "${docker_compose_command}" "${EXTRA_DC_OPTIONS[@]}" "$@" + ${run_command} "${dc_run_file}" "${docker_compose_command}" "${EXTRA_DC_OPTIONS[@]}" "$@" set -u - if [[ "${docker_compose_command}" = "down" ]]; then - kerberos::delete_kerberos_network - fi ;; perform_static_checks) + docker_engine_resources::check_all_resources breeze::make_sure_precommit_is_installed breeze::run_static_checks "${@}" ;; @@ -3343,16 +3472,20 @@ function breeze::run_breeze_command() { breeze::remove_images ;; perform_generate_constraints) + docker_engine_resources::check_all_resources runs::run_generate_constraints ;; perform_prepare_airflow_packages) + docker_engine_resources::check_all_resources build_airflow_packages::build_airflow_packages ;; perform_prepare_provider_packages) + docker_engine_resources::check_all_resources runs::run_prepare_provider_packages "${@}" ;; - perform_prepare_provider_readme) - runs::run_prepare_provider_readme "${@}" + perform_prepare_provider_documentation) + docker_engine_resources::check_all_resources + runs::run_prepare_provider_documentation "${@}" ;; perform_push_image) if [[ ${PRODUCTION_IMAGE} == "true" ]]; then @@ -3368,11 +3501,13 @@ function breeze::run_breeze_command() { breeze::setup_autocomplete ;; manage_kind_cluster) + docker_engine_resources::check_all_resources kind::make_sure_kubernetes_tools_are_installed kind::get_kind_cluster_name kind::perform_kind_cluster_operation "${KIND_CLUSTER_OPERATION}" ;; build_docs) + docker_engine_resources::check_all_resources runs::run_docs "${@}" ;; toggle_suppress_cheatsheet) @@ -3439,6 +3574,8 @@ breeze::setup_default_breeze_constants initialization::initialize_common_environment +initialization::get_environment_for_builds_on_ci + breeze::determine_python_version_to_use_in_breeze sanity_checks::basic_sanity_checks diff --git a/breeze-complete b/breeze-complete index 39413b8524b7d..ff13c27fd43cf 100644 --- a/breeze-complete +++ b/breeze-complete @@ -25,18 +25,19 @@ _breeze_allowed_python_major_minor_versions="2.7 3.5 3.6 3.7 3.8" _breeze_allowed_backends="sqlite mysql postgres" -_breeze_allowed_integrations="cassandra kerberos mongo openldap pinot presto rabbitmq redis all" +_breeze_allowed_integrations="cassandra kerberos mongo openldap pinot rabbitmq redis statsd trino all" +_breeze_allowed_generate_constraints_modes="source-providers pypi-providers no-providers" # registrys is good here even if it is not correct english. We are adding s automatically to all variables _breeze_allowed_github_registrys="docker.pkg.github.com ghcr.io" _breeze_allowed_kubernetes_modes="image" -_breeze_allowed_kubernetes_versions="v1.18.6 v1.17.5 v1.16.9" +_breeze_allowed_kubernetes_versions="v1.20.2 v1.19.7 v1.18.15" _breeze_allowed_helm_versions="v3.2.4" -_breeze_allowed_kind_versions="v0.8.0" +_breeze_allowed_kind_versions="v0.10.0" _breeze_allowed_mysql_versions="5.7 8" _breeze_allowed_postgres_versions="9.6 10 11 12 13" _breeze_allowed_kind_operations="start stop restart status deploy test shell k9s" -_breeze_allowed_test_types="All Core Providers API CLI Integration Other WWW Heisentests Postgres MySQL Helm" -_breeze_allowed_package_formats="wheel sdist both" +_breeze_allowed_test_types="All Core Providers API CLI Integration Other WWW Postgres MySQL Helm Quarantined" +_breeze_allowed_package_formats="both sdist wheel" _breeze_allowed_installation_methods=". apache-airflow" # shellcheck disable=SC2034 @@ -44,6 +45,7 @@ _breeze_allowed_installation_methods=". apache-airflow" # Default values for the commands & flags used _breeze_default_backend=$(echo "${_breeze_allowed_backends}" | awk '{print $1}') _breeze_default_github_registry=$(echo "${_breeze_allowed_github_registrys}" | awk '{print $1}') + _breeze_default_generate_providers_mode=$(echo "${_breeze_allowed_generate_constraints_modes}" | awk '{print $1}') _breeze_default_kubernetes_mode=$(echo "${_breeze_allowed_kubernetes_modes}" | awk '{print $1}') _breeze_default_kubernetes_version=$(echo "${_breeze_allowed_kubernetes_versions}" | awk '{print $1}') _breeze_default_helm_version=$(echo "${_breeze_allowed_helm_versions}" | awk '{print $1}') @@ -55,7 +57,9 @@ _breeze_allowed_installation_methods=". apache-airflow" } _breeze_allowed_install_airflow_versions=$(cat <<-EOF +2.0.1 2.0.0 +1.10.15 1.10.14 1.10.12 1.10.11 @@ -129,6 +133,7 @@ setup-order setup-extra-packages shellcheck sort-in-the-wild +sort-spelling-wordlist stylelint trailing-whitespace update-breeze-file @@ -159,13 +164,13 @@ S: N: _breeze_long_options=" help python: backend: integration: kubernetes-mode: kubernetes-version: helm-version: kind-version: -skip-mounting-local-sources install-airflow-version: install-airflow-reference: db-reset +skip-mounting-local-sources mount-all-local-sources install-airflow-version: install-airflow-reference: db-reset verbose assume-yes assume-no assume-quit forward-credentials init-script: -force-build-images force-pull-images production-image extras: force-clean-images skip-rebuild-check +force-build-images force-pull-images force-pull-base-python-image production-image extras: force-clean-images skip-rebuild-check build-cache-local build-cache-pulled build-cache-disabled disable-pip-cache -dockerhub-user: dockerhub-repo: use-github-registry github-registry: github-repository: github-image-id: +dockerhub-user: dockerhub-repo: use-github-registry github-registry: github-repository: github-image-id: generate-constraints-mode: postgres-version: mysql-version: -version-suffix-for-pypi: version-suffix-for-svn: backports +version-suffix-for-pypi: version-suffix-for-svn: additional-extras: additional-python-deps: additional-dev-deps: additional-runtime-deps: image-tag: disable-mysql-client-installation constraints-location: disable-pip-cache install-from-docker-context-files additional-extras: additional-python-deps: disable-pypi-when-building skip-installing-airflow-providers-from-sources @@ -173,8 +178,7 @@ dev-apt-deps: additional-dev-apt-deps: dev-apt-command: additional-dev-apt-comma runtime-apt-deps: additional-runtime-apt-deps: runtime-apt-command: additional-runtime-apt-command: additional-runtime-apt-env: load-default-connections load-example-dags install-packages-from-dist no-rbac-ui package-format: upgrade-to-newer-dependencies installation-method: continue-on-pip-check-failure -test-type: -preserve-volumes +test-type: preserve-volumes dry-run-docker " _breeze_commands=" @@ -197,7 +201,7 @@ toggle-suppress-asciiart" _breeze_extra_arg_commands=" docker-compose kind-cluster -prepare-provider-readme +prepare-provider-documentation prepare-provider-packages static-check tests @@ -301,6 +305,9 @@ function breeze_complete::get_known_values_breeze() { --github-registry) _breeze_known_values="${_breeze_allowed_github_registrys}" ;; + --generate-constraints-mode) + _breeze_known_values="${_breeze_allowed_generate_constraints_modes}" + ;; *) _breeze_known_values="" ;; diff --git a/chart/templates/create-user-job.yaml b/chart/templates/create-user-job.yaml index 30b6c258f3983..3a3aa3f042bc8 100644 --- a/chart/templates/create-user-job.yaml +++ b/chart/templates/create-user-job.yaml @@ -67,7 +67,7 @@ spec: args: - "bash" - "-c" - # Support running against 1.10.x and 2.0.0dev/master + # Support running against 1.10.x and 2.x - 'airflow users create "$@" || airflow create_user "$@"' - -- - "-r" diff --git a/chart/templates/migrate-database-job.yaml b/chart/templates/migrate-database-job.yaml index 975c46b11b163..bbcc67a99d770 100644 --- a/chart/templates/migrate-database-job.yaml +++ b/chart/templates/migrate-database-job.yaml @@ -63,7 +63,7 @@ spec: - name: run-airflow-migrations image: {{ template "airflow_image" . }} imagePullPolicy: {{ .Values.images.airflow.pullPolicy }} - # Support running against 1.10.x and 2.0.0dev/master + # Support running against 1.10.x and 2.x args: ["bash", "-c", "airflow db upgrade || airflow upgradedb"] envFrom: {{- include "custom_airflow_environment_from" . | default "\n []" | indent 10 }} diff --git a/chart/values.yaml b/chart/values.yaml index 30ff4dcdfb246..851698050a003 100644 --- a/chart/values.yaml +++ b/chart/values.yaml @@ -21,7 +21,7 @@ # User and group of airflow user uid: 50000 -gid: 50000 +gid: 0 # Airflow home directory # Used for mount paths @@ -166,7 +166,7 @@ secret: [] # Extra secrets that will be managed by the chart # (You can use them with extraEnv or extraEnvFrom or some of the extraVolumes values). # The format is "key/value" where -# * key (can be templated) is the the name the secret that will be created +# * key (can be templated) is the name of the secret that will be created # * value: an object with the standard 'data' or 'stringData' key (or both). # The value associated with those keys must be a string (can be templated) extraSecrets: {} @@ -185,7 +185,7 @@ extraSecrets: {} # Extra ConfigMaps that will be managed by the chart # (You can use them with extraEnv or extraEnvFrom or some of the extraVolumes values). # The format is "key/value" where -# * key (can be templated) is the the name the configmap that will be created +# * key (can be templated) is the name of the configmap that will be created # * value: an object with the standard 'data' key. # The value associated with this keys must be a string (can be templated) extraConfigMaps: {} diff --git a/codecov.yml b/codecov.yml index 518e4fbabd4cf..d68d7a8f644c2 100644 --- a/codecov.yml +++ b/codecov.yml @@ -37,6 +37,7 @@ coverage: # advanced branches: - master + - main - v1-10-stable - v1-10-test - v2-0-test @@ -53,6 +54,7 @@ coverage: # advanced branches: - master + - main - v1-10-stable - v1-10-test - v2-0-test diff --git a/dev/PROVIDER_PACKAGE_DETAILS.md b/dev/PROVIDER_PACKAGE_DETAILS.md index 7d71384b3a864..fde448e505ccc 100644 --- a/dev/PROVIDER_PACKAGE_DETAILS.md +++ b/dev/PROVIDER_PACKAGE_DETAILS.md @@ -23,15 +23,9 @@ - [Overview](#overview) - [Deciding when to release](#deciding-when-to-release) - - [Backport Packages](#backport-packages) - - [Regular Provider packages](#regular-provider-packages) -- [Generating release notes](#generating-release-notes) - - [Backport providers](#backport-providers) - - [Regular providers](#regular-providers) +- [Generating provider documentation](#generating-provider-documentation) - [Content of the release notes](#content-of-the-release-notes) - [Preparing packages](#preparing-packages) - - [Backport provider packages](#backport-provider-packages) - - [Regular provider packages](#regular-provider-packages) @@ -39,33 +33,14 @@ # Overview This document describes the process of preparing provider packages for release and releasing them. -The provider packages are packages (per provider) that are not part of the core Airflow. - -They are prepared in two variants: - -* Backport Provider Packages - those are the packages that can be installed in Airflow 1.10 line. - They provide an easy migration path to Airflow 2.0 for anyone that still uses Airflow 1.10. - -* Regular Provider Packages - those are the packages that can be installed in Airflow 2.0. Basic - Airflow release contains just core packages and operators. All the remaining providers have - to be installed separately. When you install an extra, the right provider package should be installed - automatically. Regular Provider Packages are Work In Progress and some details and processes are going - to be hashed-out during Alpha and Beta releases of Airflow 2.0. +The provider packages are packages (per `provider`) that are not part of the core Airflow. Once you release the packages, you can simply install them with: -``` -pip install apache-airflow-backport-providers-[] -``` - -for backport provider packages, or - ``` pip install apache-airflow-providers-[] ``` -for regular provider packages. - Where `` is the provider id and `` are optional extra packages to install. You can find the provider packages dependencies and extras in the README.md files in each provider package (in `airflow/providers/` folder) as well as in the PyPI installation page. @@ -76,71 +51,18 @@ airflow to 2.0 line. # Deciding when to release -## Backport Packages - -You can release backport packages separately on an ad-hoc basis, whenever we find that a given provider needs -to be released - due to new features or due to bug fixes. You can release each backport package -separately - although we decided to release all backport packages together in one go 2020.05.10. - -We are using the [CALVER](https://calver.org/) versioning scheme for the backport packages. We also have an -automated way to prepare and build the packages, so it should be very easy to release the packages often and -separately. - -## Regular Provider packages - Each provider package has its own version maintained separately when contributors implement changes, marking those as patches/features/backwards incompatible changes. Details to be hashed out in [the related issue](https://github.com/apache/airflow/issues/11425) -# Generating release notes - -## Backport providers +# Generating provider documentation When you want to prepare release notes for a package, you need to run: ``` -./breeze --backports prepare-provider-readme [YYYY.MM.DD] ... -``` - - -* YYYY.MM.DD - is the CALVER version of the package to prepare. Note that this date cannot be earlier - than the already released version (the script will fail if it will be). It can be set in the future - anticipating the future release date. If you do not specify date, the date will be taken from the last - generated readme - the last generated CHANGES file will be updated. - -* is usually directory in the `airflow/providers` folder (for example `google` but in several - cases, it might be one level deeper separated with `.` for example `apache.hive` - -You can run the script with multiple package names if you want to prepare several packages at the same time. -Before you specify a new version, the last released version is update in case you have any bug fixes -merged in the master recently, they will be automatically taken into account. - -Typically, the first time you run release before release, you run it with target release.date: - -``` -./breeze --backports prepare-provider-readme 2020.05.20 google -``` - -Then while you iterate with merges and release candidates you update the release date without providing -the date (to update the existing release notes) - -``` -./breeze --backports prepare-provider-readme google -``` - - -Whenever you are satisfied with the release notes generated you can commit generated changes/new files -to the repository. - - -## Regular providers - -When you want to prepare release notes for a package, you need to run: - -``` -./breeze --backports prepare-provider-readme ... +./breeze --backports prepare-provider-documentation ... ``` The version for each package is going to be updated separately for each package when we agree to the @@ -158,11 +80,11 @@ release date without providing the date (to update the existing release notes) ``` -./breeze --backports prepare-provider-readme google +./breeze prepare-provider-documentation google ``` -Whenever you are satisfied with the release notes generated you can commit generated changes/new files +When you are satisfied with the release notes generated you can commit generated changes/new files to the repository. @@ -170,107 +92,20 @@ to the repository. The script generates all the necessary information: -* summary of requirements for each backport package +* summary of requirements for each provider package * list of dependencies (including extras to install them) when package depends on other providers packages -* table of new hooks/operators/sensors/protocols/secrets -* table of moved hooks/operators/sensors/protocols/secrets with the - information where they were moved from -* changelog of all the changes to the provider package (this will be - automatically updated with an incremental changelog whenever we decide to - release separate packages. - -The script generates two types of files: +* link to the changelog of all the changes to the provider package -* BACKPORT_PROVIDERS_CHANGES_YYYY.MM.DD.md which keeps information about changes (commits) in a particular - version of the provider package. The file for latest release gets updated when you iterate with - the same new date/version, but it never changes automatically for already released packages. - This way - just before the final release, you can manually correct the changes file if you - want to remove some changes from the file. +The information is placed in README.rst which is regenerated every time you run the script. -* README.md which is regenerated every time you run the script (unless there are no changes since - the last time you generated the release notes - -Note that our CI system builds the release notes for backport packages automatically with every build and +Note that our CI system builds the release notes for provider packages automatically with every build and current date - this way you might be sure the automated generation of the release notes continues to work. You can also preview the generated readme files (by downloading artifacts from GitHub Actions). -The script does not modify the README and CHANGES files if there is no change in the repo for that provider. +The script does not modify the README files if there is no change in the repo for that provider. # Preparing packages -## Backport provider packages - -As part of preparation to Airflow 2.0 we decided to prepare backport of providers package that will be -possible to install in the Airflow 1.10.*, Python 3.6+ environment. - -You can build those packages in the breeze environment, so you do not have to worry about common environment. - -Note that readme release notes have to be generated first, so that the package preparation script reads -the latest version from the latest version of release notes prepared. - -* The provider package ids PACKAGE_ID are subdirectories in the ``providers`` directory. Sometimes they -are one level deeper (`apache/hive` folder for example, in which case PACKAGE_ID uses "." to separate -the folders (for example Apache Hive's PACKAGE_ID is `apache.hive` ). You can see the list of all available -providers by running: - -```bash -./breeze --backports prepare-provider-packages -- --help -``` - -The examples below show how you can build selected packages, but you can also build all packages by -omitting the package ids altogether. - -* To build the release candidate packages for SVN Apache upload run the following command: - -```bash -./breeze --backports prepare-provider-packages package-format both --version-suffix-for-svn=rc1 [PACKAGE_ID] ... -``` - -for example: - -```bash -./breeze --backports prepare-provider-packages package-format both --version-suffix-for-svn=rc1 http ... -``` - -* To build the release candidate packages for PyPI upload run the following command: - -```bash -./breeze --backports prepare-provider-packages package-format both --version-suffix-for-pypi=rc1 [PACKAGE_ID] ... -``` - -for example: - -```bash -./breeze --backports prepare-provider-packages package-format both --version-suffix-for-pypi=rc1 http ... -``` - - -* To build the final release packages run the following command: - -```bash -./breeze --backports prepare-provider-packages package-format both [PACKAGE_ID] ... -``` - -for example: - -```bash -./breeze --backports prepare-provider-packages package-format both http ... -``` - -* For each package, this creates a wheel package and source distribution package in your `dist` folder with - names following the patterns: - - * `apache_airflow_backport_providers__YYYY.[M]M.[D]D[suffix]-py3-none-any.whl` - * `apache-airflow-backport-providers--YYYY.[M]M.[D]D[suffix].tar.gz` - -Note! Even if we always use the two-digit month and day when generating the readme files, -the version in PyPI does not contain the leading 0s in version name - therefore the artifacts generated -also do not container the leading 0s. - -* You can install the .whl packages with `pip install ` - -## Regular provider packages - Airflow 2.0 is released as separate core package and separate set of provider packages. You can build those packages in the breeze environment, so you do not have to worry about common environment. @@ -337,4 +172,4 @@ Where ``MAJOR.MINOR.PATCHLEVEL`` is the semver version of the packages. * You can install the .whl packages with `pip install ` -Releasing the packages is described in [README.md](README.md) +Releasing the packages is described in [README_RELEASE_PROVIDER_PACKAGES.md](README_RELEASE_PROVIDER_PACKAGES.md) diff --git a/dev/README.md b/dev/README.md index d969c1ec99e38..f67dc727a5831 100644 --- a/dev/README.md +++ b/dev/README.md @@ -35,7 +35,7 @@ The Apache Airflow releases are one of the two types: * Releases of the Apache Airflow package -* Releases of the Backport Providers Packages +* Releases of the Providers Packages ## Apache Airflow Package @@ -47,10 +47,10 @@ They contain sources for: * Dockerfile and corresponding scripts that build and use an official DockerImage * Breeze development environment that helps with building images and testing locally apache airflow built from sources + * Provider packages - containing Airflow's providers - separate package per each service Airflow integrates + with. -In the future (Airflow 2.0) this package will be split into separate "core" and "providers" packages that -will be distributed separately, following the mechanisms introduced in Backport Package Providers. We also -plan to release the official Helm Chart sources that will allow the user to install Apache Airflow +We also plan to release the official Helm Chart sources that will allow the user to install Apache Airflow via helm 3.0 chart in a distributed fashion. The Source releases are the only "official" Apache Software Foundation releases, and they are distributed @@ -74,46 +74,36 @@ Detailed instruction of releasing Provider Packages can be found in the The Provider packages are packages (per provider) that make it possible to easily install Hooks, Operators, Sensors, and Secrets for different providers (external services used by Airflow). -There are also Backport Provider Packages that allow to use the Operators, Hooks, Secrets from the 2.0 -version of Airflow in the 1.10.* series. - Once you release the packages, you can simply install them with: ``` pip install apache-airflow-providers-[] ``` -for regular providers and - -``` -pip install apache-airflow-backport-providers-[] -``` - -for backport providers. - Where `` is the provider id and `` are optional extra packages to install. You can find the provider packages dependencies and extras in the README.md files in each provider package (in `airflow/providers/` folder) as well as in the PyPI installation page. -Backport providers are a great way to migrate your DAGs to Airflow-2.0 compatible DAGs. You can -switch to the new Airflow-2.0 packages in your DAGs, long before you attempt to migrate -airflow to 2.0 line. - The sources released in SVN allow to build all the provider packages by the user, following the instructions and scripts provided. Those are also "official_source releases" as described in the [ASF Release Policy](http://www.apache.org/legal/release-policy.html) and they are available -via [Official Apache Download for providers](https://downloads.apache.org/airflow/providers/) and -[Official Apache Download for backport-providers](https://downloads.apache.org/airflow/backport-providers/) +via [Official Apache Download for providers](https://downloads.apache.org/airflow/providers/). The full provider's list can be found here: [Provider Packages Reference](https://s.apache.org/airflow-docs) -There are also convenience packages released as "apache-airflow-providers" and -"apache-airflow-backport-providers" separately in PyPI. -You can find all backport providers via: -[PyPI query for providers](https://pypi.org/search/?q=apache-airflow-providers) and +There are also convenience packages released as "apache-airflow-providers"separately in PyPI. +[PyPI query for providers](https://pypi.org/search/?q=apache-airflow-providers) + +We also have legacy backport providers available for Airflow 1.10.* series: +[Official Apache Download for backport-providers](https://downloads.apache.org/airflow/backport-providers/) + +And available in PyPI: [PyPI query for backport providers](https://pypi.org/search/?q=apache-airflow-backport-providers). +Note that Backport Providers for Airflow 1.10.* series are not released any more. The last release +of Backport Providers was done on March 17, 2021. + Detailed instruction of releasing Provider Packages can be found in the [README_RELEASE_PROVIDER_PACKAGES.md](README_RELEASE_PROVIDER_PACKAGES.md) diff --git a/dev/README_RELEASE_PROVIDER_PACKAGES.md b/dev/README_RELEASE_PROVIDER_PACKAGES.md index 488c5c860fdb1..887209e2abc96 100644 --- a/dev/README_RELEASE_PROVIDER_PACKAGES.md +++ b/dev/README_RELEASE_PROVIDER_PACKAGES.md @@ -20,40 +20,38 @@ **Table of contents** -- [Provider Packages](#provider-packages) +- [Provider packages](#provider-packages) - [Decide when to release](#decide-when-to-release) -- [Backport provider packages versioning](#backport-provider-packages-versioning) -- [Regular provider packages versioning](#regular-provider-packages-versioning) -- [Prepare Backport Provider Packages (RC)](#prepare-backport-provider-packages-rc) +- [Provider packages versioning](#provider-packages-versioning) +- [Prepare Regular Provider packages (RC)](#prepare-regular-provider-packages-rc) - [Generate release notes](#generate-release-notes) - - [Build an RC release for SVN apache upload](#build-an-rc-release-for-svn-apache-upload) + - [Build regular provider packages for SVN apache upload](#build-regular-provider-packages-for-svn-apache-upload) - [Build and sign the source and convenience packages](#build-and-sign-the-source-and-convenience-packages) - [Commit the source packages to Apache SVN repo](#commit-the-source-packages-to-apache-svn-repo) - - [Publish the RC convenience package to PyPI](#publish-the-rc-convenience-package-to-pypi) - - [Prepare voting email for Backport Providers release candidate](#prepare-voting-email-for-backport-providers-release-candidate) + - [Publish the Regular convenience package to PyPI](#publish-the-regular-convenience-package-to-pypi) + - [Add tags in git](#add-tags-in-git) + - [Prepare documentation](#prepare-documentation) + - [Prepare voting email for Providers release candidate](#prepare-voting-email-for-providers-release-candidate) - [Verify the release by PMC members](#verify-the-release-by-pmc-members) - [Verify by Contributors](#verify-by-contributors) - - [Publish the final releases of backport packages](#publish-the-final-releases-of-backport-packages) - - [Update Announcements page](#update-announcements-page) -- [Prepare Regular Providers (Alphas/Betas)](#prepare-regular-providers-alphasbetas) - - [Generate release notes](#generate-release-notes-1) - - [Build regular provider packages for SVN apache upload](#build-regular-provider-packages-for-svn-apache-upload) - - [Build and sign the source and convenience packages](#build-and-sign-the-source-and-convenience-packages-1) - - [Commit the source packages to Apache SVN repo](#commit-the-source-packages-to-apache-svn-repo-1) - - [Publish the Regular convenience package to PyPI](#publish-the-regular-convenience-package-to-pypi) - - [Publish documentation](#publish-documentation) +- [Publish release](#publish-release) + - [Summarize the voting for the Apache Airflow release](#summarize-the-voting-for-the-apache-airflow-release) + - [Publish release to SVN](#publish-release-to-svn) + - [Publish the Regular convenience package to PyPI](#publish-the-regular-convenience-package-to-pypi-1) + - [Publish documentation prepared before](#publish-documentation-prepared-before) + - [Add tags in git](#add-tags-in-git-1) - [Notify developers of release](#notify-developers-of-release) ------------------------------------------------------------------------------------------------------------ -# Provider Packages +# Provider packages The prerequisites to release Apache Airflow are described in [README.md](README.md). -You can read more about the command line tools used to generate the packages and the two types of -packages we have (Backport and Regular Provider Packages) in [Provider packages](PROVIDER_PACKAGES.md). +You can read more about the command line tools used to generate the packages in the +[Provider packages](PROVIDER_PACKAGES.md). # Decide when to release @@ -62,90 +60,79 @@ a given provider needs to be released - due to new features or due to bug fixes. You can release each provider package separately, but due to voting and release overhead we try to group releases of provider packages together. -# Backport provider packages versioning - -We are using the [CALVER](https://calver.org/) versioning scheme for the backport packages. We also have an -automated way to prepare and build the packages, so it should be very easy to release the packages often and -separately. Backport packages will be maintained for three months after 2.0.0 version of Airflow, and it is -really a bridge, allowing people to migrate to Airflow 2.0 in stages, so the overhead of maintaining -semver versioning does not apply there - subsequent releases might be backward-incompatible, and it is -not indicated by the version of the packages. - -# Regular provider packages versioning +# Provider packages versioning -We are using the [SEMVER](https://semver.org/) versioning scheme for the regular packages. This is in order +We are using the [SEMVER](https://semver.org/) versioning scheme for the provider packages. This is in order to give the users confidence about maintaining backwards compatibility in the new releases of those packages. Details about maintaining the SEMVER version are going to be discussed and implemented in [the related issue](https://github.com/apache/airflow/issues/11425) -# Prepare Backport Provider Packages (RC) +# Prepare Regular Provider packages (RC) ## Generate release notes -Prepare release notes for all the packages you plan to release. Where YYYY.MM.DD is the CALVER -date for the packages. +Prepare release notes for all the packages you plan to release. When the provider package version +has not been updated since the latest version, the release notes are not generated. Release notes +are only generated, when the latest version of the package does not yet have a corresponding TAG. +The tags for providers is of the form ``providers-/`` for example +``providers-amazon/1.0.0``. During releasing, the RC1/RC2 tags are created (for example +``providers-amazon/1.0.0rc1``). -```shell script -./breeze --backports prepare-provider-readme YYYY.MM.DD [packages] -``` +Details about maintaining the SEMVER version are going to be discussed and implemented in +[the related issue](https://github.com/apache/airflow/issues/11425) -If you iterate with merges and release candidates you can update the release date without providing -the date (to update the existing release notes) ```shell script -./breeze --backports prepare-provider-readme google +./breeze prepare-provider-documentation [packages] ``` -Generated readme files should be eventually committed to the repository. +This command will not only prepare documentation but will also allow the release manager to review +changes implemented in all providers, and determine which of the providers should be released. For each +provider details will be printed on what changes were implemented since the last release including +links to particular commits. This should help to determine which version of provider should be released: + +* increased patch-level for bugfix-only change +* increased minor version if new features are added +* increased major version if breaking changes are added -## Build an RC release for SVN apache upload +It also allows the release manager to update CHANGELOG.rst where high-level overview of the changes should be +documented for the providers released. -The Release Candidate artifacts we vote upon should be the exact ones we vote against, without any -modification than renaming i.e. the contents of the files must be the same between voted -release candidate and final release. Because of this the version in the built artifacts -that will become the official Apache releases must not include the rcN suffix. They also need -to be signed and have checksum files. You can generate the checksum/signature files by running +You can iterate and re-generate the same readme content as many times as you want. +The generated files should be added and committed to the repository. + + +## Build regular provider packages for SVN apache upload + +Those packages might get promoted to "final" packages by just renaming the files, so internally they +should keep the final version number without the rc suffix, even if they are rc1/rc2/... candidates. + +They also need to be signed and have checksum files. You can generate the checksum/signature files by running the "dev/sign.sh" script (assuming you have the right PGP key set-up for signing). The script generates corresponding .asc and .sha512 files for each file to sign. ## Build and sign the source and convenience packages -* Set environment variables (version and root of airflow repo) +* Cleanup dist folder: ```shell script -export VERSION=2020.5.20rc2 export AIRFLOW_REPO_ROOT=$(pwd) - +rm -rf ${AIRFLOW_REPO_ROOT}/dist/* ``` -* Build the source package: -```shell script -${AIRFLOW_REPO_ROOT}/dev/provider_packages/build_source_package.sh --backports -``` - -It will generate `apache-airflow-backport-providers-${VERSION}-source.tar.gz` - -* Generate the packages - since we are preparing packages for SVN repo, we should use the right switch. Note - that this will clean up dist folder before generating the packages, so it will only contain the packages - you intended to build. +* Release candidate packages: ```shell script -./breeze --backports prepare-provider-packages --package-format both --version-suffix-for-svn rc1 +./breeze prepare-provider-packages --version-suffix-for-svn rc1 --package-format both ``` if you ony build few packages, run: ```shell script -./breeze --backports prepare-provider-packages --package-format both --version-suffix-for-svn rc1 PACKAGE PACKAGE .... -``` - -* Move the source tarball to dist folder - -```shell script -mv apache-airflow-backport-providers-${VERSION}-source.tar.gz dist +./breeze prepare-provider-packages --version-suffix-for-svn rc1 --package-format both PACKAGE PACKAGE .... ``` * Sign all your packages @@ -156,12 +143,6 @@ pushd dist popd ``` -* Push tags to Apache repository (assuming that you have apache remote pointing to apache/airflow repo)] - -```shell script -git push apache backport-providers-${VERSION} -``` - ## Commit the source packages to Apache SVN repo * Push the artifacts to ASF dev dist repo @@ -175,52 +156,58 @@ cd airflow-dev svn update # Create a new folder for the release. -cd airflow-dev/backport-providers -svn mkdir ${VERSION} +cd providers # Move the artifacts to svn folder -mv ${AIRFLOW_REPO_ROOT}/dist/* ${VERSION}/ +mv ${AIRFLOW_REPO_ROOT}/dist/* . # Add and commit -svn add ${VERSION}/* -svn commit -m "Add artifacts for Airflow ${VERSION}" +svn add * +svn commit -m "Add artifacts for Airflow Providers $(date "+%Y-%m-%d%n")" cd ${AIRFLOW_REPO_ROOT} ``` Verify that the files are available at -[backport-providers](https://dist.apache.org/repos/dist/dev/airflow/backport-providers/) +[providers](https://dist.apache.org/repos/dist/dev/airflow/providers/) + +## Publish the Regular convenience package to PyPI -## Publish the RC convenience package to PyPI +In case of pre-release versions you build the same packages for both PyPI and SVN so you can simply use +packages generated in the previous step, and you can skip the "prepare" step below. -In order to publish to PyPI you just need to build and release packages. The packages should however -contain the rcN suffix in the version name as well, so you need to use `--version-suffix-for-pypi` switch -to prepare those packages. Note that these are different packages than the ones used for SVN upload +In order to publish release candidate to PyPI you just need to build and release packages. +The packages should however contain the rcN suffix in the version file name but not internally in the package, +so you need to use `--version-suffix-for-pypi` switch to prepare those packages. +Note that these are different packages than the ones used for SVN upload though they should be generated from the same sources. * Generate the packages with the right RC version (specify the version suffix with PyPI switch). Note that this will clean up dist folder before generating the packages, so you will only have the right packages there. ```shell script -./breeze --backports prepare-provider-packages --package-format both --version-suffix-for-pypi rc1 +rm -rf ${AIRFLOW_REPO_ROOT}/dist/* + +./breeze prepare-provider-packages --version-suffix-for-pypi rc1 --package-format both ``` if you ony build few packages, run: ```shell script -./breeze --backports prepare-provider-packages --package-format both --version-suffix-for-pypi rc1 PACKAGE PACKAGE .... +./breeze prepare-provider-packages --version-suffix-for-pypi rc1 --package-format both \ + PACKAGE PACKAGE .... ``` * Verify the artifacts that would be uploaded: ```shell script -twine check dist/* +twine check ${AIRFLOW_REPO_ROOT}/dist/* ``` * Upload the package to PyPi's test environment: ```shell script -twine upload -r pypitest dist/* +twine upload -r pypitest ${AIRFLOW_REPO_ROOT}/dist/* ``` * Verify that the test packages look good by downloading it and installing them into a virtual environment. @@ -229,26 +216,125 @@ Twine prints the package links as output - separately for each package. * Upload the package to PyPi's production environment: ```shell script -twine upload -r pypi dist/* +twine upload -r pypi ${AIRFLOW_REPO_ROOT}/dist/* ``` -* Copy the list of links to the uploaded packages - they will be useful in preparing VOTE email. - * Again, confirm that the packages are available under the links printed. -## Prepare voting email for Backport Providers release candidate -Make sure the packages are in https://dist.apache.org/repos/dist/dev/airflow/backport-providers/ +## Add tags in git + +Assume that your remote for apache repository is called `apache` you should now +set tags for the providers in the repo. + +```shell script +./dev/provider_packages/tag_providers.sh +``` + +## Prepare documentation + +Documentation is an essential part of the product and should be made available to users. +In our cases, documentation for the released versions is published in a separate repository - +[`apache/airflow-site`](https://github.com/apache/airflow-site), but the documentation source code +and build tools are available in the `apache/airflow` repository, so you have to coordinate between +the two repositories to be able to build the documentation. + +Documentation for providers can be found in the `/docs/apache-airflow-providers` directory +and the `/docs/apache-airflow-providers-*/` directory. The first directory contains the package contents +lists and should be updated every time a new version of provider packages is released. + +- First, copy the airflow-site repository and set the environment variable ``AIRFLOW_SITE_DIRECTORY``. + +```shell script +git clone https://github.com/apache/airflow-site.git airflow-site +cd airflow-site +export AIRFLOW_SITE_DIRECTORY="$(pwd)" +``` + +- Then you can go to the directory and build the necessary documentation packages + +```shell script +cd "${AIRFLOW_REPO_ROOT}" +./breeze build-docs -- \ + --for-production \ + --package-filter apache-airflow-providers \ + --package-filter 'apache-airflow-providers-*' +``` + +for all providers, or if you have just few providers: + +```shell script +cd "${AIRFLOW_REPO_ROOT}" +./breeze build-docs -- \ + --for-production \ + --package-filter apache-airflow-providers \ + --package-filter 'apache-airflow-providers-PACKAGE1' \ + --package-filter 'apache-airflow-providers-PACKAGE2' \ + ... +``` + +If you have providers as list of provider ids beacuse you just released them you can build them with + +```shell script +./dev/provider_packages/build_provider_documentation.sh amazon apache.beam google .... +``` + +- Now you can preview the documentation. + +```shell script +./docs/start_doc_server.sh +``` + +- Copy the documentation to the ``airflow-site`` repository + +**NOTE** In order to run the publish documentation you need to activate virtualenv where you installed +apache-airflow with doc extra: + +* `pip install apache-airflow[doc]` + +All providers: + +```shell script +./docs/publish_docs.py \ + --package-filter apache-airflow-providers \ + --package-filter 'apache-airflow-providers-*' + +cd "${AIRFLOW_SITE_DIRECTORY}" +``` + +If you have providers as list of provider ids because you just released them you can build them with + +```shell script +./dev/provider_packages/publish_provider_documentation.sh amazon apache.beam google .... +``` + + +- If you publish a new package, you must add it to + [the docs index](https://github.com/apache/airflow-site/blob/master/landing-pages/site/content/en/docs/_index.md): + +- Create the commit and push changes. + +```shell script +branch="add-documentation-$(date "+%Y-%m-%d%n")" +git checkout -b "${branch}" +git add . +git commit -m "Add documentation for packages - $(date "+%Y-%m-%d%n")" +git push --set-upstream origin "${branch}" +``` + +## Prepare voting email for Providers release candidate + +Make sure the packages are in https://dist.apache.org/repos/dist/dev/airflow/providers/ Send out a vote to the dev@airflow.apache.org mailing list. Here you can prepare text of the -email using the ${VERSION} variable you already set in the command line. +email. subject: ```shell script cat <-*-bin.tar.gz* are the binary + Python "sdist" release - they are also official "sources" for the provider packages. + +*apache_airflow_providers_-*.whl are the binary + Python "wheel" release. -*apache-airflow-backport-providers-${VERSION}-source.tar.gz* is a source release that comes - with INSTALL instructions. +The test procedure for PMC members who would like to test the RC candidates are described in +https://github.com/apache/airflow/blob/master/dev/README_RELEASE_PROVIDER_PACKAGES.md#verify-the-release-by-pmc-members -*apache-airflow-backport-providers--${VERSION}-bin.tar.gz* are the binary - Python "sdist" release. +and for Contributors: -The test procedure for PMCs and Contributors who would like to test the RC candidates are described in -https://github.com/apache/airflow/blob/master/dev/README.md#vote-and-verify-the-backport-providers-release-candidate +https://github.com/apache/airflow/blob/master/dev/README_RELEASE_PROVIDER_PACKAGES.md#verify-by-contributors Public keys are available at: @@ -287,12 +377,19 @@ Please vote accordingly: Only votes from PMC members are binding, but members of the community are encouraged to test the release and vote with "(non-binding)". -Please note that the version number excludes the 'rcX' string, so it's now -simply ${VERSION%rc?}. This will allow us to rename the artifact without modifying +Please note that the version number excludes the 'rcX' string. +This will allow us to rename the artifact without modifying the artifact checksums when we actually release. -Each of the packages contains detailed changelog. Here is the list of links to -the released packages and changelogs: + +Each of the packages contains a link to the detailed changelog. The changelogs are moved to the official airflow documentation: +https://github.com/apache/airflow-site/ + + + + +Note the links to documentation from PyPI packages are not working until we merge +the changes to airflow site after releasing the packages officially. @@ -302,7 +399,7 @@ Cheers, EOF ``` -Due to the nature of backport packages, not all packages have to be released as convenience +Due to the nature of packages, not all packages have to be released as convenience packages in the final release. During the voting process the voting PMCs might decide to exclude certain packages from the release if some critical problems have been found in some packages. @@ -313,8 +410,8 @@ Please modify the message above accordingly to clearly exclude those packages. ### SVN check -The files should be present in the sub-folder of -[Airflow dist](https://dist.apache.org/repos/dist/dev/airflow/backport-providers/) +The files should be present in +[Airflow dist](https://dist.apache.org/repos/dist/dev/airflow/providers/) The following files should be present (9 files): @@ -334,19 +431,28 @@ Or update it if you already checked it out: svn update . ``` +Optionally you can use `check.files.py` script to verify that all expected files are +present in SVN. This script may help also with verifying installation of the packages. + +```shell script +python check_files.py -v {VERSION} -t providers -p {PATH_TO_SVN} +``` + ### Licences check This can be done with the Apache RAT tool. -* Download the latest jar from https://creadur.apache.org/rat/download_rat.cgi (unpack the sources, +* Download the latest jar from https://creadur.apache.org/rat/download_rat.cgi (unpack the binary, the jar is inside) -* Unpack the -source.tar.gz to a folder +* Unpack the binary (`-bin.tar.gz`) to a folder * Enter the folder and run the check (point to the place where you extracted the .jar) ```shell script java -jar ../../apache-rat-0.13/apache-rat-0.13.jar -E .rat-excludes -d . ``` +where `.rat-excludes` is the file in the root of Airflow source code. + ### Signature check Make sure you have the key of person signed imported in your GPG. You can find the valid keys in @@ -433,9 +539,8 @@ done You should get output similar to: ``` -Checking apache-airflow-1.10.12rc4-bin.tar.gz.sha512 -Checking apache_airflow-1.10.12rc4-py2.py3-none-any.whl.sha512 -Checking apache-airflow-1.10.12rc4-source.tar.gz.sha512 +Checking apache-airflow-providers-google-1.0.0rc1-bin.tar.gz.sha512 +Checking apache_airflow-providers-google-1.0.0rc1-py3-none-any.whl.sha512 ``` ## Verify by Contributors @@ -443,19 +548,18 @@ Checking apache-airflow-1.10.12rc4-source.tar.gz.sha512 This can be done (and we encourage to) by any of the Contributors. In fact, it's best if the actual users of Apache Airflow test it in their own staging/test installations. Each release candidate is available on PyPI apart from SVN packages, so everyone should be able to install -the release candidate version of Airflow via simply ( is 1.10.12 for example, and is -release candidate number 1,2,3,....). +the release candidate version. You can use any of the installation methods you prefer (you can even install it via the binary wheels downloaded from the SVN). ### Installing in your local virtualenv -You have to make sure you have Airflow 1.10.* installed in your PIP virtualenv +You have to make sure you have Airflow 2* installed in your PIP virtualenv (the version you want to install providers with). ```shell script -pip install apache-airflow-backport-providers-==rc +pip install apache-airflow-providers-==rc ``` ### Installing with Breeze @@ -470,34 +574,16 @@ First copy all the provider packages .whl files to the `dist` folder. --python 3.7 --backend postgres --install-packages-from-dist ``` -For 1.10 releases you can also use `--no-rbac-ui` flag disable RBAC UI of Airflow: - -```shell script -./breeze start-airflow --install-airflow-version rc \ - --python 3.7 --backend postgres --install-packages-from-dist --no-rbac-ui -``` - ### Building your own docker image -If you prefer to build your own image, you can also use the official image andipi PyPI packages to test -backport packages. This is especially helpful when you want to test integrations, but you need to install -additional tools. Below is an example Dockerfile, which installs backport providers for Google and -an additional third-party tools: +If you prefer to build your own image, you can also use the official image and PyPI packages to test +provider packages. This is especially helpful when you want to test integrations, but you need to install +additional tools. Below is an example Dockerfile, which installs providers for Google/ ```dockerfile -FROM apache/airflow:1.10.12 - -RUN pip install --user apache-airflow-backport-providers-google==2020.10.5.rc1 +FROM apache/airflow:2.0.0 -RUN curl https://sdk.cloud.google.com | bash \ - && echo "source /home/airflow/google-cloud-sdk/path.bash.inc" >> /home/airflow/.bashrc \ - && echo "source /home/airflow/google-cloud-sdk/completion.bash.inc" >> /home/airflow/.bashrc - -USER 0 -RUN KUBECTL_VERSION="$(curl -s https://storage.googleapis.com/kubernetes-release/release/stable.txt)" \ - && KUBECTL_URL="https://storage.googleapis.com/kubernetes-release/release/${KUBECTL_VERSION}/bin/linux/amd64/kubectl" \ - && curl -L "${KUBECTL_URL}" --output /usr/local/bin/kubectl \ - && chmod +x /usr/local/bin/kubectl +RUN pip install --upgrade --user apache-airflow-providers-google==2.0.0.rc1 USER ${AIRFLOW_UID} ``` @@ -511,7 +597,6 @@ docker run -ti \ -v "$PWD/data:/opt/airflow/" \ -v "$PWD/keys/:/keys/" \ -p 8080:8080 \ - -e GOOGLE_APPLICATION_CREDENTIALS=/keys/sa.json \ -e AIRFLOW__CORE__LOAD_EXAMPLES=True \ my-airflow bash ``` @@ -521,50 +606,44 @@ docker run -ti \ Once you install and run Airflow, you can perform any verification you see as necessary to check that the Airflow works as you expected. -## Publish the final releases of backport packages -### Summarize the voting for the Backport Providers Release +# Publish release + +## Summarize the voting for the Apache Airflow release Once the vote has been passed, you will need to send a result vote to dev@airflow.apache.org: Subject: -```shell script -cat <@%3Cdev.airflow.apache.org%3E +https://lists.apache.org/thread.html/736404ca3d2b2143b296d0910630b9bd0f8b56a0c54e3a05f4c8b5fe@%3Cdev.airflow.apache.org%3E -I'll continue with the release process and the release announcement will follow shortly. +I'll continue with the release process, and the release announcement will follow shortly. Cheers, - + +``` -EOF -``` -### Publish release to SVN +## Publish release to SVN The best way of doing this is to svn cp between the two repos (this avoids having to upload the binaries again, and gives a clearer history in the svn commit logs. @@ -573,16 +652,12 @@ We also need to archive older releases before copying the new ones [Release policy](http://www.apache.org/legal/release-policy.html#when-to-archive) ```shell script -# Set the variables -export VERSION_RC=2020.5.20rc2 -export VERSION=${VERSION_RC/rc?/} - # Set AIRFLOW_REPO_ROOT to the path of your git repo export AIRFLOW_REPO_ROOT=$(pwd) # Go to the directory where you have checked out the dev svn release # And go to the sub-folder with RC candidates -cd "/backport-providers/${VERSION_RC}" +cd "/providers/" export SOURCE_DIR=$(pwd) # Go the folder where you have checked out the release repo @@ -592,16 +667,16 @@ svn checkout https://dist.apache.org/repos/dist/release/airflow airflow-release # Update to latest version svn update -# Create backport-providers folder if it does not exist +# Create providers folder if it does not exist # All latest releases are kept in this one folder without version sub-folder -mkdir -pv backport-providers -cd backport-providers +mkdir -pv providers +cd providers # Move the artifacts to svn folder & remove the rc postfix -for file in ${SOURCE_DIR}/*${VERSION_RC}* +for file in ${SOURCE_DIR}/* do - base_file=$(basename ${file}) - svn cp "${file}" "${base_file/${VERSION_RC}/${VERSION}}" + base_file=$(basename ${file}) + svn mv "${file}" "${base_file//rc[0-9][\.-]/.}" done @@ -621,321 +696,75 @@ python ${AIRFLOW_REPO_ROOT}/dev/provider_packages/remove_old_releases.py \ # Commit to SVN -svn commit -m "Release Airflow Backport Providers ${VERSION} from ${VERSION_RC}" +svn commit -m "Release Airflow Providers on $(date)" ``` Verify that the packages appear in -[backport-providers](https://dist.apache.org/repos/dist/release/airflow/backport-providers) - -### Publish the final version convenience package to PyPI - -Checkout the RC Version: - -```shell script -git checkout backport-providers-${VERSION_RC} -``` - -Tag and push the final version (providing that your apache remote is named 'apache'): - -```shell script -git tag backport-providers-${VERSION} -git push apache backport-providers-${VERSION} -``` - -In order to publish to PyPI you just need to build and release packages. - -* Generate the packages. +[providers](https://dist.apache.org/repos/dist/release/airflow/providers) -```shell script -./breeze --backports prepare-provider-packages --package-format both -``` - -if you ony build few packages, run: - -```shell script -./breeze --backports prepare-provider-packages --package-format both ... -``` - -In case you decided to remove some of the packages. remove them from dist folder now: - -```shell script -ls dist/** -rm dist/** -``` - - -* Verify the artifacts that would be uploaded: - -```shell script -twine check dist/* -``` - -* Upload the package to PyPi's test environment: - -```shell script -twine upload -r pypitest dist/* -``` - -* Verify that the test packages look good by downloading it and installing them into a virtual environment. -Twine prints the package links as output - separately for each package. - -* Upload the package to PyPi's production environment: - -```shell script -twine upload -r pypi dist/* -``` - -### Notify developers of release - -- Notify users@airflow.apache.org (cc'ing dev@airflow.apache.org and announce@apache.org) that -the artifacts have been published: - -Subject: - -```shell script -cat < - - -Cheers, - -EOF -``` - -## Update Announcements page - -Update "Announcements" page at the [Official Airflow website](https://airflow.apache.org/announcements/) - ------------------------------------------------------------------------------------------------------------- - -# Prepare Regular Providers (Alphas/Betas) - -## Generate release notes - -Prepare release notes for all the packages you plan to release. Note that for now version number is -hard-coded to 0.0.1 for all packages. Later on we are going to update the versions according -to SEMVER versioning. - -Details about maintaining the SEMVER version are going to be discussed and implemented in -[the related issue](https://github.com/apache/airflow/issues/11425) - - -```shell script -./breeze prepare-provider-readme [packages] -``` - -You can iterate and re-generate the same readme content as many times as you want. -Generated readme files should be eventually committed to the repository. - -## Build regular provider packages for SVN apache upload - -There is a slightly different procedure if you build pre-release (alpha/beta) packages and the -release candidates. For the Alpha artifacts there is no voting and signature/checksum check, so -we do not need to care about this part. For release candidates - those packages might get promoted -to "final" packages by just renaming the files, so internally they should keep the final version -number without the rc suffix, even if they are rc1/rc2/... candidates. - -They also need to be signed and have checksum files. You can generate the checksum/signature files by running -the "dev/sign.sh" script (assuming you have the right PGP key set-up for signing). The script -generates corresponding .asc and .sha512 files for each file to sign. - -## Build and sign the source and convenience packages - -Currently, we are releasing alpha provider packages together with the main sources of Airflow. In the future -we are going to add procedure to release the sources of released provider packages separately. -Details are in [the related issue](https://github.com/apache/airflow/issues/11425) - -For alpha/beta releases you need to specify both - svn and pyp i - suffixes, and they have to match. This is -verified by the breeze script. Note that the script will clean up dist folder before generating the -packages, so it will only contain the packages you intended to build. - -* Pre-release packages: - -```shell script -export VERSION=0.0.1alpha1 - -./breeze prepare-provider-packages --package-format both --version-suffix-for-svn a1 --version-suffix-for-pypi a1 -``` - -if you ony build few packages, run: - -```shell script -./breeze prepare-provider-packages --package-format both --version-suffix-for-svn a1 --version-suffix-for-pypi a1 \ - PACKAGE PACKAGE .... -``` - -* Release candidate packages: - -```shell script -export VERSION=0.0.1alpha1 - -./breeze prepare-provider-packages --package-format both --version-suffix-for-svn rc1 -``` - -if you ony build few packages, run: - -```shell script -./breeze prepare-provider-packages --package-format both --version-suffix-for-svn rc1 PACKAGE PACKAGE .... -``` - -* Sign all your packages - -```shell script -pushd dist -../dev/sign.sh * -popd -``` - -## Commit the source packages to Apache SVN repo - -* Push the artifacts to ASF dev dist repo - -```shell script -# First clone the repo if you do not have it -svn checkout https://dist.apache.org/repos/dist/dev/airflow airflow-dev - -# update the repo in case you have it already -cd airflow-dev -svn update - -# Create a new folder for the release. -cd airflow-dev/providers -svn mkdir ${VERSION} - -# Move the artifacts to svn folder -mv ${AIRFLOW_REPO_ROOT}/dist/* ${VERSION}/ - -# Add and commit -svn add ${VERSION}/* -svn commit -m "Add artifacts for Airflow Providers ${VERSION}" - -cd ${AIRFLOW_REPO_ROOT} -``` - -Verify that the files are available at -[backport-providers](https://dist.apache.org/repos/dist/dev/airflow/backport-providers/) ## Publish the Regular convenience package to PyPI -In case of pre-release versions you build the same packages for both PyPI and SVN so you can simply use -packages generated in the previous step, and you can skip the "prepare" step below. +* Checkout the RC Version for the RC Version released (there is a batch of providers - one of them is enough): -In order to publish release candidate to PyPI you just need to build and release packages. -The packages should however contain the rcN suffix in the version file name but not internally in the package, -so you need to use `--version-suffix-for-pypi` switch to prepare those packages. -Note that these are different packages than the ones used for SVN upload -though they should be generated from the same sources. + ```shell script + git checkout providers-/ + ``` -* Generate the packages with the right RC version (specify the version suffix with PyPI switch). Note that -this will clean up dist folder before generating the packages, so you will only have the right packages there. +* Generate the packages with final version. Note that + this will clean up dist folder before generating the packages, so you will only have the right packages there. ```shell script -./breeze prepare-provider-packages --package-format both --version-suffix-for-pypi a1 --version-suffix-for-SVN a1 +rm -rf ${AIRFLOW_REPO_ROOT}/dist/* +./breeze prepare-provider-packages --package-format both ``` if you ony build few packages, run: ```shell script -./breeze prepare-provider-packages --package-format both --version-suffix-for-pypi a1 \ - PACKAGE PACKAGE .... +rm -rf ${AIRFLOW_REPO_ROOT}/dist/* +./breeze prepare-provider-packages --package-format both PACKAGE PACKAGE .... ``` * Verify the artifacts that would be uploaded: ```shell script -twine check dist/* +twine check ${AIRFLOW_REPO_ROOT}/dist/* ``` * Upload the package to PyPi's test environment: ```shell script -twine upload -r pypitest dist/* +twine upload -r pypitest ${AIRFLOW_REPO_ROOT}/dist/* ``` * Verify that the test packages look good by downloading it and installing them into a virtual environment. -Twine prints the package links as output - separately for each package. + Twine prints the package links as output - separately for each package. * Upload the package to PyPi's production environment: ```shell script -twine upload -r pypi dist/* +twine upload -r pypi ${AIRFLOW_REPO_ROOT}/dist/* ``` * Again, confirm that the packages are available under the links printed. -## Publish documentation - -Documentation is an essential part of the product and should be made available to users. -In our cases, documentation for the released versions is published in a separate repository - [`apache/airflow-site`](https://github.com/apache/airflow-site), but the documentation source code and build tools are available in the `apache/airflow` repository, so you have to coordinate between the two repositories to be able to build the documentation. - -Documentation for providers can be found in the `/docs/apache-airflow-providers` directory and the `/docs/apache-airflow-providers-*/` directory. The first directory contains the package contents lists and should be updated every time a new version of provider packages is released. - -- First, copy the airflow-site repository and set the environment variable ``AIRFLOW_SITE_DIRECTORY``. - - ```shell script - git clone https://github.com/apache/airflow-site.git airflow-site - cd airflow-site - export AIRFLOW_SITE_DIRECTORY="$(pwd)" - ``` - -- Then you can go to the directory and build the necessary documentation packages - - ```shell script - cd "${AIRFLOW_REPO_ROOT}" - ./breeze build-docs -- \ - --package-filter apache-airflow-providers \ - --package-filter apache-airflow-providers-apache-airflow \ - --package-filter apache-airflow-providers-telegram \ - --for-production - ``` - -- Now you can preview the documentation. - - ```shell script - ./docs/start_doc_server.sh - ``` +## Publish documentation prepared before -- Copy the documentation to the ``airflow-site`` repository +Merge the PR that you prepared before with the documentation. If you removed some of the providers +from the release - remove the versions from the prepared documentation and update stable.txt with the +previous version for those providers before merging the PR. - ```shell script - ./docs/publish_docs.py \ - --package-filter apache-airflow-providers \ - --package-filter apache-airflow-providers-apache-airflow \ - --package-filter apache-airflow-providers-telegram \ - cd "${AIRFLOW_SITE_DIRECTORY}" - ``` +## Add tags in git -- If you publish a new package, you must add it to [the docs index](https://github.com/apache/airflow-site/blob/master/landing-pages/site/content/en/docs/_index.md): +Assume that your remote for apache repository is called `apache` you should now +set tags for the providers in the repo. -- Create commit and push changes. +```shell script +./dev/provider_packages/tag_providers.sh +``` - ```shell script - git commit -m "Add documentation for backport packages - $(date "+%Y-%m-%d%n")" - git push - ``` ## Notify developers of release @@ -946,7 +775,7 @@ Subject: ```shell script cat < diff --git a/dev/import_all_classes.py b/dev/import_all_classes.py old mode 100644 new mode 100755 index da52621e8490c..2a1e688cd2700 --- a/dev/import_all_classes.py +++ b/dev/import_all_classes.py @@ -21,7 +21,9 @@ import sys import traceback from inspect import isclass -from typing import List +from typing import List, Set + +from rich import print def import_all_classes( @@ -45,6 +47,7 @@ def import_all_classes( """ imported_classes = [] tracebacks = [] + printed_packages: Set[str] = set() def mk_prefix(provider_id): return f'{prefix}{provider_id}' @@ -66,15 +69,16 @@ def onerror(_): print(f"Skipping module: {modinfo.name}") continue if print_imports: - print(f"Importing module: {modinfo.name}") + package_to_print = ".".join(modinfo.name.split(".")[:-1]) + if package_to_print not in printed_packages: + printed_packages.add(package_to_print) + print(f"Importing package: {package_to_print}") try: _module = importlib.import_module(modinfo.name) for attribute_name in dir(_module): class_name = modinfo.name + "." + attribute_name attribute = getattr(_module, attribute_name) if isclass(attribute): - if print_imports: - print(f"Imported {class_name}") imported_classes.append(class_name) except Exception: # noqa exception_str = traceback.format_exc() @@ -82,14 +86,14 @@ def onerror(_): if tracebacks: print( """ -ERROR: There were some import errors +[red]ERROR: There were some import errors[/] """, file=sys.stderr, ) for trace in tracebacks: - print("----------------------------------------", file=sys.stderr) + print("[red]----------------------------------------[/]", file=sys.stderr) print(trace, file=sys.stderr) - print("----------------------------------------", file=sys.stderr) + print("[red]----------------------------------------[/]", file=sys.stderr) sys.exit(1) else: return imported_classes @@ -107,7 +111,8 @@ def onerror(_): print() classes = import_all_classes(print_imports=True, print_skips=True, paths=args.path, prefix=args.prefix) if len(classes) == 0: - raise Exception("Something is seriously wrong - no classes imported") + print("[red]Something is seriously wrong - no classes imported[/]") + sys.exit(1) print() - print(f"SUCCESS: All provider packages are importable! Imported {len(classes)} classes.") + print(f"[green]SUCCESS: All provider packages are importable! Imported {len(classes)} classes.[/]") print() diff --git a/dev/provider_packages/BACKPORT_PROVIDER_CHANGES_TEMPLATE.md.jinja2 b/dev/provider_packages/BACKPORT_PROVIDER_CHANGES_TEMPLATE.md.jinja2 deleted file mode 100644 index cf700c5f89e53..0000000000000 --- a/dev/provider_packages/BACKPORT_PROVIDER_CHANGES_TEMPLATE.md.jinja2 +++ /dev/null @@ -1,22 +0,0 @@ -{# - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. -#} - -### Release {{RELEASE_NO_LEADING_ZEROS}}{{ VERSION_SUFFIX }} - -{{CURRENT_CHANGES_TABLE}} diff --git a/dev/provider_packages/BACKPORT_PROVIDER_CLASSES_TEMPLATE.md.jinja2 b/dev/provider_packages/BACKPORT_PROVIDER_CLASSES_TEMPLATE.md.jinja2 deleted file mode 100644 index e433ab174a4a6..0000000000000 --- a/dev/provider_packages/BACKPORT_PROVIDER_CLASSES_TEMPLATE.md.jinja2 +++ /dev/null @@ -1,46 +0,0 @@ -{# - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. -#} - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `{{PROVIDER_PACKAGE_ID}}` provider -are in the `{{FULL_PACKAGE_NAME}}` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - -{% for entity_type in ENTITY_TYPES %} -{%- set entity_type_string = entity_type.value %} -{%- set entity_name = ENTITY_NAMES.get(entity_type) %} -{%- set entity_summary = ENTITIES.get(entity_type) %} -{%- if entity_summary.new_entities or entity_summary.moved_entities %} -## {{ entity_name.capitalize() }} - -{% if entity_summary.new_entities %} -### New {{ entity_name.lower() }} - -{{ entity_summary.new_entities_table }} -{% endif %} -{% if entity_summary.moved_entities %} -### Moved {{ entity_name.lower() }} - -{{ entity_summary.moved_entities_table }} -{% endif %} -{% endif %} -{%- endfor %} - -## Releases diff --git a/dev/provider_packages/BACKPORT_PROVIDER_README_TEMPLATE.md.jinja2 b/dev/provider_packages/BACKPORT_PROVIDER_README_TEMPLATE.md.jinja2 deleted file mode 100644 index b3b6a025d1e1b..0000000000000 --- a/dev/provider_packages/BACKPORT_PROVIDER_README_TEMPLATE.md.jinja2 +++ /dev/null @@ -1,93 +0,0 @@ -{# - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. -#} - -# Package {{ PACKAGE_PIP_NAME }} - -Release: {{ RELEASE_NO_LEADING_ZEROS }}{{ VERSION_SUFFIX }} - -**Table of contents** - -- [Backport package](#backport-package) -- [Installation](#installation) -{%- if PIP_REQUIREMENTS %} -- [PIP requirements](#pip-requirements) -{%- endif %} -{%- if CROSS_PROVIDERS_DEPENDENCIES %} -- [Cross provider package dependencies](#cross-provider-package-dependencies) -{%- endif %} -- [Provider class summary](#provider-classes-summary) -{%- for entity_type in ENTITY_TYPES %} -{%- set entity_name = ENTITY_NAMES.get(entity_type) %} -{%- set entity_type_string = entity_name.replace(" ", "-") %} -{%- set entity_summary = ENTITIES.get(entity_type) %} -{%- if entity_summary.new_entities or entity_summary.moved_entities %} - - [{{ entity_name.capitalize() }}](#{{ entity_type_string.lower() }}) - {%- if entity_summary.new_entities %} - - [New {{ entity_name.lower() }}](#new-{{ entity_type_string.lower() }}) - {%- endif %} - {%- if entity_summary.moved_entities %} - - [Moved {{ entity_name.lower() }}](#moved-{{ entity_type_string.lower() }}) - {%- endif %} -{%- endif %} -{%- endfor %} -- [Releases](#releases) - {%- for release in RELEASES %} - - [Release {{release.release_version_no_leading_zeros}}](#release-{{release.release_version_no_leading_zeros.replace(".","")}}) - {%- endfor %} - -## Backport package - -This is a backport providers package for `{{PROVIDER_PACKAGE_ID}}` provider. All classes for this provider package -are in `{{FULL_PACKAGE_NAME}}` python package. - -**Only Python 3.6+ is supported for this backport package.** - -While Airflow 1.10.* continues to support Python 2.7+ - you need to upgrade python to 3.6+ if you -want to use this backport package. - -{{ ADDITIONAL_INFO }} - -## Installation - -You can install this package on top of an existing airflow 1.10.* installation via -`pip install {{PACKAGE_PIP_NAME}}` - -{%- if PIP_REQUIREMENTS %} - -## PIP requirements - -{{ PIP_REQUIREMENTS_TABLE }} - -{%- endif %} -{%- if CROSS_PROVIDERS_DEPENDENCIES %} - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install {{ PACKAGE_PIP_NAME }}[{{ CROSS_PROVIDERS_DEPENDENCIES[0] }}] -``` - -{{ CROSS_PROVIDERS_DEPENDENCIES_TABLE }} - -{%- endif %} diff --git a/dev/provider_packages/MANIFEST_TEMPLATE.in.jinja2 b/dev/provider_packages/MANIFEST_TEMPLATE.in.jinja2 index 4fd2e90309a06..cb156219dc1cf 100644 --- a/dev/provider_packages/MANIFEST_TEMPLATE.in.jinja2 +++ b/dev/provider_packages/MANIFEST_TEMPLATE.in.jinja2 @@ -16,6 +16,13 @@ # specific language governing permissions and limitations # under the License. +# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE +# OVERWRITTEN WHEN PREPARING PACKAGES. + +# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE +# `MANIFEST_TEMPLATE.py.jinja2` IN the `provider_packages` DIRECTORY + + {% if PROVIDER_PACKAGE_ID == 'amazon' %} include airflow/providers/amazon/aws/hooks/batch_waiters.json {% elif PROVIDER_PACKAGE_ID == 'cncf.kubernetes' %} diff --git a/dev/provider_packages/PROVIDER_CLASSES_TEMPLATE.md.jinja2 b/dev/provider_packages/PROVIDER_CLASSES_TEMPLATE.md.jinja2 deleted file mode 100644 index e433ab174a4a6..0000000000000 --- a/dev/provider_packages/PROVIDER_CLASSES_TEMPLATE.md.jinja2 +++ /dev/null @@ -1,46 +0,0 @@ -{# - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. -#} - -# Provider classes summary - -In Airflow 2.0, all operators, transfers, hooks, sensors, secrets for the `{{PROVIDER_PACKAGE_ID}}` provider -are in the `{{FULL_PACKAGE_NAME}}` package. You can read more about the naming conventions used -in [Naming conventions for provider packages](https://github.com/apache/airflow/blob/master/CONTRIBUTING.rst#naming-conventions-for-provider-packages) - -{% for entity_type in ENTITY_TYPES %} -{%- set entity_type_string = entity_type.value %} -{%- set entity_name = ENTITY_NAMES.get(entity_type) %} -{%- set entity_summary = ENTITIES.get(entity_type) %} -{%- if entity_summary.new_entities or entity_summary.moved_entities %} -## {{ entity_name.capitalize() }} - -{% if entity_summary.new_entities %} -### New {{ entity_name.lower() }} - -{{ entity_summary.new_entities_table }} -{% endif %} -{% if entity_summary.moved_entities %} -### Moved {{ entity_name.lower() }} - -{{ entity_summary.moved_entities_table }} -{% endif %} -{% endif %} -{%- endfor %} - -## Releases diff --git a/dev/provider_packages/PROVIDER_COMMITS_TEMPLATE.rst.jinja2 b/dev/provider_packages/PROVIDER_COMMITS_TEMPLATE.rst.jinja2 new file mode 100644 index 0000000000000..8997248cbbdab --- /dev/null +++ b/dev/provider_packages/PROVIDER_COMMITS_TEMPLATE.rst.jinja2 @@ -0,0 +1,55 @@ +{# + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE + OVERWRITTEN WHEN PREPARING PACKAGES. + + IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE + `PROVIDER_INDEX_TEMPLATE.rst.jinja2` IN the `dev/provider_packages` DIRECTORY + +#} + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Package {{ PACKAGE_PIP_NAME }} +------------------------------------------------------ + +{{ PROVIDER_DESCRIPTION | safe }} + +This is detailed commit list of changes for versions provider package: ``{{PROVIDER_PACKAGE_ID}}``. +For high-level changelog, see :doc:`package information including changelog `. + +{%- if DETAILED_CHANGES_PRESENT %} + +{{ DETAILED_CHANGES_RST | safe }} +{%- endif %} diff --git a/dev/provider_packages/PROVIDER_INDEX_TEMPLATE.rst.jinja2 b/dev/provider_packages/PROVIDER_INDEX_TEMPLATE.rst.jinja2 new file mode 100644 index 0000000000000..21e379931bba6 --- /dev/null +++ b/dev/provider_packages/PROVIDER_INDEX_TEMPLATE.rst.jinja2 @@ -0,0 +1,87 @@ +{# + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE + OVERWRITTEN WHEN PREPARING PACKAGES. + + IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE + `PROVIDER_INDEX_TEMPLATE.rst.jinja2` IN the `dev/provider_packages` DIRECTORY + +#} + +.. toctree:: + :maxdepth: 1 + :caption: Commits + + Detailed list of commits + + +Package {{ PACKAGE_PIP_NAME }} +------------------------------------------------------ + +{{ PROVIDER_DESCRIPTION | safe }} + +Release: {{ RELEASE }}{{ VERSION_SUFFIX }} + +Provider package +---------------- + +This is a provider package for ``{{PROVIDER_PACKAGE_ID}}`` provider. All classes for this provider package +are in ``{{FULL_PACKAGE_NAME}}`` python package. + +Installation +------------ + +.. note:: + + On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver + does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice + of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 + ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option + ``--use-deprecated legacy-resolver`` to your pip install command. + + +You can install this package on top of an existing airflow 2.* installation via +``pip install {{PACKAGE_PIP_NAME}}`` +{%- if PIP_REQUIREMENTS %} + +PIP requirements +---------------- + +{{ PIP_REQUIREMENTS_TABLE_RST | safe}} +{%- endif %} +{%- if CROSS_PROVIDERS_DEPENDENCIES %} + +Cross provider package dependencies +----------------------------------- + +Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified provider packages in order to use them. + +You can install such cross-provider dependencies when installing from PyPI. For example: + +.. code-block:: bash + + pip install {{ PACKAGE_PIP_NAME }}[{{ CROSS_PROVIDERS_DEPENDENCIES[0] }}] + + +{{ CROSS_PROVIDERS_DEPENDENCIES_TABLE_RST | safe }} + +{%- endif %} + +{{ CHANGELOG | safe }} diff --git a/dev/provider_packages/PROVIDER_README_TEMPLATE.md.jinja2 b/dev/provider_packages/PROVIDER_README_TEMPLATE.md.jinja2 deleted file mode 100644 index fbfaa2957ef5b..0000000000000 --- a/dev/provider_packages/PROVIDER_README_TEMPLATE.md.jinja2 +++ /dev/null @@ -1,96 +0,0 @@ -{# - Licensed to the Apache Software Foundation (ASF) under one - or more contributor license agreements. See the NOTICE file - distributed with this work for additional information - regarding copyright ownership. The ASF licenses this file - to you under the Apache License, Version 2.0 (the - "License"); you may not use this file except in compliance - with the License. You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, - software distributed under the License is distributed on an - "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - KIND, either express or implied. See the License for the - specific language governing permissions and limitations - under the License. -#} - -# Package {{ PACKAGE_PIP_NAME }} - -Release: {{ RELEASE_NO_LEADING_ZEROS }}{{ VERSION_SUFFIX }} - -**Table of contents** - -- [Provider package](#provider-package) -- [Installation](#installation) -{%- if PIP_REQUIREMENTS %} -- [PIP requirements](#pip-requirements) -{%- endif %} -{%- if CROSS_PROVIDERS_DEPENDENCIES %} -- [Cross provider package dependencies](#cross-provider-package-dependencies) -{%- endif %} -- [Provider class summary](#provider-classes-summary) -{%- for entity_type in ENTITY_TYPES %} -{%- set entity_name = ENTITY_NAMES.get(entity_type) %} -{%- set entity_type_string = entity_name.replace(" ", "-") %} -{%- set entity_summary = ENTITIES.get(entity_type) %} -{%- if entity_summary.new_entities or entity_summary.moved_entities %} - - [{{ entity_name.capitalize() }}](#{{ entity_type_string.lower() }}) - {%- if entity_summary.new_entities %} - - [New {{ entity_name.lower() }}](#new-{{ entity_type_string.lower() }}) - {%- endif %} - {%- if entity_summary.moved_entities %} - - [Moved {{ entity_name.lower() }}](#moved-{{ entity_type_string.lower() }}) - {%- endif %} -{%- endif %} -{%- endfor %} -- [Releases](#releases) - {%- for release in RELEASES %} - - [Release {{release.release_version_no_leading_zeros}}](#release-{{release.release_version_no_leading_zeros.replace(".","")}}) - {%- endfor %} - -## Provider package - -This is a provider package for `{{PROVIDER_PACKAGE_ID}}` provider. All classes for this provider package -are in `{{FULL_PACKAGE_NAME}}` python package. - -{{ ADDITIONAL_INFO }} - -## Installation - -NOTE! - -On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver -does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice -of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 -`pip install --upgrade pip==20.2.4` or, in case you use Pip 20.3, you need to add option -`--use-deprecated legacy-resolver` to your pip install command. - -You can install this package on top of an existing airflow 2.* installation via -`pip install {{PACKAGE_PIP_NAME}}` - -{%- if PIP_REQUIREMENTS %} - -## PIP requirements - -{{ PIP_REQUIREMENTS_TABLE }} - -{%- endif %} -{%- if CROSS_PROVIDERS_DEPENDENCIES %} - -## Cross provider package dependencies - -Those are dependencies that might be needed in order to use all the features of the package. -You need to install the specified backport providers package in order to use them. - -You can install such cross-provider dependencies when installing from PyPI. For example: - -```bash -pip install {{ PACKAGE_PIP_NAME }}[{{ CROSS_PROVIDERS_DEPENDENCIES[0] }}] -``` - -{{ CROSS_PROVIDERS_DEPENDENCIES_TABLE }} - -{%- endif %} diff --git a/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 new file mode 100644 index 0000000000000..9975925481511 --- /dev/null +++ b/dev/provider_packages/PROVIDER_README_TEMPLATE.rst.jinja2 @@ -0,0 +1,83 @@ +{# + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE + OVERWRITTEN WHEN PREPARING PACKAGES. + + IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE + `PROVIDER_INDEX_TEMPLATE.rst.jinja2` IN the `dev/provider_packages` DIRECTORY + +#} + +Package ``{{ PACKAGE_PIP_NAME }}`` + +Release: ``{{ RELEASE }}{{ VERSION_SUFFIX }}`` + + +{{ PROVIDER_DESCRIPTION | safe }} + +Provider package +================ + +This is a provider package for ``{{PROVIDER_PACKAGE_ID}}`` provider. All classes for this provider package +are in ``{{FULL_PACKAGE_NAME}}`` python package. + +You can find package information and changelog for the provider +in the `documentation `_. + + +Installation +============ + +NOTE! + +On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver +does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice +of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 +``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option +``--use-deprecated legacy-resolver`` to your pip install command. + +You can install this package on top of an existing airflow 2.* installation via +``pip install {{PACKAGE_PIP_NAME}}`` + +{%- if PIP_REQUIREMENTS %} + +PIP requirements +================ + +{{ PIP_REQUIREMENTS_TABLE_RST | safe }} + +{%- endif %} +{%- if CROSS_PROVIDERS_DEPENDENCIES %} + +Cross provider package dependencies +=================================== + +Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified provider packages in order to use them. + +You can install such cross-provider dependencies when installing from PyPI. For example: + +.. code-block:: bash + + pip install {{ PACKAGE_PIP_NAME }}[{{ CROSS_PROVIDERS_DEPENDENCIES[0] }}] + + +{{ CROSS_PROVIDERS_DEPENDENCIES_TABLE_RST | safe }} + +{%- endif %} diff --git a/dev/provider_packages/README.md b/dev/provider_packages/README.md index 8ca318611aeb4..7fc3f26db900a 100644 --- a/dev/provider_packages/README.md +++ b/dev/provider_packages/README.md @@ -21,111 +21,100 @@ **Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)* -- [Backport packages](#backport-packages) -- [What the backport packages are](#what-the-backport-packages-are) -- [Content of the release notes](#content-of-the-release-notes) +- [Provider packages](#provider-packages) +- [What the provider packages are](#what-the-provider-packages-are) + - [Increasing version number](#increasing-version-number) +- [Generated release notes](#generated-release-notes) - [Generating release notes](#generating-release-notes) - - [Preparing backport packages](#preparing-backport-packages) -- [Testing provider package scripts](#testing-provider-package-scripts) - - [Backport packages](#backport-packages-1) + - [Preparing provider packages](#preparing-provider-packages) +- [Testing and debugging provider preparation](#testing-and-debugging-provider-preparation) + - [Debugging import check](#debugging-import-check) + - [Debugging verifying provider classes](#debugging-verifying-provider-classes) + - [Debugging preparing package documentation](#debugging-preparing-package-documentation) + - [Debugging preparing setup files](#debugging-preparing-setup-files) + - [Debugging preparing the packages](#debugging-preparing-the-packages) +- [Testing provider packages](#testing-provider-packages) - [Regular packages](#regular-packages) -# Backport packages +# Provider packages -# What the backport packages are +# What the provider packages are -The Backport Provider packages are packages (per provider) that make it possible to easily use Hooks, -Operators, Sensors, and Secrets from the 2.0 version of Airflow in the 1.10.* series. +The Provider packages are separate packages (one package per provider) that implement +integrations with external services for Airflow in the form of installable Python packages. -The release manager prepares backport packages separately from the main Airflow Release, using +The Release Manager prepares packages separately from the main Airflow Release, using `breeze` commands and accompanying scripts. This document provides an overview of the command line tools -needed to prepare backport packages. +needed to prepare the packages. -# Content of the release notes +## Increasing version number -Each of the backport packages contains Release notes in the form of the README.md file that is -automatically generated from history of the changes and code of the provider. +First thing that release manager has to do is to change version of the provider to a target +version. Each provider has a `provider.yaml` file that, among others, stores information +about provider versions. When you attempt to release a provider you should update that +information based on the changes for the provider, and it's `CHANGELOG.rst`. It might be that +`CHANGELOG.rst` already contains the right target version. This will be especially true if some +changes in the provider add new features (then minor version is increased) or when the changes +introduce backwards-incompatible, breaking change in the provider (then major version is +incremented). Committers, when approving and merging changes to the providers, should pay attention +that the `CHANGELOG.rst` is updated whenever anything other than bugfix is added. + +If there are no new features or breaking changes, the release manager should simply increase the +patch-level version for the provider. -The script generates all the necessary information: +The new version should be first on the list. -* summary of requirements for each backport package -* list of dependencies (including extras to install them) when package - depends on other providers packages -* table of new hooks/operators/sensors/protocols/secrets -* table of moved hooks/operators/sensors/protocols/secrets with the - information where they were moved from -* changelog of all the changes to the provider package. This will be - automatically updated with an incremental changelog whenever we decide to - release separate packages. -The script generates two types of files: +# Generated release notes + +Each of the provider packages contains Release notes in the form of the `CHANGELOG.rst` file that is +automatically generated from history of the changes and code of the provider. +They are stored in the documentation directory. The `README.md` file generated during package +preparation is not stored anywhere in the repository - it contains however link to the Changelog +generated. -* BACKPORT_PROVIDERS_CHANGES_YYYY.MM.DD.md which keeps information about changes (commits) in a particular - version of the provider package. The file for latest release gets updated when you iterate with - the same new date/version, but it never changes automatically for already released packages. - This way - just before the final release, you can manually correct the changes file if you - want to remove some changes from the file. +The `README.rst` file contains the following information: -* README.md which is regenerated every time you run the script (unless there are no changes since - the last time you generated the release notes +* summary of requirements for each provider package +* list of dependencies (including extras to install them) when package depends on other providers package +* link to the detailed changelog/index.rst file: generated documentation for the packages. -Note that our CI system builds the release notes for backport packages automatically with every build and -current date - this way you might be sure the automated generation of the release notes continues to -work. You can also preview the generated readme files (by downloading artifacts from GitHub Actions). -The script does not modify the README and CHANGES files if there is no change in the repo for that provider. +The `index.rst` stored in the `docs\apache-airflow-providers-` folder contains: +* Contents this is manually maintained there +* the general package information (same for all packages with the name change) +* summary of requirements for each provider package +* list of dependencies (including extras to install them) when package depends on other providers package +* Content of high-level CHANGELOG.rst file that is stored in the provider folder next to + ``provider.yaml`` file. +* Detailed list of changes generated for all versions of the provider automatically ## Generating release notes When you want to prepare release notes for a package, you need to run: ``` -./breeze prepare-provider-readme [YYYY.MM.DD] ... +./breeze prepare-provider-documentation ... ``` - -* YYYY.MM.DD - is the CALVER version of the package to prepare. Note that this date cannot be earlier - than the already released version (the script will fail if it will be). It can be set in the future - anticipating the future release date. If you do not specify date, the date will be taken from the last - generated readme - the last generated CHANGES file will be updated. - * is usually directory in the `airflow/providers` folder (for example `google` but in several cases, it might be one level deeper separated with `.` for example `apache.hive` -You can run the script with multiple package names if you want to prepare several packages at the same time. -Before you specify a new version, the last released version is update in case you have any bug fixes -merged in the master recently, they will be automatically taken into account. - -Typically, the first time you run release before release, you run it with target release.date: - -``` -./breeze prepare-provider-readme 2020.05.20 google -``` - -Then while you iterate with merges and release candidates you update the release date without providing -the date (to update the existing release notes) - -``` -./breeze prepare-provider-readme google -``` +The index.rst is updated automatically in the `docs/apache-airflow-providers-` folder +You can run the script with multiple package names if you want to prepare several packages at the same time. -Whenever you are satisfied with the release notes generated you can commit generated changes/new files +As soon as you are satisfied with the release notes generated you can commit generated changes/new files to the repository. -## Preparing backport packages - -As part of preparation to Airflow 2.0 we decided to prepare backport of providers package that will be -possible to install in the Airflow 1.10.*, Python 3.6+ environment. -Some of those packages will be soon (after testing) officially released via PyPi, but you can build and -prepare such packages on your own easily. +## Preparing provider packages -You build those packages in the breeze environment, so you do not have to worry about common environment. +You build the packages in the breeze environment, so you do not have to worry about common environment. Note that readme release notes have to be generated first, so that the package preparation script reads -the latest version from the latest version of release notes prepared. +the `provider.yaml`. * The provider package ids PACKAGE_ID are subdirectories in the ``providers`` directory. Sometimes they are one level deeper (`apache/hive` folder for example, in which case PACKAGE_ID uses "." to separate @@ -139,31 +128,31 @@ providers by running: The examples below show how you can build selected packages, but you can also build all packages by omitting the package ids altogether. -By default, you build only wheel packages, but you can use `--package-format both` to generate -both wheel and sdist packages, or `--package-format sdist` to only generate sdist packages. +By default, you build `both` packages, but you can use `--package-format wheel` to generate +only wheel package, or `--package-format sdist` to only generate sdist package. * To build the release candidate packages for SVN Apache upload run the following command: ```bash -./breeze prepare-provider-packages --package-format both --version-suffix-for-svn=rc1 [PACKAGE_ID] ... +./breeze prepare-provider-packages --version-suffix-for-svn=rc1 [PACKAGE_ID] ... ``` for example: ```bash -./breeze prepare-provider-packages --package-format both --version-suffix-for-svn=rc1 http ... +./breeze prepare-provider-packages --version-suffix-for-svn=rc1 http ... ``` * To build the release candidate packages for PyPI upload run the following command: ```bash -./breeze prepare-provider-packages --package-format both --version-suffix-for-pypi=rc1 [PACKAGE_ID] ... +./breeze prepare-provider-packages --version-suffix-for-pypi=rc1 [PACKAGE_ID] ... ``` for example: ```bash -./breeze prepare-provider-packages --package-format both --version-suffix-for-pypi=rc1 http ... +./breeze prepare-provider-packages --version-suffix-for-pypi=rc1 http ... ``` @@ -178,14 +167,14 @@ Where PACKAGE_FORMAT might be one of : `wheel`, `sdist`, `both` (`wheel` is the for example: ```bash -./breeze prepare-provider-packages --package-format both http ... +./breeze prepare-provider-packages http ... ``` * For each package, this creates a wheel package and source distribution package in your `dist` folder with names following the patterns: - * `apache_airflow_backport_providers__YYYY.[M]M.[D]D[suffix]-py3-none-any.whl` - * `apache-airflow-backport-providers--YYYY.[M]M.[D]D[suffix].tar.gz` + * `apache_airflow_providers__YYYY.[M]M.[D]D[suffix]-py3-none-any.whl` + * `apache-airflow-providers--YYYY.[M]M.[D]D[suffix].tar.gz` Note! Even if we always use the two-digit month and day when generating the readme files, the version in PyPI does not contain the leading 0s in version name - therefore the artifacts generated @@ -193,67 +182,256 @@ also do not container the leading 0s. * You can install the .whl packages with `pip install ` +You can add `--verbose` flag if you want to see detailed commands executed by the script. -# Testing provider package scripts +# Testing and debugging provider preparation -The backport packages importing and tests execute within the "CI" environment of Airflow -the -same image that is used by Breeze. They however require special mounts (no -sources of Airflow mounted to it) and possibility to install all extras and packages in order to test -importability of all the packages. It is rather simple but requires some semi-automated process: +The provider preparation is done using `Breeze` development environment and CI image. This way we have +common environment for package preparation, and we can easily verify if provider packages are OK and can +be installed for released versions of Airflow (including 2.0.0 version). -## Backport packages +The same scripts and environment is run in our [CI Workflow](../../CI.rst) - the packages are prepared, +installed and tested using the same CI image. The tests are performed via the Production image, also +in the CI workflow. Our production images are built using Airflow and Provider packages prepared on the +CI so that they are as close to what users will be using when they are installing from PyPI. Our scripts +prepare `wheel` and `sdist` packages for both - airflow and provider packages and install them during +building of the images. This is very helpful in case of testing new providers that do not yet have PyPI +package released, but also it allows checking if provider's authors did not make breaking changes. -1. Prepare backport packages +All classes from all providers must be imported - otherwise our CI will fail. Also, verification +of the image is performed where expected providers should be installed (for production image) and +providers should be discoverable, as well as `pip check` with all the dependencies has to succeed. +You might want to occasionally modify the preparation scripts for providers. They are all present in +the `dev/provider_packages` folder. There are the `Breeze` commands above - they perform the sequence +of those steps automatically, but you can manually run the scripts as follows to debug them: + +The commands are best to execute in the Breeze environment as it has all the dependencies installed, +Examples below describe that. However, for development you might run them in your local development +environment as it makes it easier to debug. Just make sure you install your development environment +with 'devel_all' extra (make sure to ue the right python version). + +Note that it is best to use `INSTALL_PROVIDERS_FROM_SOURCES` set to`true`, to make sure +that any new added providers are not added as packages (in case they are not yet available in PyPI. ```shell script -./breeze --backports prepare-provider-packages --package-format both +INSTALL_PROVIDERS_FROM_SOURCES="true" pip install -e ".[devel_all]" \ + --constraint https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-3.6.txt ``` -This prepares all backport packages in the "dist" folder +Note that you might need to add some extra dependencies to your system to install "devel_all" - many +dependencies are needed to make a clean install - the `Breeze` environment has all the +dependencies installed in case you have problem with setting up your local virtualenv. -2. Enter the container: +You can also use `breeze` to prepare your virtualenv (it will print extra information if some +dependencies are missing/installation fails and it will also reset your SQLite test db in +the `${HOME}/airflow` directory: ```shell script -export INSTALL_AIRFLOW_VERSION=1.10.12 -export BACKPORT_PACKAGES="true" +./breeze initialize-local-virtualenv +``` -./dev/provider_packages/enter_breeze_provider_package_tests.sh + +You can find description of all the commands and more information about the "prepare" +tool by running it with `--help` + +```shell script +./dev/provider_packages/prepare_provider_packages.py --help ``` -(the rest of it is in the container) +You can see for example list of all provider packages: + +```shell script +./dev/provider_packages/prepare_provider_packages.py list-providers-packages +``` + + +You can add `--verbose` flag in breeze command if you want to see commands executed. + +## Debugging import check -3. \[IN CONTAINER\] Install all remaining dependencies and reinstall airflow 1.10: +The script verifies if all provider's classes can be imported. + +1) Enter Breeze environment (optionally if you have no local virtualenv): ```shell script -cd /airflow_sources +./breeze +``` -pip install ".[devel_all]" +All the rest is in-container in case you use Breeze, but can be in your local virtualenv if you have +it installed with `devel_all` extra. -pip install "apache-airflow==${INSTALL_AIRFLOW_VERSION}" +2) Install remaining dependencies. Until we manage to bring the apache.beam due to i's dependencies without + conflicting dependencies (requires fixing Snowflake and Azure providers). This is optional in case you + already installed the environment with `devel_all` extra -cd +```shell script +pip install -e ".[devel_all]" ``` -4. \[IN CONTAINER\] Install the provider packages from /dist +3) Run import check: ```shell script -pip install /dist/apache_airflow_backport_providers_*.whl +./dev/import_all_classes.py --path airflow/providers ``` -5. \[IN CONTAINER\] Check the installation folder for providers: +It checks if all classes from provider packages can be imported. + +## Debugging verifying provider classes + +The script verifies if all provider's classes are correctly named. + +1) Enter Breeze environment (optionally if you have no local virtualenv): ```shell script -python3 </dev/null -import airflow.providers; -path=airflow.providers.__path__ -for p in path._path: - print(p) -EOF +./breeze ``` -6. \[IN CONTAINER\] Check if all the providers can be imported -python3 /opt/airflow/dev/import_all_classes.py --path +All the rest is in-container in case you use Breeze, but can be in your local virtualenv if you have +it installed with `devel_all` extra. + +2) Install remaining dependencies. Until we manage to bring the apache.beam due to i's dependencies without + conflicting dependencies (requires fixing Snowflake and Azure providers). This is optional in case you + already installed the environment with `devel_all` extra + +```shell script +pip install -e ".[devel_all]" +``` + +3) Run import check: + +```shell script +./dev/provider_packages/prepare_provider_packages.py verify-provider-classes +``` + +It checks if all provider Operators/Hooks etc. are correctly named. + + +## Debugging preparing package documentation + +The script updates documentation of the provider packages. Note that it uses airflow git and pulls +the latest version of tags available in Airflow, so you need to enter Breeze with +`--mount-all-local-sources flag` + +1) Enter Breeze environment (optionally if you have no local virtualenv): + +```shell script +./breeze --mount-all-local-sources +``` + +(all the rest is in-container) + +2) Install remaining dependencies. Until we manage to bring the apache.beam due to i's dependencies without + conflicting dependencies (requires fixing Snowflake and Azure providers). + Optionally if you have no local virtualenv. + +```shell script +pip install -e ".[devel_all]" +``` + +3) Run update documentation (version suffix might be empty): + +```shell script +./dev/provider_packages/prepare_provider_packages.py update-package-documentation \ + --version-suffix \ + +``` + +This script will fetch the latest version of airflow from Airflow's repo (it will automatically add +`apache-https-for-providers` remote and pull airflow (read only) from there. There is no need +to setup any credentials for it. + +In case version being prepared is already tagged in the repo documentation preparation returns immediately +and prints warning. + +You can add `--verbose` flag if you want to see detailed commands executed by the script. + +## Debugging preparing setup files + +This script prepares the actual packages. + +1) Enter Breeze environment: + +```shell script +./breeze +``` + +(all the rest is in-container) + +2) Cleanup the artifact directories: + +This is needed because setup tools does not clean those files and generating packages one by one +without cleanup, might include artifacts from previous package to be included in the new one. + +```shell script +rm -rf -- *.egg-info build/ +``` + +3) Generate setup.py/setup.cfg/MANIFEST.in/provider_info.py/README files files for: + +* alpha/beta packages (specify a1,a2,.../b1,b2... suffix) +* release candidates (specify r1,r2,... suffix) - those are release candidate +* official package (to be released in PypI as official package) + +The version suffix specified here will be appended to the version retrieved from +`provider.yaml`. Note that this command will fail if the tag denoted by the +version + suffix already exist. This means that the version was not updated since the +last time it was generated. In the CI we always add 'dev' suffix, and we never create +TAG for it, so in the CI the setup.py is generated and should never fail. + +```shell script +./dev/provider_packages/prepare_provider_packages.py generate-setup-files \ + --version-suffix "" \ + +``` + +You can add `--verbose` flag if you want to see detailed commands executed by the script. + +## Debugging preparing the packages + +The script prepares the package after sources have been copied and setup files generated. +Note that it uses airflow git and pulls the latest version of tags available in Airflow, +so you need to enter Breeze with +`--mount-all-local-sources flag` + +1) Enter Breeze environment (optionally if you have no local virtualenv): + +```shell script +./breeze --mount-all-local-sources +``` + +(all the rest is in-container) + +2) Install remaining dependencies. Until we manage to bring the apache.beam due to i's dependencies without + conflicting dependencies (requires fixing Snowflake and Azure providers). + Optionally if you have no local virtualenv. + +```shell script +pip install -e ".[devel_all]" +``` + +3) Run update documentation (version suffix might be empty): + +```shell script +./dev/provider_packages/prepare_provider_packages.py build-provider-packages \ + --version-suffix \ + +``` + +In case version being prepared is already tagged in the repo documentation preparation returns immediately +and prints error. You can prepare the error regardless and build the packages even if the tag exists, by +specifying ``--version-suffix`` (for example ``--version-suffix dev``). + +By default, you prepare ``both`` packages, but you can add ``--package-format`` argument and specify +``wheel``, ``sdist`` to build only one of them. + + +# Testing provider packages + +The provider packages importing and tests execute within the "CI" environment of Airflow -the +same image that is used by Breeze. They however require special mounts (no +sources of Airflow mounted to it) and possibility to install all extras and packages in order to test +if all classes can be imported. It is rather simple but requires some semi-automated process: ## Regular packages @@ -261,16 +439,15 @@ python3 /opt/airflow/dev/import_all_classes.py --path -# you will still be required to type in your signing key password -# or it needs to be available in your keychain -set -euo pipefail - -PROVIDER_ID_PACKAGES_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -cd "${PROVIDER_ID_PACKAGES_DIR}"/../.. - -function check_version() { - : "${VERSION:?"Please export VERSION variable with the version of source package to prepare"}" -} - -function tag_release() { - echo - echo "Tagging the sources with ${BACKPORT_PREFIX}providers-${VERSION} tag" - echo - - git tag "${BACKPORT_PREFIX}providers-${VERSION}" -} - -function clean_repo() { - ./confirm "Cleaning the repository sources - that might remove some of your unchanged files" - - git clean -fxd -} - - -function prepare_combined_changelog() { - echo - echo "Preparing the changelog" - echo - CHANGELOG_FILE="provider_packages/CHANGELOG.txt" - PATTERN="airflow\/providers\/(.*)\/${BACKPORT_CAPITAL_PREFIX}PROVIDER_CHANGES_.*.md" - echo > "${CHANGELOG_FILE}" - CHANGES_FILES=$(find "airflow/providers/" -name "${BACKPORT_CAPITAL_PREFIX}PROVIDER_CHANGES_*.md" | sort -r) - LAST_PROVIDER_ID="" - for FILE in ${CHANGES_FILES} - do - echo "Adding ${FILE}" - [[ ${FILE} =~ ${PATTERN} ]] - PROVIDER_ID=${BASH_REMATCH[1]//\//.} - { - if [[ ${LAST_PROVIDER_ID} != "${PROVIDER_ID}" ]]; then - echo - echo "Provider: ${BASH_REMATCH[1]//\//.}" - echo - LAST_PROVIDER_ID=${PROVIDER_ID} - else - echo - fi - cat "${FILE}" - echo - } >> "${CHANGELOG_FILE}" - done - - - echo - echo "Changelog prepared in ${CHANGELOG_FILE}" - echo -} - -function prepare_archive(){ - echo - echo "Preparing the archive ${ARCHIVE_FILE_NAME}" - echo - - git archive \ - --format=tar.gz \ - "backport-providers-${VERSION}" \ - "--prefix=apache-airflow-${BACKPORT_PREFIX}providers-${VERSION%rc?}/" \ - -o "${ARCHIVE_FILE_NAME}" - - echo - echo "Prepared the archive ${ARCHIVE_FILE_NAME}" - echo - -} - - -function replace_install_changelog(){ - DIR=$(mktemp -d) - - echo - echo "Replacing INSTALL CHANGELOG.txt in ${ARCHIVE_FILE_NAME} " - echo - tar -f "${ARCHIVE_FILE_NAME}" -xz -C "${DIR}" - - cp "provider_packages/INSTALL" "provider_packages/CHANGELOG.txt" \ - "${DIR}/apache-airflow-${BACKPORT_PREFIX}providers-${VERSION%rc?}/" - - tar -f "${ARCHIVE_FILE_NAME}" -cz -C "${DIR}" \ - "apache-airflow-${BACKPORT_PREFIX}providers-${VERSION%rc?}/" - - echo - echo "Replaced INSTALL CHANGELOG.txt in ${ARCHIVE_FILE_NAME} " - echo - -} - -BACKPORTS="false" -if (( $# > 0 )); then - if [[ "$1" == "--backports" ]]; then - BACKPORTS="true" - else - echo - echo "${COLOR_RED}ERROR: You can run the script with '--backports' flag only ${COLOR_RESET}" - echo - exit 1 - fi -fi -readonly BACKPORTS - -BACKPORT_PREFIX="" -BACKPORT_CAPITAL_PREFIX="" -if [[ ${BACKPORTS} == "true" ]]; then - BACKPORT_PREFIX='backport-' - BACKPORT_CAPITAL_PREFIX="BACKPORT_" -fi - -check_version - -export ARCHIVE_FILE_NAME="apache-airflow-${BACKPORT_PREFIX}providers-${VERSION}-source.tar.gz" - -tag_release -clean_repo -prepare_archive -prepare_combined_changelog -replace_install_changelog diff --git a/dev/provider_packages/enter_breeze_provider_package_tests.sh b/dev/provider_packages/enter_breeze_provider_package_tests.sh index 2359fd4af4504..b67b33f4695b0 100755 --- a/dev/provider_packages/enter_breeze_provider_package_tests.sh +++ b/dev/provider_packages/enter_breeze_provider_package_tests.sh @@ -15,13 +15,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -export MOUNT_LOCAL_SOURCES="false" +export MOUNT_SELECTED_LOCAL_SOURCES="false" # shellcheck source=scripts/ci/libraries/_script_init.sh . "$(dirname "${BASH_SOURCE[0]}")/../../scripts/ci/libraries/_script_init.sh" function enter_breeze_with_mapped_sources() { - docker run -it "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run -it "${EXTRA_DOCKER_FLAGS[@]}" \ -v "${AIRFLOW_SOURCES}/setup.py:/airflow_sources/setup.py:cached" \ -v "${AIRFLOW_SOURCES}/setup.cfg:/airflow_sources/setup.cfg:cached" \ -v "${AIRFLOW_SOURCES}/airflow/__init__.py:/airflow_sources/airflow/__init__.py:cached" \ diff --git a/dev/provider_packages/get_provider_info_TEMPLATE.py.jinja2 b/dev/provider_packages/get_provider_info_TEMPLATE.py.jinja2 index 51644472577f0..b6e50ceeed520 100644 --- a/dev/provider_packages/get_provider_info_TEMPLATE.py.jinja2 +++ b/dev/provider_packages/get_provider_info_TEMPLATE.py.jinja2 @@ -14,5 +14,13 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +# NOTE! THIS FILE IS AUTOMATICALLY GENERATED AND WILL BE +# OVERWRITTEN WHEN PREPARING PACKAGES. +# +# IF YOU WANT TO MODIFY IT, YOU SHOULD MODIFY THE TEMPLATE +# `get_provider_info_TEMPLATE.py.jinja2` IN the `provider_packages` DIRECTORY + + def get_provider_info(): return {{ PROVIDER_INFO }} diff --git a/dev/provider_packages/prepare_provider_packages.py b/dev/provider_packages/prepare_provider_packages.py old mode 100644 new mode 100755 index f414cbec4159b..dd8e13fdbc63f --- a/dev/provider_packages/prepare_provider_packages.py +++ b/dev/provider_packages/prepare_provider_packages.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 # pylint: disable=wrong-import-order # # Licensed to the Apache Software Foundation (ASF) under one @@ -17,13 +18,14 @@ # specific language governing permissions and limitations # under the License. """Setup.py for the Provider packages of Airflow project.""" -import argparse import collections +import glob import importlib import json import logging import os import re +import shutil import subprocess import sys import tempfile @@ -32,26 +34,65 @@ from copy import deepcopy from datetime import datetime, timedelta from enum import Enum -from os import listdir +from functools import lru_cache from os.path import dirname from shutil import copyfile from typing import Any, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Type +import click import jsonpath_ng import jsonschema import yaml from packaging.version import Version from rich import print +from rich.console import Console +from rich.syntax import Syntax -PROVIDER_TEMPLATE_PREFIX = "PROVIDER_" -BACKPORT_PROVIDER_TEMPLATE_PREFIX = "BACKPORT_PROVIDER_" +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader # noqa + +INITIAL_CHANGELOG_CONTENT = """ + + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. +""" -BACKPORT_PROVIDER_PREFIX = "backport_provider_" +HTTPS_REMOTE = "apache-https-for-providers" +HEAD_OF_HTTPS_REMOTE = f"{HTTPS_REMOTE}/master" + +PROVIDER_TEMPLATE_PREFIX = "PROVIDER_" MY_DIR_PATH = os.path.dirname(__file__) SOURCE_DIR_PATH = os.path.abspath(os.path.join(MY_DIR_PATH, os.pardir, os.pardir)) AIRFLOW_PATH = os.path.join(SOURCE_DIR_PATH, "airflow") PROVIDERS_PATH = os.path.join(AIRFLOW_PATH, "providers") +DOCUMENTATION_PATH = os.path.join(SOURCE_DIR_PATH, "docs") TARGET_PROVIDER_PACKAGES_PATH = os.path.join(SOURCE_DIR_PATH, "provider_packages") GENERATED_AIRFLOW_PATH = os.path.join(TARGET_PROVIDER_PACKAGES_PATH, "airflow") GENERATED_PROVIDERS_PATH = os.path.join(GENERATED_AIRFLOW_PATH, "providers") @@ -79,6 +120,43 @@ PY3 = sys.version_info[0] == 3 +@click.group(context_settings={'help_option_names': ['-h', '--help'], 'max_content_width': 500}) +def cli(): + ... + + +@cli.resultcallback() +def process_result(result): + # This is special case - when the command executed returns false, it means that we are skipping + # the package + if result is False: + raise click.exceptions.Exit(64) + return result + + +option_git_update = click.option( + '--git-update/--no-git-update', + default=True, + is_flag=True, + help=f"If the git remote {HTTPS_REMOTE} already exists, don't try to update it", +) +option_version_suffix = click.option( + "--version-suffix", + metavar="suffix", + help=textwrap.dedent( + """ + adds version suffix to version of the packages. + only useful when generating rc candidates for pypi.""" + ), +) +option_verbose = click.option( + "--verbose", + is_flag=True, + help="Print verbose information about performed steps", +) +argument_package_id = click.argument('package_id') + + @contextmanager def with_group(title): """ @@ -89,12 +167,11 @@ def with_group(title): https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-commands-for-github-actions#grouping-log-lines """ if os.environ.get('GITHUB_ACTIONS', 'false') != "true": - print("[blue]", "#" * 20, title, "#" * 20, "[/]") + print("[blue]" + "#" * 10 + ' ' + title + ' ' + "#" * 10 + "[/]") yield return print(f"::group::{title}") yield - print("\033[0m") print("::endgroup::") @@ -120,6 +197,15 @@ class VerifiedEntities(NamedTuple): wrong_entities: List[Tuple[type, str]] +class ProviderPackageDetails(NamedTuple): + provider_package_id: str + full_package_name: str + source_provider_package_path: str + documentation_provider_package_path: str + provider_description: str + versions: List[str] + + ENTITY_NAMES = { EntityType.Operators: "Operators", EntityType.Transfers: "Transfer Operators", @@ -217,31 +303,25 @@ def get_target_providers_package_folder(provider_package_id: str) -> str: } -def get_pip_package_name(provider_package_id: str, backport_packages: bool) -> str: +def get_pip_package_name(provider_package_id: str) -> str: """ Returns PIP package name for the package id. :param provider_package_id: id of the package - :param backport_packages: whether to prepare regular (False) or backport (True) packages :return: the name of pip package """ - return ( - "apache-airflow-backport-providers-" if backport_packages else "apache-airflow-providers-" - ) + provider_package_id.replace(".", "-") + return "apache-airflow-providers-" + provider_package_id.replace(".", "-") -def get_long_description(provider_package_id: str, backport_packages: bool) -> str: +def get_long_description(provider_package_id: str) -> str: """ Gets long description of the package. :param provider_package_id: package id - :param backport_packages: whether to prepare regular (False) or backport (True) packages - :return: content of the description (BACKPORT_PROVIDER_README/README file) + :return: content of the description: README file """ package_folder = get_target_providers_package_folder(provider_package_id) - readme_file = os.path.join( - package_folder, "BACKPORT_PROVIDER_README.md" if backport_packages else "README.md" - ) + readme_file = os.path.join(package_folder, "README.md") if not os.path.exists(readme_file): return "" with open(readme_file, encoding='utf-8', mode="r") as file: @@ -252,7 +332,7 @@ def get_long_description(provider_package_id: str, backport_packages: bool) -> s if line.startswith("**Table of contents**"): copying = False continue - header_line = "## Backport package" if backport_packages else "## Provider package" + header_line = "## Provider package" if line.startswith(header_line): copying = True if copying: @@ -260,45 +340,16 @@ def get_long_description(provider_package_id: str, backport_packages: bool) -> s return long_description -def get_package_release_version( - provider_package_id: str, backport_packages: bool, version_suffix: str = "" -) -> str: - """ - Returns release version including optional suffix. - - :param provider_package_id: package id - :param backport_packages: whether to prepare regular (False) or backport (True) packages - :param version_suffix: optional suffix (rc1, rc2 etc). - :return: - """ - return ( - get_latest_release( - get_source_package_path(provider_package_id=provider_package_id), - backport_packages=backport_packages, - ).release_version - + version_suffix - ) - - -def get_install_requirements(provider_package_id: str, backport_packages: bool) -> List[str]: +def get_install_requirements(provider_package_id: str) -> List[str]: """ Returns install requirements for the package. :param provider_package_id: id of the provider package - :param backport_packages: whether we are preparing backport packages :return: install requirements of the package """ dependencies = PROVIDERS_REQUIREMENTS[provider_package_id] - - if backport_packages: - airflow_dependency = ( - 'apache-airflow~=1.10' - if provider_package_id != 'cncf.kubernetes' - else 'apache-airflow>=1.10.12, <2.0.0' - ) - else: - airflow_dependency = 'apache-airflow>=2.0.0' + airflow_dependency = 'apache-airflow>=2.0.0' # Avoid circular dependency for the preinstalled packages install_requires = [airflow_dependency] if provider_package_id not in PREINSTALLED_PROVIDERS else [] install_requires.extend(dependencies) @@ -313,13 +364,11 @@ def get_setup_requirements() -> List[str]: return ['setuptools', 'wheel'] -def get_package_extras(provider_package_id: str, backport_packages: bool) -> Dict[str, List[str]]: +def get_package_extras(provider_package_id: str) -> Dict[str, List[str]]: """ Finds extras for the package specified. :param provider_package_id: id of the package - :param backport_packages: whether to prepare regular (False) or backport (True) packages - """ if provider_package_id == 'providers': return {} @@ -327,7 +376,7 @@ def get_package_extras(provider_package_id: str, backport_packages: bool) -> Dic cross_provider_dependencies: Dict[str, List[str]] = json.load(dependencies_file) extras_dict = ( { - module: [get_pip_package_name(module, backport_packages=backport_packages)] + module: [get_pip_package_name(module)] for module in cross_provider_dependencies[provider_package_id] } if cross_provider_dependencies.get(provider_package_id) @@ -375,7 +424,7 @@ def is_from_the_expected_base_package(the_class: Type, expected_package: str) -> return the_class.__module__.startswith(expected_package) -def inherits_from(the_class: Type, expected_ancestor: Type) -> bool: +def inherits_from(the_class: Type, expected_ancestor: Optional[Type] = None) -> bool: """ Returns true if the class inherits (directly or indirectly) from the class specified. :param the_class: The class to check @@ -401,7 +450,7 @@ def is_class(the_class: Type) -> bool: return inspect.isclass(the_class) -def package_name_matches(the_class: Type, expected_pattern: Optional[str]) -> bool: +def package_name_matches(the_class: Type, expected_pattern: Optional[str] = None) -> bool: """ In case expected_pattern is set, it checks if the package name matches the pattern. . @@ -409,7 +458,7 @@ def package_name_matches(the_class: Type, expected_pattern: Optional[str]) -> bo :param expected_pattern: the pattern that should match the package :return: true if the expected_pattern is None or the pattern matches the package """ - return expected_pattern is None or re.match(expected_pattern, the_class.__module__) + return expected_pattern is None or re.match(expected_pattern, the_class.__module__) is not None def find_all_entities( @@ -419,7 +468,7 @@ def find_all_entities( sub_package_pattern_match: str, expected_class_name_pattern: str, unexpected_class_name_patterns: Set[str], - exclude_class_type: Type = None, + exclude_class_type: Optional[Type] = None, false_positive_class_names: Optional[Set[str]] = None, ) -> VerifiedEntities: """ @@ -494,7 +543,9 @@ def convert_new_classes_to_table( def convert_moved_classes_to_table( - entity_type: EntityType, moved_entities: Dict[str, str], full_package_name: str + entity_type: EntityType, + moved_entities: Dict[str, str], + full_package_name: str, ) -> str: """ Converts moved entities to a markdown table @@ -610,8 +661,8 @@ def print_wrong_naming(entity_type: EntityType, wrong_classes: List[Tuple[type, """ if wrong_classes: print(f"\n[red]There are wrongly named entities of type {entity_type}:[/]\n", file=sys.stderr) - for entity_type, message in wrong_classes: - print(f"{entity_type}: {message}", file=sys.stderr) + for wrong_entity_type, message in wrong_classes: + print(f"{wrong_entity_type}: {message}", file=sys.stderr) def get_package_class_summary( @@ -725,7 +776,9 @@ def render_template( return content -def convert_git_changes_to_table(changes: str, base_url: str) -> str: +def convert_git_changes_to_table( + print_version: Optional[str], changes: str, base_url: str, markdown: bool = True +) -> str: """ Converts list of changes from it's string form to markdown table. @@ -734,9 +787,11 @@ def convert_git_changes_to_table(changes: str, base_url: str) -> str: The subject can contain spaces but one of the preceding values can, so we can make split 3 times on spaces to break it up. + :param print_version: Version to print :param changes: list of changes in a form of multiple-line string :param base_url: base url for the commit URL - :return: markdown-formatted table + :param markdown: if True, markdown format is used else rst + :return: formatted table """ from tabulate import tabulate @@ -747,15 +802,33 @@ def convert_git_changes_to_table(changes: str, base_url: str) -> str: if line == "": continue full_hash, short_hash, date, message = line.split(" ", maxsplit=3) - table_data.append((f"[{short_hash}]({base_url}{full_hash})", date, message)) - return tabulate(table_data, headers=headers, tablefmt="pipe") + message_without_backticks = message.replace("`", "'") + table_data.append( + ( + f"[{short_hash}]({base_url}{full_hash})" + if markdown + else f"`{short_hash} <{base_url}{full_hash}>`_", + date, + f"`{message_without_backticks}`" if markdown else f"``{message_without_backticks}``", + ) + ) + header = "" + if not table_data: + return header + table = tabulate(table_data, headers=headers, tablefmt="pipe" if markdown else "rst") + if not markdown: + header += f"\n\n{print_version}\n" + "." * (len(print_version) if print_version else 0) + "\n\n" + release_date = table_data[0][1] + header += f"Latest change: {release_date}\n\n" + return header + table -def convert_pip_requirements_to_table(requirements: Iterable[str]) -> str: +def convert_pip_requirements_to_table(requirements: Iterable[str], markdown: bool = True) -> str: """ Converts PIP requirement list to a markdown table. :param requirements: requirements list - :return: markdown-formatted table + :param markdown: if True, markdown format is used else rst + :return: formatted table """ from tabulate import tabulate @@ -766,40 +839,39 @@ def convert_pip_requirements_to_table(requirements: Iterable[str]) -> str: if found: package = found.group(1) version_required = found.group(2) - table_data.append((package, version_required)) + if version_required != "": + version_required = f"`{version_required}`" if markdown else f'``{version_required}``' + table_data.append((f"`{package}`" if markdown else f"``{package}``", version_required)) else: table_data.append((dependency, "")) - return tabulate(table_data, headers=headers, tablefmt="pipe") + return tabulate(table_data, headers=headers, tablefmt="pipe" if markdown else "rst") def convert_cross_package_dependencies_to_table( cross_package_dependencies: List[str], - backport_packages: bool, + markdown: bool = True, ) -> str: """ Converts cross-package dependencies to a markdown table :param cross_package_dependencies: list of cross-package dependencies - :param backport_packages: whether we are preparing backport packages - :return: markdown-formatted table + :param markdown: if True, markdown format is used else rst + :return: formatted table """ from tabulate import tabulate headers = ["Dependent package", "Extra"] table_data = [] - if backport_packages: - prefix = "apache-airflow-backport-providers" - base_url = "https://github.com/apache/airflow/tree/master/airflow/providers/" - else: - prefix = "apache-airflow-providers" - base_url = f"https://pypi.org/project/{prefix}-" + prefix = "apache-airflow-providers-" + base_url = "https://airflow.apache.org/docs/" for dependency in cross_package_dependencies: - pip_package_name = f"{prefix}-{dependency.replace('.','-')}" - if backport_packages: - url_suffix = f"{dependency.replace('.','/')}" + pip_package_name = f"{prefix}{dependency.replace('.','-')}" + url_suffix = f"{dependency.replace('.','-')}" + if markdown: + url = f"[{pip_package_name}]({base_url}{url_suffix})" else: - url_suffix = f"{dependency.replace('.','-')}" - table_data.append((f"[{pip_package_name}]({base_url}{url_suffix})", dependency)) - return tabulate(table_data, headers=headers, tablefmt="pipe") + url = f"`{pip_package_name} <{base_url}{prefix}{url_suffix}>`_" + table_data.append((url, f"`{dependency}`" if markdown else f"``{dependency}``")) + return tabulate(table_data, headers=headers, tablefmt="pipe" if markdown else "rst") LICENCE = """ """ +LICENCE_RST = """ +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. +""" + """ Keeps information about historical releases. """ @@ -830,91 +921,21 @@ def convert_cross_package_dependencies_to_table( ) -def strip_leading_zeros_in_calver(calver_version: str) -> str: +def strip_leading_zeros(version: str) -> str: """ - Strips leading zeros from calver version number. + Strips leading zeros from version number. This converts 1974.04.03 to 1974.4.3 as the format with leading month and day zeros is not accepted by PIP versioning. - :param calver_version: version number in calver format (potentially with leading 0s in date and month) + :param version: version number in CALVER format (potentially with leading 0s in date and month) :return: string with leading 0s after dot replaced. """ - return calver_version.replace(".0", ".") - - -def get_provider_changes_prefix(backport_packages: bool) -> str: - """ - Returns prefix for provider CHANGES files. - """ - if backport_packages: - return "BACKPORT_PROVIDER_CHANGES_" - else: - return "PROVIDER_CHANGES_" - - -def get_all_releases(provider_package_path: str, backport_packages: bool) -> List[ReleaseInfo]: - """ - Returns information about past releases (retrieved from *changes_*md files stored in the - package folder. - :param provider_package_path: path of the package - :param backport_packages: whether to prepare regular (False) or backport (True) packages - :return: list of releases made so far. - """ - changes_file_prefix = get_provider_changes_prefix(backport_packages=backport_packages) - past_releases: List[ReleaseInfo] = [] - changes_file_names = listdir(provider_package_path) - for file_name in sorted(changes_file_names, reverse=True): - if file_name.startswith(changes_file_prefix) and file_name.endswith(".md"): - changes_file_path = os.path.join(provider_package_path, file_name) - with open(changes_file_path) as changes_file: - content = changes_file.read() - found = re.search(r'/([a-z0-9]*)\)', content, flags=re.MULTILINE) - if not found: - print("[yellow]No commit found. This seems to be first time you run it[/]", file=sys.stderr) - else: - last_commit_hash = found.group(1) - release_version = file_name[len(changes_file_prefix) :][:-3] - release_version_no_leading_zeros = ( - strip_leading_zeros_in_calver(release_version) if backport_packages else release_version - ) - past_releases.append( - ReleaseInfo( - release_version=release_version, - release_version_no_leading_zeros=release_version_no_leading_zeros, - last_commit_hash=last_commit_hash, - content=content, - file_name=file_name, - ) - ) - return past_releases - - -def get_latest_release(provider_package_path: str, backport_packages: bool) -> ReleaseInfo: - """ - Gets information about the latest release. - - :param provider_package_path: path of package - :param backport_packages: whether to prepare regular (False) or backport (True) packages - :return: latest release information - """ - releases = get_all_releases( - provider_package_path=provider_package_path, backport_packages=backport_packages - ) - if len(releases) == 0: - return ReleaseInfo( - release_version="0.0.0", - release_version_no_leading_zeros="0.0.0", - last_commit_hash="no_hash", - content="empty", - file_name="no_file", - ) - else: - return releases[0] + return ".".join(str(int(i)) for i in version.split(".")) def get_previous_release_info( - previous_release_version: str, past_releases: List[ReleaseInfo], current_release_version: str + previous_release_version: Optional[str], past_releases: List[ReleaseInfo], current_release_version: str ) -> Optional[str]: """ Find previous release. In case we are re-running current release we assume that last release was @@ -935,13 +956,13 @@ def get_previous_release_info( def check_if_release_version_ok( - past_releases: List[ReleaseInfo], current_release_version: str, backport_packages: bool + past_releases: List[ReleaseInfo], + current_release_version: str, ) -> Tuple[str, Optional[str]]: """ Check if the release version passed is not later than the last release version :param past_releases: all past releases (if there are any) :param current_release_version: release version to check - :param backport_packages: whether to prepare regular (False) or backport (True) packages :return: Tuple of current/previous_release (previous might be None if there are no releases) """ previous_release_version = past_releases[0].release_version if past_releases else None @@ -949,10 +970,7 @@ def check_if_release_version_ok( if previous_release_version: current_release_version = previous_release_version else: - if backport_packages: - current_release_version = (datetime.today() + timedelta(days=5)).strftime('%Y.%m.%d') - else: - current_release_version = "1.0.0" # TODO: replace with maintained version + current_release_version = (datetime.today() + timedelta(days=5)).strftime('%Y.%m.%d') if previous_release_version: if Version(current_release_version) < Version(previous_release_version): print( @@ -960,7 +978,7 @@ def check_if_release_version_ok( f"{previous_release_version} - last release for the package[/]", file=sys.stderr, ) - sys.exit(2) + raise Exception("Bad release version") return current_release_version, previous_release_version @@ -975,72 +993,114 @@ def get_cross_provider_dependent_packages(provider_package_id: str) -> List[str] return dependent_packages -def make_sure_remote_apache_exists_and_fetch(): +def make_sure_remote_apache_exists_and_fetch(git_update: bool, verbose: bool): """ - Make sure that apache remote exist in git. We need to take a log from the master of apache - repository - not locally - because when we commit this change and run it, our log will include the - current commit - which is going to have different commit id once we merge. So it is a bit - catch-22. + Make sure that apache remote exist in git. We need to take a log from the apache + repository - not locally. - :return: + Also the local repo might be shallow so we need to unshallow it. + + This will: + * check if the remote exists and add if it does not + * check if the local repo is shallow, mark it to be unshallowed in this case + * fetch from the remote including all tags and overriding local tags in case they are set differently + + :param git_update: If the git remote already exists, should we try to update it + :param verbose: print verbose messages while fetching """ try: + check_remote_command = ["git", "remote", "get-url", HTTPS_REMOTE] + if verbose: + print(f"Running command: '{' '.join(check_remote_command)}'") subprocess.check_call( - ["git", "remote", "add", "apache-https-for-providers", "https://github.com/apache/airflow.git"], + check_remote_command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, ) - except subprocess.CalledProcessError as e: - if e.returncode == 128: - print( - "[yellow]The remote `apache-https-for-providers` already exists. If you have trouble running " - "git log delete the remote[/]", - file=sys.stderr, - ) + + # Remote already exists, don't update it again! + if not git_update: + return + except subprocess.CalledProcessError as ex: + if ex.returncode == 128: + remote_add_command = [ + "git", + "remote", + "add", + HTTPS_REMOTE, + "https://github.com/apache/airflow.git", + ] + if verbose: + print(f"Running command: '{' '.join(remote_add_command)}'") + try: + subprocess.check_output( + remote_add_command, + stderr=subprocess.STDOUT, + ) + except subprocess.CalledProcessError as ex: + print("[red]Error: when adding remote:[/]", ex) else: raise + if verbose: + print("Fetching full history and tags from remote. ") + print("This might override your local tags!") + is_shallow_repo = ( + subprocess.check_output(["git", "rev-parse", "--is-shallow-repository"], stderr=subprocess.DEVNULL) + == 'true' + ) + fetch_command = ["git", "fetch", "--tags", "--force", HTTPS_REMOTE] + if is_shallow_repo: + if verbose: + print( + "This will also unshallow the repository, " + "making all history available and increasing storage!" + ) + fetch_command.append("--unshallow") + if verbose: + print(f"Running command: '{' '.join(fetch_command)}'") subprocess.check_call( - ["git", "fetch", "apache-https-for-providers"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL + fetch_command, + stderr=subprocess.DEVNULL, ) -def get_git_command(base_commit: Optional[str]) -> List[str]: +def get_git_log_command( + verbose: bool, from_commit: Optional[str] = None, to_commit: Optional[str] = None +) -> List[str]: """ Get git command to run for the current repo from the current folder (which is the package folder). - :param base_commit: if present - base commit from which to start the log from + :param verbose: whether to print verbose info while getting the command + :param from_commit: if present - base commit from which to start the log from + :param to_commit: if present - final commit which should be the start of the log :return: git command to run """ git_cmd = [ "git", "log", - "apache-https-for-providers/master", "--pretty=format:%H %h %cd %s", "--date=short", ] - if base_commit: - git_cmd.append(f"{base_commit}...HEAD") + if from_commit and to_commit: + git_cmd.append(f"{from_commit}...{to_commit}") + elif from_commit: + git_cmd.append(from_commit) git_cmd.extend(['--', '.']) + if verbose: + print(f"Command to run: '{' '.join(git_cmd)}'") return git_cmd -def store_current_changes( - provider_package_path: str, current_release_version: str, current_changes: str, backport_packages: bool -) -> None: +def get_git_tag_check_command(tag: str) -> List[str]: """ - Stores current changes in the *_changes_YYYY.MM.DD.md file. - - :param provider_package_path: path for the package - :param current_release_version: release version to build - :param current_changes: list of changes formatted in markdown format - :param backport_packages: whether to prepare regular (False) or backport (True) packages + Get git command to check if tag exits. + :param tag: Tag to check + :return: git command to run """ - current_changes_file_path = os.path.join( - provider_package_path, - get_provider_changes_prefix(backport_packages=backport_packages) + current_release_version + ".md", - ) - with open(current_changes_file_path, "wt") as current_changes_file: - current_changes_file.write(current_changes) - current_changes_file.write("\n") + return [ + "git", + "rev-parse", + tag, + ] def get_source_package_path(provider_package_id: str) -> str: @@ -1049,8 +1109,18 @@ def get_source_package_path(provider_package_id: str) -> str: :param provider_package_id: id of the package :return: path of the providers folder """ - provider_package_path = os.path.join(PROVIDERS_PATH, *provider_package_id.split(".")) - return provider_package_path + return os.path.join(PROVIDERS_PATH, *provider_package_id.split(".")) + + +def get_documentation_package_path(provider_package_id: str) -> str: + """ + Retrieves documentation package path from package id. + :param provider_package_id: id of the package + :return: path of the documentation folder + """ + return os.path.join( + DOCUMENTATION_PATH, f"apache-airflow-providers-{provider_package_id.replace('.','-')}" + ) def get_generated_package_path(provider_package_id: str) -> str: @@ -1088,6 +1158,32 @@ def get_additional_package_info(provider_package_path: str) -> str: return "" +def get_changelog_for_package(provider_package_path: str) -> str: + """ + Returns changelog_for the package. + + :param provider_package_path: path for the package + :return: additional information for the path (empty string if missing) + """ + changelog_path = os.path.join(provider_package_path, "CHANGELOG.rst") + if os.path.isfile(changelog_path): + with open(changelog_path) as changelog_file: + return changelog_file.read() + else: + print(f"[red]ERROR: Missing ${changelog_path}[/]") + print("Please add the file with initial content:") + print() + syntax = Syntax( + INITIAL_CHANGELOG_CONTENT, + "rst", + theme="ansi_dark", + ) + console = Console(width=100) + console.print(syntax) + print() + raise Exception(f"Missing {changelog_path}") + + def is_camel_case_with_acronyms(s: str): """ Checks if the string passed is Camel Case (with capitalised acronyms allowed). @@ -1131,11 +1227,8 @@ def check_if_classes_are_properly_named( return total_class_number, badly_named_class_number -def get_package_pip_name(provider_package_id: str, backport_packages: bool): - if backport_packages: - return f"apache-airflow-backport-providers-{provider_package_id.replace('.', '-')}" - else: - return f"apache-airflow-providers-{provider_package_id.replace('.', '-')}" +def get_package_pip_name(provider_package_id: str): + return f"apache-airflow-providers-{provider_package_id.replace('.', '-')}" def validate_provider_info_with_2_0_0_schema(provider_info: Dict[str, Any]) -> None: @@ -1150,12 +1243,12 @@ def validate_provider_info_with_2_0_0_schema(provider_info: Dict[str, Any]) -> N schema = json.load(schema_file) try: jsonschema.validate(provider_info, schema=schema) - print("[green]Provider info validated against 2.0.0 schema[/]") - except jsonschema.ValidationError as e: + except jsonschema.ValidationError as ex: + print("[red]Provider info validated not against 2.0.0 schema[/]") raise Exception( "Error when validating schema. The schema must be Airflow 2.0.0 compatible. " "If you added any fields please remove them via 'convert_to_provider_info' method.", - e, + ex, ) @@ -1172,13 +1265,13 @@ def validate_provider_info_with_runtime_schema(provider_info: Dict[str, Any]) -> schema = json.load(schema_file) try: jsonschema.validate(provider_info, schema=schema) - print("[green]Provider info validated against runtime schema[/]") - except jsonschema.ValidationError as e: + except jsonschema.ValidationError as ex: + print("[red]Provider info not validated against runtime schema[/]") raise Exception( "Error when validating schema. The schema must be compatible with " + "airflow/provider_info.schema.json. " + "If you added any fields please remove them via 'convert_to_provider_info' method.", - e, + ex, ) @@ -1218,191 +1311,330 @@ def get_provider_info_from_provider_yaml(provider_package_id: str) -> Dict[str, if not os.path.exists(provider_yaml_file_name): raise Exception(f"The provider.yaml file is missing: {provider_yaml_file_name}") with open(provider_yaml_file_name) as provider_file: - provider_yaml_dict = yaml.safe_load(provider_file.read()) + provider_yaml_dict = yaml.load(provider_file, SafeLoader) # noqa provider_info = convert_to_provider_info(provider_yaml_dict) validate_provider_info_with_2_0_0_schema(provider_info) validate_provider_info_with_runtime_schema(provider_info) return provider_info -def update_generated_files_for_package( - provider_package_id: str, - current_release_version: str, - version_suffix: str, - imported_classes: List[str], - backport_packages: bool, - update_release_notes: bool, - update_setup: bool, -) -> Tuple[int, int]: - """ - Updates release notes (BACKPORT_PROVIDER_README.md/README.md) for the package. - Returns Tuple of total number of entities and badly named entities. +def get_version_tag(version: str, provider_package_id: str, version_suffix: str = ''): + if version_suffix is None: + version_suffix = '' + return f"providers-{provider_package_id.replace('.','-')}/{version}{version_suffix}" - :param provider_package_id: id of the package - :param current_release_version: release version: - :param version_suffix: version suffix corresponding to the version in the code - :param imported_classes - entities that have been imported from providers - :param backport_packages: whether to prepare regular (False) or backport (True) packages - :param update_release_notes: whether to update release notes - :param update_setup: whether to update setup files - :return: Tuple of total/bad number of entities - """ - verify_provider_package(provider_package_id) - full_package_name = f"airflow.providers.{provider_package_id}" - source_provider_package_path = get_source_package_path(provider_package_id) - entity_summaries = get_package_class_summary(full_package_name, imported_classes) - past_releases = get_all_releases( - provider_package_path=source_provider_package_path, backport_packages=backport_packages +def print_changes_table(changes_table): + syntax = Syntax(changes_table, "rst", theme="ansi_dark") + console = Console(width=200) + console.print(syntax) + + +def get_all_changes_for_regular_packages( + versions: List[str], + provider_package_id: str, + source_provider_package_path: str, + verbose: bool, +) -> Tuple[bool, str]: + current_version = versions[0] + current_tag_no_suffix = get_version_tag(current_version, provider_package_id) + if verbose: + print(f"Checking if tag '{current_tag_no_suffix}' exist.") + if not subprocess.call( + get_git_tag_check_command(current_tag_no_suffix), + cwd=source_provider_package_path, + stderr=subprocess.DEVNULL, + ): + if verbose: + print(f"The tag {current_tag_no_suffix} exists.") + # The tag already exists + changes = subprocess.check_output( + get_git_log_command(verbose, HEAD_OF_HTTPS_REMOTE, current_tag_no_suffix), + cwd=source_provider_package_path, + universal_newlines=True, + ) + if changes: + print( + f"[yellow]The provider {provider_package_id} has changes" + f" since last release but version is not updated[/]" + ) + print() + print( + "[yellow]Please update version in " + f"'airflow/providers/{provider_package_id.replace('-','/')}/'" + "provider.yaml' to prepare release.[/]\n" + ) + changes_table = convert_git_changes_to_table( + "UNKNOWN", changes, base_url="https://github.com/apache/airflow/commit/", markdown=False + ) + print_changes_table(changes_table) + return False, changes_table + else: + print(f"No changes for {provider_package_id}") + return False, "" + if verbose: + print("The tag does not exist. ") + if len(versions) == 1: + print(f"The provider '{provider_package_id}' has never been released but it is ready to release!\n") + else: + print(f"New version of the '{provider_package_id}' package is ready to be released!\n") + next_version_tag = HEAD_OF_HTTPS_REMOTE + changes_table = '' + print_version = versions[0] + for version in versions[1:]: + version_tag = get_version_tag(version, provider_package_id) + changes = subprocess.check_output( + get_git_log_command(verbose, next_version_tag, version_tag), + cwd=source_provider_package_path, + universal_newlines=True, + ) + changes_table += convert_git_changes_to_table( + print_version, changes, base_url="https://github.com/apache/airflow/commit/", markdown=False + ) + next_version_tag = version_tag + print_version = version + changes = subprocess.check_output( + get_git_log_command(verbose, next_version_tag), + cwd=source_provider_package_path, + universal_newlines=True, + ) + changes_table += convert_git_changes_to_table( + print_version, changes, base_url="https://github.com/apache/airflow/commit/", markdown=False ) - current_release_version, previous_release = check_if_release_version_ok( - past_releases, current_release_version, backport_packages + if verbose: + print_changes_table(changes_table) + return True, changes_table + + +def get_provider_details(provider_package_id: str) -> ProviderPackageDetails: + provider_info = get_provider_info_from_provider_yaml(provider_package_id) + return ProviderPackageDetails( + provider_package_id=provider_package_id, + full_package_name=f"airflow.providers.{provider_package_id}", + source_provider_package_path=get_source_package_path(provider_package_id), + documentation_provider_package_path=get_documentation_package_path(provider_package_id), + provider_description=provider_info['description'], + versions=provider_info['versions'], ) + + +def get_provider_jinja_context( + provider_details: ProviderPackageDetails, + current_release_version: str, + version_suffix: str, +): + verify_provider_package(provider_details.provider_package_id) cross_providers_dependencies = get_cross_provider_dependent_packages( - provider_package_id=provider_package_id + provider_package_id=provider_details.provider_package_id ) - previous_release = get_previous_release_info( - previous_release_version=previous_release, - past_releases=past_releases, - current_release_version=current_release_version, + release_version_no_leading_zeros = strip_leading_zeros(current_release_version) + pip_requirements_table = convert_pip_requirements_to_table( + PROVIDERS_REQUIREMENTS[provider_details.provider_package_id] + ) + pip_requirements_table_rst = convert_pip_requirements_to_table( + PROVIDERS_REQUIREMENTS[provider_details.provider_package_id], markdown=False + ) + cross_providers_dependencies_table = convert_cross_package_dependencies_to_table( + cross_providers_dependencies ) - release_version_no_leading_zeros = ( - strip_leading_zeros_in_calver(current_release_version) - if backport_packages - else current_release_version + cross_providers_dependencies_table_rst = convert_cross_package_dependencies_to_table( + cross_providers_dependencies, markdown=False ) context: Dict[str, Any] = { "ENTITY_TYPES": list(EntityType), - "README_FILE": "BACKPORT_PROVIDER_README.md" if backport_packages else "README.md", - "PROVIDER_PACKAGE_ID": provider_package_id, - "PACKAGE_PIP_NAME": get_pip_package_name(provider_package_id, backport_packages), - "FULL_PACKAGE_NAME": full_package_name, - "PROVIDER_PATH": full_package_name.replace(".", "/"), + "README_FILE": "README.rst", + "PROVIDER_PACKAGE_ID": provider_details.provider_package_id, + "PACKAGE_PIP_NAME": get_pip_package_name(provider_details.provider_package_id), + "FULL_PACKAGE_NAME": provider_details.full_package_name, + "PROVIDER_PATH": provider_details.full_package_name.replace(".", "/"), "RELEASE": current_release_version, "RELEASE_NO_LEADING_ZEROS": release_version_no_leading_zeros, - "VERSION_SUFFIX": version_suffix, - "ADDITIONAL_INFO": get_additional_package_info(provider_package_path=source_provider_package_path), + "VERSION_SUFFIX": version_suffix or '', + "ADDITIONAL_INFO": get_additional_package_info( + provider_package_path=provider_details.source_provider_package_path + ), + "CHANGELOG": get_changelog_for_package( + provider_package_path=provider_details.source_provider_package_path + ), "CROSS_PROVIDERS_DEPENDENCIES": cross_providers_dependencies, - "PIP_REQUIREMENTS": PROVIDERS_REQUIREMENTS[provider_package_id], - "PROVIDER_TYPE": "Backport provider" if BACKPORT_PACKAGES else "Provider", - "PROVIDERS_FOLDER": "backport-providers" if BACKPORT_PACKAGES else "providers", + "PIP_REQUIREMENTS": PROVIDERS_REQUIREMENTS[provider_details.provider_package_id], + "PROVIDER_TYPE": "Provider", + "PROVIDERS_FOLDER": "providers", + "PROVIDER_DESCRIPTION": provider_details.provider_description, "INSTALL_REQUIREMENTS": get_install_requirements( - provider_package_id=provider_package_id, backport_packages=backport_packages + provider_package_id=provider_details.provider_package_id ), "SETUP_REQUIREMENTS": get_setup_requirements(), - "EXTRAS_REQUIREMENTS": get_package_extras( - provider_package_id=provider_package_id, backport_packages=backport_packages - ), - "PROVIDER_INFO": get_provider_info_from_provider_yaml(provider_package_id), + "EXTRAS_REQUIREMENTS": get_package_extras(provider_package_id=provider_details.provider_package_id), + "CROSS_PROVIDERS_DEPENDENCIES_TABLE": cross_providers_dependencies_table, + "CROSS_PROVIDERS_DEPENDENCIES_TABLE_RST": cross_providers_dependencies_table_rst, + "PIP_REQUIREMENTS_TABLE": pip_requirements_table, + "PIP_REQUIREMENTS_TABLE_RST": pip_requirements_table_rst, } - if update_release_notes: - git_cmd = get_git_command(previous_release) - try: - changes = subprocess.check_output( - git_cmd, cwd=source_provider_package_path, universal_newlines=True - ) - changes_table = convert_git_changes_to_table( - changes, base_url="https://github.com/apache/airflow/commit/" - ) - except subprocess.CalledProcessError: - # TODO(potiuk) fix me for both backport/provider package check - changes_table = '' - context["CURRENT_CHANGES_TABLE"] = changes_table - pip_requirements_table = convert_pip_requirements_to_table( - PROVIDERS_REQUIREMENTS[provider_package_id] - ) - cross_providers_dependencies_table = convert_cross_package_dependencies_to_table( - cross_providers_dependencies, - backport_packages=backport_packages, - ) - context["CROSS_PROVIDERS_DEPENDENCIES_TABLE"] = cross_providers_dependencies_table - context["PIP_REQUIREMENTS_TABLE"] = pip_requirements_table + return context - total, bad = check_if_classes_are_properly_named(entity_summaries) + +def prepare_readme_file(context): + readme_content = LICENCE_RST + readme_template_name = PROVIDER_TEMPLATE_PREFIX + "README" + readme_content += render_template(template_name=readme_template_name, context=context, extension=".rst") + readme_file_path = os.path.join(TARGET_PROVIDER_PACKAGES_PATH, "README.rst") + with open(readme_file_path, "wt") as readme_file: + readme_file.write(readme_content) + + +def update_generated_files_for_regular_package( + provider_package_id: str, + version_suffix: str, + update_release_notes: bool, + update_setup: bool, + verbose: bool, +) -> bool: + """ + Updates generated files (readme, changes and/or setup.cfg/setup.py/manifest.in/provider_info) + + :param provider_package_id: id of the package + :param version_suffix: version suffix corresponding to the version in the code + :param update_release_notes: whether to update release notes + :param update_setup: whether to update setup files + :param verbose: whether to print verbose messages + :returns False if the package should be skipped, Tre if everything generated properly + """ + verify_provider_package(provider_package_id) + provider_details = get_provider_details(provider_package_id) + provider_info = get_provider_info_from_provider_yaml(provider_package_id) + current_release_version = provider_details.versions[0] + jinja_context = get_provider_jinja_context( + provider_details=provider_details, + current_release_version=current_release_version, + version_suffix=version_suffix, + ) + jinja_context["PROVIDER_INFO"] = provider_info if update_release_notes: - prepare_readme_and_changes_files( - backport_packages, - context, - current_release_version, - entity_summaries, + proceed, changes = get_all_changes_for_regular_packages( + provider_details.versions, provider_package_id, - source_provider_package_path, + provider_details.source_provider_package_path, + verbose, + ) + if not proceed: + print() + print( + f"[yellow]Provider: {provider_package_id} - skipping documentation generation. No changes![/]" + ) + print() + return False + jinja_context["DETAILED_CHANGES_RST"] = changes + jinja_context["DETAILED_CHANGES_PRESENT"] = len(changes) > 0 + print() + print(f"Update index.rst for {provider_package_id}") + print() + update_index_rst_for_regular_providers( + jinja_context, provider_package_id, provider_details.documentation_provider_package_path + ) + update_commits_rst_for_regular_providers( + jinja_context, provider_package_id, provider_details.documentation_provider_package_path ) if update_setup: - prepare_setup_py_file(context) - prepare_setup_cfg_file(context) - prepare_get_provider_info_py_file(context, provider_package_id) - prepare_manifest_in_file(context) - - bad = bad + sum([len(entity_summary.wrong_entities) for entity_summary in entity_summaries.values()]) - if bad != 0: print() - print(f"[red]There are {bad} errors of {total} entities for {provider_package_id}[/]") + print(f"Generating setup files for {provider_package_id}") print() - return total, bad + prepare_setup_py_file(jinja_context) + prepare_setup_cfg_file(jinja_context) + prepare_get_provider_info_py_file(jinja_context, provider_package_id) + prepare_manifest_in_file(jinja_context) + prepare_readme_file(jinja_context) + return True -def get_template_name(backport_packages: bool, template_suffix: str) -> str: - """ - Returns name of the template +def replace_content(file_path, old_text, new_text, provider_package_id): + if new_text != old_text: + _, temp_file_path = tempfile.mkstemp() + try: + if os.path.isfile(file_path): + copyfile(file_path, temp_file_path) + with open(file_path, "wt") as readme_file: + readme_file.write(new_text) + print() + print(f"Generated {file_path} file for the {provider_package_id} provider") + print() + if old_text != "": + subprocess.call(["diff", "--color=always", temp_file_path, file_path]) + finally: + os.remove(temp_file_path) - :param backport_packages: whether to generate backport packages - :param template_suffix: suffix to add - :return template name - """ - return ( - BACKPORT_PROVIDER_TEMPLATE_PREFIX if backport_packages else PROVIDER_TEMPLATE_PREFIX - ) + template_suffix + +AUTOMATICALLY_GENERATED_CONTENT = ( + ".. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME!" +) -def prepare_readme_and_changes_files( - backport_packages, +def update_index_rst_for_regular_providers( context, - current_release_version, - entity_summaries, provider_package_id, - provider_package_path, + target_path, ): - changes_template_name = get_template_name(backport_packages, "CHANGES") - current_changes = render_template(template_name=changes_template_name, context=context, extension='.md') - store_current_changes( - provider_package_path=provider_package_path, - current_release_version=current_release_version, - current_changes=current_changes, - backport_packages=backport_packages, + index_template_name = PROVIDER_TEMPLATE_PREFIX + "INDEX" + index_update = render_template( + template_name=index_template_name, context=context, extension='.rst', keep_trailing_newline=False ) - context['ENTITIES'] = entity_summaries - context['ENTITY_NAMES'] = ENTITY_NAMES - all_releases = get_all_releases(provider_package_path, backport_packages=backport_packages) - context["RELEASES"] = all_releases - readme = LICENCE - readme_template_name = get_template_name(backport_packages, "README") - readme += render_template(template_name=readme_template_name, context=context, extension='.md') - classes_template_name = get_template_name(backport_packages, "CLASSES") - readme += render_template(template_name=classes_template_name, context=context, extension='.md') - for a_release in all_releases: - readme += a_release.content - readme_file_path = os.path.join( - provider_package_path, "BACKPORT_PROVIDER_README.md" if backport_packages else "README.md" + index_file_path = os.path.join(target_path, "index.rst") + old_text = "" + if os.path.isfile(index_file_path): + with open(index_file_path) as readme_file_read: + old_text = readme_file_read.read() + new_text = deepcopy(old_text) + lines = old_text.splitlines(keepends=False) + for index, line in enumerate(lines): + if line == AUTOMATICALLY_GENERATED_CONTENT: + new_text = "\n".join(lines[:index]) + new_text += "\n" + AUTOMATICALLY_GENERATED_CONTENT + "\n" + new_text += index_update + replace_content(index_file_path, old_text, new_text, provider_package_id) + + +def update_commits_rst_for_regular_providers( + context, + provider_package_id, + target_path, +): + commits_template_name = PROVIDER_TEMPLATE_PREFIX + "COMMITS" + new_text = render_template( + template_name=commits_template_name, context=context, extension='.rst', keep_trailing_newline=True ) + index_file_path = os.path.join(target_path, "commits.rst") old_text = "" - if os.path.isfile(readme_file_path): - with open(readme_file_path) as readme_file_read: + if os.path.isfile(index_file_path): + with open(index_file_path) as readme_file_read: old_text = readme_file_read.read() - if old_text != readme: - _, temp_file_path = tempfile.mkstemp(".md") - try: - if os.path.isfile(readme_file_path): - copyfile(readme_file_path, temp_file_path) - with open(readme_file_path, "wt") as readme_file: - readme_file.write(readme) - print() - print(f"Generated {readme_file_path} file for the {provider_package_id} provider") - print() - if old_text != "": - subprocess.call(["diff", "--color=always", temp_file_path, readme_file_path]) - finally: - os.remove(temp_file_path) + replace_content(index_file_path, old_text, new_text, provider_package_id) + + +@lru_cache(maxsize=None) +def black_mode(): + from black import Mode, parse_pyproject_toml, target_version_option_callback + + config = parse_pyproject_toml(os.path.join(SOURCE_DIR_PATH, "pyproject.toml")) + + target_versions = set( + target_version_option_callback(None, None, config.get('target_version', [])), # noqa + ) + + return Mode( + target_versions=target_versions, + line_length=config.get('line_length', Mode.line_length), + is_pyi=config.get('is_pyi', Mode.is_pyi), + string_normalization=not config.get('skip_string_normalization', not Mode.string_normalization), + experimental_string_processing=config.get( + 'experimental_string_processing', Mode.experimental_string_processing + ), + ) + + +def black_format(content) -> str: + from black import format_str + + return format_str(content, mode=black_mode()) def prepare_setup_py_file(context): @@ -1412,9 +1644,7 @@ def prepare_setup_py_file(context): template_name=setup_py_template_name, context=context, extension='.py', autoescape=False ) with open(setup_py_file_path, "wt") as setup_py_file: - setup_py_file.write(setup_py_content) - # format the generated setup.py - subprocess.run(["black", setup_py_file_path, "--config=./pyproject.toml"], cwd=SOURCE_DIR_PATH) + setup_py_file.write(black_format(setup_py_content)) def prepare_setup_cfg_file(context): @@ -1447,8 +1677,7 @@ def prepare_get_provider_info_py_file(context, provider_package_id: str): keep_trailing_newline=True, ) with open(get_provider_file_path, "wt") as get_provider_file: - get_provider_file.write(get_provider_content) - subprocess.run(["black", get_provider_file_path, "--config=./pyproject.toml"], cwd=SOURCE_DIR_PATH) + get_provider_file.write(black_format(get_provider_content)) def prepare_manifest_in_file(context): @@ -1464,88 +1693,6 @@ def prepare_manifest_in_file(context): fh.write(content) -def update_release_notes_for_packages( - provider_ids: List[str], release_version: str, version_suffix: str, backport_packages: bool -): - """ - Updates release notes for the list of packages specified. - :param provider_ids: list of provider ids - :param release_version: version to release - :param version_suffix: version suffix corresponding to the version stored in the code - :param backport_packages: whether to prepare regular (False) or backport (True) packages - :return: - """ - imported_classes = import_all_classes( - provider_ids=provider_ids, print_imports=True, paths=[PROVIDERS_PATH], prefix="airflow.providers." - ) - if len(imported_classes) == 0: - raise Exception( - "There is something seriously wrong with importing all classes as " - "None of the classes were imported," - ) - make_sure_remote_apache_exists_and_fetch() - if len(provider_ids) == 0: - if backport_packages: - provider_ids = get_all_backportable_providers() - else: - provider_ids = get_all_providers() - total = 0 - bad = 0 - with with_group("Generating README summary"): - print() - print("Generating README files and checking if entities are correctly named.") - print() - print("Providers to generate:") - for provider_id in provider_ids: - print(provider_id) - print() - for package in provider_ids: - with with_group(f"Update generated files for package {package}"): - inc_total, inc_bad = update_generated_files_for_package( - package, - release_version, - version_suffix, - imported_classes, - backport_packages, - update_release_notes=True, - update_setup=False, - ) - total += inc_total - bad += inc_bad - if bad == 0: - print() - print(f"[green]All good! All {total} entities are properly named[/]") - print() - print("Totals:") - print() - print("New:") - print() - for entity in EntityType: - print(f"{entity.value}: {TOTALS[entity][0]}") - print() - print("Moved:") - print() - for entity in EntityType: - print(f"{entity.value}: {TOTALS[entity][1]}") - print() - else: - print() - print(f"[red]There are in total: {bad} entities badly named out of {total} entities[/]") - print() - sys.exit(1) - - -def get_all_backportable_providers() -> List[str]: - """ - Returns all providers that should be taken into account when preparing backports. - For now we remove Papermill as it is deeply linked with Lineage in Airflow core and it won't work - with lineage for Airflow 1.10 anyway. - :return: list of providers that are considered for backport provider packages - """ - excluded_providers = ["papermill"] - return [prov for prov in PROVIDERS_REQUIREMENTS.keys() if prov not in excluded_providers] - - def get_all_providers() -> List[str]: """ Returns all providers for regular packages. @@ -1561,176 +1708,260 @@ def verify_provider_package(package: str) -> None: :return: None """ if package not in get_provider_packages(): - raise Exception( - f"The package {package} is not a provider package. " f"Use one of {get_provider_packages()}" - ) + print(f"[red]Wrong package name: {package}[/]") + print("Use one of:") + print(get_provider_packages()) + raise Exception(f"The package {package} is not a provider package.") -def copy_readme_and_changelog(provider_package_id: str, backport_packages: bool) -> None: - """ - Copies the right README.md/CHANGELOG.txt to provider_package directory. - :param provider_package_id: package from which to copy the setup.py - :param backport_packages: whether to prepare regular (False) or backport (True) packages - :return: - """ - readme_source = "BACKPORT_PROVIDER_README.md" if backport_packages else "README.md" - source_provider_package_path = get_source_package_path(provider_package_id) - readme_source = os.path.join(source_provider_package_path, readme_source) - readme_target = os.path.join(TARGET_PROVIDER_PACKAGES_PATH, "README.md") - copyfile(readme_source, readme_target) - changelog_target = os.path.join(TARGET_PROVIDER_PACKAGES_PATH, "CHANGELOG.txt") - with open(readme_source) as infile, open(changelog_target, 'wt') as outfile: - copy = False - for line in infile: - if line.strip() == "## Releases": - copy = True - if copy: - outfile.write(line) - - -def print_provider_packages_list(_): +@cli.command() +def list_providers_packages() -> bool: """List all provider packages.""" - providers = list(PROVIDERS_REQUIREMENTS.keys()) - for provider in providers: - print(provider) - - -def print_backport_packages_list(_): - """Lists all packages that are backportable.""" - providers = get_all_backportable_providers() + providers = get_all_providers() for provider in providers: print(provider) + return True -def get_version_suffix(version_suffix): - return version_suffix if version_suffix is not None else "" +@cli.command() +@option_version_suffix +@option_git_update +@argument_package_id +@option_verbose +def update_package_documentation( + version_suffix: str, + git_update: bool, + package_id: str, + verbose: bool, +): + """ + Updates package documentation. + See `list-providers-packages` subcommand for the possible PACKAGE_ID values + """ + provider_package_id = package_id + verify_provider_package(provider_package_id) + with with_group(f"Update generated files for package '{provider_package_id}' "): + print("Updating documentation for the latest release version.") + make_sure_remote_apache_exists_and_fetch(git_update, verbose) + return update_generated_files_for_regular_package( + provider_package_id, + version_suffix, + update_release_notes=True, + update_setup=False, + verbose=verbose, + ) -def update_package_release_notes(_): - """Updates package release notes.""" - release_ver = "" - suffix = get_version_suffix(args.version_suffix) - if args.release_version and args.release_version not in get_provider_packages(): - release_ver = args.release_version - print() - print() - print(f"Preparing release version: {release_ver}") - package_list = args.package_list - else: - print() - print() - print("Updating latest release version.") - package_list = args.package_list - print() - update_release_notes_for_packages( - package_list, - release_version=release_ver, - version_suffix=suffix, - backport_packages=BACKPORT_PACKAGES, - ) +def tag_exists_for_version(provider_package_id: str, current_tag: str, verbose: bool): + provider_details = get_provider_details(provider_package_id) + if verbose: + print(f"Checking if tag `{current_tag}` exists.") + if not subprocess.call( + get_git_tag_check_command(current_tag), + cwd=provider_details.source_provider_package_path, + stderr=subprocess.DEVNULL, + stdout=subprocess.DEVNULL, + ): + if verbose: + print(f"Tag `{current_tag}` exists.") + return True + if verbose: + print(f"Tag `{current_tag}` does not exist.") + return False + + +@cli.command() +@option_version_suffix +@option_git_update +@argument_package_id +@option_verbose +def generate_setup_files(version_suffix: str, git_update: bool, package_id: str, verbose: bool): + """ + Generates setup files for the package. + + See `list-providers-packages` subcommand for the possible PACKAGE_ID values + """ + provider_package_id = package_id + package_ok = True + with with_group(f"Generate setup files for '{provider_package_id}'"): + current_tag = get_current_tag(provider_package_id, version_suffix, git_update, verbose) + if tag_exists_for_version(provider_package_id, current_tag, verbose): + print(f"[yellow]The tag {current_tag} exists. Not preparing the package.[/]") + package_ok = False + else: + if update_generated_files_for_regular_package( + provider_package_id, + version_suffix, + update_release_notes=False, + update_setup=True, + verbose=verbose, + ): + print(f"[green]Generated regular package setup files for {provider_package_id}[/]") + else: + package_ok = False + return package_ok -def generate_setup_files(args: Any): - """Generates setup files for the package.""" - print() - print() - print("Generate setup files") - print() - provider = args.provider - suffix = get_version_suffix(args.version_suffix) - update_generated_files_for_package( - provider, "", suffix, [], BACKPORT_PACKAGES, update_release_notes=False, update_setup=True - ) +def get_current_tag(provider_package_id: str, suffix: str, git_update: bool, verbose: bool): + verify_provider_package(provider_package_id) + make_sure_remote_apache_exists_and_fetch(git_update, verbose) + provider_info = get_provider_info_from_provider_yaml(provider_package_id) + versions: List[str] = provider_info['versions'] + current_version = versions[0] + current_tag = get_version_tag(current_version, provider_package_id, suffix) + return current_tag + + +def cleanup_remnants(verbose: bool): + if verbose: + print("Cleaning remnants (*.egginfo)") + files = glob.glob("*.egg-info") + for file in files: + shutil.rmtree(file, ignore_errors=True) + + +def verify_setup_py_prepared(provider_package): + with open("setup.py") as f: + setup_content = f.read() + search_for = f"providers-{provider_package.replace('.','-')} for Apache Airflow" + if search_for not in setup_content: + print( + f"[red]The setup.py is probably prepared for another package. " + f"It does not contain [bold]{search_for}[/bold]![/]" + ) + print( + f"\nRun:\n\n[bold]./dev/provider_packages/prepare_provider_packages.py " + f"generate-setup-files {provider_package}[/bold]\n" + ) + raise Exception("Wrong setup!") -def build_provider_package(args): - """ - Provide packages for setup.py. - Choose from the above available packages. +@cli.command() +@click.option( + '--package-format', + type=click.Choice(['sdist', 'wheel', 'both']), + default='wheel', + help='Optional format - only used in case of building packages (default: wheel)', +) +@option_git_update +@option_version_suffix +@argument_package_id +@option_verbose +def build_provider_packages( + package_format: str, + git_update: bool, + version_suffix: str, + package_id: str, + verbose: bool, +) -> bool: """ - _provider_package = args.packages - verify_provider_package(_provider_package) - package_format = os.environ.get("PACKAGE_FORMAT", "wheel") - suffix = get_version_suffix(args.version_suffix) + Builds provider package. - print(f"Building provider package: {_provider_package} in format ${package_format}") - copy_readme_and_changelog(_provider_package, BACKPORT_PACKAGES) - command = ["python3", "setup.py"] - - if suffix != "": - command.extend(['egg_info', '--tag-build', suffix]) - if package_format in ['sdist', 'both']: - command.append("sdist") - if package_format in ['wheel', 'both']: - command.append("bdist_wheel") - print(f"Executing command: '{command}'") - subprocess.check_call( - command, - ) - print(f"[green]Prepared provider package {_provider_package} in format ${package_format}[/]") - - -def get_parser(): - provider_names = get_provider_packages() - help_text = "Available packages:\n" - out = " ".join(provider_names) - out_array = textwrap.wrap(out, 80) - help_text += "\n".join(out_array) - cli_parser = argparse.ArgumentParser(description=help_text, formatter_class=argparse.RawTextHelpFormatter) - cli_parser.add_argument( - "--packages", - help=textwrap.dedent(build_provider_package.__doc__), - ) - cli_parser.add_argument( - "--version-suffix", - metavar="SUFFIX", - help=textwrap.dedent( - """Adds version suffix to version of the packages. -Only useful when generating RC candidates for PyPI.""" - ), - ) - subparsers = cli_parser.add_subparsers(dest="cmd") - first_param_subparser1 = subparsers.add_parser( - "list-providers-packages", help=print_provider_packages_list.__doc__ - ) - first_param_subparser1.set_defaults(func=print_provider_packages_list) + See `list-providers-packages` subcommand for the possible PACKAGE_ID values + """ - first_param_subparser2 = subparsers.add_parser( - "list-backportable-packages", help=print_backport_packages_list.__doc__ - ) - first_param_subparser2.set_defaults(func=print_backport_packages_list) + import tempfile - first_param_subparser3 = subparsers.add_parser( - "update-package-release-notes", help=update_package_release_notes.__doc__ - ) - first_param_subparser3.set_defaults(func=update_package_release_notes) - first_param_subparser3.add_argument("release_version", metavar="YYYY.MM.DD", nargs="?", default="") - first_param_subparser3.add_argument("package_list", metavar="PACKAGES", nargs="*", type=list) + # we cannot use context managers because if the directory gets deleted (which bdist_wheel does), + # the context manager will throw an exception when trying to delete it again + tmp_build_dir = tempfile.TemporaryDirectory().name + tmp_dist_dir = tempfile.TemporaryDirectory().name + try: + provider_package_id = package_id + with with_group(f"Prepare provider package for '{provider_package_id}'"): + current_tag = get_current_tag(provider_package_id, version_suffix, git_update, verbose) + if tag_exists_for_version(provider_package_id, current_tag, verbose): + print(f"[yellow]The tag {current_tag} exists. Skipping the package.[/]") + return False + print(f"Changing directory to ${TARGET_PROVIDER_PACKAGES_PATH}") + os.chdir(TARGET_PROVIDER_PACKAGES_PATH) + cleanup_remnants(verbose) + provider_package = package_id + verify_setup_py_prepared(provider_package) + + print(f"Building provider package: {provider_package} in format {package_format}") + command = ["python3", "setup.py", "build", "--build-temp", tmp_build_dir] + if version_suffix is not None: + command.extend(['egg_info', '--tag-build', version_suffix]) + if package_format in ['sdist', 'both']: + command.append("sdist") + if package_format in ['wheel', 'both']: + command.extend(["bdist_wheel", "--bdist-dir", tmp_dist_dir]) + print(f"Executing command: '{' '.join(command)}'") + try: + subprocess.check_call(command, stdout=subprocess.DEVNULL) + except subprocess.CalledProcessError as ex: + print(ex.output.decode()) + raise Exception("The command returned an error %s", command) + print(f"[green]Prepared provider package {provider_package} in format {package_format}[/]") + finally: + shutil.rmtree(tmp_build_dir, ignore_errors=True) + shutil.rmtree(tmp_dist_dir, ignore_errors=True) + + return True + + +def verify_provider_classes_for_single_provider(imported_classes: List[str], provider_package_id: str): + """Verify naming of provider classes for single provider.""" + full_package_name = f"airflow.providers.{provider_package_id}" + entity_summaries = get_package_class_summary(full_package_name, imported_classes) + total, bad = check_if_classes_are_properly_named(entity_summaries) + bad += sum([len(entity_summary.wrong_entities) for entity_summary in entity_summaries.values()]) + if bad != 0: + print() + print(f"[red]There are {bad} errors of {total} entities for {provider_package_id}[/]") + print() + return total, bad - first_param_subparser4 = subparsers.add_parser("generate-setup-files", help=generate_setup_files.__doc__) - first_param_subparser4.set_defaults(func=generate_setup_files) - first_param_subparser4.add_argument("provider", metavar="PACKAGE") - return cli_parser +def summarise_total_vs_bad(total: int, bad: int): + """Summarises Bad/Good class names for providers""" + if bad == 0: + print() + print(f"[green]All good! All {total} entities are properly named[/]") + print() + print("Totals:") + print() + print("New:") + print() + for entity in EntityType: + print(f"{entity.value}: {TOTALS[entity][0]}") + print() + print("Moved:") + print() + for entity in EntityType: + print(f"{entity.value}: {TOTALS[entity][1]}") + print() + else: + print() + print(f"[red]There are in total: {bad} entities badly named out of {total} entities[/]") + print() + raise Exception("Badly names entities") + + +@cli.command() +def verify_provider_classes() -> bool: + """Verifies if all classes in all providers are correctly named.""" + with with_group("Verifies names for all provider classes"): + provider_ids = get_all_providers() + imported_classes = import_all_classes( + provider_ids=provider_ids, + print_imports=False, + paths=[PROVIDERS_PATH], + prefix="airflow.providers.", + ) + total = 0 + bad = 0 + for provider_package_id in provider_ids: + inc_total, inc_bad = verify_provider_classes_for_single_provider( + imported_classes, provider_package_id + ) + total += inc_total + bad += inc_bad + summarise_total_vs_bad(total, bad) + return True if __name__ == "__main__": - parser = get_parser() - args = parser.parse_args() - - BACKPORT_PACKAGES = os.getenv('BACKPORT_PACKAGES') == "true" - - if len(sys.argv) < 2: - parser.print_help(file=sys.stderr) - sys.exit(1) - - try: - if args.packages is not None: - build_provider_package(args) - else: - args.func(args) - sys.exit(0) - except Exception as e: - print(e, file=sys.stderr) - sys.exit(1) + cli() diff --git a/dev/provider_packages/publish_provider_documentation.sh b/dev/provider_packages/publish_provider_documentation.sh new file mode 100755 index 0000000000000..4ebf7f2800ee2 --- /dev/null +++ b/dev/provider_packages/publish_provider_documentation.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +set -euo pipefail + +if [[ $# == "0" ]]; then + echo "ERROR: Pass provider ids as list" + exit 1 +fi + +provider_filters=() +for provider in "${@}" +do + provider_filters+=("--package-filter" "apache-airflow-providers-${provider//./-}") +done + +./docs/publish_docs.py \ + --package-filter apache-airflow-providers \ + "${provider_filters[@]}" +cd "${AIRFLOW_SITE_DIRECTORY}" diff --git a/dev/provider_packages/refactor_provider_packages.py b/dev/provider_packages/refactor_provider_packages.py deleted file mode 100755 index 83240fe5f473e..0000000000000 --- a/dev/provider_packages/refactor_provider_packages.py +++ /dev/null @@ -1,780 +0,0 @@ -#!/usr/bin/env python3 -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -import os -import sys -import token -from os.path import dirname -from shutil import copyfile, copytree, rmtree -from typing import List - -from bowler import LN, TOKEN, Capture, Filename, Query -from fissix.fixer_util import Comma, KeywordArg, Name -from fissix.pytree import Leaf - -from dev.provider_packages.prepare_provider_packages import ( - get_source_airflow_folder, - get_source_providers_folder, - get_target_providers_folder, - get_target_providers_package_folder, -) - - -def copy_provider_sources() -> None: - """ - Copies provider sources to directory where they will be refactored. - """ - - def rm_build_dir() -> None: - """ - Removes build directory. - """ - build_dir = os.path.join(dirname(__file__), "build") - if os.path.isdir(build_dir): - rmtree(build_dir) - - def ignore_google_auth_backend(src: str, names: List[str]) -> List[str]: - del names - if src.endswith("google" + os.path.sep + "common"): - return ["auth_backend"] - return [] - - def ignore_some_files(src: str, names: List[str]) -> List[str]: - ignored_list = [] - ignored_list.extend(ignore_google_auth_backend(src=src, names=names)) - return ignored_list - - rm_build_dir() - package_providers_dir = get_target_providers_folder() - if os.path.isdir(package_providers_dir): - rmtree(package_providers_dir) - copytree(get_source_providers_folder(), get_target_providers_folder(), ignore=ignore_some_files) - - -def copy_helper_py_file(target_file_path: str) -> None: - """ - Copies. airflow/utils/helper.py to a new location within provider package - - The helper has two methods (chain, cross_downstream) that are moved from the original helper to - 'airflow.models.baseoperator'. so in 1.10 they should reimport the original 'airflow.utils.helper' - methods. Those deprecated methods use import with import_string("") so it is easier to - replace them as strings rather than with Bowler - - :param target_file_path: target path name for the helpers.py - """ - - source_helper_file_path = os.path.join(get_source_airflow_folder(), "airflow", "utils", "helpers.py") - - with open(source_helper_file_path) as in_file: - with open(target_file_path, "wt") as out_file: - for line in in_file: - out_file.write(line.replace('airflow.models.baseoperator', 'airflow.utils.helpers')) - - -class RefactorBackportPackages: - """ - Refactors the code of providers, so that it works in 1.10. - - """ - - def __init__(self): - self.qry = Query() - - def remove_class(self, class_name) -> None: - """ - Removes class altogether. Example diff generated: - - .. code-block:: diff - - --- ./airflow/providers/qubole/example_dags/example_qubole.py - +++ ./airflow/providers/qubole/example_dags/example_qubole.py - @@ -22,7 +22,7 @@ - - from airflow import DAG - from airflow.operators.dummy_operator import DummyOperator - -from airflow.operators.python import BranchPythonOperator, PythonOperator - +from airflow.operators.python_operator import BranchPythonOperator, PythonOperator - from airflow.providers.qubole.operators.qubole import QuboleOperator - from airflow.providers.qubole.sensors.qubole import QuboleFileSensor, QubolePartitionSensor - from airflow.utils.dates import days_ago - - :param class_name: name to remove - """ - - def _remover(node: LN, capture: Capture, filename: Filename) -> None: - node.remove() - - self.qry.select_class(class_name).modify(_remover) - - def rename_deprecated_modules(self) -> None: - """ - Renames back to deprecated modules imported. Example diff generated: - - .. code-block:: diff - - --- ./airflow/providers/dingding/operators/dingding.py - +++ ./airflow/providers/dingding/operators/dingding.py - @@ -16,7 +16,7 @@ - # specific language governing permissions and limitations - # under the License. - - -from airflow.operators.baseoperator import BaseOperator - +from airflow.operators.bash_operator import BaseOperator - from airflow.providers.dingding.hooks.dingding import DingdingHook - from airflow.utils.decorators import apply_defaults - - """ - changes = [ - ("airflow.hooks.base", "airflow.hooks.base_hook"), - ("airflow.hooks.dbapi", "airflow.hooks.dbapi_hook"), - ("airflow.operators.bash", "airflow.operators.bash_operator"), - ("airflow.operators.branch", "airflow.operators.branch_operator"), - ("airflow.operators.dummy", "airflow.operators.dummy_operator"), - ("airflow.operators.python", "airflow.operators.python_operator"), - ("airflow.operators.trigger_dagrun", "airflow.operators.dagrun_operator"), - ("airflow.sensors.base", "airflow.sensors.base_sensor_operator"), - ("airflow.sensors.date_time", "airflow.sensors.date_time_sensor"), - ("airflow.sensors.external_task", "airflow.sensors.external_task_sensor"), - ("airflow.sensors.sql", "airflow.sensors.sql_sensor"), - ("airflow.sensors.time_delta", "airflow.sensors.time_delta_sensor"), - ("airflow.sensors.weekday", "airflow.contrib.sensors.weekday_sensor"), - ("airflow.utils.session", "airflow.utils.db"), - ] - for new, old in changes: - self.qry.select_module(new).rename(old) - - def is_not_k8spodop(node: LN, capture: Capture, filename: Filename) -> bool: - return not filename.endswith("/kubernetes_pod.py") - - self.qry.select_module("airflow.providers.cncf.kubernetes.backcompat").filter( - callback=is_not_k8spodop - ).rename("airflow.kubernetes") - - self.qry.select_module("airflow.providers.cncf.kubernetes.backcompat.pod_runtime_info_env").rename( - "airflow.kubernetes.pod_runtime_info_env" - ) - - backcompat_target_folder = os.path.join( - get_target_providers_package_folder("cncf.kubernetes"), "backcompat" - ) - # Remove backcompat classes that are imported from "airflow.kubernetes" - for file in ['pod.py', 'pod_runtime_info_env.py', 'volume.py', 'volume_mount.py']: - os.remove(os.path.join(backcompat_target_folder, file)) - - def add_provide_context_to_python_operators(self) -> None: - """ - - Adds provide context to usages of Python/BranchPython Operators - mostly in example_dags. - Note that those changes apply to example DAGs not to the operators/hooks erc. - We package the example DAGs together with the provider classes and they should serve as - examples independently on the version of Airflow it will be installed in. - Provide_context feature in Python operators was feature added 2.0.0 and we are still - using the "Core" operators from the Airflow version that the provider packages are installed - in - the "Core" operators do not have (for now) their own provider package. - - The core operators are: - - * Python - * BranchPython - * Bash - * Branch - * Dummy - * LatestOnly - * ShortCircuit - * PythonVirtualEnv - - - Example diff generated: - - .. code-block:: diff - - --- ./airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py - +++ ./airflow/providers/amazon/aws/example_dags/example_google_api_to_s3_transfer_advanced.py - @@ -105,7 +105,8 @@ - task_video_ids_to_s3.google_api_response_via_xcom, - task_video_ids_to_s3.task_id - ], - - task_id='check_and_transform_video_ids' - + task_id='check_and_transform_video_ids', - + provide_context=True - ) - - """ - - def add_provide_context_to_python_operator(node: LN, capture: Capture, filename: Filename) -> None: - fn_args = capture['function_arguments'][0] - if len(fn_args.children) > 0 and ( - not isinstance(fn_args.children[-1], Leaf) or fn_args.children[-1].type != token.COMMA - ): - fn_args.append_child(Comma()) - - provide_context_arg = KeywordArg(Name('provide_context'), Name('True')) - provide_context_arg.prefix = fn_args.children[0].prefix - fn_args.append_child(provide_context_arg) - - (self.qry.select_function("PythonOperator").is_call().modify(add_provide_context_to_python_operator)) - ( - self.qry.select_function("BranchPythonOperator") - .is_call() - .modify(add_provide_context_to_python_operator) - ) - - def remove_super_init_call(self): - r""" - Removes super().__init__() call from Hooks. - - In airflow 1.10 almost none of the Hooks call super().init(). It was always broken in Airflow 1.10 - - the BaseHook() has it's own __init__() which is wrongly implemented and requires source - parameter to be passed:: - - .. code-block:: python - - def __init__(self, source): - pass - - We fixed it in 2.0, but for the entire 1.10 line calling super().init() is not a good idea - - and it basically does nothing even if you do. And it's bad because it does not initialize - LoggingMixin (BaseHook derives from LoggingMixin). And it is the main reason why Hook - logs are not working as they are supposed to sometimes: - - .. code-block:: python - - class LoggingMixin(object): - \"\"\" - Convenience super-class to have a logger configured with the class name - \"\"\" - def __init__(self, context=None): - self._set_context(context) - - - There are two Hooks in 1.10 that call super.__init__ : - - .. code-block:: python - - super(CloudSqlDatabaseHook, self).__init__(source=None) - super(MongoHook, self).__init__(source='mongo') - - Not that it helps with anything because init in BaseHook does nothing. So we remove - the super().init() in Hooks when backporting to 1.10. - - Example diff generated: - - .. code-block:: diff - - --- ./airflow/providers/apache/druid/hooks/druid.py - +++ ./airflow/providers/apache/druid/hooks/druid.py - @@ -49,7 +49,7 @@ - timeout=1, - max_ingestion_time=None): - - - super().__init__() - + - self.druid_ingest_conn_id = druid_ingest_conn_id - self.timeout = timeout - self.max_ingestion_time = max_ingestion_time - - """ - - def remove_super_init_call_modifier(node: LN, capture: Capture, filename: Filename) -> None: - for ch in node.post_order(): - if isinstance(ch, Leaf) and ch.value == "super": - if any(c.value for c in ch.parent.post_order() if isinstance(c, Leaf)): - ch.parent.remove() - - self.qry.select_subclass("BaseHook").modify(remove_super_init_call_modifier) - - def remove_tags(self): - """ - Removes tags from execution of the operators (in example_dags). Note that those changes - apply to example DAGs not to the operators/hooks erc. We package the example DAGs together - with the provider classes and they should serve as examples independently on the version - of Airflow it will be installed in. The tags are feature added in 1.10.10 and occasionally - we will want to run example DAGs as system tests in pre-1.10.10 version so we want to - remove the tags here. - - - Example diff generated: - - .. code-block:: diff - - - -- ./airflow/providers/amazon/aws/example_dags/example_datasync_2.py - +++ ./airflow/providers/amazon/aws/example_dags/example_datasync_2.py - @@ -83,8 +83,7 @@ - with models.DAG( - "example_datasync_2", - default_args=default_args, - - schedule_interval=None, # Override to match your needs - - tags=['example'], - + schedule_interval=None, - ) as dag: - - """ - - def remove_tags_modifier(_: LN, capture: Capture, filename: Filename) -> None: - for node in capture['function_arguments'][0].post_order(): - if isinstance(node, Leaf) and node.value == "tags" and node.type == TOKEN.NAME: - if node.parent.next_sibling and node.parent.next_sibling.value == ",": - node.parent.next_sibling.remove() - node.parent.remove() - - # Remove tags - self.qry.select_method("DAG").is_call().modify(remove_tags_modifier) - - def remove_poke_mode_only_decorator(self): - r""" - Removes @poke_mode_only decorator. The decorator is only available in Airflow 2.0. - - Example diff generated: - - .. code-block:: diff - - --- ./airflow/providers/google/cloud/sensors/gcs.py - +++ ./airflow/providers/google/cloud/sensors/gcs.py - @@ -189,7 +189,6 @@ - return datetime.now() - - - -@poke_mode_only - class GCSUploadSessionCompleteSensor(BaseSensorOperator): - \"\"\" - Checks for changes in the number of objects at prefix in Google Cloud Storage - - """ - - def find_and_remove_poke_mode_only_import(node: LN): - for child in node.children: - if isinstance(child, Leaf) and child.type == 1 and child.value == 'poke_mode_only': - import_node = child.parent - # remove the import by default - skip_import_remove = False - if isinstance(child.prev_sibling, Leaf) and child.prev_sibling.value == ",": - # remove coma before the whole import - child.prev_sibling.remove() - # do not remove if there are other imports - skip_import_remove = True - if isinstance(child.next_sibling, Leaf) and child.prev_sibling.value == ",": - # but keep the one after and do not remove the whole import - skip_import_remove = True - # remove the import - child.remove() - if not skip_import_remove: - # remove import of there were no sibling - import_node.remove() - else: - find_and_remove_poke_mode_only_import(child) - - def find_root_remove_import(node: LN): - current_node = node - while current_node.parent: - current_node = current_node.parent - find_and_remove_poke_mode_only_import(current_node) - - def is_poke_mode_only_decorator(node: LN) -> bool: - return ( - node.children - and len(node.children) >= 2 - and isinstance(node.children[0], Leaf) - and node.children[0].value == '@' - and isinstance(node.children[1], Leaf) - and node.children[1].value == 'poke_mode_only' - ) - - def remove_poke_mode_only_modifier(node: LN, capture: Capture, filename: Filename) -> None: - for child in capture['node'].parent.children: - if is_poke_mode_only_decorator(child): - find_root_remove_import(child) - child.remove() - - self.qry.select_subclass("BaseSensorOperator").modify(remove_poke_mode_only_modifier) - - def refactor_amazon_package(self): - """ - Fixes to "amazon" providers package. - - Copies some of the classes used from core Airflow to "common.utils" package of - the provider and renames imports to use them from there. - - We copy typing_compat.py and change import as in example diff: - - .. code-block:: diff - - --- ./airflow/providers/amazon/aws/operators/ecs.py - +++ ./airflow/providers/amazon/aws/operators/ecs.py - @@ -24,7 +24,7 @@ - from airflow.models import BaseOperator - from airflow.providers.amazon.aws.hooks.base_aws import AwsBaseHook - from airflow.providers.amazon.aws.hooks.logs import AwsLogsHook - -from airflow.typing_compat import Protocol, runtime_checkable - +from airflow.providers.amazon.common.utils.typing_compat import Protocol, runtime_checkable - from airflow.utils.decorators import apply_defaults - - """ - - def amazon_package_filter(node: LN, capture: Capture, filename: Filename) -> bool: - return filename.startswith("./airflow/providers/amazon/") - - os.makedirs( - os.path.join(get_target_providers_package_folder("amazon"), "common", "utils"), exist_ok=True - ) - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "__init__.py"), - os.path.join(get_target_providers_package_folder("amazon"), "common", "__init__.py"), - ) - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "__init__.py"), - os.path.join(get_target_providers_package_folder("amazon"), "common", "utils", "__init__.py"), - ) - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "typing_compat.py"), - os.path.join( - get_target_providers_package_folder("amazon"), "common", "utils", "typing_compat.py" - ), - ) - ( - self.qry.select_module("airflow.typing_compat") - .filter(callback=amazon_package_filter) - .rename("airflow.providers.amazon.common.utils.typing_compat") - ) - - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "email.py"), - os.path.join(get_target_providers_package_folder("amazon"), "common", "utils", "email.py"), - ) - ( - self.qry.select_module("airflow.utils.email") - .filter(callback=amazon_package_filter) - .rename("airflow.providers.amazon.common.utils.email") - ) - - def refactor_elasticsearch_package(self): - """ - Fixes to "elasticsearch" providers package. - - Copies some of the classes used from core Airflow to "common.utils" package of - the provider and renames imports to use them from there. - - We copy file_task_handler.py and change import as in example diff: - - .. code-block:: diff - - --- ./airflow/providers/elasticsearch/log/es_task_handler.py - +++ ./airflow/providers/elasticsearch/log/es_task_handler.py - @@ -24,7 +24,7 @@ - from airflow.configuration import conf - from airflow.models import TaskInstance - from airflow.utils import timezone - from airflow.utils.helpers import parse_template_string - -from airflow.utils.log.file_task_handler import FileTaskHandler - +from airflow.providers.elasticsearch.common.utils.log.file_task_handler import FileTaskHandler - from airflow.utils.log.json_formatter import JSONFormatter - from airflow.utils.log.logging_mixin import LoggingMixin - - """ - - def elasticsearch_package_filter(node: LN, capture: Capture, filename: Filename) -> bool: - return filename.startswith("./airflow/providers/elasticsearch/") - - os.makedirs( - os.path.join(get_target_providers_package_folder("elasticsearch"), "common", "utils", "log"), - exist_ok=True, - ) - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "__init__.py"), - os.path.join(get_target_providers_package_folder("elasticsearch"), "common", "__init__.py"), - ) - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "__init__.py"), - os.path.join( - get_target_providers_package_folder("elasticsearch"), "common", "utils", "__init__.py" - ), - ) - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "log", "__init__.py"), - os.path.join( - get_target_providers_package_folder("elasticsearch"), "common", "utils", "log", "__init__.py" - ), - ) - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "log", "file_task_handler.py"), - os.path.join( - get_target_providers_package_folder("elasticsearch"), - "common", - "utils", - "log", - "file_task_handler.py", - ), - ) - ( - self.qry.select_module("airflow.utils.log.file_task_handler") - .filter(callback=elasticsearch_package_filter) - .rename("airflow.providers.elasticsearch.common.utils.log.file_task_handler") - ) - - def refactor_google_package(self): - r""" - Fixes to "google" providers package. - - Copies some of the classes used from core Airflow to "common.utils" package of the - the provider and renames imports to use them from there. Note that in this case we also rename - the imports in the copied files. - - For example we copy python_virtualenv.py, process_utils.py and change import as in example diff: - - .. code-block:: diff - - --- ./airflow/providers/google/cloud/operators/kubernetes_engine.py - +++ ./airflow/providers/google/cloud/operators/kubernetes_engine.py - @@ -28,11 +28,11 @@ - - from airflow.exceptions import AirflowException - from airflow.models import BaseOperator - -from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator - +from airflow.contrib.operators.kubernetes_pod_operator import KubernetesPodOperator - from airflow.providers.google.cloud.hooks.kubernetes_engine import GKEHook - from airflow.providers.google.common.hooks.base_google import GoogleBaseHook - from airflow.utils.decorators import apply_defaults - -from airflow.utils.process_utils import execute_in_subprocess, patch_environ - +from airflow.providers.google.common.utils.process_utils import execute_in_subprocess - - - And in the copied python_virtualenv.py we also change import to process_utils.py. This happens - automatically and is solved by Pybowler. - - - .. code-block:: diff - - --- ./airflow/providers/google/common/utils/python_virtualenv.py - +++ ./airflow/providers/google/common/utils/python_virtualenv.py - @@ -21,7 +21,7 @@ - \"\"\" - from typing import List, Optional - - -from airflow.utils.process_utils import execute_in_subprocess - +from airflow.providers.google.common.utils.process_utils import execute_in_subprocess - - - def _generate_virtualenv_cmd(tmp_dir: str, python_bin: str, system_site_packages: bool) - - - We also rename Base operator links to deprecated names: - - - .. code-block:: diff - - --- ./airflow/providers/google/cloud/operators/mlengine.py - +++ ./airflow/providers/google/cloud/operators/mlengine.py - @@ -24,7 +24,7 @@ - from typing import List, Optional - - from airflow.exceptions import AirflowException - -from airflow.models import BaseOperator, BaseOperatorLink - +from airflow.models.baseoperator import BaseOperator, BaseOperatorLink - from airflow.models.taskinstance import TaskInstance - from airflow.providers.google.cloud.hooks.mlengine import MLEngineHook - from airflow.utils.decorators import apply_defaults - - We also copy (google.common.utils) and rename imports to the helpers. - - .. code-block:: diff - - --- ./airflow/providers/google/cloud/example_dags/example_datacatalog.py - +++ ./airflow/providers/google/cloud/example_dags/example_datacatalog.py - @@ -37,7 +37,7 @@ - CloudDataCatalogUpdateTagTemplateOperator, - ) - from airflow.utils.dates import days_ago - -from airflow.utils.helpers import chain - +from airflow.providers.google.common.utils.helpers import chain - - default_args = {"start_date": days_ago(1)} - - And also module_loading which is used by helpers - - .. code-block:: diff - - --- ./airflow/providers/google/common/utils/helpers.py - +++ ./airflow/providers/google/common/utils/helpers.py - @@ -26,7 +26,7 @@ - from jinja2 import Template - - from airflow.exceptions import AirflowException - -from airflow.utils.module_loading import import_string - +from airflow.providers.google.common.utils.module_loading import import_string - - KEY_REGEX = re.compile(r'^[\\w.-]+$') - - """ - - def google_package_filter(node: LN, capture: Capture, filename: Filename) -> bool: - return filename.startswith("./airflow/providers/google/") - - def pure_airflow_models_filter(node: LN, capture: Capture, filename: Filename) -> bool: - """Check if select is exactly [airflow, . , models]""" - return len(list(node.children[1].leaves())) == 3 - - os.makedirs( - os.path.join(get_target_providers_package_folder("google"), "common", "utils"), exist_ok=True - ) - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "__init__.py"), - os.path.join(get_target_providers_package_folder("google"), "common", "utils", "__init__.py"), - ) - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "python_virtualenv.py"), - os.path.join( - get_target_providers_package_folder("google"), "common", "utils", "python_virtualenv.py" - ), - ) - - copy_helper_py_file( - os.path.join(get_target_providers_package_folder("google"), "common", "utils", "helpers.py") - ) - - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "module_loading.py"), - os.path.join( - get_target_providers_package_folder("google"), "common", "utils", "module_loading.py" - ), - ) - ( - self.qry.select_module("airflow.utils.python_virtualenv") - .filter(callback=google_package_filter) - .rename("airflow.providers.google.common.utils.python_virtualenv") - ) - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "process_utils.py"), - os.path.join( - get_target_providers_package_folder("google"), "common", "utils", "process_utils.py" - ), - ) - ( - self.qry.select_module("airflow.utils.process_utils") - .filter(callback=google_package_filter) - .rename("airflow.providers.google.common.utils.process_utils") - ) - - ( - self.qry.select_module("airflow.utils.helpers") - .filter(callback=google_package_filter) - .rename("airflow.providers.google.common.utils.helpers") - ) - - ( - self.qry.select_module("airflow.utils.module_loading") - .filter(callback=google_package_filter) - .rename("airflow.providers.google.common.utils.module_loading") - ) - - ( - # Fix BaseOperatorLinks imports - self.qry.select_module("airflow.models") - .is_filename(include=r"bigquery\.py|mlengine\.py") - .filter(callback=google_package_filter) - .filter(pure_airflow_models_filter) - .rename("airflow.models.baseoperator") - ) - - def refactor_odbc_package(self): - """ - Fixes to "odbc" providers package. - - Copies some of the classes used from core Airflow to "common.utils" package of the - the provider and renames imports to use them from there. - - We copy helpers.py and change import as in example diff: - - .. code-block:: diff - - --- ./airflow/providers/google/cloud/example_dags/example_datacatalog.py - +++ ./airflow/providers/google/cloud/example_dags/example_datacatalog.py - @@ -37,7 +37,7 @@ - CloudDataCatalogUpdateTagTemplateOperator, - ) - from airflow.utils.dates import days_ago - -from airflow.utils.helpers import chain - +from airflow.providers.odbc.utils.helpers import chain - - default_args = {"start_date": days_ago(1)} - - - """ - - def odbc_package_filter(node: LN, capture: Capture, filename: Filename) -> bool: - return filename.startswith("./airflow/providers/odbc/") - - os.makedirs(os.path.join(get_target_providers_folder(), "odbc", "utils"), exist_ok=True) - copyfile( - os.path.join(get_source_airflow_folder(), "airflow", "utils", "__init__.py"), - os.path.join(get_target_providers_package_folder("odbc"), "utils", "__init__.py"), - ) - copy_helper_py_file(os.path.join(get_target_providers_package_folder("odbc"), "utils", "helpers.py")) - - ( - self.qry.select_module("airflow.utils.helpers") - .filter(callback=odbc_package_filter) - .rename("airflow.providers.odbc.utils.helpers") - ) - - def refactor_kubernetes_pod_operator(self): - def kubernetes_package_filter(node: LN, capture: Capture, filename: Filename) -> bool: - return filename.startswith("./airflow/providers/cncf/kubernetes") - - ( - self.qry.select_class("KubernetesPodOperator") - .select_method("add_xcom_sidecar") - .filter(callback=kubernetes_package_filter) - .rename("add_sidecar") - ) - - def do_refactor(self, in_process: bool = False) -> None: # noqa - self.rename_deprecated_modules() - self.refactor_amazon_package() - self.refactor_google_package() - self.refactor_elasticsearch_package() - self.refactor_odbc_package() - self.remove_tags() - self.remove_super_init_call() - self.add_provide_context_to_python_operators() - self.remove_poke_mode_only_decorator() - self.refactor_kubernetes_pod_operator() - # In order to debug Bowler - set in_process to True - self.qry.execute(write=True, silent=False, interactive=False, in_process=in_process) - - -if __name__ == '__main__': - BACKPORT_PACKAGES = os.getenv('BACKPORT_PACKAGES') == "true" - in_process = False - if len(sys.argv) > 1: - if sys.argv[1] in ['--help', '-h']: - print() - print("Refactors provider packages to be Airflow 1.10 compatible.") - print() - print(f"Usage: {sys.argv[0]} [--debug] | [-h] | [--help]") - print() - print("You can use --debug flag in order to run bowler refactoring in process.") - print("This allows you to debug bowler process as usual using your IDE debugger") - print("Otherwise it heavily uses multi-processing and is next-to-impossible to debug") - print() - print("Note - Bowler is also a lot slower in this mode.") - print() - sys.exit(0) - if sys.argv[1] == '--debug': - in_process = True - copy_provider_sources() - if BACKPORT_PACKAGES: - RefactorBackportPackages().do_refactor(in_process=in_process) diff --git a/dev/provider_packages/remove_old_releases.py b/dev/provider_packages/remove_old_releases.py index fb8643d74853d..2c8c62d6feb7a 100644 --- a/dev/provider_packages/remove_old_releases.py +++ b/dev/provider_packages/remove_old_releases.py @@ -67,7 +67,7 @@ def process_all_files(directory: str, suffix: str, execute: bool): versioned_file = package_types[0] print( "Leaving the only version: " - f"${versioned_file.base + versioned_file.version + versioned_file.suffix}" + f"{versioned_file.base + versioned_file.version + versioned_file.suffix}" ) # Leave only last version from each type for versioned_file in package_types[:-1]: @@ -95,12 +95,9 @@ def parse_args() -> argparse.Namespace: if __name__ == '__main__': args = parse_args() - process_all_files(args.directory, "-bin.tar.gz", args.execute) - process_all_files(args.directory, "-bin.tar.gz.sha512", args.execute) - process_all_files(args.directory, "-bin.tar.gz.asc", args.execute) - process_all_files(args.directory, "-source.tar.gz", args.execute) - process_all_files(args.directory, "-source.tar.gz.sha512", args.execute) - process_all_files(args.directory, "-source.tar.gz.asc", args.execute) + process_all_files(args.directory, ".tar.gz", args.execute) + process_all_files(args.directory, ".tar.gz.sha512", args.execute) + process_all_files(args.directory, ".tar.gz.asc", args.execute) process_all_files(args.directory, "-py3-none-any.whl", args.execute) process_all_files(args.directory, "-py3-none-any.whl.sha512", args.execute) process_all_files(args.directory, "-py3-none-any.whl.asc", args.execute) diff --git a/dev/provider_packages/tag_providers.sh b/dev/provider_packages/tag_providers.sh new file mode 100755 index 0000000000000..f2909452bad16 --- /dev/null +++ b/dev/provider_packages/tag_providers.sh @@ -0,0 +1,29 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +set -euo pipefail +AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../" && pwd)" + +for file in "${AIRFLOW_SOURCES}/dist/"*.whl +do + if [[ ${file} =~ .*airflow_providers_(.*)-(.*)-py3.* ]]; then + provider="providers-${BASH_REMATCH[1]}" + tag="${provider//_/-}/${BASH_REMATCH[2]}" + git tag "${tag}" + git push apache "${tag}" + fi +done diff --git a/docs/README.rst b/docs/README.rst index 4bb140cb9b514..5ea38ad648fe1 100644 --- a/docs/README.rst +++ b/docs/README.rst @@ -25,7 +25,7 @@ For Helm Chart, see: `/chart/README.md <../chart/READMe.md>`__ Development documentation preview ================================== -Documentation from the ``master`` branch is built and automatically published: `s.apache.org/airflow-docs `_ +Documentation from the development version is built and automatically published: `s.apache.org/airflow-docs `_ Documentation for your PRs is available as downloadable artifact in GitHub Actions after the CI builds your PR. diff --git a/docs/apache-airflow-providers-airbyte/commits.rst b/docs/apache-airflow-providers-airbyte/commits.rst new file mode 100644 index 0000000000000..cae1272d08bbd --- /dev/null +++ b/docs/apache-airflow-providers-airbyte/commits.rst @@ -0,0 +1,27 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Package apache-airflow-providers-airbyte +---------------------------------------- + +`Airbyte `__ + + +This is detailed commit list of changes for versions provider package: ``airbyte``. +For high-level changelog, see :doc:`package information including changelog `. diff --git a/docs/apache-airflow-providers-airbyte/connections.rst b/docs/apache-airflow-providers-airbyte/connections.rst new file mode 100644 index 0000000000000..31b69c70a6bf8 --- /dev/null +++ b/docs/apache-airflow-providers-airbyte/connections.rst @@ -0,0 +1,36 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +Airbyte Connection +================== +The Airbyte connection type use the HTTP protocol. + +Configuring the Connection +-------------------------- +Host(required) + The host to connect to the Airbyte server. + +Port (required) + The port for the Airbyte server. + +Login (optional) + Specify the user name to connect. + +Password (optional) + Specify the password to connect. diff --git a/docs/apache-airflow-providers-airbyte/index.rst b/docs/apache-airflow-providers-airbyte/index.rst new file mode 100644 index 0000000000000..d83f5e054c47f --- /dev/null +++ b/docs/apache-airflow-providers-airbyte/index.rst @@ -0,0 +1,121 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-airbyte`` +==================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: Guides + + Operators + Connection types + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/airbyte/index> + +.. toctree:: + :maxdepth: 1 + :caption: Resources + + Example DAGs + PyPI Repository + +.. toctree:: + :maxdepth: 1 + :caption: Commits + + Detailed list of commits + +Package apache-airflow-providers-airbyte +---------------------------------------- + +`Airbyte `__ + + +Release: 1.0.0 + +Provider package +---------------- + +This is a provider package for ``airbyte`` provider. All classes for this provider package +are in ``airflow.providers.airbyte`` python package. + +Installation +------------ + +.. note:: + + On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver + does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice + of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 + ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option + ``--use-deprecated legacy-resolver`` to your pip install command. + + +You can install this package on top of an existing airflow 2.* installation via +``pip install apache-airflow-providers-airbyte`` + +Cross provider package dependencies +----------------------------------- + +Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified backport providers package in order to use them. + +You can install such cross-provider dependencies when installing from PyPI. For example: + +.. code-block:: bash + + pip install apache-airflow-providers-airbyte[http] + + +================================================================================================ ======== +Dependent package Extra +================================================================================================ ======== +`apache-airflow-providers-http `_ ``http`` +================================================================================================ ======== + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Changelog +--------- + +1.0.0 +..... + +Initial version of the provider. diff --git a/docs/apache-airflow-providers-airbyte/operators/airbyte.rst b/docs/apache-airflow-providers-airbyte/operators/airbyte.rst new file mode 100644 index 0000000000000..b67462734e486 --- /dev/null +++ b/docs/apache-airflow-providers-airbyte/operators/airbyte.rst @@ -0,0 +1,58 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. _howto/operator:AirbyteTriggerSyncOperator: + +AirbyteTriggerSyncOperator +========================== + +Use the :class:`~airflow.providers.airbyte.operators.AirbyteTriggerSyncOperator` to +trigger an existing ConnectionId sync job in Airbyte. + +.. warning:: + This operator triggers a synchronization job in Airbyte. + If triggered again, this operator does not guarantee idempotency. + You must be aware of the source (database, API, etc) you are updating/sync and + the method applied to perform the operation in Airbyte. + + +Using the Operator +^^^^^^^^^^^^^^^^^^ + +The AirbyteTriggerSyncOperator requires the ``connection_id`` this is the uuid identifier +create in Airbyte between a source and destination synchronization job. +Use the ``airbyte_conn_id`` parameter to specify the Airbyte connection to use to +connect to your account. + +You can trigger a synchronization job in Airflow in two ways with the Operator. The first one +is a synchronous process. This will trigger the Airbyte job and the Operator manage the status +of the job. Another way is use the flag ``async = True`` so the Operator only trigger the job and +return the ``job_id`` that should be pass to the AirbyteSensor. + +An example using the synchronous way: + +.. exampleinclude:: /../../airflow/providers/airbyte/example_dags/example_airbyte_trigger_job.py + :language: python + :start-after: [START howto_operator_airbyte_synchronous] + :end-before: [END howto_operator_airbyte_synchronous] + +An example using the async way: + +.. exampleinclude:: /../../airflow/providers/airbyte/example_dags/example_airbyte_trigger_job.py + :language: python + :start-after: [START howto_operator_airbyte_asynchronous] + :end-before: [END howto_operator_airbyte_asynchronous] diff --git a/docs/apache-airflow-providers-apache-beam/index.rst b/docs/apache-airflow-providers-apache-beam/index.rst new file mode 100644 index 0000000000000..30718f9a01f5f --- /dev/null +++ b/docs/apache-airflow-providers-apache-beam/index.rst @@ -0,0 +1,36 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-apache-beam`` +======================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/apache/beam/index> + PyPI Repository + Example DAGs + +.. toctree:: + :maxdepth: 1 + :caption: Guides + + Operators diff --git a/docs/apache-airflow-providers-apache-beam/operators.rst b/docs/apache-airflow-providers-apache-beam/operators.rst new file mode 100644 index 0000000000000..3c1b2bd296d40 --- /dev/null +++ b/docs/apache-airflow-providers-apache-beam/operators.rst @@ -0,0 +1,116 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Apache Beam Operators +===================== + +`Apache Beam `__ is an open source, unified model for defining both batch and +streaming data-parallel processing pipelines. Using one of the open source Beam SDKs, you build a program +that defines the pipeline. The pipeline is then executed by one of Beam’s supported distributed processing +back-ends, which include Apache Flink, Apache Spark, and Google Cloud Dataflow. + + +.. _howto/operator:BeamRunPythonPipelineOperator: + +Run Python Pipelines in Apache Beam +=================================== + +The ``py_file`` argument must be specified for +:class:`~airflow.providers.apache.beam.operators.beam.BeamRunPythonPipelineOperator` +as it contains the pipeline to be executed by Beam. The Python file can be available on GCS that Airflow +has the ability to download or available on the local filesystem (provide the absolute path to it). + +The ``py_interpreter`` argument specifies the Python version to be used when executing the pipeline, the default +is ``python3`. If your Airflow instance is running on Python 2 - specify ``python2`` and ensure your ``py_file`` is +in Python 2. For best results, use Python 3. + +If ``py_requirements`` argument is specified a temporary Python virtual environment with specified requirements will be created +and within it pipeline will run. + +The ``py_system_site_packages`` argument specifies whether or not all the Python packages from your Airflow instance, +will be accessible within virtual environment (if ``py_requirements`` argument is specified), +recommend avoiding unless the Dataflow job requires it. + +Python Pipelines with DirectRunner +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_start_python_direct_runner_pipeline_local_file] + :end-before: [END howto_operator_start_python_direct_runner_pipeline_local_file] + +.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_start_python_direct_runner_pipeline_gcs_file] + :end-before: [END howto_operator_start_python_direct_runner_pipeline_gcs_file] + +Python Pipelines with DataflowRunner +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_start_python_dataflow_runner_pipeline_gcs_file] + :end-before: [END howto_operator_start_python_dataflow_runner_pipeline_gcs_file] + +.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file] + :end-before: [END howto_operator_start_python_dataflow_runner_pipeline_async_gcs_file] + +.. _howto/operator:BeamRunJavaPipelineOperator: + +Run Java Pipelines in Apache Beam +================================= + +For Java pipeline the ``jar`` argument must be specified for +:class:`~airflow.providers.apache.beam.operators.beam.BeamRunJavaPipelineOperator` +as it contains the pipeline to be executed by Apache Beam. The JAR can be available on GCS that Airflow +has the ability to download or available on the local filesystem (provide the absolute path to it). + +Java Pipelines with DirectRunner +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_start_java_direct_runner_pipeline] + :end-before: [END howto_operator_start_java_direct_runner_pipeline + +Java Pipelines with DataflowRunner +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. exampleinclude:: /../../airflow/providers/apache/beam/example_dags/example_beam.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_start_java_dataflow_runner_pipeline] + :end-before: [END howto_operator_start_java_dataflow_runner_pipeline + +Reference +^^^^^^^^^ + +For further information, look at: + +* `Apache Beam Documentation `__ +* `Google Cloud API Documentation `__ +* `Product Documentation `__ +* `Dataflow Monitoring Interface `__ +* `Dataflow Command-line Interface `__ diff --git a/docs/apache-airflow-providers-google/commits.rst b/docs/apache-airflow-providers-google/commits.rst new file mode 100644 index 0000000000000..ef169e1bd11cf --- /dev/null +++ b/docs/apache-airflow-providers-google/commits.rst @@ -0,0 +1,479 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Package apache-airflow-providers-google +------------------------------------------------------ + +Google services including: + + - `Google Ads `__ + - `Google Cloud (GCP) `__ + - `Google Firebase `__ + - `Google Marketing Platform `__ + - `Google Workspace `__ (formerly Google Suite) + + +This is detailed commit list of changes for versions provider package: ``google``. +For high-level changelog, see :doc:`package information including changelog `. + + + +2.0.0 +..... + +Latest change: 2021-01-31 + +================================================================================================ =========== ========================================================================================= +Commit Committed Subject +================================================================================================ =========== ========================================================================================= +`2ab3c1130 `_ 2021-01-31 ``Implement provider versioning tools`` +`823741cfe `_ 2021-01-28 ``Improve GCS system test envs (#13946)`` +`6d6588fe2 `_ 2021-01-28 ``Add Google Cloud Workflows Operators (#13366)`` +`810c15ed8 `_ 2021-01-27 ``Fix and improve GCP BigTable hook and system test (#13896)`` +`661661733 `_ 2021-01-27 ``Add env variables to PubSub example dag (#13794)`` +`f473ca713 `_ 2021-01-24 ``Replace 'google_cloud_storage_conn_id' by 'gcp_conn_id' when using 'GCSHook' (#13851)`` +`a9ac2b040 `_ 2021-01-23 ``Switch to f-strings using flynt. (#13732)`` +`9592be88e `_ 2021-01-22 ``Fix Google Spanner example dag (#13842)`` +`af52fdb51 `_ 2021-01-22 ``Improve environment variables in GCP Dataflow system test (#13841)`` +`e7946f1cb `_ 2021-01-22 ``Improve environment variables in GCP Datafusion system test (#13837)`` +`61c1d6ec6 `_ 2021-01-22 ``Improve environment variables in GCP Memorystore system test (#13833)`` +`202f66093 `_ 2021-01-22 ``Improve environment variables in GCP Lifeciences system test (#13834)`` +`70bf307f3 `_ 2021-01-21 ``Add How To Guide for Dataflow (#13461)`` +`3fd5ef355 `_ 2021-01-21 ``Add missing logos for integrations (#13717)`` +`309788e5e `_ 2021-01-18 ``Refactor DataprocOperators to support google-cloud-dataproc 2.0 (#13256)`` +`7ec858c45 `_ 2021-01-17 ``updated Google DV360 Hook to fix SDF issue (#13703)`` +`ef8617ec9 `_ 2021-01-14 ``Support google-cloud-tasks>=2.0.0 (#13347)`` +`189af5404 `_ 2021-01-13 ``Add system tests for Stackdriver operators (#13644)`` +`a6f999b62 `_ 2021-01-11 ``Support google-cloud-automl >=2.1.0 (#13505)`` +`947dbb73b `_ 2021-01-11 ``Support google-cloud-datacatalog>=3.0.0 (#13534)`` +`2fb68342b `_ 2021-01-07 ``Replace deprecated module and operator in example_tasks.py (#13527)`` +`003584bbf `_ 2021-01-05 ``Fix failing backport packages test (#13497)`` +`7d1ea4cb1 `_ 2021-01-05 ``Replace deprecated module and operator in example_tasks.py (#13473)`` +`c7d75ad88 `_ 2021-01-05 ``Revert "Support google-cloud-datacatalog 3.0.0 (#13224)" (#13482)`` +`feb84057d `_ 2021-01-04 ``Support google-cloud-datacatalog 3.0.0 (#13224)`` +`3a3e73998 `_ 2021-01-04 ``Fix insert_all method of BigQueryHook to support tables without schema (#13138)`` +`c33d2c06b `_ 2021-01-02 ``Fix another pylint c-extension-no-member (#13438)`` +`f6518dd6a `_ 2021-01-02 ``Generalize MLEngineStartTrainingJobOperator to custom images (#13318)`` +`9de712708 `_ 2020-12-31 ``Support google-cloud-bigquery-datatransfer>=3.0.0 (#13337)`` +`406181d64 `_ 2020-12-31 ``Add Parquet data type to BaseSQLToGCSOperator (#13359)`` +`295d66f91 `_ 2020-12-30 ``Fix Grammar in PIP warning (#13380)`` +`13a9747bf `_ 2020-12-28 ``Revert "Support google-cloud-tasks>=2.0.0 (#13334)" (#13341)`` +`04ec45f04 `_ 2020-12-28 ``Add DataprocCreateWorkflowTemplateOperator (#13338)`` +`1f712219f `_ 2020-12-28 ``Support google-cloud-tasks>=2.0.0 (#13334)`` +`f4745c8ce `_ 2020-12-26 ``Fix typo in example (#13321)`` +`e9d65bd45 `_ 2020-12-24 ``Decode Remote Google Logs (#13115)`` +`e7aeacf33 `_ 2020-12-24 ``Add OracleToGCS Transfer (#13246)`` +`323084e97 `_ 2020-12-24 ``Add timeout option to gcs hook methods. (#13156)`` +`0b626c804 `_ 2020-12-22 ``Support google-cloud-redis>=2.0.0 (#13117)`` +`9042a5855 `_ 2020-12-22 ``Add more operators to example DAGs for Cloud Tasks (#13235)`` +`8c00ec89b `_ 2020-12-22 ``Support google-cloud-pubsub>=2.0.0 (#13127)`` +`b26b0df5b `_ 2020-12-22 ``Update compatibility with google-cloud-kms>=2.0 (#13124)`` +`9a1d3820d `_ 2020-12-22 ``Support google-cloud-datacatalog>=1.0.0 (#13097)`` +`f95b1c9c9 `_ 2020-12-21 ``Add regional support to dataproc workflow template operators (#12907)`` +`6cf76d7ac `_ 2020-12-18 ``Fix typo in pip upgrade command :( (#13148)`` +`23f27c1b1 `_ 2020-12-18 ``Add system tests for CloudKMSHook (#13122)`` +`cddbf81b1 `_ 2020-12-17 ``Fix Google BigQueryHook method get_schema() (#13136)`` +`1259c712a `_ 2020-12-17 ``Update compatibility with google-cloud-os-login>=2.0.0 (#13126)`` +`bcf77586e `_ 2020-12-16 ``Fix Data Catalog operators (#13096)`` +`5090fb0c8 `_ 2020-12-15 ``Add script to generate integrations.json (#13073)`` +`b4b9cf559 `_ 2020-12-14 ``Check for missing references to operator guides (#13059)`` +`1c1ef7ee6 `_ 2020-12-14 ``Add project_id to client inside BigQuery hook update_table method (#13018)`` +================================================================================================ =========== ========================================================================================= + +1.0.0 +..... + +Latest change: 2020-12-09 + +================================================================================================ =========== ====================================================================================================================================================================== +Commit Committed Subject +================================================================================================ =========== ====================================================================================================================================================================== +`32971a1a2 `_ 2020-12-09 ``Updates providers versions to 1.0.0 (#12955)`` +`b40dffa08 `_ 2020-12-08 ``Rename remaing modules to match AIP-21 (#12917)`` +`9b39f2478 `_ 2020-12-08 ``Add support for dynamic connection form fields per provider (#12558)`` +`1dcd3e13f `_ 2020-12-05 ``Add support for extra links coming from the providers (#12472)`` +`2037303ee `_ 2020-11-29 ``Adds support for Connection/Hook discovery from providers (#12466)`` +`02d94349b `_ 2020-11-29 ``Don't use time.time() or timezone.utcnow() for duration calculations (#12353)`` +`76bcd08dc `_ 2020-11-28 ``Added '@apply_defaults' decorator. (#12620)`` +`e1ebfa68b `_ 2020-11-27 ``Add DataflowJobMessagesSensor and DataflowAutoscalingEventsSensor (#12249)`` +`3fa51f94d `_ 2020-11-24 ``Add check for duplicates in provider.yaml files (#12578)`` +`c34ef853c `_ 2020-11-20 ``Separate out documentation building per provider (#12444)`` +`9e3b2c554 `_ 2020-11-19 ``GCP Secrets Optional Lookup (#12360)`` +`008035450 `_ 2020-11-18 ``Update provider READMEs for 1.0.0b2 batch release (#12449)`` +`7ca0b6f12 `_ 2020-11-18 ``Enable Markdownlint rule MD003/heading-style/header-style (#12427) (#12438)`` +`8d0950646 `_ 2020-11-18 ``Fix download method in GCSToBigQueryOperator (#12442)`` +`2c0920fba `_ 2020-11-17 ``Adds mechanism for provider package discovery. (#12383)`` +`2cda2f2a0 `_ 2020-11-17 ``Add missing pre-commit definition - provider-yamls (#12393)`` +`80a957f14 `_ 2020-11-17 ``Add Dataflow sensors - job metrics (#12039)`` +`ae7cb4a1e `_ 2020-11-17 ``Update wrong commit hash in backport provider changes (#12390)`` +`917e6c442 `_ 2020-11-16 ``Add provide_file_and_upload to GCSHook (#12310)`` +`cfa4ecfeb `_ 2020-11-15 ``Add DataflowJobStatusSensor and support non-blocking execution of jobs (#11726)`` +`6889a333c `_ 2020-11-15 ``Improvements for operators and hooks ref docs (#12366)`` +`7825e8f59 `_ 2020-11-13 ``Docs installation improvements (#12304)`` +`32b59f835 `_ 2020-11-12 ``Fixes the sending of an empty list to BigQuery 'list_rows' (#12307)`` +`250436d96 `_ 2020-11-10 ``Fix spelling in Python files (#12230)`` +`502ba309e `_ 2020-11-10 ``Enable Markdownlint rule - MD022/blanks-around-headings (#12225)`` +`dd2095f4a `_ 2020-11-10 ``Simplify string expressions & Use f-string (#12216)`` +`f37c6e6fc `_ 2020-11-10 ``Add Compute Engine SSH hook (#9879)`` +`85a18e13d `_ 2020-11-09 ``Point at pypi project pages for cross-dependency of provider packages (#12212)`` +`59eb5de78 `_ 2020-11-09 ``Update provider READMEs for up-coming 1.0.0beta1 releases (#12206)`` +`61feb6ec4 `_ 2020-11-09 ``Provider's readmes generated for elasticsearch and google packages (#12194)`` +`b2a28d159 `_ 2020-11-09 ``Moves provider packages scripts to dev (#12082)`` +`fcb6b00ef `_ 2020-11-08 ``Add authentication to AWS with Google credentials (#12079)`` +`2ef3b7ef8 `_ 2020-11-08 ``Fix ERROR - Object of type 'bytes' is not JSON serializable when using store_to_xcom_key parameter (#12172)`` +`0caec9fd3 `_ 2020-11-06 ``Dataflow - add waiting for successful job cancel (#11501)`` +`cf9437d79 `_ 2020-11-06 ``Simplify string expressions (#12123)`` +`91a64db50 `_ 2020-11-04 ``Format all files (without excepions) by black (#12091)`` +`fd3db778e `_ 2020-11-04 ``Add server side cursor support for postgres to GCS operator (#11793)`` +`f1f194026 `_ 2020-11-04 ``Add DataflowStartSQLQuery operator (#8553)`` +`41bf172c1 `_ 2020-11-04 ``Simplify string expressions (#12093)`` +`5f5244b74 `_ 2020-11-04 ``Add template fields renderers to Biguery and Dataproc operators (#12067)`` +`4e8f9cc8d `_ 2020-11-03 ``Enable Black - Python Auto Formmatter (#9550)`` +`8c42cf1b0 `_ 2020-11-03 ``Use PyUpgrade to use Python 3.6 features (#11447)`` +`45ae145c2 `_ 2020-11-03 ``Log BigQuery job id in insert method of BigQueryHook (#12056)`` +`e324b37a6 `_ 2020-11-03 ``Add job name and progress logs to Cloud Storage Transfer Hook (#12014)`` +`6071fdd58 `_ 2020-11-02 ``Improve handling server errors in DataprocSubmitJobOperator (#11947)`` +`2f703df12 `_ 2020-10-30 ``Add SalesforceToGcsOperator (#10760)`` +`e5713e00b `_ 2020-10-29 ``Add drain option when canceling Dataflow pipelines (#11374)`` +`37eaac3c5 `_ 2020-10-29 ``The PRs which are not approved run subset of tests (#11828)`` +`79cb77199 `_ 2020-10-28 ``Fixing re pattern and changing to use a single character class. (#11857)`` +`5a439e84e `_ 2020-10-26 ``Prepare providers release 0.0.2a1 (#11855)`` +`240c7d4d7 `_ 2020-10-26 ``Google Memcached hooks - improve protobuf messages handling (#11743)`` +`8afdb6ac6 `_ 2020-10-26 ``Fix spellings (#11825)`` +`872b1566a `_ 2020-10-25 ``Generated backport providers readmes/setup for 2020.10.29 (#11826)`` +`6ce855af1 `_ 2020-10-24 ``Fix spelling (#11821)`` +`483068745 `_ 2020-10-24 ``Use Python 3 style super classes (#11806)`` +`727c739af `_ 2020-10-22 ``Improve Cloud Memorystore for Redis example (#11735)`` +`1da8379c9 `_ 2020-10-22 ``Fix static checks after merging #10121 (#11737)`` +`91503308c `_ 2020-10-22 ``Add Google Cloud Memorystore Memcached Operators (#10121)`` +`950c16d0b `_ 2020-10-21 ``Retry requests in case of error in Google ML Engine Hook (#11712)`` +`2bfc53b5e `_ 2020-10-21 ``Fix doc errors in google provider files. (#11713)`` +`53e606210 `_ 2020-10-21 ``Enforce strict rules for yamllint (#11709)`` +`349b0811c `_ 2020-10-20 ``Add D200 pydocstyle check (#11688)`` +`2d854c350 `_ 2020-10-19 ``Add service_account to Google ML Engine operator (#11619)`` +`46a121fb7 `_ 2020-10-18 ``docs: Update Bigquery clustering docstrings (#11232)`` +`49c58147f `_ 2020-10-18 ``Strict type checking for provider Google (#11609)`` +`0823d46a7 `_ 2020-10-16 ``Add type annotations for AWS operators and hooks (#11434)`` +`3c10ca650 `_ 2020-10-16 ``Add DataflowStartFlexTemplateOperator (#8550)`` +`8865d14df `_ 2020-10-16 ``Strict type checking for provider google cloud (#11548)`` +`16e712971 `_ 2020-10-13 ``Added support for provider packages for Airflow 2.0 (#11487)`` +`06141d6d0 `_ 2020-10-12 ``Google cloud operator strict type check (#11450)`` +`d305876be `_ 2020-10-12 ``Remove redundant None provided as default to dict.get() (#11448)`` +`1845cd11b `_ 2020-10-11 ``Strict type check for google ads and cloud hooks (#11390)`` +`bd204bb91 `_ 2020-10-11 ``Optionally set null marker in csv exports in BaseSQLToGCSOperator (#11409)`` +`75071831b `_ 2020-10-10 ``Remove redundant parentheses from Python files (#10967)`` +`8baf657fc `_ 2020-10-09 ``Fix regression in DataflowTemplatedJobStartOperator (#11167)`` +`b0fcf6755 `_ 2020-10-07 ``Add AzureFileShareToGCSOperator (#10991)`` +`47b05a87f `_ 2020-10-07 ``Improve handling of job_id in BigQuery operators (#11287)`` +`0a0e1af80 `_ 2020-10-03 ``Fix Broken Markdown links in Providers README TOC (#11249)`` +`ca4238eb4 `_ 2020-10-02 ``Fixed month in backport packages to October (#11242)`` +`5220e4c38 `_ 2020-10-02 ``Prepare Backport release 2020.09.07 (#11238)`` +`cb52fb0ae `_ 2020-09-27 ``Add example DAG and system test for MySQLToGCSOperator (#10990)`` +`99accec29 `_ 2020-09-25 ``Fix incorrect Usage of Optional[str] & Optional[int] (#11141)`` +`e3f96ce7a `_ 2020-09-24 ``Fix incorrect Usage of Optional[bool] (#11138)`` +`daf8f3108 `_ 2020-09-23 ``Add template fields renderers for better UI rendering (#11061)`` +`f3e87c503 `_ 2020-09-22 ``Add D202 pydocstyle check (#11032)`` +`cb979f9f2 `_ 2020-09-22 ``Get Airflow configs with sensitive data from CloudSecretManagerBackend (#11024)`` +`76545bb3d `_ 2020-09-16 ``Add example dag and system test for S3ToGCSOperator (#10951)`` +`22c631625 `_ 2020-09-16 ``Fix more docs spellings (#10965)`` +`12a652f53 `_ 2020-09-13 ``Fix parameter name collision in AutoMLBatchPredictOperator #10723 (#10869)`` +`41a62735e `_ 2020-09-11 ``Add on_kill method to BigQueryInsertJobOperator (#10866)`` +`3e91da56e `_ 2020-09-11 ``fix typo in firebase/example_filestore DAG (#10875)`` +`68cc7273b `_ 2020-09-10 ``Add on_kill method to DataprocSubmitJobOperator (#10847)`` +`f92095721 `_ 2020-09-10 ``Fix and remove some more typos from spelling_wordlist.txt (#10845)`` +`9549274d1 `_ 2020-09-09 ``Upgrade black to 20.8b1 (#10818)`` +`078bfaf60 `_ 2020-09-08 ``Extract missing gcs_to_local example DAG from gcs example (#10767)`` +`10ce31127 `_ 2020-09-08 ``Deprecate using global as the default region in Google Dataproc operators and hooks (#10772)`` +`f14f37971 `_ 2020-09-07 ``[AIRFLOW-10672] Refactor BigQueryToGCSOperator to use new method (#10773)`` +`c8ee45568 `_ 2020-09-07 ``Refactor DataprocCreateCluster operator to use simpler interface (#10403)`` +`ece685b5b `_ 2020-09-05 ``Asynchronous execution of Dataproc jobs with a Sensor (#10673)`` +`6e3d7b63d `_ 2020-09-04 ``Add masterConfig parameter to MLEngineStartTrainingJobOperator (#10578)`` +`804548d58 `_ 2020-09-01 ``Add Dataprep operators (#10304)`` +`11c00bc82 `_ 2020-08-30 ``Fix typos: duplicated "the" (#10647)`` +`2ca615cff `_ 2020-08-29 ``Update Google Cloud branding (#10642)`` +`1b533f617 `_ 2020-08-28 ``Fix broken master - DLP (#10635)`` +`5ae82a56d `_ 2020-08-28 ``Fix Google DLP example and improve ops idempotency (#10608)`` +`3867f7662 `_ 2020-08-28 ``Update Google Cloud branding (#10615)`` +`91ff31ad1 `_ 2020-08-27 ``Documentation for Google Cloud Data Loss Prevention (#8201) (#9651)`` +`fdd9b6f65 `_ 2020-08-25 ``Enable Black on Providers Packages (#10543)`` +`d76026545 `_ 2020-08-25 ``PyDocStyle: No whitespaces allowed surrounding docstring text (#10533)`` +`d1bce91bb `_ 2020-08-25 ``PyDocStyle: Enable D403: Capitalized first word of docstring (#10530)`` +`866701c80 `_ 2020-08-25 ``Fix typo in "Cloud" (#10534)`` +`47265e7b5 `_ 2020-08-24 ``Fix typo in PostgresHook (#10529)`` +`3696c34c2 `_ 2020-08-24 ``Fix typo in the word "release" (#10528)`` +`2f2d8dbfa `_ 2020-08-25 ``Remove all "noinspection" comments native to IntelliJ (#10525)`` +`3734876d9 `_ 2020-08-24 ``Implement impersonation in google operators (#10052)`` +`b0598b535 `_ 2020-08-24 ``Add support for creating multiple replicated clusters in Bigtable hook and operator (#10475)`` +`ee7ca128a `_ 2020-08-22 ``Fix broken Markdown refernces in Providers README (#10483)`` +`515cc72c9 `_ 2020-08-22 ``Fix typo in timed_out (#10459)`` +`7c206a82a `_ 2020-08-22 ``Replace assigment with Augmented assignment (#10468)`` +`88c7d2e52 `_ 2020-08-21 ``Dataflow operators don't not always create a virtualenv (#10373)`` +`083c3c129 `_ 2020-08-18 ``Simplified GCSTaskHandler configuration (#10365)`` +`1ae5bdf23 `_ 2020-08-17 ``Add test for GCSTaskHandler (#9600) (#9861)`` +`e195a980b `_ 2020-08-16 ``Add type annotations for mlengine_operator_utils (#10297)`` +`382c1011b `_ 2020-08-16 ``Add Bigtable Update Instance Hook/Operator (#10340)`` +`bfa5a8d5f `_ 2020-08-15 ``CI: Fix failing docs-build (#10342)`` +`be46d20fb `_ 2020-08-15 ``Improve idempotency of BigQueryInsertJobOperator (#9590)`` +`47387a69e `_ 2020-08-14 ``Catch Permission Denied exception when getting secret from GCP Secret Manager. (#10326)`` +`2f0613b0c `_ 2020-08-13 ``Implement Google BigQuery Table Partition Sensor (#10218)`` +`f6734b3b8 `_ 2020-08-12 ``Enable Sphinx spellcheck for doc generation (#10280)`` +`8f8db8959 `_ 2020-08-12 ``DbApiHook: Support kwargs in get_pandas_df (#9730)`` +`ef088314f `_ 2020-08-09 ``Added DataprepGetJobsForJobGroupOperator (#10246)`` +`b43f90abf `_ 2020-08-09 ``Fix various typos in the repo (#10263)`` +`c29533888 `_ 2020-08-08 ``Add labels param to Google MLEngine Operators (#10222)`` +`cdec30125 `_ 2020-08-07 ``Add correct signature to all operators and sensors (#10205)`` +`eff0f0321 `_ 2020-08-06 `` Update guide for Google Cloud Secret Manager Backend (#10172)`` +`24c8e4c2d `_ 2020-08-06 ``Changes to all the constructors to remove the args argument (#10163)`` +`010322692 `_ 2020-08-06 ``Improve handling Dataproc cluster creation with ERROR state (#9593)`` +`1437cb749 `_ 2020-08-04 ``Add correct signatures for operators in google provider package (#10144)`` +`6efa1b9cb `_ 2020-08-03 ``Add additional Cloud Datastore operators (#10032)`` +`27020f8e5 `_ 2020-08-03 ``Add try clause to DataFusionHook.wait_for_pipeline_state (#10031)`` +`4e3799fec `_ 2020-08-02 ``[AIRFLOW-4541] Replace os.mkdirs usage with pathlib.Path(path).mkdir (#10117)`` +`85c56b173 `_ 2020-08-02 ``Add missing params to GCP Pub/Sub creation_subscription (#10106)`` +`b79466c12 `_ 2020-08-02 ``Fix sensor not providing arguments for GCSHook (#10074)`` +`4ee35d027 `_ 2020-08-02 ``Fix hook not passing gcp_conn_id to base class (#10075)`` +`aeea71274 `_ 2020-08-02 ``Remove 'args' parameter from provider operator constructors (#10097)`` +`4c84661ad `_ 2020-07-31 ``Split Display Video 360 example into smaler DAGs (#10077)`` +`59cbff087 `_ 2020-07-29 ``Fix docstrings in BigQueryGetDataOperator (#10042)`` +`81b87d48e `_ 2020-07-27 ``Add unit tests for GcpBodyFieldSanitizer in Google providers (#9996)`` +`7d24b088c `_ 2020-07-25 ``Stop using start_date in default_args in example_dags (2) (#9985)`` +`8b10a4b35 `_ 2020-07-25 ``Stop using start_date in default_args in example_dags (#9982)`` +`ef98edf4d `_ 2020-07-23 ``Add more information about using GoogleAdsHook (#9951)`` +`33f0cd265 `_ 2020-07-22 ``apply_default keeps the function signature for mypy (#9784)`` +`39a0288a4 `_ 2020-07-22 ``Add Google Authentication for experimental API (#9848)`` +`c2db0dfeb `_ 2020-07-22 ``More strict rules in mypy (#9705) (#9906)`` +`c4244e18b `_ 2020-07-22 ``Fix calling 'get_client' in BigQueryHook.table_exists (#9916)`` +`5eacc1642 `_ 2020-07-22 ``Add support for impersonation in GCP hooks (#9915)`` +`1cfdebf5f `_ 2020-07-21 ``Fix insert_job method of BigQueryHook (#9899)`` +`c8c52e69c `_ 2020-07-21 ``Remove type hint causing DeprecationWarning in Firestore operators (#9819)`` +`eb6f1d1cf `_ 2020-07-16 ``Fix typo in datafusion operator (#9859)`` +`b01d95ec2 `_ 2020-07-15 ``Change DAG.clear to take dag_run_state (#9824)`` +`6d65c15d1 `_ 2020-07-15 ``Add guide for AI Platform (previously Machine Learning Engine) Operators (#9798)`` +`770de53eb `_ 2020-07-15 ``BigQueryTableExistenceSensor needs to specify keyword arguments (#9832)`` +`2d8dbacdf `_ 2020-07-15 ``Add CloudVisionDeleteReferenceImageOperator (#9698)`` +`9f017951b `_ 2020-07-15 ``Add Google Deployment Manager Hook (#9159)`` +`ed5004cca `_ 2020-07-14 ``Allow 'replace' flag in gcs_to_gcs operator. (#9667)`` +`553bb7af7 `_ 2020-07-13 ``Keep functions signatures in decorators (#9786)`` +`68925904e `_ 2020-07-13 ``Add multiple file upload functionality to GCS hook (#8849)`` +`1de78e8f9 `_ 2020-07-12 ``Add Google Stackdriver link (#9765)`` +`092d33f29 `_ 2020-07-11 ``Fix StackdriverTaskHandler + add system tests (#9761)`` +`b2305660f `_ 2020-07-09 ``Update example DAG for AI Platform operators (#9727)`` +`23f80f34a `_ 2020-07-08 ``Move gcs & wasb task handlers to their respective provider packages (#9714)`` +`44d4ae809 `_ 2020-07-06 ``Upgrade to latest pre-commit checks (#9686)`` +`a79e2d4c4 `_ 2020-07-06 ``Move provider's log task handlers to the provider package (#9604)`` +`cd3d9d934 `_ 2020-07-02 ``Fix using .json template extension in GMP operators (#9566)`` +`4799af30e `_ 2020-06-30 ``Extend BigQuery example with include clause (#9572)`` +`e33f1a12d `_ 2020-06-30 ``Add template_ext to BigQueryInsertJobOperator (#9568)`` +`40add26d4 `_ 2020-06-29 ``Remove almost all references to airflow.contrib (#9559)`` +`c420dbd6e `_ 2020-06-27 ``Bump Pylint to 2.5.3 (#9294)`` +`0051c89cb `_ 2020-06-26 ``nitpick fix (#9527)`` +`87fdbd070 `_ 2020-06-25 ``Use literal syntax instead of function calls to create data structure (#9516)`` +`7256f4caa `_ 2020-06-22 ``Pylint fixes and deprecation of rare used methods in Connection (#9419)`` +`e13a14c87 `_ 2020-06-21 ``Enable & Fix Whitespace related PyDocStyle Checks (#9458)`` +`5b680e27e `_ 2020-06-19 ``Don't use connection to store task handler credentials (#9381)`` +`d0e7db402 `_ 2020-06-19 ``Fixed release number for fresh release (#9408)`` +`416334e2e `_ 2020-06-19 ``Properly propagated warnings in operators (#9348)`` +`12af6a080 `_ 2020-06-19 ``Final cleanup for 2020.6.23rc1 release preparation (#9404)`` +`c7e5bce57 `_ 2020-06-19 ``Prepare backport release candidate for 2020.6.23rc1 (#9370)`` +`4e09c6442 `_ 2020-06-18 ``Adds GCP Secret Manager Hook (#9368)`` +`40bf8f28f `_ 2020-06-18 ``Detect automatically the lack of reference to the guide in the operator descriptions (#9290)`` +`f6bd817a3 `_ 2020-06-16 ``Introduce 'transfers' packages (#9320)`` +`639972d99 `_ 2020-06-16 ``Add support for latest Apache Beam SDK in Dataflow operators (#9323)`` +`1459970b3 `_ 2020-06-15 ``Rename CloudBuildCreateBuildOperator to CloudBuildCreateOperator (#9314)`` +`431ea3291 `_ 2020-06-15 ``Resolve upstream tasks when template field is XComArg (#8805)`` +`aee6ab94e `_ 2020-06-15 ``Wait for pipeline state in Data Fusion operators (#8954)`` +`fb1c8b83d `_ 2020-06-10 ``Add test for BQ operations using location (#9206)`` +`a26afbfa5 `_ 2020-06-10 ``Make generated job_id more informative in BQ insert_job (#9203)`` +`c41192fa1 `_ 2020-06-10 ``Upgrade pendulum to latest major version ~2.0 (#9184)`` +`b1c8c5ed5 `_ 2020-06-09 ``Allows using private endpoints in GKEStartPodOperator (#9169)`` +`5918efc86 `_ 2020-06-05 ``Add 3.8 to the test matrices (#8836)`` +`9bcdadaf7 `_ 2020-06-05 ``Add 'main' param to template_fields in DataprocSubmitPySparkJobOperator (#9154)`` +`f56811dff `_ 2020-06-05 ``[AIRFLOW-6290] Create guide for GKE operators (#8883)`` +`76962867b `_ 2020-06-04 ``Fix sql_to_gcs hook gzip of schema_file (#9140)`` +`17adcea83 `_ 2020-06-02 ``Fix handling of subprocess error handling in s3_file_transform and gcs (#9106)`` +`789852546 `_ 2020-06-01 ``Add BigQueryInsertJobOperator (#8868)`` +`29eb68b90 `_ 2020-05-31 ``Create guide for Dataproc Operators (#9037)`` +`886afaf62 `_ 2020-05-29 ``Add example dag and system test for LocalFilesystemToGCSOperator (#9043)`` +`a779c4dfc `_ 2020-05-29 ``add separate example dags and system tests for GCSToGoogleSheetsOperator (#9066)`` +`ada26be23 `_ 2020-05-29 ``Add correct description for dst param in LocalFilesystemToGCSOperator (#9055)`` +`81b2761b8 `_ 2020-05-29 ``add example dag and system test for GoogleSheetsToGCSOperator (#9056)`` +`0b0e4f7a4 `_ 2020-05-26 ``Preparing for RC3 relase of backports (#9026)`` +`00642a46d `_ 2020-05-26 ``Fixed name of 20 remaining wrongly named operators. (#8994)`` +`3994030ea `_ 2020-05-26 ``Refactor BigQuery operators (#8858)`` +`cdb3f2545 `_ 2020-05-26 ``All classes in backport providers are now importable in Airflow 1.10 (#8991)`` +`1d36b0303 `_ 2020-05-23 ``Fix references in docs (#8984)`` +`cf5cf45e1 `_ 2020-05-23 ``Support YAML input for CloudBuildCreateOperator (#8808)`` +`499493c5c `_ 2020-05-19 ``[AIRFLOW-6586] Improvements to gcs sensor (#7197)`` +`375d1ca22 `_ 2020-05-19 ``Release candidate 2 for backport packages 2020.05.20 (#8898)`` +`841d81664 `_ 2020-05-19 ``Allow setting the pooling time in DLPHook (#8824)`` +`12c5e5d8a `_ 2020-05-17 ``Prepare release candidate for backport packages (#8891)`` +`f3521fb0e `_ 2020-05-16 ``Regenerate readme files for backport package release (#8886)`` +`15273f0ea `_ 2020-05-16 ``Check for same task instead of Equality to detect Duplicate Tasks (#8828)`` +`92585ca4c `_ 2020-05-15 ``Added automated release notes generation for backport operators (#8807)`` +`e1e833bb2 `_ 2020-05-13 ``Update GoogleBaseHook to not follow 308 and use 60s timeout (#8816)`` +`8b5491971 `_ 2020-05-12 ``Refactor BigQuery hook methods to use python library (#8631)`` +`6911dfe83 `_ 2020-05-12 ``Fix template fields in Google operators (#8840)`` +`4b06fde0f `_ 2020-05-12 ``Fix Flake8 errors (#8841)`` +`1d12c347c `_ 2020-05-12 ``Refactor BigQuery check operators (#8813)`` +`493b685d7 `_ 2020-05-10 ``Add separate example DAGs and system tests for google cloud speech (#8778)`` +`79ef8bed8 `_ 2020-05-10 ``Added Upload Multiple Entity Read Files to specified big query dataset (#8610)`` +`280f1f0c4 `_ 2020-05-10 ``Correctly restore upstream_task_ids when deserializing Operators (#8775)`` +`58aefb23b `_ 2020-05-08 ``Added SDFtoGCSOperator (#8740)`` +`723c52c94 `_ 2020-05-07 ``Add documentation for SpannerDeployInstanceOperator (#8750)`` +`25ee4211b `_ 2020-05-06 ``Support all RuntimeEnvironment parameters in DataflowTemplatedJobStartOperator (#8531)`` +`8d6f1aa4b `_ 2020-05-05 ``Support num_retries field in env var for GCP connection (#8700)`` +`67caae0f2 `_ 2020-05-04 ``Add system test for gcs_to_bigquery (#8556)`` +`bc45fa675 `_ 2020-05-03 ``Add system test and docs for Facebook Ads operators (#8503)`` +`a28c66f23 `_ 2020-04-30 ``[AIRFLOW-4734] Upsert functionality for PostgresHook.insert_rows() (#8625)`` +`992a24ce4 `_ 2020-04-28 ``Split and improve BigQuery example DAG (#8529)`` +`c1fb28230 `_ 2020-04-28 ``Refactor BigQueryHook dataset operations (#8477)`` +`e8d0f8fea `_ 2020-04-26 ``Improve idempodency in CloudDataTransferServiceCreateJobOperator (#8430)`` +`37fdfa977 `_ 2020-04-26 ``[AIRFLOW-6281] Create guide for GCS to GCS transfer operators (#8442)`` +`14b22e6ff `_ 2020-04-25 ``Add hook and operator for Google Cloud Life Sciences (#8481)`` +`72ddc94d1 `_ 2020-04-23 ``Pass location using parmamter in Dataflow integration (#8382)`` +`912aa4b42 `_ 2020-04-23 ``Added GoogleDisplayVideo360DownloadLineItemsOperator (#8174)`` +`57c8c0583 `_ 2020-04-22 ``Use python client in BQ hook create_empty_table/dataset and table_exists (#8377)`` +`5d3a7eef3 `_ 2020-04-20 ``Allow multiple extra_packages in Dataflow (#8394)`` +`79c99b1b6 `_ 2020-04-18 ``Added location parameter to BigQueryCheckOperator (#8273)`` +`79d3f33c1 `_ 2020-04-17 ``Clean up temporary files in Dataflow operators (#8313)`` +`efcffa323 `_ 2020-04-16 ``Add Dataproc SparkR Example (#8240)`` +`b198a1fa9 `_ 2020-04-15 ``Create guide for BigQuery operators (#8276)`` +`2636cc932 `_ 2020-04-14 ``Raise exception when GCP credential doesn't support account impersonation (#8213)`` +`eee4ebaee `_ 2020-04-14 ``Added Facebook Ads Operator #7887 (#8008)`` +`8cae07ea1 `_ 2020-04-14 ``fixed typo (#8294)`` +`45c898330 `_ 2020-04-13 ``Less aggressive eager upgrade of requirements (#8267)`` +`1fd9ed384 `_ 2020-04-13 ``Add mypy plugin for decorators. (#8145)`` +`327b0a9f7 `_ 2020-04-13 ``Added GoogleDisplayVideo360UploadLineItemsOperator (#8216)`` +`bb5e403a3 `_ 2020-04-10 ``Honor schema type for MySQL to GCS data pre-process (#8090)`` +`87969a350 `_ 2020-04-09 ``[AIRFLOW-6515] Change Log Levels from Info/Warn to Error (#8170)`` +`3fc89f29f `_ 2020-04-06 ``[AIRFLOW-7106] Cloud data fusion integration - Allow to pass args to start pipeline (#7849)`` +`7ef75d239 `_ 2020-04-03 ``[AIRFLOW-7117] Honor self.schema in sql_to_gcs as schema to upload (#8049)`` +`ed2bc0057 `_ 2020-04-02 ``Add Google Ads list accounts operator (#8007)`` +`3808a6206 `_ 2020-04-01 ``Unify Google class/package names (#8033)`` +`8a0240257 `_ 2020-03-31 ``Rename CloudBaseHook to GoogleBaseHook and move it to google.common (#8011)`` +`8e8978007 `_ 2020-03-31 ``Add more refactor steps for providers.google (#8010)`` +`aae3b8fb2 `_ 2020-03-31 ``Individual package READMEs (#8012)`` +`779023968 `_ 2020-03-30 ``[AIRFLOW-7075] Operators for storing information from GCS into GA (#7743)`` +`49abce521 `_ 2020-03-30 ``Improve system tests for Cloud Build (#8003)`` +`0f19a930d `_ 2020-03-29 ``Remove GKEStartPodOperator when backporting (#7908)`` +`0e1c238b2 `_ 2020-03-28 ``Get Airflow Variables from GCP Secrets Manager (#7946)`` +`eb4af4f94 `_ 2020-03-28 ``Make BaseSecretsBackend.build_path generic (#7948)`` +`01f99426f `_ 2020-03-28 ``Add download/upload operators for GCS and Google Sheets (#7866)`` +`892522f8e `_ 2020-03-26 ``Change signature of GSheetsHook methods (#7853)`` +`bfd425157 `_ 2020-03-26 ``Improve idempotency in MLEngineHook.create_model (#7811)`` +`f9c226343 `_ 2020-03-26 ``Fix CloudSecretsManagerBackend invalid connections_prefix (#7861)`` +`e3920f12f `_ 2020-03-26 ``Improve setUp/tearDown in Cloud Firestore system test (#7862)`` +`8ba8a7295 `_ 2020-03-26 ``Improve example DAGs for Cloud Memorystore (#7855)`` +`f7d1a437c `_ 2020-03-26 ``Fix CloudMemorystoreCreateInstanceAndImportOperator operator (#7856)`` +`beef6c230 `_ 2020-03-26 ``Improve authorization in GCP system tests (#7863)`` +`5f165f3e4 `_ 2020-03-26 ``[AIRFLOW-5801] Get GCP credentials from file instead of JSON blob (#7869)`` +`686d7d50b `_ 2020-03-25 ``Standardize SecretBackend class names (#7846)`` +`1982c3fdc `_ 2020-03-24 ``Run Dataflow for ML Engine summary in venv (#7809)`` +`eef87b995 `_ 2020-03-23 ``[AIRFLOW-7105] Unify Secrets Backend method interfaces (#7830)`` +`529db07b2 `_ 2020-03-23 ``Improve Google PubSub hook publish method (#7831)`` +`4bde99f13 `_ 2020-03-23 ``Make airflow/providers pylint compatible (#7802)`` +`a001489b5 `_ 2020-03-23 ``Improve example DAG for ML Engine (#7810)`` +`9e5a8e7f8 `_ 2020-03-23 ``Add call to Super class in 'google' providers (#7823)`` +`b86bf79bf `_ 2020-03-23 ``Fix typo in GCP credentials_provider's docstring (#7818)`` +`56c013ce9 `_ 2020-03-23 ``Add missing docstring in BigQueryHook.create_empty_table (#7817)`` +`426a79847 `_ 2020-03-23 ``Imrove support for laatest API in MLEngineStartTrainingJobOperator (#7812)`` +`cdf1809fc `_ 2020-03-23 ``[AIRFLOW-7104] Add Secret backend for GCP Secrets Manager (#7795)`` +`27dac00e1 `_ 2020-03-22 ``[AIRFLOW-7099] Improve system test for cloud transfer service (#7794)`` +`0daf5d729 `_ 2020-03-22 ``Add ability to specify a maximum modified time for objects in GCSToGCSOperator (#7791)`` +`c8088c2bd `_ 2020-03-20 ``[AIRFLOW-7100] Add GoogleAnalyticsGetAdsLinkOperator (#7781)`` +`5106a2931 `_ 2020-03-20 ``[AIRFLOW-6752] Add GoogleAnalyticsRetrieveAdsLinksListOperator (#7748)`` +`759ce2a80 `_ 2020-03-20 ``[AIRFLOW-6978] Add PubSubPullOperator (#7766)`` +`6b9b214e4 `_ 2020-03-20 ``[AIRFLOW-6732] Add GoogleAdsHook and GoogleAdsToGcsOperator (#7692)`` +`b11891696 `_ 2020-03-19 ``[AIRFLOW-7069] Fix cloudsql system tests (#7770)`` +`ae854cae5 `_ 2020-03-19 ``[AIRFLOW-7082] Remove catch_http_exception decorator in GCP hooks (#7756)`` +`7e1e954d2 `_ 2020-03-19 ``[AIRFLOW-7085] Cache credentials, project_id in GCP Base Hook (#7759)`` +`6e21c139b `_ 2020-03-19 ``[AIRFLOW-XXXX] Fix reference to GCP classes in guides (#7762)`` +`ce022a3f7 `_ 2020-03-19 ``[AIRFLOW-XXXX] Add cross-references for operators guide (#7760)`` +`029c84e55 `_ 2020-03-18 ``[AIRFLOW-5421] Add Presto to GCS transfer operator (#7718)`` +`63a3102ed `_ 2020-03-18 ``[AIRFLOW-7064] Add CloudFirestoreExportDatabaseOperator (#7725)`` +`73305c7bd `_ 2020-03-18 ``[AIRFLOW-7081] Remove env variables from GCP guide (#7755)`` +`60fdbf6d9 `_ 2020-03-18 ``[AIRFLOW-5610] Add ability to specify multiple objects to copy in GCSToGCSOperator (#7728)`` +`de7e934ca `_ 2020-03-17 ``[AIRFLOW-7079] Remove redundant code for storing template_fields (#7750)`` +`0de0347b2 `_ 2020-03-17 ``[AIRFLOW-6855]: Escape project_dataset_table in SQL query in gcs to bq … (#7475)`` +`91557c6f8 `_ 2020-03-17 ``[AIRFLOW-7073] GKEStartPodOperator always use connection credentials (#7738)`` +`51161dbd9 `_ 2020-03-16 ``[AIRFLOW-5664] Store timestamps with microseconds precision (#6354)`` +`2bc020c43 `_ 2020-03-14 ``[AIRFLOW-7055] Verbose logging option for google provider (#7711)`` +`c997cab42 `_ 2020-03-13 ``[AIRFLOW-6724] Add Google Analytics 360 Accounts Retrieve Operator (#7630)`` +`137896f32 `_ 2020-03-12 ``[AIRFLOW-7034] Remove feature: Assigning Dag to task using Bitshift Op (#7685)`` +`1f77f943d `_ 2020-03-10 ``[AIRFLOW-6980] Improve system tests and building providers package (#7615)`` +`bf9b6b6d7 `_ 2020-03-09 ``[AIRFLOW-5013] Add GCP Data Catalog Hook and operators (#7664)`` +`e5130dc9f `_ 2020-03-09 ``[AIRFLOW-2911] Add job cancellation capability to Dataflow service (#7659)`` +`faf0df4b9 `_ 2020-03-09 ``[AIRFLOW-XXXX] Fix upsert operator in BQ example DAG (#7666)`` +`42eef3821 `_ 2020-03-07 ``[AIRFLOW-6877] Add cross-provider dependencies as extras (#7506)`` +`b5b9795f0 `_ 2020-03-07 ``[AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (fix) (#7624)`` +`6b65038fb `_ 2020-03-06 ``[AIRFLOW-6990] Improve system tests for Google Marketing Platform (#7631)`` +`755fe5224 `_ 2020-03-05 ``[AIRFLOW-6915] Add AI Platform Console Link for MLEngineStartTrainingJobOperator (#7535)`` +`cb2f33911 `_ 2020-03-04 ``[AIRFLOW-6973] Make GCSCreateBucketOperator idempotent (#7609)`` +`09fea3ce8 `_ 2020-03-04 ``[AIRFLOW-6977] Fix BigQuery DTS example DAG (#7612)`` +`8230ccc48 `_ 2020-03-04 ``[AIRFLOW-6926] Fix Google Tasks operators return types and idempotency (#7547)`` +`0d1e3088a `_ 2020-03-04 ``[AIRFLOW-6970] Improve GCP Video Intelligence system tests (#7604)`` +`ab6bb0012 `_ 2020-03-03 ``[AIRFLOW-6971] Fix return type in CloudSpeechToTextRecognizeSpeechOperator (#7607)`` +`3db4ade3d `_ 2020-02-29 ``[AIRFLOW-6924] Fix Google DLP operators return types (#7546)`` +`008b4bab1 `_ 2020-02-27 ``[AIRFLOW-6730] Use total_seconds instead of seconds (#7363)`` +`bb552b2d9 `_ 2020-02-25 ``[AIRFLOW-6908] Lazy load AirflowException (#7528)`` +`d1a34246a `_ 2020-02-25 ``[AIRFLOW-6593] Add GCP Stackdriver Alerting Hooks and Operators (#7322)`` +`3320e432a `_ 2020-02-24 ``[AIRFLOW-6817] Lazy-load 'airflow.DAG' to keep user-facing API untouched (#7517)`` +`dcf874352 `_ 2020-02-24 ``[AIRFLOW-6894] Prevent db query in example_dags (#7516)`` +`4d03e33c1 `_ 2020-02-22 ``[AIRFLOW-6817] remove imports from 'airflow/__init__.py', replaced implicit imports with explicit imports, added entry to 'UPDATING.MD' - squashed/rebased (#7456)`` +`35b961637 `_ 2020-02-21 ``[AIRFLOW-4973] Add Cloud Data Fusion Pipeline integration (#7486)`` +`aff3a361b `_ 2020-02-20 ``[AIRFLOW-6558] Campaign Manager operators for conversions (#7420)`` +`9cbd7de6d `_ 2020-02-18 ``[AIRFLOW-6792] Remove _operator/_hook/_sensor in providers package and add tests (#7412)`` +`5b199cb86 `_ 2020-02-17 ``[AIRFLOW-XXXX] Typo in example_bigquery DAG (#7429)`` +`2c9345a8e `_ 2020-02-17 ``[AIRFLOW-6759] Added MLEngine operator/hook to cancel MLEngine jobs (#7400)`` +`946bdc23c `_ 2020-02-16 ``[AIRFLOW-6405] Add GCP BigQuery Table Upsert Operator (#7126)`` +`2381c820c `_ 2020-02-13 ``[AIRFLOW-6505] Let emoji encoded properly for json.dumps() (#7399)`` +`04c1fefbf `_ 2020-02-03 ``[AIRFLOW-6676] added GCSDeleteBucketOperator (#7307)`` +`a0252748f `_ 2020-02-03 ``[AIRFLOW-6717] Remove non-existent field from templated_fields (#7340)`` +`97a429f9d `_ 2020-02-02 ``[AIRFLOW-6714] Remove magic comments about UTF-8 (#7338)`` +`9d8d07557 `_ 2020-02-03 ``[AIRFLOW-6715] Fix Google Cloud DLP Example DAG (#7337)`` +`cf141506a `_ 2020-02-02 ``[AIRFLOW-6708] Set unique logger names (#7330)`` +`373c6aa4a `_ 2020-01-30 ``[AIRFLOW-6682] Move GCP classes to providers package (#7295)`` +`83c037873 `_ 2020-01-30 ``[AIRFLOW-6674] Move example_dags in accordance with AIP-21 (#7287)`` +`057f3ae3a `_ 2020-01-29 ``[AIRFLOW-6670][depends on AIRFLOW-6669] Move contrib operators to providers package (#7286)`` +`ceea293c1 `_ 2020-01-28 ``[AIRFLOW-6656] Fix AIP-21 moving (#7272)`` +`c42a375e7 `_ 2020-01-27 ``[AIRFLOW-6644][AIP-21] Move service classes to providers package (#7265)`` +`059eda05f `_ 2020-01-21 ``[AIRFLOW-6610] Move software classes to providers package (#7231)`` +`f4d3e5e54 `_ 2020-01-13 ``[AIRFLOW-6102] [AIP-21] Rename Dataproc operators (#7151)`` +`e7bf8ecb4 `_ 2020-01-13 ``[AIRFLOW-6119] [AIP-21] Rename GCS operators, hooks and sensors (#7125)`` +`5b6772cb8 `_ 2020-01-09 ``[AIRFLOW-6125] [AIP-21] Rename S3 operator and SFTP operator (#7112)`` +`4f8592ae8 `_ 2020-01-08 ``[AIRFLOW-6118] [AIP-21] Rename Pubsub operators and hook (#7046)`` +`20299473f `_ 2020-01-03 ``[AIRFLOW-6115] [AIP-21] Rename GCP vision operators (#7020)`` +`18e8cea4e `_ 2020-01-03 ``[AIRFLOW-6428] Fix import path for airflow.utils.dates.days_ago in Example DAGs (#7007)`` +`95087af14 `_ 2019-12-31 ``[AIRFLOW-6110] [AIP-21] Rename natural_language service (#6968)`` +`69629a5a9 `_ 2019-12-09 ``[AIRFLOW-5807] Move SFTP from contrib to providers. (#6464)`` +`25e9047a4 `_ 2019-12-09 ``[AIRFLOW-6193] Do not use asserts in Airflow main code (#6749)`` +`ed0a14f32 `_ 2019-12-09 ``[AIRFLOW-6120] Rename GoogleCloudBaseHook (#6734)`` +`2f2f89c14 `_ 2019-12-01 ``[AIRFLOW-6139] Consistent spaces in pylint enable/disable (#6701)`` +`03c870a61 `_ 2019-11-26 ``[AIRFLOW-6010] Remove cyclic imports and pylint hacks (#6601)`` +`5c4cfea8c `_ 2019-11-15 ``[AIRFLOW-5718] Add SFTPToGoogleCloudStorageOperator (#6393)`` +`44a8c37a9 `_ 2019-11-13 ``[AIRFLOW-XXX] Fix the docstring for Dataproc get_job method (#6581)`` +`d633d3ac4 `_ 2019-11-13 ``[AIRFLOW-5691] Rewrite Dataproc operators to use python library (#6371)`` +`d985c02d9 `_ 2019-11-05 ``[AIRFLOW-XXX] Add How-To-Guide to GCP PubSub (#6497)`` +`a296cdabd `_ 2019-11-04 ``[AIRFLOW-5743] Move Google PubSub to providers package (#6476)`` +`470b2a779 `_ 2019-10-30 ``[AIRFLOW-5741] Move Cloud Natural Language to providers (#6421)`` +`f2caa451f `_ 2019-10-27 ``[AIRFLOW-5742] Move Google Cloud Vision to providers package (#6424)`` +`16d7accb2 `_ 2019-10-22 ``[AIRFLOW-4971] Add Google Display & Video 360 integration (#6170)`` +`4e661f535 `_ 2019-10-22 ``[AIRFLOW-5379] Add Google Search Ads 360 operators (#6228)`` +`19e32b4e2 `_ 2019-10-18 ``[AIRFLOW-5656] Rename provider to providers module (#6333)`` +================================================================================================ =========== ====================================================================================================================================================================== diff --git a/docs/apache-airflow-providers-google/index.rst b/docs/apache-airflow-providers-google/index.rst index 140d4733e1376..5fd65ba2245cb 100644 --- a/docs/apache-airflow-providers-google/index.rst +++ b/docs/apache-airflow-providers-google/index.rst @@ -44,3 +44,212 @@ Content Example DAGs PyPI Repository + +.. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! + + +.. toctree:: + :maxdepth: 1 + :caption: Commits + + Detailed list of commits + + +Package apache-airflow-providers-google +------------------------------------------------------ + +Google services including: + + - `Google Ads `__ + - `Google Cloud (GCP) `__ + - `Google Firebase `__ + - `Google Marketing Platform `__ + - `Google Workspace `__ (formerly Google Suite) + + +Release: 2.0.0 + +Provider package +---------------- + +This is a provider package for ``google`` provider. All classes for this provider package +are in ``airflow.providers.google`` python package. + +Installation +------------ + +.. note:: + + On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver + does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice + of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 + ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option + ``--use-deprecated legacy-resolver`` to your pip install command. + + +You can install this package on top of an existing airflow 2.* installation via +``pip install apache-airflow-providers-google`` + +PIP requirements +---------------- + +====================================== =================== +PIP package Version required +====================================== =================== +``PyOpenSSL`` +``google-ads`` ``>=4.0.0,<8.0.0`` +``google-api-core`` ``>=1.25.1,<2.0.0`` +``google-api-python-client`` ``>=1.6.0,<2.0.0`` +``google-auth-httplib2`` ``>=0.0.1`` +``google-auth`` ``>=1.0.0,<2.0.0`` +``google-cloud-automl`` ``>=2.1.0,<3.0.0`` +``google-cloud-bigquery-datatransfer`` ``>=3.0.0,<4.0.0`` +``google-cloud-bigtable`` ``>=1.0.0,<2.0.0`` +``google-cloud-container`` ``>=0.1.1,<2.0.0`` +``google-cloud-datacatalog`` ``>=3.0.0,<4.0.0`` +``google-cloud-dataproc`` ``>=2.2.0,<3.0.0`` +``google-cloud-dlp`` ``>=0.11.0,<2.0.0`` +``google-cloud-kms`` ``>=2.0.0,<3.0.0`` +``google-cloud-language`` ``>=1.1.1,<2.0.0`` +``google-cloud-logging`` ``>=1.14.0,<2.0.0`` +``google-cloud-memcache`` ``>=0.2.0`` +``google-cloud-monitoring`` ``>=0.34.0,<2.0.0`` +``google-cloud-os-login`` ``>=2.0.0,<3.0.0`` +``google-cloud-pubsub`` ``>=2.0.0,<3.0.0`` +``google-cloud-redis`` ``>=2.0.0,<3.0.0`` +``google-cloud-secret-manager`` ``>=0.2.0,<2.0.0`` +``google-cloud-spanner`` ``>=1.10.0,<2.0.0`` +``google-cloud-speech`` ``>=0.36.3,<2.0.0`` +``google-cloud-storage`` ``>=1.30,<2.0.0`` +``google-cloud-tasks`` ``>=2.0.0,<3.0.0`` +``google-cloud-texttospeech`` ``>=0.4.0,<2.0.0`` +``google-cloud-translate`` ``>=1.5.0,<2.0.0`` +``google-cloud-videointelligence`` ``>=1.7.0,<2.0.0`` +``google-cloud-vision`` ``>=0.35.2,<2.0.0`` +``google-cloud-workflows`` ``>=0.1.0,<2.0.0`` +``grpcio-gcp`` ``>=0.2.2`` +``json-merge-patch`` ``~=0.2`` +``pandas-gbq`` +====================================== =================== + +Cross provider package dependencies +----------------------------------- + +Those are dependencies that might be needed in order to use all the features of the package. +You need to install the specified provider packages in order to use them. + +You can install such cross-provider dependencies when installing from PyPI. For example: + +.. code-block:: bash + + pip install apache-airflow-providers-google[amazon] + + +======================================================================================================================== ==================== +Dependent package Extra +======================================================================================================================== ==================== +`apache-airflow-providers-amazon `_ ``amazon`` +`apache-airflow-providers-apache-cassandra `_ ``apache.cassandra`` +`apache-airflow-providers-cncf-kubernetes `_ ``cncf.kubernetes`` +`apache-airflow-providers-facebook `_ ``facebook`` +`apache-airflow-providers-microsoft-azure `_ ``microsoft.azure`` +`apache-airflow-providers-microsoft-mssql `_ ``microsoft.mssql`` +`apache-airflow-providers-mysql `_ ``mysql`` +`apache-airflow-providers-oracle `_ ``oracle`` +`apache-airflow-providers-postgres `_ ``postgres`` +`apache-airflow-providers-presto `_ ``presto`` +`apache-airflow-providers-salesforce `_ ``salesforce`` +`apache-airflow-providers-sftp `_ ``sftp`` +`apache-airflow-providers-ssh `_ ``ssh`` +======================================================================================================================== ==================== + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Changelog +--------- + +2.0.0 +..... + +Updated ``google-cloud-*`` libraries +```````````````````````````````````` + +This release of the provider package contains third-party library updates, which may require updating your +DAG files or custom hooks and operators, if you were using objects from those libraries. +Updating of these libraries is necessary to be able to use new features made available by new versions of +the libraries and to obtain bug fixes that are only available for new versions of the library. + +Details are covered in the UPDATING.md files for each library, but there are some details +that you should pay attention to. + + ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| Library name | Previous constraints | Current constraints | Upgrade Documentation | ++=====================================================================================================+======================+=====================+=======================================================================================================================================+ +| `google-cloud-bigquery-datatransfer `_ | ``>=0.4.0,<2.0.0`` | ``>=3.0.0,<4.0.0`` | `Upgrading google-cloud-bigquery-datatransfer `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-datacatalog `_ | ``>=0.5.0,<0.8`` | ``>=1.0.0,<2.0.0`` | `Upgrading google-cloud-datacatalog `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-os-login `_ | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-os-login `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-pubsub `_ | ``>=1.0.0,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-pubsub `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-kms `_ | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-kms `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ +| `google-cloud-tasks `_ | ``>=1.2.1,<2.0.0`` | ``>=2.0.0,<3.0.0`` | `Upgrading google-cloud-task `_ | ++-----------------------------------------------------------------------------------------------------+----------------------+---------------------+---------------------------------------------------------------------------------------------------------------------------------------+ + +The field names use the snake_case convention +````````````````````````````````````````````` + +If your DAG uses an object from the above mentioned libraries passed by XCom, it is necessary to update the +naming convention of the fields that are read. Previously, the fields used the CamelSnake convention, +now the snake_case convention is used. + +**Before:** + +.. code-block:: python + + set_acl_permission = GCSBucketCreateAclEntryOperator( + task_id="gcs-set-acl-permission", + bucket=BUCKET_NAME, + entity="user-{{ task_instance.xcom_pull('get-instance')['persistenceIamIdentity']" + ".split(':', 2)[1] }}", + role="OWNER", + ) + + +**After:** + +.. code-block:: python + + set_acl_permission = GCSBucketCreateAclEntryOperator( + task_id="gcs-set-acl-permission", + bucket=BUCKET_NAME, + entity="user-{{ task_instance.xcom_pull('get-instance')['persistence_iam_identity']" + ".split(':', 2)[1] }}", + role="OWNER", + ) + + + +1.0.0 +..... + +Initial version of the provider. diff --git a/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst index 58606798902cc..10cbc3e3c95df 100644 --- a/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst +++ b/docs/apache-airflow-providers-google/operators/cloud/kubernetes_engine.rst @@ -36,7 +36,7 @@ Prerequisite Tasks Manage GKE cluster ^^^^^^^^^^^^^^^^^^ -A cluster is the foundation of GKE - all workloads run on on top of the cluster. It is made up on a cluster master +A cluster is the foundation of GKE - all workloads run on top of the cluster. It is made up on a cluster master and worker nodes. The lifecycle of the master is managed by GKE when creating or deleting a cluster. The worker nodes are represented as Compute Engine VM instances that GKE creates on your behalf when creating a cluster. diff --git a/docs/apache-airflow-providers-google/operators/cloud/workflows.rst b/docs/apache-airflow-providers-google/operators/cloud/workflows.rst new file mode 100644 index 0000000000000..551a7ca40f938 --- /dev/null +++ b/docs/apache-airflow-providers-google/operators/cloud/workflows.rst @@ -0,0 +1,185 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Google Cloud Workflows Operators +================================ + +You can use Workflows to create serverless workflows that link series of serverless tasks together +in an order you define. Combine the power of Google Cloud's APIs, serverless products like Cloud +Functions and Cloud Run, and calls to external APIs to create flexible serverless applications. + +For more information about the service visit +`Workflows production documentation `__. + +.. contents:: + :depth: 1 + :local: + +Prerequisite Tasks +------------------ + +.. include::/operators/_partials/prerequisite_tasks.rst + + +.. _howto/operator:WorkflowsCreateWorkflowOperator: + +Create workflow +=============== + +To create a workflow use +:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCreateWorkflowOperator`. + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py + :language: python + :dedent: 4 + :start-after: [START how_to_create_workflow] + :end-before: [END how_to_create_workflow] + +The workflow should be define in similar why to this example: + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py + :language: python + :dedent: 0 + :start-after: [START how_to_define_workflow] + :end-before: [END how_to_define_workflow] + +For more information about authoring workflows check official +production documentation ``__. + + +.. _howto/operator:WorkflowsUpdateWorkflowOperator: + +Update workflow +=============== + +To update a workflow use +:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsUpdateWorkflowOperator`. + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py + :language: python + :dedent: 4 + :start-after: [START how_to_update_workflow] + :end-before: [END how_to_update_workflow] + +.. _howto/operator:WorkflowsGetWorkflowOperator: + +Get workflow +============ + +To get a workflow use +:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsGetWorkflowOperator`. + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py + :language: python + :dedent: 4 + :start-after: [START how_to_get_workflow] + :end-before: [END how_to_get_workflow] + +.. _howto/operator:WorkflowsListWorkflowsOperator: + +List workflows +============== + +To list workflows use +:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsListWorkflowsOperator`. + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py + :language: python + :dedent: 4 + :start-after: [START how_to_list_workflows] + :end-before: [END how_to_list_workflows] + +.. _howto/operator:WorkflowsDeleteWorkflowOperator: + +Delete workflow +=============== + +To delete a workflow use +:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsDeleteWorkflowOperator`. + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py + :language: python + :dedent: 4 + :start-after: [START how_to_delete_workflow] + :end-before: [END how_to_delete_workflow] + +.. _howto/operator:WorkflowsCreateExecutionOperator: + +Create execution +================ + +To create an execution use +:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCreateExecutionOperator`. +This operator is not idempotent due to API limitation. + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py + :language: python + :dedent: 4 + :start-after: [START how_to_create_execution] + :end-before: [END how_to_create_execution] + +The create operator does not wait for execution to complete. To wait for execution result use +:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowExecutionSensor`. + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py + :language: python + :dedent: 4 + :start-after: [START how_to_wait_for_execution] + :end-before: [END how_to_wait_for_execution] + +.. _howto/operator:WorkflowsGetExecutionOperator: + +Get execution +================ + +To get an execution use +:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsGetExecutionOperator`. + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py + :language: python + :dedent: 4 + :start-after: [START how_to_get_execution] + :end-before: [END how_to_get_execution] + +.. _howto/operator:WorkflowsListExecutionsOperator: + +List executions +=============== + +To list executions use +:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsListExecutionsOperator`. +By default this operator will return only executions for last 60 minutes. + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py + :language: python + :dedent: 4 + :start-after: [START how_to_list_executions] + :end-before: [END how_to_list_executions] + +.. _howto/operator:WorkflowsCancelExecutionOperator: + +Cancel execution +================ + +To cancel an execution use +:class:`~airflow.providers.google.cloud.operators.dataproc.WorkflowsCancelExecutionOperator`. + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_workflows.py + :language: python + :dedent: 4 + :start-after: [START how_to_cancel_execution] + :end-before: [END how_to_cancel_execution] diff --git a/docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst b/docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst new file mode 100644 index 0000000000000..29dc5405e7484 --- /dev/null +++ b/docs/apache-airflow-providers-google/operators/transfer/trino_to_gcs.rst @@ -0,0 +1,142 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Trino to Google Cloud Storage Transfer Operator +=============================================== + +`Trino `__ is an open source, fast, distributed SQL query engine for running interactive +analytic queries against data sources of all sizes ranging from gigabytes to petabytes. Trino allows +querying data where it lives, including Hive, Cassandra, relational databases or even proprietary data stores. +A single Trino query can combine data from multiple sources, allowing for analytics across your entire +organization. + +`Google Cloud Storage `__ allows world-wide storage and retrieval of +any amount of data at any time. You can use it to store backup and +`archive data `__ as well +as a `data source for BigQuery `__. + + +Data transfer +------------- + +Transfer files between Trino and Google Storage is performed with the +:class:`~airflow.providers.google.cloud.transfers.trino_to_gcs.TrinoToGCSOperator` operator. + +This operator has 3 required parameters: + +* ``sql`` - The SQL to execute. +* ``bucket`` - The bucket to upload to. +* ``filename`` - The filename to use as the object name when uploading to Google Cloud Storage. + A ``{}`` should be specified in the filename to allow the operator to inject file + numbers in cases where the file is split due to size. + +All parameters are described in the reference documentation - :class:`~airflow.providers.google.cloud.transfers.trino_to_gcs.TrinoToGCSOperator`. + +An example operator call might look like this: + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_trino_to_gcs_basic] + :end-before: [END howto_operator_trino_to_gcs_basic] + +Choice of data format +^^^^^^^^^^^^^^^^^^^^^ + +The operator supports two output formats: + +* ``json`` - JSON Lines (default) +* ``csv`` + +You can specify these options by the ``export_format`` parameter. + +If you want a CSV file to be created, your operator call might look like this: + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_trino_to_gcs_csv] + :end-before: [END howto_operator_trino_to_gcs_csv] + +Generating BigQuery schema +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you set ``schema_filename`` parameter, a ``.json`` file containing the BigQuery schema fields for the table +will be dumped from the database and upload to the bucket. + +If you want to create a schema file, then an example operator call might look like this: + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_trino_to_gcs_multiple_types] + :end-before: [END howto_operator_trino_to_gcs_multiple_types] + +For more information about the BigQuery schema, please look at +`Specifying schema `__ in the Big Query documentation. + +Division of the result into multiple files +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This operator supports the ability to split large result into multiple files. The ``approx_max_file_size_bytes`` +parameters allows developers to specify the file size of the splits. By default, the file has no more +than 1 900 000 000 bytes (1900 MB) + +Check `Quotas & limits in Google Cloud Storage `__ to see the +maximum allowed file size for a single object. + +If you want to create 10 MB files, your code might look like this: + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_read_data_from_gcs_many_chunks] + :end-before: [END howto_operator_read_data_from_gcs_many_chunks] + +Querying data using the BigQuery +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The data available in Google Cloud Storage can be used by BigQuery. You can load data to BigQuery or +refer in queries directly to GCS data. For information about the loading data to the BigQuery, please look at +`Introduction to loading data from Cloud Storage `__ +in the BigQuery documentation. For information about the querying GCS data, please look at +`Querying Cloud Storage data `__ in +the BigQuery documentation. + +Airflow also has numerous operators that allow you to create the use of BigQuery. +For example, if you want to create an external table that allows you to create queries that +read data directly from GCS, then you can use :class:`~airflow.providers.google.cloud.operators.bigquery.BigQueryCreateExternalTableOperator`. +Using this operator looks like this: + +.. exampleinclude:: /../../airflow/providers/google/cloud/example_dags/example_trino_to_gcs.py + :language: python + :dedent: 4 + :start-after: [START howto_operator_create_external_table_multiple_types] + :end-before: [END howto_operator_create_external_table_multiple_types] + +For more information about the Airflow and BigQuery integration, please look at +the Python API Reference - :class:`~airflow.providers.google.cloud.operators.bigquery`. + +Reference +^^^^^^^^^ + +For further information, look at: + +* `Trino Documentation `__ + +* `Google Cloud Storage Documentation `__ diff --git a/docs/apache-airflow-providers-microsoft-azure/operators/azure_blob_to_gcs.rst b/docs/apache-airflow-providers-microsoft-azure/operators/azure_blob_to_gcs.rst index 11daa0394c18a..7b130979fe1e1 100644 --- a/docs/apache-airflow-providers-microsoft-azure/operators/azure_blob_to_gcs.rst +++ b/docs/apache-airflow-providers-microsoft-azure/operators/azure_blob_to_gcs.rst @@ -31,7 +31,7 @@ Please follow Azure to do it. TOKEN should be added to the Connection in Airflow in JSON format, Login and Password as plain text. -You can check `how to do such connection `_. +You can check `how to do such connection `_. See following example. Set values for these fields: diff --git a/docs/apache-airflow-providers-neo4j/commits.rst b/docs/apache-airflow-providers-neo4j/commits.rst new file mode 100644 index 0000000000000..bfe9721e76384 --- /dev/null +++ b/docs/apache-airflow-providers-neo4j/commits.rst @@ -0,0 +1,41 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Package apache-airflow-providers-neo4j +------------------------------------------------------ + +`Neo4j `__ + + +This is detailed commit list of changes for versions provider package: ``neo4j``. +For high-level changelog, see :doc:`package information including changelog `. + + + +1.0.0 +..... + +Latest change: 2021-02-01 + +================================================================================================ =========== ================================================ +Commit Committed Subject +================================================================================================ =========== ================================================ +`ac2f72c98 `_ 2021-02-01 ``Implement provider versioning tools (#13767)`` +`1d2977f6a `_ 2021-01-14 ``Add Neo4j hook and operator (#13324)`` +================================================================================================ =========== ================================================ diff --git a/docs/apache-airflow-providers-neo4j/connections/neo4j.rst b/docs/apache-airflow-providers-neo4j/connections/neo4j.rst new file mode 100644 index 0000000000000..33fd6b50b9fef --- /dev/null +++ b/docs/apache-airflow-providers-neo4j/connections/neo4j.rst @@ -0,0 +1,63 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +Neo4j Connection +================ +The Neo4j connection type provides connection to a Neo4j database. + +Configuring the Connection +-------------------------- +Host (required) + The host to connect to. + +Schema (optional) + Specify the schema name to be used in the database. + +Login (required) + Specify the user name to connect. + +Password (required) + Specify the password to connect. + +Extra (optional) + Specify the extra parameters (as json dictionary) that can be used in Neo4j + connection. + + The following extras are supported: + + - Default - uses bolt scheme(bolt://) + - neo4j_scheme - neo4j:// + - certs_self_signed - neo4j+ssc:// + - certs_trusted_ca - neo4j+s:// + + * ``encrypted``: Sets encrypted=True/False for GraphDatabase.driver, Set to ``True`` for Neo4j Aura. + * ``neo4j_scheme``: Specifies the scheme to ``neo4j://``, default is ``bolt://`` + * ``certs_self_signed``: Sets the URI scheme to support self-signed certificates(``neo4j+ssc://``) + * ``certs_trusted_ca``: Sets the URI scheme to support only trusted CA(``neo4j+s://``) + + Example "extras" field: + + .. code-block:: json + + { + "encrypted": true, + "neo4j_scheme": true, + "certs_self_signed": true, + "certs_trusted_ca": false + } diff --git a/docs/apache-airflow-providers-neo4j/index.rst b/docs/apache-airflow-providers-neo4j/index.rst new file mode 100644 index 0000000000000..dd995fb8c65b8 --- /dev/null +++ b/docs/apache-airflow-providers-neo4j/index.rst @@ -0,0 +1,124 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-neo4j`` +================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: Guides + + Connection types + Operators + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/neo4j/index> + +.. toctree:: + :maxdepth: 1 + :caption: Resources + + Example DAGs + +.. toctree:: + :maxdepth: 1 + :caption: Resources + + PyPI Repository + +.. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! + + +.. toctree:: + :maxdepth: 1 + :caption: Commits + + Detailed list of commits + + +Package apache-airflow-providers-neo4j +------------------------------------------------------ + +`Neo4j `__ + + +Release: 1.0.0 + +Provider package +---------------- + +This is a provider package for ``neo4j`` provider. All classes for this provider package +are in ``airflow.providers.neo4j`` python package. + +Installation +------------ + +.. note:: + + On November 2020, new version of PIP (20.3) has been released with a new, 2020 resolver. This resolver + does not yet work with Apache Airflow and might lead to errors in installation - depends on your choice + of extras. In order to install Airflow you need to either downgrade pip to version 20.2.4 + ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option + ``--use-deprecated legacy-resolver`` to your pip install command. + + +You can install this package on top of an existing airflow 2.* installation via +``pip install apache-airflow-providers-neo4j`` + +PIP requirements +---------------- + +============= ================== +PIP package Version required +============= ================== +``neo4j`` ``>=4.2.1`` +============= ================== + + + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Changelog +--------- + + +1.0.0 +..... + +Initial version of the provider. diff --git a/docs/apache-airflow-providers-neo4j/operators/neo4j.rst b/docs/apache-airflow-providers-neo4j/operators/neo4j.rst new file mode 100644 index 0000000000000..411aa0c6698ce --- /dev/null +++ b/docs/apache-airflow-providers-neo4j/operators/neo4j.rst @@ -0,0 +1,50 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + + +.. _howto/operator:Neo4jOperator: + +Neo4jOperator +============= + +Use the :class:`~airflow.providers.neo4j.operators.Neo4jOperator` to execute +SQL commands in a `Neo4j `__ database. + + +Using the Operator +^^^^^^^^^^^^^^^^^^ + +Use the ``neo4j_conn_id`` argument to connect to your Neo4j instance where +the connection metadata is structured as follows: + +.. list-table:: Neo4j Airflow Connection Metadata + :widths: 25 25 + :header-rows: 1 + + * - Parameter + - Input + * - Host: string + - Neo4j hostname + * - Schema: string + - Database name + * - Login: string + - Neo4j user + * - Password: string + - Neo4j user password + * - Port: int + - Neo4j port diff --git a/docs/apache-airflow-providers-ssh/connections/ssh.rst b/docs/apache-airflow-providers-ssh/connections/ssh.rst index 54e902e36a582..f320381904fd0 100644 --- a/docs/apache-airflow-providers-ssh/connections/ssh.rst +++ b/docs/apache-airflow-providers-ssh/connections/ssh.rst @@ -47,9 +47,10 @@ Extra (optional) * ``private_key_passphrase`` - Content of the private key passphrase used to decrypt the private key. * ``timeout`` - An optional timeout (in seconds) for the TCP connect. Default is ``10``. * ``compress`` - ``true`` to ask the remote client/server to compress traffic; ``false`` to refuse compression. Default is ``true``. - * ``no_host_key_check`` - Set to ``false`` to restrict connecting to hosts with no entries in ``~/.ssh/known_hosts`` (Hosts file). This provides maximum protection against trojan horse attacks, but can be troublesome when the ``/etc/ssh/ssh_known_hosts`` file is poorly maintained or connections to new hosts are frequently made. This option forces the user to manually add all new hosts. Default is ``true``, ssh will automatically add new host keys to the user known hosts files. + * ``no_host_key_check`` - Set to ``false`` to restrict connecting to hosts with either no entries in ``~/.ssh/known_hosts`` (Hosts file) or not present in the ``host_key`` extra. This provides maximum protection against trojan horse attacks, but can be troublesome when the ``/etc/ssh/ssh_known_hosts`` file is poorly maintained or connections to new hosts are frequently made. This option forces the user to manually add all new hosts. Default is ``true``, ssh will automatically add new host keys to the user known hosts files. * ``allow_host_key_change`` - Set to ``true`` if you want to allow connecting to hosts that has host key changed or when you get 'REMOTE HOST IDENTIFICATION HAS CHANGED' error. This wont protect against Man-In-The-Middle attacks. Other possible solution is to remove the host entry from ``~/.ssh/known_hosts`` file. Default is ``false``. * ``look_for_keys`` - Set to ``false`` if you want to disable searching for discoverable private key files in ``~/.ssh/`` + * ``host_key`` - The base64 encoded ssh-rsa public key of the host, as you would find in the ``known_hosts`` file. Specifying this, along with ``no_host_key_check=False`` allows you to only make the connection if the public key of the endpoint matches this value. Example "extras" field: @@ -59,9 +60,10 @@ Extra (optional) "key_file": "/home/airflow/.ssh/id_rsa", "timeout": "10", "compress": "false", + "look_for_keys": "false", "no_host_key_check": "false", "allow_host_key_change": "false", - "look_for_keys": "false" + "host_key": "AAAHD...YDWwq==" } When specifying the connection as URI (in :envvar:`AIRFLOW_CONN_{CONN_ID}` variable) you should specify it diff --git a/docs/apache-airflow-providers-tableau/index.rst b/docs/apache-airflow-providers-tableau/index.rst new file mode 100644 index 0000000000000..ce74925c8e37b --- /dev/null +++ b/docs/apache-airflow-providers-tableau/index.rst @@ -0,0 +1,38 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-tableau`` +======================================= + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/tableau/index> + +.. toctree:: + :maxdepth: 1 + :caption: Resources + + Example DAGs + PyPI Repository + +.. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! diff --git a/docs/apache-airflow-providers-trino/commits.rst b/docs/apache-airflow-providers-trino/commits.rst new file mode 100644 index 0000000000000..5f0341d81e984 --- /dev/null +++ b/docs/apache-airflow-providers-trino/commits.rst @@ -0,0 +1,26 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Package apache-airflow-providers-trino +------------------------------------------------------ + +`Trino `__ + + +This is detailed commit list of changes for versions provider package: ``trino``. +For high-level changelog, see :doc:`package information including changelog `. diff --git a/docs/apache-airflow-providers-trino/index.rst b/docs/apache-airflow-providers-trino/index.rst new file mode 100644 index 0000000000000..e74c7d62e7d23 --- /dev/null +++ b/docs/apache-airflow-providers-trino/index.rst @@ -0,0 +1,43 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +``apache-airflow-providers-trino`` +=================================== + +Content +------- + +.. toctree:: + :maxdepth: 1 + :caption: References + + Python API <_api/airflow/providers/trino/index> + +.. toctree:: + :maxdepth: 1 + :caption: Resources + + PyPI Repository + +.. THE REMINDER OF THE FILE IS AUTOMATICALLY GENERATED. IT WILL BE OVERWRITTEN AT RELEASE TIME! + +.. toctree:: + :maxdepth: 1 + :caption: Commits + + Detailed list of commits diff --git a/docs/apache-airflow-providers/howto/create-update-providers.rst b/docs/apache-airflow-providers/howto/create-update-providers.rst new file mode 100644 index 0000000000000..47ebb77c1475b --- /dev/null +++ b/docs/apache-airflow-providers/howto/create-update-providers.rst @@ -0,0 +1,301 @@ + + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Community Providers +=================== + +.. contents:: :local: + +How-to creating a new community provider +---------------------------------------- + +This document gathers the necessary steps to create a new community provider and also guidelines for updating +the existing ones. You should be aware that providers may have distinctions that may not be covered in +this guide. The sequence described was designed to meet the most linear flow possible in order to develop a +new provider. + +Another recommendation that will help you is to look for a provider that works similar to yours. That way it will +help you to set up tests and other dependencies. + +First, you need to set up your local development environment. See `Contribution Quick Start `_ +if you did not set up your local environment yet. We recommend using ``breeze`` to develop locally. This way you +easily be able to have an environment more similar to the one executed by Github CI workflow. + + .. code-block:: bash + + ./breeze + +Using the code above you will set up Docker containers. These containers your local code to internal volumes. +In this way, the changes made in your IDE are already applied to the code inside the container and tests can +be carried out quickly. + +In this how-to guide our example provider name will be ````. +When you see this placeholder you must change for your provider name. + + +Initial Code and Unit Tests +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Most likely you have developed a version of the provider using some local customization and now you need to +transfer this code to the Airflow project. Below is described all the initial code structure that +the provider may need. Understand that not all providers will need all the components described in this structure. +If you still have doubts about building your provider, we recommend that you read the initial provider guide and +open a issue on Github so the community can help you. + + .. code-block:: bash + + airflow/ + ├── providers// + │ ├── __init__.py + │ ├── example_dags/ + │ │ ├── __init__.py + │ │ └── example_.py + │ ├── hooks/ + │ │ ├── __init__.py + │ │ └── .py + │ ├── operators/ + │ │ ├── __init__.py + │ │ └── .py + │ ├── sensors/ + │ │ ├── __init__.py + │ │ └── .py + │ └── transfers/ + │ ├── __init__.py + │ └── .py + └── tests/providers// + ├── __init__.py + ├── hooks/ + │ ├── __init__.py + │ └── test_.py + ├── operators/ + │ ├── __init__.py + │ ├── test_.py + │ └── test__system.py + ├── sensors/ + │ ├── __init__.py + │ └── test_.py + └── transfers/ + ├── __init__.py + └── test_.py + +Considering that you have already transferred your provider's code to the above structure, it will now be necessary +to create unit tests for each component you created. The example below I have already set up an environment using +breeze and I'll run unit tests for my Hook. + + .. code-block:: bash + + root@fafd8d630e46:/opt/airflow# python -m pytest tests/providers//hook/.py + +Update Airflow validation tests +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +There are some tests that Airflow performs to ensure consistency that is related to the providers. + + .. code-block:: bash + + airflow/scripts/in_container/ + └── run_install_and_test_provider_packages.sh + tests/core/ + └── test_providers_manager.py + +Change expected number of providers, hooks and connections if needed in ``run_install_and_test_provider_packages.sh`` file. + +Add your provider information in the following variables in ``test_providers_manager.py``: + +- add your provider to ``ALL_PROVIDERS`` list; +- add your provider into ``CONNECTIONS_LIST`` if your provider create a new connection type. + + +Integration tests +^^^^^^^^^^^^^^^^^ + +See `Airflow Integration Tests `_ + + +Documentation +^^^^^^^^^^^^^ + +An important part of building a new provider is the documentation. +Some steps for documentation occurs automatically by ``pre-commit`` see `Installing pre-commit guide `_ + + .. code-block:: bash + + airflow/ + ├── INSTALL + ├── CONTRIBUTING.rst + ├── setup.py + ├── docs/ + │ ├── spelling_wordlist.txt + │ ├── apache-airflow/ + │ │ └── extra-packages-ref.rst + │ ├── integration-logos// + │ │ └── .png + │ └── apache-airflow-providers-/ + │ ├── index.rst + │ ├── commits.rst + │ ├── connections.rst + │ └── operators/ + │ └── .rst + └── providers/ + ├── dependencies.json + └── / + ├── provider.yaml + └── CHANGELOG.rst + + +Files automatically updated by pre-commit: + +- ``airflow/providers/dependencies.json`` +- ``INSTALL`` + +Files automatically created when the provider is released: + +- ``docs/apache-airflow-providers-/commits.rst`` +- ``/airflow/providers//CHANGELOG`` + +There is a chance that your provider's name is not a common English word. +In this case is necessary to add it to the file ``docs/spelling_wordlist.txt``. This file begin with capitalized words and +lowercase in the second block. + + .. code-block:: bash + + Namespace + Neo4j + Nextdoor + (new line) + Nones + NotFound + Nullable + ... + neo4j + neq + networkUri + (new line) + nginx + nobr + nodash + +Add your provider dependencies into **PROVIDER_REQUIREMENTS** variable in ``setup.py``. If your provider doesn't have +any dependency add a empty list. + + .. code-block:: python + + PROVIDERS_REQUIREMENTS: Dict[str, List[str]] = { + ... + 'microsoft.winrm': winrm, + 'mongo': mongo, + 'mysql': mysql, + 'neo4j': neo4j, + '': [], + 'odbc': odbc, + ... + } + +In the ``CONTRIBUTING.rst`` adds: + +- your provider name in the list in the **Extras** section +- your provider dependencies in the **Provider Packages** section table, only if your provider has external dependencies. + +In the ``docs/apache-airflow-providers-/connections.rst``: + +- add information how to configure connection for your provider. + +In the ``docs/apache-airflow-providers-/operators/.rst``: + +- add information how to use the Operator. It's important to add examples and additional information if your Operator has extra-parameters. + + .. code-block:: RST + + .. _howto/operator:NewProviderOperator: + + NewProviderOperator + =================== + + Use the :class:`~airflow.providers..operators.NewProviderOperator` to do something + amazing with Airflow! + + Using the Operator + ^^^^^^^^^^^^^^^^^^ + + The NewProviderOperator requires a ``connection_id`` and this other awesome parameter. + You can see an example below: + + .. exampleinclude:: /../../airflow/providers//example_dags/example_.py + :language: python + :start-after: [START howto_operator_] + :end-before: [END howto_operator_] + + +In the ``docs/apache-airflow-providers-new_provider/index.rst``: + +- add all information of the purpose of your provider. It is recommended to check with another provider to help you complete this document as best as possible. + +In the ``airflow/providers//provider.yaml`` add information of your provider: + + .. code-block:: yaml + + package-name: apache-airflow-providers- + name: + description: | + ` `__ + versions: + - 1.0.0 + + integrations: + - integration-name: + external-doc-url: https://www.example.io/ + logo: /integration-logos//.png + how-to-guide: + - /docs/apache-airflow-providers-/operators/.rst + tags: [service] + + operators: + - integration-name: + python-modules: + - airflow.providers..operators. + + hooks: + - integration-name: + python-modules: + - airflow.providers..hooks. + + sensors: + - integration-name: + python-modules: + - airflow.providers..sensors. + + hook-class-names: + - airflow.providers..hooks..NewProviderHook + +You only need to add ``hook-class-names`` in case you have some hooks that have customized UI behavior. +For more information see `Custom connection types `_ + + +After changing and creating these files you can build the documentation locally. The two commands below will +serve to accomplish this. The first will build your provider's documentation. The second will ensure that the +main Airflow documentation that involves some steps with the providers is also working. + + .. code-block:: bash + + ./breeze build-docs -- --package-filter apache-airflow-providers- + ./breeze build-docs -- --package-filter apache-airflow + +How-to Update a community provider +---------------------------------- + +See `Provider packages versioning `_ diff --git a/docs/apache-airflow-providers/index.rst b/docs/apache-airflow-providers/index.rst index 17519219a4775..43c168714cb29 100644 --- a/docs/apache-airflow-providers/index.rst +++ b/docs/apache-airflow-providers/index.rst @@ -51,8 +51,13 @@ provider packages are automatically documented in the release notes of every pro .. note:: We also provide ``apache-airflow-backport-providers`` packages that can be installed for Airflow 1.10. - Those are the same providers as for 2.0 but automatically back-ported to work for Airflow 1.10. Those - backport providers are going to be updated and released for 3 months after Apache Airflow 2.0 release. + Those are the same providers as for 2.0 but automatically back-ported to work for Airflow 1.10. The + last release of backport providers was done on March 17, 2021. + +Creating and maintaining community providers +"""""""""""""""""""""""""""""""""""""""""""" + +See :doc:`howto/create-update-providers` for more information. Provider packages functionality @@ -169,15 +174,10 @@ Using Backport Providers in Airflow 1.10 I would like to upgrade the provider package. If I don't need to upgrade the Airflow version anymore, how do I know that this provider version is compatible with my Airflow version?** -Backport Provider Packages (those are needed in 1.10.* Airflow series) are going to be released for -3 months after the release. We will stop releasing new updates to the backport providers afterwards. -You will be able to continue using the provider packages that you already use and unless you need to -get some new release of the provider that is only released for 2.0, there is no need to upgrade -Airflow. This might happen if for example the provider is migrated to use newer version of client -libraries or when new features/operators/hooks are added to it. Those changes will only be -backported to 1.10.* compatible backport providers up to 3 months after releasing Airflow 2.0. -Also we expect more providers, changes and fixes added to the existing providers to come after the -3 months pass. Eventually you will have to upgrade to Airflow 2.0 if you would like to make use of those. +We have Backport Providers are compatible with 1.10 but they stopped being released on +March 17, 2021. Since then, no new changes to providers for Airflow 2.0 are going to be +released as backport packages. It's the highest time to upgrade to Airflow 2.0. + When it comes to compatibility of providers with different Airflow 2 versions, each provider package will keep its own dependencies, and while we expect those providers to be generally backwards-compatible, particular versions of particular providers might introduce dependencies on @@ -247,13 +247,13 @@ Example ``myproviderpackage/somemodule.py``: **How do provider packages work under the hood?** -When running airflow with your provider package, there will be (at least) three components to your airflow installation: +When running Airflow with your provider package, there will be (at least) three components to your airflow installation: * The installation itself (for example, a ``venv`` where you installed airflow with ``pip install apache-airflow``) together with the related files (e.g. ``dags`` folder) * The ``apache-airflow`` package * Your own ``myproviderpackage`` package that is independent of ``apache-airflow`` or your airflow installation, which - can be a local Python package (that you install via ``pip pip install -e /path/to/my-package``), a normal pip package + can be a local Python package (that you install via ``pip install -e /path/to/my-package``), a normal pip package (``pip install myproviderpackage``), or any other type of Python package In the ``myproviderpackage`` package you need to add the entry point and provide the appropriate metadata as described above. @@ -301,6 +301,12 @@ The Community only accepts providers that are generic enough, are well documente and with capabilities of being tested by people in the community. So we might not always be in the position to accept such contributions. + +After you think that your provider matches the expected values above, you can read +:doc:`howto/create-update-providers` to check all prerequisites for a new +community Provider and discuss it at the `Devlist `_. + + However, in case you have your own, specific provider, which you can maintain on your own or by your team, you are free to publish the providers in whatever form you find appropriate. The custom and community-managed providers have exactly the same capabilities. @@ -328,3 +334,4 @@ Content Packages Operators and hooks + Howto create and update community providers diff --git a/docs/apache-airflow/backport-providers.rst b/docs/apache-airflow/backport-providers.rst index 016b30f38a4e1..493d7f61dfb0e 100644 --- a/docs/apache-airflow/backport-providers.rst +++ b/docs/apache-airflow/backport-providers.rst @@ -19,6 +19,13 @@ Backport Providers ------------------ +.. warning:: + + We have stopped releasing the backport providers on March 17, 2021. Since then, no new changes to + providers for Airflow 2.0 are going to be released as backport packages. + It's the highest time to upgrade to Airflow 2.0. + + Context: Airflow 2.0 operators, hooks, and secrets '''''''''''''''''''''''''''''''''''''''''''''''''' @@ -34,7 +41,7 @@ with the constraint that those packages can only be used in Python 3.6+ environm Installing Airflow 2.0 operators in Airflow 1.10 '''''''''''''''''''''''''''''''''''''''''''''''' -We released backport packages that can be installed for older Airflow versions. These backport packages will be +We released Backport provider packages that can be installed for older Airflow versions. These packages will be released more frequently compared to the main Airflow 1.10.* releases. You will not have to upgrade your Airflow version to use those packages. You can find those packages on @@ -58,7 +65,11 @@ In some rare cases the new operators will not be fully backwards compatible, you about those cases in `UPDATING.md `_ where we explained all such cases. -Switching early to the Airflow 2.0 operators while still running Airflow 1.10.x will make your migration much easier. +Switching early to the Airflow 2.0 operators while still running Airflow 1.10.x will make your +migration much easier. + +Note that as of 17 March 2021 the backport providers are not released any more, so you might find additional +differences accumulating over time in the newer versions of the providers. Installing backport packages ''''''''''''''''''''''''''''' @@ -87,7 +98,7 @@ Backport providers only work when they are installed in the same namespace as th This is majority of cases when you simply run pip install - it installs all packages in the same folder (usually in ``/usr/local/lib/pythonX.Y/site-packages``). But when you install the ``apache-airflow`` and ``apache-airflow-backport-package-*`` using different methods (for example using ``pip install -e .`` or -``pip install --user`` they might be installed in different namespaces. +``pip install --user``) they might be installed in different namespaces. If that's the case, the provider packages will not be importable (the error in such case is ``ModuleNotFoundError: No module named 'airflow.providers'``). diff --git a/docs/apache-airflow/concepts.rst b/docs/apache-airflow/concepts.rst index 346f6c008dd36..3de060b6445a3 100644 --- a/docs/apache-airflow/concepts.rst +++ b/docs/apache-airflow/concepts.rst @@ -99,7 +99,7 @@ logical workflow. Scope ----- -Airflow will load any ``DAG`` object it can import from a DAGfile. Critically, +Airflow will load any ``DAG`` object it can import from a DAG file. Critically, that means the DAG must appear in ``globals()``. Consider the following two DAGs. Only ``dag_1`` will be loaded; the other one only appears in a local scope. @@ -134,7 +134,7 @@ any of its operators. This makes it easy to apply a common parameter to many ope dag = DAG('my_dag', default_args=default_args) op = DummyOperator(task_id='dummy', dag=dag) - print(op.owner) # Airflow + print(op.owner) # airflow .. _concepts:context_manager: @@ -160,9 +160,9 @@ TaskFlow API .. versionadded:: 2.0.0 Airflow 2.0 adds a new style of authoring dags called the TaskFlow API which removes a lot of the boilerplate -around creating PythonOperators, managing dependencies between task and accessing XCom values. (During +around creating PythonOperators, managing dependencies between task and accessing XCom values (During development this feature was called "Functional DAGs", so if you see or hear any references to that, it's the -same thing) +same thing). Outputs and inputs are sent between tasks using :ref:`XCom values `. In addition, you can wrap functions as tasks using the :ref:`task decorator `. Airflow will also automatically @@ -221,7 +221,7 @@ Example DAG with decorator: :end-before: [END dag_decorator_usage] .. note:: Note that Airflow will only load DAGs that appear in ``globals()`` as noted in :ref:`scope section `. - This means you need to make sure to have a variable for your returned DAG is in the module scope. + This means you need to make sure to have a variable for your returned DAG in the module scope. Otherwise Airflow won't detect your decorated DAG. .. _concepts:executor_config: @@ -229,7 +229,7 @@ Example DAG with decorator: ``executor_config`` =================== -The ``executor_config`` is an argument placed into operators that allow airflow users to override tasks +The ``executor_config`` is an argument placed into operators that allow Airflow users to override tasks before launch. Currently this is primarily used by the :class:`KubernetesExecutor`, but will soon be available for other overrides. @@ -252,7 +252,7 @@ execution_date The ``execution_date`` is the *logical* date and time which the DAG Run, and its task instances, are running for. This allows task instances to process data for the desired *logical* date & time. -While a task_instance or DAG run might have an *actual* start date of now, +While a task instance or DAG run might have an *actual* start date of now, their *logical* date might be 3 months ago because we are busy reloading something. In the prior example the ``execution_date`` was 2016-01-01 for the first DAG Run and 2016-01-02 for the second. @@ -366,7 +366,8 @@ using ``@task`` decorator. .. code-block:: python - from airflow.operators.python import task, get_current_context + from airflow.decorators import task + from airflow.operators.python import get_current_context @task def my_task(): @@ -454,7 +455,7 @@ This is a subtle but very important point: in general, if two operators need to share information, like a filename or small amount of data, you should consider combining them into a single operator. If it absolutely can't be avoided, Airflow does have a feature for operator cross-communication called XCom that is -described in the section :ref:`XComs ` +described in the section :ref:`XComs `. Airflow provides many built-in operators for many common tasks, including: @@ -464,7 +465,7 @@ Airflow provides many built-in operators for many common tasks, including: There are also other, commonly used operators that are installed together with airflow automatically, by pre-installing some :doc:`apache-airflow-providers:index` packages (they are always available no -matter which extras you chose when installing Apache Airflow: +matter which extras you chose when installing Apache Airflow): - :class:`~airflow.providers.http.operators.http.SimpleHttpOperator` - sends an HTTP request - :class:`~airflow.providers.sqlite.operators.sqlite.SqliteOperator` - SQLite DB operator @@ -484,7 +485,7 @@ Some examples of popular operators are: - :class:`~airflow.providers.docker.operators.docker.DockerOperator` - :class:`~airflow.providers.apache.hive.operators.hive.HiveOperator` - :class:`~airflow.providers.amazon.aws.operators.s3_file_transform.S3FileTransformOperator` -- :class:`~airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator`, +- :class:`~airflow.providers.mysql.transfers.presto_to_mysql.PrestoToMySqlOperator` - :class:`~airflow.providers.slack.operators.slack.SlackAPIOperator` But there are many, many more - you can see the list of those by following the providers documentation @@ -530,7 +531,7 @@ There are currently 3 different modes for how a sensor operates: How to use: -For ``poke|schedule`` mode, you can configure them at the task level by supplying the ``mode`` parameter, +For ``poke|reschedule`` mode, you can configure them at the task level by supplying the ``mode`` parameter, i.e. ``S3KeySensor(task_id='check-bucket', mode='reschedule', ...)``. For ``smart sensor``, you need to configure it in ``airflow.cfg``, for example: @@ -545,7 +546,7 @@ For ``smart sensor``, you need to configure it in ``airflow.cfg``, for example: shards = 5 sensors_enabled = NamedHivePartitionSensor, MetastorePartitionSensor -For more information on how to configure ``smart-sensor`` and its architecture, see: +For more information on how to configure ``smart sensor`` and its architecture, see: :doc:`Smart Sensor Architecture and Configuration` DAG Assignment @@ -655,11 +656,11 @@ Relationship Builders *Moved in Airflow 2.0* -In Airflow 2.0 those two methods moved from ``airflow.utils.helpers`` to ``airflow.models.baseoperator``. - ``chain`` and ``cross_downstream`` function provide easier ways to set relationships between operators in specific situation. +In Airflow 2.0 those two methods moved from ``airflow.utils.helpers`` to ``airflow.models.baseoperator``. + When setting a relationship between two lists, if we want all operators in one list to be upstream to all operators in the other, we cannot use a single bitshift composition. Instead we have to split one of the lists: @@ -736,7 +737,7 @@ be conceptualized like this: - Operator: A class that acts as a template for carrying out some work. - Task: Defines work by implementing an operator, written in Python. - Task Instance: An instance of a task - that has been assigned to a DAG and has a - state associated with a specific DAG run (i.e for a specific execution_date). + state associated with a specific DAG run (i.e. for a specific execution_date). - execution_date: The logical date and time for a DAG Run and its Task Instances. By combining ``DAGs`` and ``Operators`` to create ``TaskInstances``, you can @@ -1321,8 +1322,8 @@ In case of DAG and task policies users may raise :class:`~airflow.exceptions.Air to prevent a DAG from being imported or prevent a task from being executed if the task is not compliant with users' check. -Please note, cluster policy will have precedence over task attributes defined in DAG meaning -if ``task.sla`` is defined in dag and also mutated via cluster policy then later will have precedence. +Please note, cluster policy will have precedence over task attributes defined in DAG meaning that +if ``task.sla`` is defined in dag and also mutated via cluster policy then the latter will have precedence. In next sections we show examples of each type of cluster policy. @@ -1393,8 +1394,8 @@ Documentation & Notes ===================== It's possible to add documentation or notes to your DAGs & task objects that -become visible in the web interface ("Graph View" & "Tree View" for DAGs, "Task Details" for -tasks). There are a set of special task attributes that get rendered as rich +become visible in the web interface ("Graph View" & "Tree View" for DAGs, "Task Instance Details" +for tasks). There are a set of special task attributes that get rendered as rich content if defined: ========== ================ @@ -1429,7 +1430,7 @@ to the related tasks in Airflow. """ This content will get rendered as markdown respectively in the "Graph View" and -"Task Details" pages. +"Task Instance Details" pages. .. _jinja-templating: @@ -1634,7 +1635,7 @@ A ``.airflowignore`` file specifies the directories or files in ``DAG_FOLDER`` or ``PLUGINS_FOLDER`` that Airflow should intentionally ignore. Each line in ``.airflowignore`` specifies a regular expression pattern, and directories or files whose names (not DAG id) match any of the patterns -would be ignored (under the hood,``Pattern.search()`` is used to match the pattern). +would be ignored (under the hood, ``Pattern.search()`` is used to match the pattern). Overall it works like a ``.gitignore`` file. Use the ``#`` character to indicate a comment; all characters on a line following a ``#`` will be ignored. diff --git a/docs/apache-airflow/dag-run.rst b/docs/apache-airflow/dag-run.rst index 5fc2426320de2..07529904617b6 100644 --- a/docs/apache-airflow/dag-run.rst +++ b/docs/apache-airflow/dag-run.rst @@ -22,7 +22,7 @@ A DAG Run is an object representing an instantiation of the DAG in time. Each DAG may or may not have a schedule, which informs how DAG Runs are created. ``schedule_interval`` is defined as a DAG argument, which can be passed a `cron expression `_ as -a ``str``, a ``datetime.timedelta`` object, or one of of the following cron "presets". +a ``str``, a ``datetime.timedelta`` object, or one of the following cron "presets". .. tip:: You can use an online editor for CRON expressions such as `Crontab guru `_ @@ -80,7 +80,7 @@ An Airflow DAG with a ``start_date``, possibly an ``end_date``, and a ``sched series of intervals which the scheduler turns into individual DAG Runs and executes. The scheduler, by default, will kick off a DAG Run for any interval that has not been run since the last execution date (or has been cleared). This concept is called Catchup. -If your DAG is written to handle its catchup (i.e., not limited to the interval, but instead to ``Now`` for instance.), +If your DAG is not written to handle its catchup (i.e., not limited to the interval, but instead to ``Now`` for instance.), then you will want to turn catchup off. This can be done by setting ``catchup = False`` in DAG or ``catchup_by_default = False`` in the configuration file. When turned off, the scheduler creates a DAG run only for the latest interval. @@ -208,10 +208,10 @@ Example of a parameterized DAG: .. code-block:: python from airflow import DAG - from airflow.operators.bash_operator import BashOperator + from airflow.operators.bash import BashOperator from airflow.utils.dates import days_ago - dag = DAG("example_parametrized_dag", schedule_interval=None, start_date=days_ago(2)) + dag = DAG("example_parameterized_dag", schedule_interval=None, start_date=days_ago(2)) parameterized_task = BashOperator( task_id='parameterized_task', @@ -227,7 +227,7 @@ Using CLI .. code-block:: bash - airflow dags trigger --conf '{"conf1": "value1"}' example_parametrized_dag + airflow dags trigger --conf '{"conf1": "value1"}' example_parameterized_dag Using UI ^^^^^^^^^^ diff --git a/docs/apache-airflow/dag-serialization.rst b/docs/apache-airflow/dag-serialization.rst index c883f8b79da7d..cf5b7a08f49af 100644 --- a/docs/apache-airflow/dag-serialization.rst +++ b/docs/apache-airflow/dag-serialization.rst @@ -41,7 +41,7 @@ as :class:`~airflow.models.serialized_dag.SerializedDagModel` model. The Webserver now instead of having to parse the DAG file again, reads the serialized DAGs in JSON, de-serializes them and create the DagBag and uses it to show in the UI. And the Scheduler does not need the actual DAG for making Scheduling decisions, -instead of using the DAG files, we use Serialized DAGs that contain all the information needing to +instead of using the DAG files, we use Serialized DAGs that contain all the information needed to schedule the DAGs from Airflow 2.0.0 (this was done as part of :ref:`Scheduler HA `). One of the key features that is implemented as the part of DAG Serialization is that diff --git a/docs/apache-airflow/executor/kubernetes.rst b/docs/apache-airflow/executor/kubernetes.rst index 9b774cf08dc95..61d13f410ef15 100644 --- a/docs/apache-airflow/executor/kubernetes.rst +++ b/docs/apache-airflow/executor/kubernetes.rst @@ -113,23 +113,25 @@ create a V1pod with a single container, and overwrite the fields as follows: :start-after: [START task_with_volume] :end-before: [END task_with_volume] -Note that volume mounts environment variables, ports, and devices will all be extended instead of overwritten. +Note that volume mounts, environment variables, ports, and devices will all be extended instead of overwritten. To add a sidecar container to the launched pod, create a V1pod with an empty first container with the name ``base`` and a second container containing your desired sidecar. .. exampleinclude:: /../../airflow/example_dags/example_kubernetes_executor_config.py :language: python + :dedent: 8 :start-after: [START task_with_sidecar] :end-before: [END task_with_sidecar] You can also create custom ``pod_template_file`` on a per-task basis so that you can recycle the same base values between multiple tasks. -This will replace the default ``pod_template_file`` named in the airflow.cfg and then override that template using the ``pod_override_spec``. +This will replace the default ``pod_template_file`` named in the airflow.cfg and then override that template using the ``pod_override``. Here is an example of a task with both features: .. exampleinclude:: /../../airflow/example_dags/example_kubernetes_executor_config.py :language: python + :dedent: 8 :start-after: [START task_with_template] :end-before: [END task_with_template] diff --git a/docs/apache-airflow/extra-packages-ref.rst b/docs/apache-airflow/extra-packages-ref.rst index 32007edce3112..b902868f5c164 100644 --- a/docs/apache-airflow/extra-packages-ref.rst +++ b/docs/apache-airflow/extra-packages-ref.rst @@ -20,17 +20,7 @@ Reference for package extras Here's the list of all the :ref:`extra dependencies `. -Most of the the extras result in installing additional provider packages. - -The entries with ``*`` in the ``Providers`` column indicate that one or more provider packages will be -installed automatically when those extras are used. - -For ``provider`` extras - they usually install single provider package. - -For ``bundle`` extras - they usually install a group of extras (for example ``all`` or ``devel_all`` -or ``all_dbs``), and thus it will include two or more provider extras. - -The entries with ``*`` in the ``Preinstalled`` column indicate that those extras (with providers) are always +The entries with ``*`` in the ``Preinstalled`` column indicate that those extras (providers) are always pre-installed when Airflow is installed. .. note:: @@ -40,226 +30,279 @@ pre-installed when Airflow is installed. providers directly via Airflow sources. This variable is automatically set in ``Breeze`` development environment. Setting this variable is not needed in editable mode (``pip install -e``). - -**Apache Software extras:** - -Those are extras that add dependencies needed for integration with other Apache projects. - -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| extra | install command | enables | Providers | -+=====================+=====================================================+======================================================================+===========+ -| apache.atlas | ``pip install 'apache-airflow[apache.atlas]'`` | Apache Atlas to use Data Lineage feature | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.beam | ``pip install 'apache-airflow[apache.beam]'`` | Apache Beam operators & hooks | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.cassandra | ``pip install 'apache-airflow[apache.cassandra]'`` | Cassandra related operators & hooks | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.druid | ``pip install 'apache-airflow[apache.druid]'`` | Druid related operators & hooks | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.hdfs | ``pip install 'apache-airflow[apache.hdfs]'`` | HDFS hooks and operators | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.hive | ``pip install 'apache-airflow[apache.hive]'`` | All Hive related operators | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.kylin | ``pip install 'apache-airflow[apache.kylin]'`` | All Kylin related operators & hooks | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.livy | ``pip install 'apache-airflow[apache.livy]'`` | All Livy related operators, hooks & sensors | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.pig | ``pip install 'apache-airflow[apache.pig]'`` | All Pig related operators & hooks | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.pinot | ``pip install 'apache-airflow[apache.pinot]'`` | All Pinot related hooks | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.spark | ``pip install 'apache-airflow[apache.spark]'`` | All Spark related operators & hooks | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.sqoop | ``pip install 'apache-airflow[apache.sqoop]'`` | All Sqoop related operators & hooks | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| apache.webhdfs | ``pip install 'apache-airflow[apache.webhdfs]'`` | HDFS hooks and operators | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ - - -**Services extras:** +Core Airflow extras +------------------- + +Those are core airflow extras that extend capabilities of core Airflow. They usually do not install provider +packages (with the exception of ``celery`` and ``cncf.kubernetes`` extras), they just install necessary +python dependencies for the provided package. + ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| extra | install command | enables | ++=====================+=====================================================+============================================================================+ +| async | ``pip install 'apache-airflow[async]'`` | Async worker classes for Gunicorn | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| celery | ``pip install 'apache-airflow[celery]'`` | CeleryExecutor (also installs the celery provider package!) | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| cgroups | ``pip install 'apache-airflow[cgroups]'`` | Needed To use CgroupTaskRunner | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| cncf.kubernetes | ``pip install 'apache-airflow[cncf.kubernetes]'`` | Kubernetes Executor (also installs the kubernetes provider package) | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| dask | ``pip install 'apache-airflow[dask]'`` | DaskExecutor | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| github_enterprise | ``pip install 'apache-airflow[github_enterprise]'`` | GitHub Enterprise auth backend | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| google_auth | ``pip install 'apache-airflow[google_auth]'`` | Google auth backend | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| kerberos | ``pip install 'apache-airflow[kerberos]'`` | Kerberos integration for Kerberized services (Hadoop, Presto, Trino) | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| ldap | ``pip install 'apache-airflow[ldap]'`` | LDAP authentication for users | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| password | ``pip install 'apache-airflow[password]'`` | Password authentication for users | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| rabbitmq | ``pip install 'apache-airflow[rabbitmq]'`` | RabbitMQ support as a Celery backend | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| sentry | ``pip install 'apache-airflow[sentry]'`` | Sentry service for application logging and monitoring | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| statsd | ``pip install 'apache-airflow[statsd]'`` | Needed by StatsD metrics | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ +| virtualenv | ``pip install 'apache-airflow[virtualenv]'`` | Running python tasks in local virtualenv | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+ + + +Providers extras +---------------- + +Those providers extras are simply convenience extras to install provider packages so that you can install the providers with simple command - including +provider package and necessary dependencies in single command, which allows PIP to resolve any conflicting dependencies. This is extremely useful +for first time installation where you want to repeatably install version of dependencies which are 'valid' for both airflow and providers installed. + +For example the below command will install: + + * apache-airflow + * apache-airflow-providers-amazon + * apache-airflow-providers-google + * apache-airflow-providers-apache-spark + +with a consistent set of dependencies based on constraint files provided by Airflow Community at the time 2.0.1 version was released. + +.. code-block:: bash + + pip install apache-airflow[google,amazon,apache.spark]==2.0.1 \ + --constraint "https://raw.githubusercontent.com/apache/airflow/constraints-2.0.1/constraints-3.6.txt" + +Note, that this will install providers in the versions that were released at the time of Airflow 2.0.1 release. You can later +upgrade those providers manually if you want to use latest versions of the providers. + + +Apache Software extras +====================== + +Those are extras that add dependencies needed for integration with other Apache projects (note that ``apache.atlas`` and +``apache.webhdfs`` do not have their own providers - they only install additional libraries that can be used in +custom bash/python providers). + ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| extra | install command | enables | ++=====================+=====================================================+================================================+ +| apache.atlas | ``pip install 'apache-airflow[apache.atlas]'`` | Apache Atlas | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.beam | ``pip install 'apache-airflow[apache.beam]'`` | Apache Beam operators & hooks | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.cassandra | ``pip install 'apache-airflow[apache.cassandra]'`` | Cassandra related operators & hooks | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.druid | ``pip install 'apache-airflow[apache.druid]'`` | Druid related operators & hooks | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.hdfs | ``pip install 'apache-airflow[apache.hdfs]'`` | HDFS hooks and operators | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.hive | ``pip install 'apache-airflow[apache.hive]'`` | All Hive related operators | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.kylin | ``pip install 'apache-airflow[apache.kylin]'`` | All Kylin related operators & hooks | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.livy | ``pip install 'apache-airflow[apache.livy]'`` | All Livy related operators, hooks & sensors | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.pig | ``pip install 'apache-airflow[apache.pig]'`` | All Pig related operators & hooks | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.pinot | ``pip install 'apache-airflow[apache.pinot]'`` | All Pinot related hooks | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.spark | ``pip install 'apache-airflow[apache.spark]'`` | All Spark related operators & hooks | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.sqoop | ``pip install 'apache-airflow[apache.sqoop]'`` | All Sqoop related operators & hooks | ++---------------------+-----------------------------------------------------+------------------------------------------------+ +| apache.webhdfs | ``pip install 'apache-airflow[apache.webhdfs]'`` | HDFS hooks and operators | ++---------------------+-----------------------------------------------------+------------------------------------------------+ + + +External Services extras +======================== Those are extras that add dependencies needed for integration with external services - either cloud based or on-premises. -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| extra | install command | enables | Providers | -+=====================+=====================================================+============================================================================+===========+ -| amazon | ``pip install 'apache-airflow[amazon]'`` | Amazon Web Services | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| azure | ``pip install 'apache-airflow[microsoft.azure]'`` | Microsoft Azure | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| cloudant | ``pip install 'apache-airflow[cloudant]'`` | Cloudant hook | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| databricks | ``pip install 'apache-airflow[databricks]'`` | Databricks hooks and operators | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| datadog | ``pip install 'apache-airflow[datadog]'`` | Datadog hooks and sensors | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| dask | ``pip install 'apache-airflow[dask]'`` | DaskExecutor | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| dingding | ``pip install 'apache-airflow[dingding]'`` | Dingding hooks and sensors | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| discord | ``pip install 'apache-airflow[discord]'`` | Discord hooks and sensors | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| facebook | ``pip install 'apache-airflow[facebook]'`` | Facebook Social | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| google | ``pip install 'apache-airflow[google]'`` | Google Cloud | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| github_enterprise | ``pip install 'apache-airflow[github_enterprise]'`` | GitHub Enterprise auth backend | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| google_auth | ``pip install 'apache-airflow[google_auth]'`` | Google auth backend | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| hashicorp | ``pip install 'apache-airflow[hashicorp]'`` | Hashicorp Services (Vault) | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| jira | ``pip install 'apache-airflow[jira]'`` | Jira hooks and operators | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| opsgenie | ``pip install 'apache-airflow[opsgenie]'`` | OpsGenie hooks and operators | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| pagerduty | ``pip install 'apache-airflow[pagerduty]'`` | Pagerduty hook | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| plexus | ``pip install 'apache-airflow[plexus]'`` | Plexus service of CoreScientific.com AI platform | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| qubole | ``pip install 'apache-airflow[qubole]'`` | Enable QDS (Qubole Data Service) support | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| salesforce | ``pip install 'apache-airflow[salesforce]'`` | Salesforce hook | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| sendgrid | ``pip install 'apache-airflow[sendgrid]'`` | Send email using sendgrid | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| segment | ``pip install 'apache-airflow[segment]'`` | Segment hooks and sensors | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| sentry | ``pip install 'apache-airflow[sentry]'`` | Sentry service for application logging and monitoring | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| slack | ``pip install 'apache-airflow[slack]'`` | Slack hooks and operators | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| snowflake | ``pip install 'apache-airflow[snowflake]'`` | Snowflake hooks and operators | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| telegram | ``pip install 'apache-airflow[telegram]'`` | Telegram hooks and operators | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| vertica | ``pip install 'apache-airflow[vertica]'`` | Vertica hook support as an Airflow backend | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| yandex | ``pip install 'apache-airflow[yandex]'`` | Yandex.cloud hooks and operators | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ -| zendesk | ``pip install 'apache-airflow[zendesk]'`` | Zendesk hooks | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------------+-----------+ - - -**Software extras:** ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| extra | install command | enables | ++=====================+=====================================================+=====================================================+ +| airbyte | ``pip install 'apache-airflow[airbyte]'`` | Airbyte hooks and operators | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| amazon | ``pip install 'apache-airflow[amazon]'`` | Amazon Web Services | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| azure | ``pip install 'apache-airflow[microsoft.azure]'`` | Microsoft Azure | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| cloudant | ``pip install 'apache-airflow[cloudant]'`` | Cloudant hook | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| databricks | ``pip install 'apache-airflow[databricks]'`` | Databricks hooks and operators | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| datadog | ``pip install 'apache-airflow[datadog]'`` | Datadog hooks and sensors | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| dingding | ``pip install 'apache-airflow[dingding]'`` | Dingding hooks and sensors | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| discord | ``pip install 'apache-airflow[discord]'`` | Discord hooks and sensors | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| facebook | ``pip install 'apache-airflow[facebook]'`` | Facebook Social | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| google | ``pip install 'apache-airflow[google]'`` | Google Cloud | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| hashicorp | ``pip install 'apache-airflow[hashicorp]'`` | Hashicorp Services (Vault) | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| jira | ``pip install 'apache-airflow[jira]'`` | Jira hooks and operators | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| opsgenie | ``pip install 'apache-airflow[opsgenie]'`` | OpsGenie hooks and operators | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| pagerduty | ``pip install 'apache-airflow[pagerduty]'`` | Pagerduty hook | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| plexus | ``pip install 'apache-airflow[plexus]'`` | Plexus service of CoreScientific.com AI platform | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| qubole | ``pip install 'apache-airflow[qubole]'`` | Enable QDS (Qubole Data Service) support | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| salesforce | ``pip install 'apache-airflow[salesforce]'`` | Salesforce hook | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| sendgrid | ``pip install 'apache-airflow[sendgrid]'`` | Send email using sendgrid | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| segment | ``pip install 'apache-airflow[segment]'`` | Segment hooks and sensors | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| slack | ``pip install 'apache-airflow[slack]'`` | Slack hooks and operators | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| snowflake | ``pip install 'apache-airflow[snowflake]'`` | Snowflake hooks and operators | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| tableau | ``pip install 'apache-airflow[tableau]'`` | Tableau hooks and operators | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| telegram | ``pip install 'apache-airflow[telegram]'`` | Telegram hooks and operators | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| vertica | ``pip install 'apache-airflow[vertica]'`` | Vertica hook support as an Airflow backend | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| yandex | ``pip install 'apache-airflow[yandex]'`` | Yandex.cloud hooks and operators | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ +| zendesk | ``pip install 'apache-airflow[zendesk]'`` | Zendesk hooks | ++---------------------+-----------------------------------------------------+-----------------------------------------------------+ + + +Locally installed software extras +================================= Those are extras that add dependencies needed for integration with other software packages installed usually as part of the deployment of Airflow. -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| extra | install command | enables | Providers | -+=====================+=====================================================+====================================================================================+===========+ -| async | ``pip install 'apache-airflow[async]'`` | Async worker classes for Gunicorn | | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| celery | ``pip install 'apache-airflow[celery]'`` | CeleryExecutor | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| cncf.kubernetes | ``pip install 'apache-airflow[cncf.kubernetes]'`` | Kubernetes Executor and operator | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| docker | ``pip install 'apache-airflow[docker]'`` | Docker hooks and operators | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| elasticsearch | ``pip install 'apache-airflow[elasticsearch]'`` | Elasticsearch hooks and Log Handler | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| exasol | ``pip install 'apache-airflow[exasol]'`` | Exasol hooks and operators | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| jenkins | ``pip install 'apache-airflow[jenkins]'`` | Jenkins hooks and operators | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| ldap | ``pip install 'apache-airflow[ldap]'`` | LDAP authentication for users | | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| mongo | ``pip install 'apache-airflow[mongo]'`` | Mongo hooks and operators | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| microsoft.mssql | ``pip install 'apache-airflow[microsoft.mssql]'`` | Microsoft SQL Server operators and hook. | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| mysql | ``pip install 'apache-airflow[mysql]'`` | MySQL operators and hook | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| odbc | ``pip install 'apache-airflow[odbc]'`` | ODBC data sources including MS SQL Server | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| openfaas | ``pip install 'apache-airflow[openfaas]'`` | OpenFaaS hooks | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| oracle | ``pip install 'apache-airflow[oracle]'`` | Oracle hooks and operators | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| postgres | ``pip install 'apache-airflow[postgres]'`` | PostgreSQL operators and hook | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| password | ``pip install 'apache-airflow[password]'`` | Password authentication for users | | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| presto | ``pip install 'apache-airflow[presto]'`` | All Presto related operators & hooks | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| rabbitmq | ``pip install 'apache-airflow[rabbitmq]'`` | RabbitMQ support as a Celery backend | | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| redis | ``pip install 'apache-airflow[redis]'`` | Redis hooks and sensors | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| samba | ``pip install 'apache-airflow[samba]'`` | Samba hooks and operators | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| singularity | ``pip install 'apache-airflow[singularity]'`` | Singularity container operator | * | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| statsd | ``pip install 'apache-airflow[statsd]'`` | Needed by StatsD metrics | | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| tableau | ``pip install 'apache-airflow[tableau]'`` | Tableau visualization integration | | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ -| virtualenv | ``pip install 'apache-airflow[virtualenv]'`` | Running python tasks in local virtualenv | | -+---------------------+-----------------------------------------------------+------------------------------------------------------------------------------------+-----------+ - - -**Other extras:** ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| extra | install command | enables | ++=====================+=====================================================+===========================================+ +| docker | ``pip install 'apache-airflow[docker]'`` | Docker hooks and operators | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| elasticsearch | ``pip install 'apache-airflow[elasticsearch]'`` | Elasticsearch hooks and Log Handler | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| exasol | ``pip install 'apache-airflow[exasol]'`` | Exasol hooks and operators | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| jenkins | ``pip install 'apache-airflow[jenkins]'`` | Jenkins hooks and operators | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| mongo | ``pip install 'apache-airflow[mongo]'`` | Mongo hooks and operators | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| microsoft.mssql | ``pip install 'apache-airflow[microsoft.mssql]'`` | Microsoft SQL Server operators and hook. | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| mysql | ``pip install 'apache-airflow[mysql]'`` | MySQL operators and hook | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| neo4j | ``pip install 'apache-airflow[neo4j]'`` | Neo4j operators and hook | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| odbc | ``pip install 'apache-airflow[odbc]'`` | ODBC data sources including MS SQL Server | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| openfaas | ``pip install 'apache-airflow[openfaas]'`` | OpenFaaS hooks | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| oracle | ``pip install 'apache-airflow[oracle]'`` | Oracle hooks and operators | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| postgres | ``pip install 'apache-airflow[postgres]'`` | PostgreSQL operators and hook | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| presto | ``pip install 'apache-airflow[presto]'`` | All Presto related operators & hooks | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| redis | ``pip install 'apache-airflow[redis]'`` | Redis hooks and sensors | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| samba | ``pip install 'apache-airflow[samba]'`` | Samba hooks and operators | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| singularity | ``pip install 'apache-airflow[singularity]'`` | Singularity container operator | ++---------------------+-----------------------------------------------------+-------------------------------------------+ +| trino | ``pip install 'apache-airflow[trino]'`` | All Trino related operators & hooks | ++---------------------+-----------------------------------------------------+-------------------------------------------+ + + +Other extras +============ Those are extras that provide support for integration with external systems via some - usually - standard protocols. -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| extra | install command | enables | Providers | Preinstalled | -+=====================+=====================================================+======================================================================+===========+==============+ -| cgroups | ``pip install 'apache-airflow[cgroups]'`` | Needed To use CgroupTaskRunner | | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| ftp | ``pip install 'apache-airflow[ftp]'`` | FTP hooks and operators | * | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| grpc | ``pip install 'apache-airflow[grpc]'`` | Grpc hooks and operators | * | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| http | ``pip install 'apache-airflow[http]'`` | HTTP hooks, operators and sensors | * | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| imap | ``pip install 'apache-airflow[imap]'`` | IMAP hooks and sensors | * | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| jdbc | ``pip install 'apache-airflow[jdbc]'`` | JDBC hooks and operators | * | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| kerberos | ``pip install 'apache-airflow[kerberos]'`` | Kerberos integration for Kerberized services (Hadoop, Presto) | | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| papermill | ``pip install 'apache-airflow[papermill]'`` | Papermill hooks and operators | * | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| sftp | ``pip install 'apache-airflow[sftp]'`` | SFTP hooks, operators and sensors | * | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| sqlite | ``pip install 'apache-airflow[sqlite]'`` | SQLite hooks and operators | * | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| ssh | ``pip install 'apache-airflow[ssh]'`` | SSH hooks and operators | * | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ -| microsoft.winrm | ``pip install 'apache-airflow[microsoft.winrm]'`` | WinRM hooks and operators | * | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+--------------+ - -**Bundle extras:** - -Those are extras that install one ore more extras. - -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| extra | install command | enables | Providers | -+=====================+=====================================================+======================================================================+===========+ -| all | ``pip install 'apache-airflow[all]'`` | All Airflow user facing features (no devel and doc requirements) | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| all_dbs | ``pip install 'apache-airflow[all_dbs]'`` | All databases integrations | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| devel | ``pip install 'apache-airflow[devel]'`` | Minimum dev tools requirements (without providers) | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| devel_hadoop | ``pip install 'apache-airflow[devel_hadoop]'`` | Same as ``devel`` + dependencies for developing the Hadoop stack | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| devel_all | ``pip install 'apache-airflow[devel_all]'`` | Everything needed for development (``devel_hadoop`` + providers) | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| devel_ci | ``pip install 'apache-airflow[devel_ci]'`` | All dependencies required for CI build. | * | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ - -**Doc extras:** ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ +| extra | install command | enables | Preinstalled | ++=====================+=====================================================+======================================+==============+ +| ftp | ``pip install 'apache-airflow[ftp]'`` | FTP hooks and operators | * | ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ +| grpc | ``pip install 'apache-airflow[grpc]'`` | Grpc hooks and operators | | ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ +| http | ``pip install 'apache-airflow[http]'`` | HTTP hooks, operators and sensors | * | ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ +| imap | ``pip install 'apache-airflow[imap]'`` | IMAP hooks and sensors | * | ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ +| jdbc | ``pip install 'apache-airflow[jdbc]'`` | JDBC hooks and operators | | ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ +| papermill | ``pip install 'apache-airflow[papermill]'`` | Papermill hooks and operators | | ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ +| sftp | ``pip install 'apache-airflow[sftp]'`` | SFTP hooks, operators and sensors | | ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ +| sqlite | ``pip install 'apache-airflow[sqlite]'`` | SQLite hooks and operators | * | ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ +| ssh | ``pip install 'apache-airflow[ssh]'`` | SSH hooks and operators | | ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ +| microsoft.winrm | ``pip install 'apache-airflow[microsoft.winrm]'`` | WinRM hooks and operators | | ++---------------------+-----------------------------------------------------+--------------------------------------+--------------+ + +Bundle extras +------------- + +Those are extras that install one ore more extras as a bundle. + ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+ +| extra | install command | enables | ++=====================+=====================================================+======================================================================+ +| all | ``pip install 'apache-airflow[all]'`` | All Airflow user facing features (no devel and doc requirements) | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+ +| all_dbs | ``pip install 'apache-airflow[all_dbs]'`` | All databases integrations | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+ +| devel | ``pip install 'apache-airflow[devel]'`` | Minimum dev tools requirements (without providers) | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+ +| devel_hadoop | ``pip install 'apache-airflow[devel_hadoop]'`` | Same as ``devel`` + dependencies for developing the Hadoop stack | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+ +| devel_all | ``pip install 'apache-airflow[devel_all]'`` | Everything needed for development (``devel_hadoop`` + providers) | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+ +| devel_ci | ``pip install 'apache-airflow[devel_ci]'`` | All dependencies required for CI build. | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+ + +Doc extras +---------- This is the extra that is needed to generated documentation for Airflow. This is used for development time only -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ -| doc | ``pip install 'apache-airflow[doc]'`` | Packages needed to build docs (included in ``devel``) | | -+---------------------+-----------------------------------------------------+----------------------------------------------------------------------+-----------+ ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+ +| extra | install command | enables | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+ +| doc | ``pip install 'apache-airflow[doc]'`` | Packages needed to build docs (included in ``devel``) | ++---------------------+-----------------------------------------------------+----------------------------------------------------------------------+ -**Deprecated 1.10 extras:** +Deprecated 1.10 extras +---------------------- Those are the extras that have been deprecated in 2.0 and will be removed in Airflow 3.0.0. They were all replaced by new extras, which have naming consistent with the names of provider packages. diff --git a/docs/apache-airflow/howto/operator/bash.rst b/docs/apache-airflow/howto/operator/bash.rst index c8a923f963225..3d2195f916bde 100644 --- a/docs/apache-airflow/howto/operator/bash.rst +++ b/docs/apache-airflow/howto/operator/bash.rst @@ -27,6 +27,7 @@ commands in a `Bash `__ shell. .. exampleinclude:: /../../airflow/example_dags/example_bash_operator.py :language: python + :dedent: 4 :start-after: [START howto_operator_bash] :end-before: [END howto_operator_bash] @@ -38,6 +39,7 @@ You can use :ref:`Jinja templates ` to parameterize the .. exampleinclude:: /../../airflow/example_dags/example_bash_operator.py :language: python + :dedent: 4 :start-after: [START howto_operator_bash_template] :end-before: [END howto_operator_bash_template] diff --git a/docs/apache-airflow/howto/operator/external_task_sensor.rst b/docs/apache-airflow/howto/operator/external_task_sensor.rst index eec8074cb4b4d..420bd1319cb06 100644 --- a/docs/apache-airflow/howto/operator/external_task_sensor.rst +++ b/docs/apache-airflow/howto/operator/external_task_sensor.rst @@ -46,6 +46,7 @@ via ``allowed_states`` and ``failed_states`` parameters. .. exampleinclude:: /../../airflow/example_dags/example_external_task_marker_dag.py :language: python + :dedent: 4 :start-after: [START howto_operator_external_task_sensor] :end-before: [END howto_operator_external_task_sensor] @@ -60,5 +61,6 @@ user clears ``parent_task``. .. exampleinclude:: /../../airflow/example_dags/example_external_task_marker_dag.py :language: python + :dedent: 4 :start-after: [START howto_operator_external_task_marker] :end-before: [END howto_operator_external_task_marker] diff --git a/docs/apache-airflow/howto/operator/python.rst b/docs/apache-airflow/howto/operator/python.rst index 7f4d2b8163d20..4a59df61aa196 100644 --- a/docs/apache-airflow/howto/operator/python.rst +++ b/docs/apache-airflow/howto/operator/python.rst @@ -27,6 +27,7 @@ Python callables. .. exampleinclude:: /../../airflow/example_dags/example_python_operator.py :language: python + :dedent: 4 :start-after: [START howto_operator_python] :end-before: [END howto_operator_python] @@ -38,6 +39,7 @@ to the Python callable. .. exampleinclude:: /../../airflow/example_dags/example_python_operator.py :language: python + :dedent: 4 :start-after: [START howto_operator_python_kwargs] :end-before: [END howto_operator_python_kwargs] @@ -63,6 +65,7 @@ Python callables inside a new Python virtual environment. .. exampleinclude:: /../../airflow/example_dags/example_python_operator.py :language: python + :dedent: 4 :start-after: [START howto_operator_python_venv] :end-before: [END howto_operator_python_venv] diff --git a/docs/apache-airflow/howto/run-behind-proxy.rst b/docs/apache-airflow/howto/run-behind-proxy.rst index eea0eb7945b4c..2901ed01fe53d 100644 --- a/docs/apache-airflow/howto/run-behind-proxy.rst +++ b/docs/apache-airflow/howto/run-behind-proxy.rst @@ -47,7 +47,7 @@ Your reverse proxy (ex: nginx) should be configured as follow: location /myorg/airflow/ { proxy_pass http://localhost:8080; - proxy_set_header Host $host; + proxy_set_header Host $http_host; proxy_redirect off; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; @@ -64,7 +64,7 @@ Your reverse proxy (ex: nginx) should be configured as follow: location /myorg/flower/ { rewrite ^/myorg/flower/(.*)$ /$1 break; # remove prefix from http header proxy_pass http://localhost:5555; - proxy_set_header Host $host; + proxy_set_header Host $http_host; proxy_redirect off; proxy_http_version 1.1; proxy_set_header Upgrade $http_upgrade; diff --git a/docs/apache-airflow/howto/set-config.rst b/docs/apache-airflow/howto/set-config.rst index f1aac87dc307f..d36a96f741e2d 100644 --- a/docs/apache-airflow/howto/set-config.rst +++ b/docs/apache-airflow/howto/set-config.rst @@ -85,6 +85,9 @@ For example: export AIRFLOW__CORE__SQL_ALCHEMY_CONN_SECRET=sql_alchemy_conn +.. note:: + The config options must follow the config prefix naming convention defined within the secrets backend. This means that ``sql_alchemy_conn`` is not defined with a connection prefix, but with config prefix. For example it should be named as ``airflow/config/sql_alchemy_conn`` + The idea behind this is to not store passwords on boxes in plain text files. The universal order of precedence for all configuration options is as follows: diff --git a/docs/apache-airflow/howto/set-up-database.rst b/docs/apache-airflow/howto/set-up-database.rst index b13fdc4aff045..58e812309d07d 100644 --- a/docs/apache-airflow/howto/set-up-database.rst +++ b/docs/apache-airflow/howto/set-up-database.rst @@ -28,7 +28,7 @@ Choosing database backend ------------------------- If you want to take a real test drive of Airflow, you should consider setting up a database backend to **MySQL** and **PostgresSQL**. -By default, Airflow uses **SQLite**, which is not intended for development purposes only. +By default, Airflow uses **SQLite**, which is intended for development purposes only. Airflow supports the following database engine versions, so make sure which version you have. Old versions may not support all SQL statements. @@ -59,6 +59,51 @@ the example below. The exact format description is described in the SQLAlchemy documentation, see `Database Urls `__. We will also show you some examples below. +Setting up a SQLite Database +---------------------------- + +SQLite database can be used to run Airflow for development purpose as it does not require any database server +(the database is stored in a local file). There are a few limitations of using the SQLite database (for example +it only works with Sequential Executor) and it should NEVER be used for production. + +There is a minimum version of sqlite3 required to run Airflow 2.0+ - minimum version is 3.15.0. Some of the +older systems have an earlier version of sqlite installed by default and for those system you need to manually +upgrade SQLite to use version newer than 3.15.0. Note, that this is not a ``python library`` version, it's the +SQLite system-level application that needs to be upgraded. There are different ways how SQLIte might be +installed, you can find some information about that at the `official website of SQLite +`_ and in the documentation specific to distribution of your Operating +System. + +**Troubleshooting** + +Sometimes even if you upgrade SQLite to higher version and your local python reports higher version, +the python interpreter used by Airflow might still use the older version available in the +``LD_LIBRARY_PATH`` set for the python interpreter that is used to start Airflow. + +You can make sure which version is used by the interpreter by running this check: + +.. code-block:: bash + + root@b8a8e73caa2c:/opt/airflow# python + Python 3.6.12 (default, Nov 25 2020, 03:59:00) + [GCC 8.3.0] on linux + Type "help", "copyright", "credits" or "license" for more information. + >>> import sqlite3 + >>> sqlite3.sqlite_version + '3.27.2' + >>> + +But be aware that setting environment variables for your Airflow deployment might change which SQLite +library is found first, so you might want to make sure that the "high-enough" version of SQLite is the only +version installed in your system. + +An example URI for the sqlite database: + +.. code-block:: text + + sqlite:////home/airflow/airflow.db + + Setting up a MySQL Database --------------------------- @@ -67,10 +112,15 @@ In the example below, a database ``airflow_db`` and user with username ``airflo .. code-block:: sql - CREATE DATABASE airflow_db CHARACTER SET utf8 COLLATE utf8_unicode_ci; + CREATE DATABASE airflow_db CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci; CREATE USER 'airflow_user' IDENTIFIED BY 'airflow_pass'; GRANT ALL PRIVILEGES ON airflow_db.* TO 'airflow_user'; + +.. note:: + + The database must use a UTF-8 character set + We rely on more strict ANSI SQL settings for MySQL in order to have sane defaults. Make sure to have specified ``explicit_defaults_for_timestamp=1`` option under ``[mysqld]`` section in your ``my.cnf`` file. You can also activate these options with the ``--explicit-defaults-for-timestamp`` switch passed to ``mysqld`` executable @@ -91,6 +141,8 @@ without any cert options provided. However if you want to use other drivers visit the `MySQL Dialect `__ in SQLAlchemy documentation for more information regarding download and setup of the SqlAlchemy connection. +In addition, you also should pay particular attention to MySQL's encoding. Although the ``utf8mb4`` character set is more and more popular for MySQL (actually, ``utf8mb4`` becomes default character set in MySQL8.0), using the ``utf8mb4`` encoding requires additional setting in Airflow 2+ (See more details in `#7570 `__.). If you use ``utf8mb4`` as character set, you should also set ``sql_engine_collation_for_ids=utf8mb3_general_ci``. + Setting up a PostgreSQL Database -------------------------------- @@ -100,9 +152,13 @@ In the example below, a database ``airflow_db`` and user with username ``airflo .. code-block:: sql CREATE DATABASE airflow_db; - CREATE USER airflow_user WITH PASSWORD 'airflow_user'; + CREATE USER airflow_user WITH PASSWORD 'airflow_pass'; GRANT ALL PRIVILEGES ON DATABASE airflow_db TO airflow_user; +.. note:: + + The database must use a UTF-8 character set + You may need to update your Postgres ``pg_hba.conf`` to add the ``airflow`` user to the database access control list; and to reload the database configuration to load your change. See diff --git a/docs/apache-airflow/img/graph.png b/docs/apache-airflow/img/graph.png index d88395962ff4e..23279f856c778 100644 Binary files a/docs/apache-airflow/img/graph.png and b/docs/apache-airflow/img/graph.png differ diff --git a/docs/apache-airflow/installation.rst b/docs/apache-airflow/installation.rst index 04f6042155bf4..a348334be83c0 100644 --- a/docs/apache-airflow/installation.rst +++ b/docs/apache-airflow/installation.rst @@ -27,7 +27,7 @@ installation with other tools as well. .. note:: - Airflow is also distributed as a Docker image (OCI Image). For more information, see: :ref:`docker_image` + Airflow is also distributed as a Docker image (OCI Image). Consider using it to guarantee that software will always run the same no matter where it is deployed. For more information, see: :doc:`docker-stack:index`. Prerequisites ''''''''''''' @@ -42,7 +42,7 @@ Airflow is tested with: * MySQL: 5.7, 8 * SQLite: 3.15.0+ -* Kubernetes: 1.16.9, 1.17.5, 1.18.6 +* Kubernetes: 1.18.15 1.19.7 1.20.2 **Note:** MySQL 5.x versions are unable to or have limitations with running multiple schedulers -- please see: :doc:`/scheduler`. MariaDB is not tested/recommended. @@ -63,7 +63,7 @@ issues from ``pip`` 20.3.0 release have been fixed in 20.3.3). In order to insta either downgrade pip to version 20.2.4 ``pip install --upgrade pip==20.2.4`` or, in case you use Pip 20.3, you need to add option ``--use-deprecated legacy-resolver`` to your pip install command. -While they are some successes with using other tools like `poetry `_ or +While there are some successes with using other tools like `poetry `_ or `pip-tools `_, they do not share the same workflow as ``pip`` - especially when it comes to constraint vs. requirements management. Installing via ``Poetry`` or ``pip-tools`` is not currently supported. If you wish to install airflow @@ -81,8 +81,8 @@ environment. For instance, if you don't need connectivity with Postgres, you won't have to go through the trouble of installing the ``postgres-devel`` yum package, or whatever equivalent applies on the distribution you are using. -Most of the extra dependencies are linked to a corresponding providers package. For example "amazon" extra -has a corresponding ``apache-airflow-providers-amazon`` providers package to be installed. When you install +Most of the extra dependencies are linked to a corresponding provider package. For example "amazon" extra +has a corresponding ``apache-airflow-providers-amazon`` provider package to be installed. When you install Airflow with such extras, the necessary provider packages are installed automatically (latest versions from PyPI for those packages). However you can freely upgrade and install provider packages independently from the main Airflow installation. @@ -96,7 +96,7 @@ Provider packages Unlike Apache Airflow 1.10, the Airflow 2.0 is delivered in multiple, separate, but connected packages. The core of Airflow scheduling system is delivered as ``apache-airflow`` package and there are around -60 providers packages which can be installed separately as so called ``Airflow Provider packages``. +60 provider packages which can be installed separately as so called ``Airflow Provider packages``. The default Airflow installation doesn't have many integrations and you have to install them yourself. You can even develop and install your own providers for Airflow. For more information, @@ -164,9 +164,9 @@ In order to have repeatable installation, starting from **Airflow 1.10.10** and ``constraints-master``, ``constraints-2-0`` and ``constraints-1-10`` orphan branches and then we create tag for each released version e.g. ``constraints-2.0.1``. This way, when we keep a tested and working set of dependencies. -Those "known-to-be-working" constraints are per major/minor python version. You can use them as constraint +Those "known-to-be-working" constraints are per major/minor Python version. You can use them as constraint files when installing Airflow from PyPI. Note that you have to specify correct Airflow version -and python versions in the URL. +and Python versions in the URL. You can create the URL to the file substituting the variables in the template below. @@ -179,35 +179,90 @@ where: - ``AIRFLOW_VERSION`` - Airflow version (e.g. ``2.0.1``) or ``master``, ``2-0``, ``1-10`` for latest development version - ``PYTHON_VERSION`` Python version e.g. ``3.8``, ``3.7`` +There is also a no-providers constraint file, which contains just constraints required to install Airflow core. This allows +to install and upgrade airflow separately and independently from providers. + +You can create the URL to the file substituting the variables in the template below. + +.. code-block:: + + https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-no-providers-${PYTHON_VERSION}.txt + Installation script ''''''''''''''''''' -In order to simplify the installation, we have prepared a script that will select `the constraints file `__ compatible with your Python version +In order to simplify the installation, we have prepared examples that will select +`the constraints file `__ compatible with your Python version. -**Plain installation:** +**Installing Airflow with extras and providers** -If you don't need to install any extra extra, you can use the command set below: +If you need to install :ref:`extra dependencies of airflow `, +you can use the script below to make an installation a one-liner (the example below installs +postgres and google provider, as well as ``async`` extra. .. code-block:: bash AIRFLOW_VERSION=2.0.1 PYTHON_VERSION="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" - # For example: 3.6 CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-${PYTHON_VERSION}.txt" - # For example: https://raw.githubusercontent.com/apache/airflow/constraints-2.0.1/constraints-3.6.txt - pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" + pip install "apache-airflow[async,postgres,google]==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" -**Installing with extras** +Note, that it will install the versions of providers that were available at the moment this version of Airflow +has been prepared. You need to follow next steps if you want to upgrade provider packages in case they were +released afterwards. + + +**Upgrading Airflow with providers** + +You can also upgrade airflow together with extras (providers available at the time of the release of Airflow +being installed. -If you need to install :ref:`extra dependencies of airflow `, -you can use the script below (the example below installs postgres and google extras. .. code-block:: bash AIRFLOW_VERSION=2.0.1 PYTHON_VERSION="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-${PYTHON_VERSION}.txt" - pip install "apache-airflow[postgres,google]==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" + pip install --upgrade "apache-airflow[postgres,google]==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" + +**Installation and upgrading of Airflow providers separately** + +You can manually install all the providers you need. You can continue using the "providers" constraint files +but the 'versioned' airflow constraints installs only the versions of providers that were available in PyPI at +the time of preparing of the airflow version. However, usually you can use "master" version of the providers +to install latest version of providers. Usually the providers work with most versions of Airflow, if there +will be any incompatibilities, it will be captured as package dependencies. + +.. code-block:: bash + + PYTHON_VERSION="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" + # For example: 3.6 + CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-${PYTHON_VERSION}.txt" + pip install "apache-airflow-providers-google" --constraint "${CONSTRAINT_URL}" + +You can also upgrade the providers to latest versions (you need to use master version of constraints for that): + +.. code-block:: bash + + PYTHON_VERSION="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" + # For example: 3.6 + CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-master/constraints-${PYTHON_VERSION}.txt" + pip install "apache-airflow-providers-google" --upgrade --constraint "${CONSTRAINT_URL}" + + +**Installation and upgrade of Airflow core:** + +If you don't want to install any extra providers, initially you can use the command set below. + +.. code-block:: bash + + AIRFLOW_VERSION=2.0.1 + PYTHON_VERSION="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" + # For example: 3.6 + CONSTRAINT_URL="https://raw.githubusercontent.com/apache/airflow/constraints-${AIRFLOW_VERSION}/constraints-no-providers-${PYTHON_VERSION}.txt" + # For example: https://raw.githubusercontent.com/apache/airflow/constraints-no-providers-2.0.1/constraints-3.6.txt + pip install "apache-airflow==${AIRFLOW_VERSION}" --constraint "${CONSTRAINT_URL}" + Python versions support ''''''''''''''''''''''' diff --git a/docs/apache-airflow/lineage.rst b/docs/apache-airflow/lineage.rst index a29f042234e9c..362d3e607d796 100644 --- a/docs/apache-airflow/lineage.rst +++ b/docs/apache-airflow/lineage.rst @@ -95,3 +95,24 @@ has outlets defined (e.g. by using ``add_outlets(..)`` or has out of the box sup f_in > run_this | (run_this_last > outlets) .. _precedence: https://docs.python.org/3/reference/expressions.html + + +Lineage Backend +--------------- + +It's possible to push the lineage metrics to a custom backend by providing an instance of a LinageBackend in the config: + +.. code-block:: ini + + [lineage] + backend = my.lineage.CustomBackend + +The backend should inherit from ``airflow.lineage.LineageBackend``. + +.. code-block:: python + + from airflow.lineage.backend import LineageBackend + + class ExampleBackend(LineageBackend): + def send_lineage(self, operator, inlets=None, outlets=None, context=None): + # Send the info to some external service diff --git a/docs/apache-airflow/macros-ref.rst b/docs/apache-airflow/macros-ref.rst index 6ef12c7cbabee..832dad74109a3 100644 --- a/docs/apache-airflow/macros-ref.rst +++ b/docs/apache-airflow/macros-ref.rst @@ -62,7 +62,7 @@ Variable Description ``{{ ti }}`` same as ``{{ task_instance }}`` ``{{ params }}`` a reference to the user-defined params dictionary which can be overridden by the dictionary passed through ``trigger_dag -c`` if you enabled - ``dag_run_conf_overrides_params` in ``airflow.cfg`` + ``dag_run_conf_overrides_params`` in ``airflow.cfg`` ``{{ var.value.my_var }}`` global defined variables represented as a dictionary ``{{ var.json.my_var.path }}`` global defined variables represented as a dictionary with deserialized JSON object, append the path to the diff --git a/docs/apache-airflow/plugins.rst b/docs/apache-airflow/plugins.rst index 80708b9154a05..687270a1e3b65 100644 --- a/docs/apache-airflow/plugins.rst +++ b/docs/apache-airflow/plugins.rst @@ -115,7 +115,7 @@ looks like: flask_blueprints = [] # A list of dictionaries containing FlaskAppBuilder BaseView object and some metadata. See example below appbuilder_views = [] - # A list of dictionaries containing FlaskAppBuilder BaseView object and some metadata. See example below + # A list of dictionaries containing kwargs for FlaskAppBuilder add_link. See example below appbuilder_menu_items = [] # A callback to perform actions when airflow starts and the plugin is loaded. # NOTE: Ensure your plugin has *args, and **kwargs in the method definition @@ -210,11 +210,16 @@ definitions in Airflow. "view": v_appbuilder_nomenu_view } - # Creating a flask appbuilder Menu Item - appbuilder_mitem = {"name": "Google", - "category": "Search", - "category_icon": "fa-th", - "href": "https://www.google.com"} + # Creating flask appbuilder Menu Items + appbuilder_mitem = { + "name": "Google", + "href": "https://www.google.com", + "category": "Search", + } + appbuilder_mitem_toplevel = { + "name": "Apache", + "href": "https://www.apache.org/", + } # A global operator extra link that redirect you to # task logs stored in S3 @@ -247,7 +252,7 @@ definitions in Airflow. macros = [plugin_macro] flask_blueprints = [bp] appbuilder_views = [v_appbuilder_package, v_appbuilder_nomenu_package] - appbuilder_menu_items = [appbuilder_mitem] + appbuilder_menu_items = [appbuilder_mitem, appbuilder_mitem_toplevel] global_operator_extra_links = [GoogleLink(),] operator_extra_links = [S3LogLink(), ] diff --git a/docs/apache-airflow/production-deployment.rst b/docs/apache-airflow/production-deployment.rst index e737fc5e39a48..ecc6077d81ea5 100644 --- a/docs/apache-airflow/production-deployment.rst +++ b/docs/apache-airflow/production-deployment.rst @@ -56,9 +56,9 @@ Once that is done, you can run - Multi-Node Cluster ================== -Airflow uses :class:`~airflow.executors.sequential_executor.SequentialExecutor` by default. However, by it +Airflow uses :class:`~airflow.executors.sequential_executor.SequentialExecutor` by default. However, by its nature, the user is limited to executing at most one task at a time. ``Sequential Executor`` also pauses -the scheduler when it runs a task, hence not recommended in a production setup. You should use the +the scheduler when it runs a task, hence it is not recommended in a production setup. You should use the :class:`~airflow.executors.local_executor.LocalExecutor` for a single machine. For a multi-node setup, you should use the :doc:`Kubernetes executor <../executor/kubernetes>` or the :doc:`Celery executor <../executor/celery>`. @@ -111,854 +111,14 @@ Airflow users occasionally report instances of the scheduler hanging without a t * `Scheduler gets stuck without a trace `_ * `Scheduler stopping frequently `_ -Strategies for mitigation: - -* When running on kubernetes, use a ``livenessProbe`` on the scheduler deployment to fail if the scheduler - has not heartbeat in a while. - `Example: `_. +To mitigate these issues, make sure you have a :doc:`health check ` set up that will detect when your scheduler has not heartbeat in a while. .. _docker_image: Production Container Images =========================== -Production-ready reference Image --------------------------------- - -For the ease of deployment in production, the community releases a production-ready reference container -image. - -The docker image provided (as convenience binary package) in the -`Apache Airflow DockerHub `_ is a bare image -that has a few external dependencies and extras installed.. - -The Apache Airflow image provided as convenience package is optimized for size, so -it provides just a bare minimal set of the extras and dependencies installed and in most cases -you want to either extend or customize the image. You can see all possible extras in -:doc:`extra-packages-ref`. The set of extras used in Airflow Production image are available in the -`Dockerfile `_. - -The production images are build in DockerHub from released version and release candidates. There -are also images published from branches but they are used mainly for development and testing purpose. -See `Airflow Git Branching `_ -for details. - - -Customizing or extending the Production Image ---------------------------------------------- - -Before you dive-deeply in the way how the Airflow Image is build, named and why we are doing it the -way we do, you might want to know very quickly how you can extend or customize the existing image -for Apache Airflow. This chapter gives you a short answer to those questions. - -Airflow Summit 2020's `Production Docker Image `_ talk provides more -details about the context, architecture and customization/extension methods for the Production Image. - -Extending the image -................... - -Extending the image is easiest if you just need to add some dependencies that do not require -compiling. The compilation framework of Linux (so called ``build-essential``) is pretty big, and -for the production images, size is really important factor to optimize for, so our Production Image -does not contain ``build-essential``. If you need compiler like gcc or g++ or make/cmake etc. - those -are not found in the image and it is recommended that you follow the "customize" route instead. - -How to extend the image - it is something you are most likely familiar with - simply -build a new image using Dockerfile's ``FROM`` directive and add whatever you need. Then you can add your -Debian dependencies with ``apt`` or PyPI dependencies with ``pip install`` or any other stuff you need. - -You should be aware, about a few things: - -* The production image of airflow uses "airflow" user, so if you want to add some of the tools - as ``root`` user, you need to switch to it with ``USER`` directive of the Dockerfile. Also you - should remember about following the - `best practises of Dockerfiles `_ - to make sure your image is lean and small. - -.. code-block:: dockerfile - - FROM apache/airflow:2.0.0 - USER root - RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - my-awesome-apt-dependency-to-add \ - && apt-get autoremove -yqq --purge \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - USER airflow - - -* PyPI dependencies in Apache Airflow are installed in the user library, of the "airflow" user, so - you need to install them with the ``--user`` flag and WITHOUT switching to airflow user. Note also - that using --no-cache-dir is a good idea that can help to make your image smaller. - -.. code-block:: dockerfile - - FROM apache/airflow:2.0.0 - RUN pip install --no-cache-dir --user my-awesome-pip-dependency-to-add - - -* If your apt, or PyPI dependencies require some of the build-essentials, then your best choice is - to follow the "Customize the image" route. However it requires to checkout sources of Apache Airflow, - so you might still want to choose to add build essentials to your image, even if your image will - be significantly bigger. - -.. code-block:: dockerfile - - FROM apache/airflow:2.0.0 - USER root - RUN apt-get update \ - && apt-get install -y --no-install-recommends \ - build-essential my-awesome-apt-dependency-to-add \ - && apt-get autoremove -yqq --purge \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - USER airflow - RUN pip install --no-cache-dir --user my-awesome-pip-dependency-to-add - - -* You can also embed your dags in the image by simply adding them with COPY directive of Airflow. - The DAGs in production image are in /opt/airflow/dags folder. - -Customizing the image -..................... - -Customizing the image is an alternative way of adding your own dependencies to the image - better -suited to prepare optimized production images. - -The advantage of this method is that it produces optimized image even if you need some compile-time -dependencies that are not needed in the final image. You need to use Airflow Sources to build such images -from the `official distribution folder of Apache Airflow `_ for the -released versions, or checked out from the GitHub project if you happen to do it from git sources. - -The easiest way to build the image image is to use ``breeze`` script, but you can also build such customized -image by running appropriately crafted docker build in which you specify all the ``build-args`` -that you need to add to customize it. You can read about all the args and ways you can build the image -in the `<#production-image-build-arguments>`_ chapter below. - -Here just a few examples are presented which should give you general understanding of what you can customize. - -This builds the production image in version 3.7 with additional airflow extras from 2.0.0 PyPI package and -additional apt dev and runtime dependencies. - -.. code-block:: bash - - docker build . \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ - --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \ - --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ - --build-arg AIRFLOW_VERSION="2.0.0" \ - --build-arg AIRFLOW_INSTALL_VERSION="==2.0.0" \ - --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0" \ - --build-arg AIRFLOW_SOURCES_FROM="empty" \ - --build-arg AIRFLOW_SOURCES_TO="/empty" \ - --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ - --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ - --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \ - --build-arg ADDITIONAL_RUNTIME_APT_DEPS="default-jre-headless" \ - --tag my-image - - -the same image can be built using ``breeze`` (it supports auto-completion of the options): - -.. code-block:: bash - - ./breeze build-image \ - --production-image --python 3.7 --install-airflow-version=2.0.0 \ - --additional-extras=jdbc --additional-python-deps="pandas" \ - --additional-dev-apt-deps="gcc g++" --additional-runtime-apt-deps="default-jre-headless" - - -You can customize more aspects of the image - such as additional commands executed before apt dependencies -are installed, or adding extra sources to install your dependencies from. You can see all the arguments -described below but here is an example of rather complex command to customize the image -based on example in `this comment `_: - -.. code-block:: bash - - docker build . -f Dockerfile \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ - --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \ - --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ - --build-arg AIRFLOW_VERSION="2.0.0" \ - --build-arg AIRFLOW_INSTALL_VERSION="==2.0.0" \ - --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0" \ - --build-arg AIRFLOW_SOURCES_FROM="empty" \ - --build-arg AIRFLOW_SOURCES_TO="/empty" \ - --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack" \ - --build-arg ADDITIONAL_PYTHON_DEPS="apache-airflow-backport-providers-odbc \ - apache-airflow-backport-providers-odbc \ - azure-storage-blob \ - sshtunnel \ - google-api-python-client \ - oauth2client \ - beautifulsoup4 \ - dateparser \ - rocketchat_API \ - typeform" \ - --build-arg ADDITIONAL_DEV_APT_DEPS="msodbcsql17 unixodbc-dev g++" \ - --build-arg ADDITIONAL_DEV_APT_COMMAND="curl https://packages.microsoft.com/keys/microsoft.asc | \ - apt-key add --no-tty - && \ - curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list" \ - --build-arg ADDITIONAL_DEV_ENV_VARS="ACCEPT_EULA=Y" \ - --build-arg ADDITIONAL_RUNTIME_APT_COMMAND="curl https://packages.microsoft.com/keys/microsoft.asc | \ - apt-key add --no-tty - && \ - curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list" \ - --build-arg ADDITIONAL_RUNTIME_APT_DEPS="msodbcsql17 unixodbc git procps vim" \ - --build-arg ADDITIONAL_RUNTIME_ENV_VARS="ACCEPT_EULA=Y" \ - --tag my-image - -Customizing images in high security restricted environments -........................................................... - -You can also make sure your image is only build using local constraint file and locally downloaded -wheel files. This is often useful in Enterprise environments where the binary files are verified and -vetted by the security teams. - -This builds below builds the production image in version 3.7 with packages and constraints used from the local -``docker-context-files`` rather than installed from PyPI or GitHub. It also disables MySQL client -installation as it is using external installation method. - -Note that as a prerequisite - you need to have downloaded wheel files. In the example below we -first download such constraint file locally and then use ``pip download`` to get the .whl files needed -but in most likely scenario, those wheel files should be copied from an internal repository of such .whl -files. Note that ``AIRFLOW_INSTALL_VERSION`` is only there for reference, the apache airflow .whl file -in the right version is part of the .whl files downloaded. - -Note that 'pip download' will only works on Linux host as some of the packages need to be compiled from -sources and you cannot install them providing ``--platform`` switch. They also need to be downloaded using -the same python version as the target image. - -The ``pip download`` might happen in a separate environment. The files can be committed to a separate -binary repository and vetted/verified by the security team and used subsequently to build images -of Airflow when needed on an air-gaped system. - -Preparing the constraint files and wheel files: - -.. code-block:: bash - - rm docker-context-files/*.whl docker-context-files/*.txt - - curl -Lo "docker-context-files/constraints-2-0.txt" \ - https://raw.githubusercontent.com/apache/airflow/constraints-2-0/constraints-3.7.txt - - pip download --dest docker-context-files \ - --constraint docker-context-files/constraints-2-0.txt \ - apache-airflow[async,aws,azure,celery,dask,elasticsearch,gcp,kubernetes,mysql,postgres,redis,slack,ssh,statsd,virtualenv]==2.0.0 - -Since apache-airflow .whl packages are treated differently by the docker image, you need to rename the -downloaded apache-airflow* files, for example: - -.. code-block:: bash - - pushd docker-context-files - for file in apache?airflow* - do - mv ${file} _${file} - done - popd - -Building the image: - -.. code-block:: bash - - ./breeze build-image \ - --production-image --python 3.7 --install-airflow-version=2.0.0 \ - --disable-mysql-client-installation --disable-pip-cache --install-from-local-files-when-building \ - --constraints-location="/docker-context-files/constraints-2-0.txt" - -or - -.. code-block:: bash - - docker build . \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ - --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \ - --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ - --build-arg AIRFLOW_VERSION="2.0.0" \ - --build-arg AIRFLOW_INSTALL_VERSION="==2.0.0" \ - --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0" \ - --build-arg AIRFLOW_SOURCES_FROM="empty" \ - --build-arg AIRFLOW_SOURCES_TO="/empty" \ - --build-arg INSTALL_MYSQL_CLIENT="false" \ - --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="false" \ - --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="true" \ - --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-2-0.txt" - - -Customizing & extending the image together -.......................................... - -You can combine both - customizing & extending the image. You can build the image first using -``customize`` method (either with docker command or with ``breeze`` and then you can ``extend`` -the resulting image using ``FROM`` any dependencies you want. - -Customizing PYPI installation -............................. - -You can customize PYPI sources used during image build by adding a docker-context-files/.pypirc file -This .pypirc will never be committed to the repository and will not be present in the final production image. -It is added and used only in the build segment of the image so it is never copied to the final image. - -External sources for dependencies ---------------------------------- - -In corporate environments, there is often the need to build your Container images using -other than default sources of dependencies. The docker file uses standard sources (such as -Debian apt repositories or PyPI repository. However, in corporate environments, the dependencies -are often only possible to be installed from internal, vetted repositories that are reviewed and -approved by the internal security teams. In those cases, you might need to use those different -sources. - -This is rather easy if you extend the image - you simply write your extension commands -using the right sources - either by adding/replacing the sources in apt configuration or -specifying the source repository in pip install command. - -It's a bit more involved in the case of customizing the image. We do not have yet (but we are working -on it) a capability of changing the sources via build args. However, since the builds use -Dockerfile that is a source file, you can rather easily simply modify the file manually and -specify different sources to be used by either of the commands. - - -Comparing extending and customizing the image ---------------------------------------------- - -Here is the comparison of the two types of building images. - -+----------------------------------------------------+---------------------+-----------------------+ -| | Extending the image | Customizing the image | -+====================================================+=====================+=======================+ -| Produces optimized image | No | Yes | -+----------------------------------------------------+---------------------+-----------------------+ -| Use Airflow Dockerfile sources to build the image | No | Yes | -+----------------------------------------------------+---------------------+-----------------------+ -| Requires Airflow sources | No | Yes | -+----------------------------------------------------+---------------------+-----------------------+ -| You can build it with Breeze | No | Yes | -+----------------------------------------------------+---------------------+-----------------------+ -| Allows to use non-default sources for dependencies | Yes | No [1] | -+----------------------------------------------------+---------------------+-----------------------+ - -[1] When you combine customizing and extending the image, you can use external sources -in the "extend" part. There are plans to add functionality to add external sources -option to image customization. You can also modify Dockerfile manually if you want to -use non-default sources for dependencies. - -Using the production image --------------------------- - -The PROD image entrypoint works as follows: - -* In case the user is not "airflow" (with undefined user id) and the group id of the user is set to 0 (root), - then the user is dynamically added to /etc/passwd at entry using USER_NAME variable to define the user name. - This is in order to accommodate the - `OpenShift Guidelines `_ - -* The ``AIRFLOW_HOME`` is set by default to ``/opt/airflow/`` - this means that DAGs - are in default in the ``/opt/airflow/dags`` folder and logs are in the ``/opt/airflow/logs`` - -* The working directory is ``/opt/airflow`` by default. - -* If ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable is passed to the container and it is either mysql or postgres - SQL alchemy connection, then the connection is checked and the script waits until the database is reachable. - If ``AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD`` variable is passed to the container, it is evaluated as a - command to execute and result of this evaluation is used as ``AIRFLOW__CORE__SQL_ALCHEMY_CONN``. The - ``_CMD`` variable takes precedence over the ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable. - -* If no ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable is set then SQLite database is created in - ${AIRFLOW_HOME}/airflow.db and db reset is executed. - -* If first argument equals to "bash" - you are dropped to a bash shell or you can executes bash command - if you specify extra arguments. For example: - -.. code-block:: bash - - docker run -it apache/airflow:master-python3.6 bash -c "ls -la" - total 16 - drwxr-xr-x 4 airflow root 4096 Jun 5 18:12 . - drwxr-xr-x 1 root root 4096 Jun 5 18:12 .. - drwxr-xr-x 2 airflow root 4096 Jun 5 18:12 dags - drwxr-xr-x 2 airflow root 4096 Jun 5 18:12 logs - -* If first argument is equal to "python" - you are dropped in python shell or python commands are executed if - you pass extra parameters. For example: - -.. code-block:: bash - - > docker run -it apache/airflow:master-python3.6 python -c "print('test')" - test - -* If first argument equals to "airflow" - the rest of the arguments is treated as an airflow command - to execute. Example: - -.. code-block:: bash - - docker run -it apache/airflow:master-python3.6 airflow webserver - -* If there are any other arguments - they are simply passed to the "airflow" command - -.. code-block:: bash - - > docker run -it apache/airflow:master-python3.6 version - 2.0.0.dev0 - -* If ``AIRFLOW__CELERY__BROKER_URL`` variable is passed and airflow command with - scheduler, worker of flower command is used, then the script checks the broker connection - and waits until the Celery broker database is reachable. - If ``AIRFLOW__CELERY__BROKER_URL_CMD`` variable is passed to the container, it is evaluated as a - command to execute and result of this evaluation is used as ``AIRFLOW__CELERY__BROKER_URL``. The - ``_CMD`` variable takes precedence over the ``AIRFLOW__CELERY__BROKER_URL`` variable. - -Production image build arguments --------------------------------- - -The following build arguments (``--build-arg`` in docker build command) can be used for production images: - -+------------------------------------------+------------------------------------------+------------------------------------------+ -| Build argument | Default value | Description | -+==========================================+==========================================+==========================================+ -| ``PYTHON_BASE_IMAGE`` | ``python:3.6-slim-buster`` | Base python image. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``PYTHON_MAJOR_MINOR_VERSION`` | ``3.6`` | major/minor version of Python (should | -| | | match base image). | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_VERSION`` | ``2.0.0.dev0`` | version of Airflow. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_REPO`` | ``apache/airflow`` | the repository from which PIP | -| | | dependencies are pre-installed. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_BRANCH`` | ``master`` | the branch from which PIP dependencies | -| | | are pre-installed initially. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_CONSTRAINTS_LOCATION`` | | If not empty, it will override the | -| | | source of the constraints with the | -| | | specified URL or file. Note that the | -| | | file has to be in docker context so | -| | | it's best to place such file in | -| | | one of the folders included in | -| | | .dockerignore. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | ``constraints-master`` | Reference (branch or tag) from GitHub | -| | | where constraints file is taken from | -| | | It can be ``constraints-master`` but | -| | | also can be ``constraints-1-10`` for | -| | | 1.10.* installation. In case of building | -| | | specific version you want to point it | -| | | to specific tag, for example | -| | | ``constraints-1.10.14``. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``INSTALL_PROVIDERS_FROM_SOURCES`` | ``false`` | If set to ``true`` and image is built | -| | | from sources, all provider packages are | -| | | installed from sources rather than from | -| | | packages. It has no effect when | -| | | installing from PyPI or GitHub repo. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_EXTRAS`` | (see Dockerfile) | Default extras with which airflow is | -| | | installed. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``INSTALL_FROM_PYPI`` | ``true`` | If set to true, Airflow is installed | -| | | from PyPI. if you want to install | -| | | Airflow from self-build package | -| | | you can set it to false, put package in | -| | | ``docker-context-files`` and set | -| | | ``INSTALL_FROM_DOCKER_CONTEXT_FILES`` to | -| | | ``true``. For this you have to also keep | -| | | ``AIRFLOW_PRE_CACHED_PIP_PACKAGES`` flag | -| | | set to ``false``. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_PRE_CACHED_PIP_PACKAGES`` | ``false`` | Allows to pre-cache airflow PIP packages | -| | | from the GitHub of Apache Airflow | -| | | This allows to optimize iterations for | -| | | Image builds and speeds up CI builds. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``INSTALL_FROM_DOCKER_CONTEXT_FILES`` | ``false`` | If set to true, Airflow, providers and | -| | | all dependencies are installed from | -| | | from locally built/downloaded | -| | | .whl and .tar.gz files placed in the | -| | | ``docker-context-files``. In certain | -| | | corporate environments, this is required | -| | | to install airflow from such pre-vetted | -| | | packages rather than from PyPI. For this | -| | | to work, also set ``INSTALL_FROM_PYPI``. | -| | | Note that packages starting with | -| | | ``apache?airflow`` glob are treated | -| | | differently than other packages. All | -| | | ``apache?airflow`` packages are | -| | | installed with dependencies limited by | -| | | airflow constraints. All other packages | -| | | are installed without dependencies | -| | | 'as-is'. If you wish to install airflow | -| | | via 'pip download' with all dependencies | -| | | downloaded, you have to rename the | -| | | apache airflow and provider packages to | -| | | not start with ``apache?airflow`` glob. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``UPGRADE_TO_NEWER_DEPENDENCIES`` | ``false`` | If set to true, the dependencies are | -| | | upgraded to newer versions matching | -| | | setup.py before installation. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``CONTINUE_ON_PIP_CHECK_FAILURE`` | ``false`` | By default the image build fails if pip | -| | | check fails for it. This is good for | -| | | interactive building but on CI the | -| | | image should be built regardless - we | -| | | have a separate step to verify image. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``ADDITIONAL_AIRFLOW_EXTRAS`` | | Optional additional extras with which | -| | | airflow is installed. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``ADDITIONAL_PYTHON_DEPS`` | | Optional python packages to extend | -| | | the image with some extra dependencies. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``DEV_APT_COMMAND`` | (see Dockerfile) | Dev apt command executed before dev deps | -| | | are installed in the Build image. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``ADDITIONAL_DEV_APT_COMMAND`` | | Additional Dev apt command executed | -| | | before dev dep are installed | -| | | in the Build image. Should start with | -| | | ``&&``. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``DEV_APT_DEPS`` | (see Dockerfile) | Dev APT dependencies installed | -| | | in the Build image. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``ADDITIONAL_DEV_APT_DEPS`` | | Additional apt dev dependencies | -| | | installed in the Build image. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``ADDITIONAL_DEV_APT_ENV`` | | Additional env variables defined | -| | | when installing dev deps. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``RUNTIME_APT_COMMAND`` | (see Dockerfile) | Runtime apt command executed before deps | -| | | are installed in the Main image. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``ADDITIONAL_RUNTIME_APT_COMMAND`` | | Additional Runtime apt command executed | -| | | before runtime dep are installed | -| | | in the Main image. Should start with | -| | | ``&&``. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``RUNTIME_APT_DEPS`` | (see Dockerfile) | Runtime APT dependencies installed | -| | | in the Main image. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``ADDITIONAL_RUNTIME_APT_DEPS`` | | Additional apt runtime dependencies | -| | | installed in the Main image. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``ADDITIONAL_RUNTIME_APT_ENV`` | | Additional env variables defined | -| | | when installing runtime deps. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_HOME`` | ``/opt/airflow`` | Airflow’s HOME (that’s where logs and | -| | | SQLite databases are stored). | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_UID`` | ``50000`` | Airflow user UID. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_GID`` | ``50000`` | Airflow group GID. Note that most files | -| | | created on behalf of airflow user belong | -| | | to the ``root`` group (0) to keep | -| | | OpenShift Guidelines compatibility. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``AIRFLOW_USER_HOME_DIR`` | ``/home/airflow`` | Home directory of the Airflow user. | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``CASS_DRIVER_BUILD_CONCURRENCY`` | ``8`` | Number of processors to use for | -| | | cassandra PIP install (speeds up | -| | | installing in case cassandra extra is | -| | | used). | -+------------------------------------------+------------------------------------------+------------------------------------------+ -| ``INSTALL_MYSQL_CLIENT`` | ``true`` | Whether MySQL client should be installed | -| | | The mysql extra is removed from extras | -| | | if the client is not installed. | -+------------------------------------------+------------------------------------------+------------------------------------------+ - -There are build arguments that determine the installation mechanism of Apache Airflow for the -production image. There are three types of build: - -* From local sources (by default for example when you use ``docker build .``) -* You can build the image from released PyPI airflow package (used to build the official Docker image) -* You can build the image from any version in GitHub repository(this is used mostly for system testing). - -+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+ -| Build argument | Default | What to specify | -+===================================+========================+===================================================================================+ -| ``AIRFLOW_INSTALLATION_METHOD`` | ``apache-airflow`` | Should point to the installation method of Apache Airflow. It can be | -| | | ``apache-airflow`` for installation from packages and URL to installation from | -| | | GitHub repository tag or branch or "." to install from sources. | -| | | Note that installing from local sources requires appropriate values of the | -| | | ``AIRFLOW_SOURCES_FROM`` and ``AIRFLOW_SOURCES_TO`` variables as described below. | -| | | Only used when ``INSTALL_FROM_PYPI`` is set to ``true``. | -+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+ -| ``AIRFLOW_INSTALL_VERSION`` | | Optional - might be used for package installation of different Airflow version | -| | | for example"==2.0.0". For consistency, you should also set``AIRFLOW_VERSION`` | -| | | to the same value AIRFLOW_VERSION is embedded as label in the image created. | -+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+ -| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | ``constraints-master`` | Reference (branch or tag) from GitHub where constraints file is taken from. | -| | | It can be ``constraints-master`` but also can be``constraints-1-10`` for | -| | | 1.10.* installations. In case of building specific version | -| | | you want to point it to specific tag, for example ``constraints-2.0.0`` | -+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+ -| ``AIRFLOW_WWW`` | ``www`` | In case of Airflow 2.0 it should be "www", in case of Airflow 1.10 | -| | | series it should be "www_rbac". | -+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+ -| ``AIRFLOW_SOURCES_FROM`` | ``empty`` | Sources of Airflow. Set it to "." when you install airflow from | -| | | local sources. | -+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+ -| ``AIRFLOW_SOURCES_TO`` | ``/empty`` | Target for Airflow sources. Set to "/opt/airflow" when | -| | | you want to install airflow from local sources. | -+-----------------------------------+------------------------+-----------------------------------------------------------------------------------+ - -This builds production image in version 3.6 with default extras from the local sources (master version -of 2.0 currently): - -.. code-block:: bash - - docker build . - -This builds the production image in version 3.7 with default extras from 2.0.0 tag and -constraints taken from constraints-2-0 branch in GitHub. - -.. code-block:: bash - - docker build . \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ - --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \ - --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/2.0.0.tar.gz#egg=apache-airflow" \ - --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0" \ - --build-arg AIRFLOW_BRANCH="v1-10-test" \ - --build-arg AIRFLOW_SOURCES_FROM="empty" \ - --build-arg AIRFLOW_SOURCES_TO="/empty" - -This builds the production image in version 3.7 with default extras from 2.0.0 PyPI package and -constraints taken from 2.0.0 tag in GitHub and pre-installed pip dependencies from the top -of v1-10-test branch. - -.. code-block:: bash - - docker build . \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ - --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \ - --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ - --build-arg AIRFLOW_VERSION="2.0.0" \ - --build-arg AIRFLOW_INSTALL_VERSION="==2.0.0" \ - --build-arg AIRFLOW_BRANCH="v1-10-test" \ - --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2.0.0" \ - --build-arg AIRFLOW_SOURCES_FROM="empty" \ - --build-arg AIRFLOW_SOURCES_TO="/empty" - -This builds the production image in version 3.7 with additional airflow extras from 2.0.0 PyPI package and -additional python dependencies and pre-installed pip dependencies from 2.0.0 tagged constraints. - -.. code-block:: bash - - docker build . \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ - --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \ - --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ - --build-arg AIRFLOW_VERSION="2.0.0" \ - --build-arg AIRFLOW_INSTALL_VERSION="==2.0.0" \ - --build-arg AIRFLOW_BRANCH="v1-10-test" \ - --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2.0.0" \ - --build-arg AIRFLOW_SOURCES_FROM="empty" \ - --build-arg AIRFLOW_SOURCES_TO="/empty" \ - --build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs" \ - --build-arg ADDITIONAL_PYTHON_DEPS="sshtunnel oauth2client" - -This builds the production image in version 3.7 with additional airflow extras from 2.0.0 PyPI package and -additional apt dev and runtime dependencies. - -.. code-block:: bash - - docker build . \ - --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ - --build-arg PYTHON_MAJOR_MINOR_VERSION=3.7 \ - --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ - --build-arg AIRFLOW_VERSION="2.0.0" \ - --build-arg AIRFLOW_INSTALL_VERSION="==2.0.0" \ - --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0" \ - --build-arg AIRFLOW_SOURCES_FROM="empty" \ - --build-arg AIRFLOW_SOURCES_TO="/empty" \ - --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ - --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \ - --build-arg ADDITIONAL_RUNTIME_APT_DEPS="default-jre-headless" - - -Actions executed at image start -------------------------------- - -If you are using the default entrypoint of the production image, -there are a few actions that are automatically performed when the container starts. -In some cases, you can pass environment variables to the image to trigger some of that behaviour. - -The variables that control the "execution" behaviour start with ``_AIRFLOW`` to distinguish them -from the variables used to build the image starting with ``AIRFLOW``. - -Creating system user -.................... - -Airflow image is Open-Shift compatible, which means that you can start it with random user ID and group id 0. -Airflow will automatically create such a user and make it's home directory point to ``/home/airflow``. -You can read more about it in the "Support arbitrary user ids" chapter in the -`Openshift best practices `_. - -Waits for Airflow DB connection -............................... - -In case Postgres or MySQL DB is used, the entrypoint will wait until the airflow DB connection becomes -available. This happens always when you use the default entrypoint. - -The script detects backend type depending on the URL schema and assigns default port numbers if not specified -in the URL. Then it loops until the connection to the host/port specified can be established -It tries ``CONNECTION_CHECK_MAX_COUNT`` times and sleeps ``CONNECTION_CHECK_SLEEP_TIME`` between checks - -Supported schemes: - -* ``postgres://`` - default port 5432 -* ``mysql://`` - default port 3306 -* ``sqlite://`` - -In case of SQLite backend, there is no connection to establish and waiting is skipped. - -Upgrading Airflow DB -.................... - -If you set ``_AIRFLOW_DB_UPGRADE`` variable to a non-empty value, the entrypoint will run -the ``airflow db upgrade`` command right after verifying the connection. You can also use this -when you are running airflow with internal SQLite database (default) to upgrade the db and create -admin users at entrypoint, so that you can start the webserver immediately. Note - using SQLite is -intended only for testing purpose, never use SQLite in production as it has severe limitations when it -comes to concurrency. - - -Creating admin user -................... - -The entrypoint can also create webserver user automatically when you enter it. you need to set -``_AIRFLOW_WWW_USER_CREATE`` to a non-empty value in order to do that. This is not intended for -production, it is only useful if you would like to run a quick test with the production image. -You need to pass at least password to create such user via ``_AIRFLOW_WWW_USER_PASSWORD_CMD`` or -``_AIRFLOW_WWW_USER_PASSWORD_CMD`` similarly like for other ``*_CMD`` variables, the content of -the ``*_CMD`` will be evaluated as shell command and it's output will be set ass password. - -User creation will fail if none of the ``PASSWORD`` variables are set - there is no default for -password for security reasons. - -+-----------+--------------------------+----------------------------------------------------------------------+ -| Parameter | Default | Environment variable | -+===========+==========================+======================================================================+ -| username | admin | ``_AIRFLOW_WWW_USER_USERNAME`` | -+-----------+--------------------------+----------------------------------------------------------------------+ -| password | | ``_AIRFLOW_WWW_USER_PASSWORD_CMD`` or ``_AIRFLOW_WWW_USER_PASSWORD`` | -+-----------+--------------------------+----------------------------------------------------------------------+ -| firstname | Airflow | ``_AIRFLOW_WWW_USER_FIRSTNAME`` | -+-----------+--------------------------+----------------------------------------------------------------------+ -| lastname | Admin | ``_AIRFLOW_WWW_USER_LASTNAME`` | -+-----------+--------------------------+----------------------------------------------------------------------+ -| email | airflowadmin@example.com | ``_AIRFLOW_WWW_USER_EMAIL`` | -+-----------+--------------------------+----------------------------------------------------------------------+ -| role | Admin | ``_AIRFLOW_WWW_USER_ROLE`` | -+-----------+--------------------------+----------------------------------------------------------------------+ - -In case the password is specified, the user will be attempted to be created, but the entrypoint will -not fail if the attempt fails (this accounts for the case that the user is already created). - -You can, for example start the webserver in the production image with initializing the internal SQLite -database and creating an ``admin/admin`` Admin user with the following command: - -.. code-block:: bash - - docker run -it -p 8080:8080 \ - --env "_AIRFLOW_DB_UPGRADE=true" \ - --env "_AIRFLOW_WWW_USER_CREATE=true" \ - --env "_AIRFLOW_WWW_USER_PASSWORD=admin" \ - apache/airflow:master-python3.8 webserver - - -.. code-block:: bash - - docker run -it -p 8080:8080 \ - --env "_AIRFLOW_DB_UPGRADE=true" \ - --env "_AIRFLOW_WWW_USER_CREATE=true" \ - --env "_AIRFLOW_WWW_USER_PASSWORD_CMD=echo admin" \ - apache/airflow:master-python3.8 webserver - -The commands above perform initialization of the SQLite database, create admin user with admin password -and Admin role. They also forward local port ``8080`` to the webserver port and finally start the webserver. - - -Waits for celery broker connection -.................................. - -In case Postgres or MySQL DB is used, and one of the ``scheduler``, ``celery``, ``worker``, or ``flower`` -commands are used the entrypoint will wait until the celery broker DB connection is available. - -The script detects backend type depending on the URL schema and assigns default port numbers if not specified -in the URL. Then it loops until connection to the host/port specified can be established -It tries ``CONNECTION_CHECK_MAX_COUNT`` times and sleeps ``CONNECTION_CHECK_SLEEP_TIME`` between checks - -Supported schemes: - -* ``amqp(s)://`` (rabbitmq) - default port 5672 -* ``redis://`` - default port 6379 -* ``postgres://`` - default port 5432 -* ``mysql://`` - default port 3306 -* ``sqlite://`` - -In case of SQLite backend, there is no connection to establish and waiting is skipped. - - -Recipes -------- - -Users sometimes share interesting ways of using the Docker images. We encourage users to contribute these -recipes to the documentation in case they prove useful to other members of the community by -submitting a pull request. The sections below capture this knowledge. - -Google Cloud SDK installation -............................. - -Some operators, such as :class:`airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartPodOperator`, -:class:`airflow.providers.google.cloud.operators.dataflow.DataflowStartSqlJobOperator`, require -the installation of `Google Cloud SDK `__ (includes ``gcloud``). -You can also run these commands with BashOperator. - -Create a new Dockerfile like the one shown below. - -.. exampleinclude:: /docker-images-recipes/gcloud.Dockerfile - :language: dockerfile - -Then build a new image. - -.. code-block:: bash - - docker build . \ - --build-arg BASE_AIRFLOW_IMAGE="apache/airflow:2.0.0" \ - -t my-airflow-image - - -Apache Hadoop Stack installation -................................ - -Airflow is often used to run tasks on Hadoop cluster. It required Java Runtime Environment (JRE) to run. -Below are the steps to take tools that are frequently used in Hadoop-world: - -- Java Runtime Environment (JRE) -- Apache Hadoop -- Apache Hive -- `Cloud Storage connector for Apache Hadoop `__ - - -Create a new Dockerfile like the one shown below. - -.. exampleinclude:: /docker-images-recipes/hadoop.Dockerfile - :language: dockerfile - -Then build a new image. - -.. code-block:: bash - - docker build . \ - --build-arg BASE_AIRFLOW_IMAGE="apache/airflow:2.0.0" \ - -t my-airflow-image - -More details about the images ------------------------------ - -You can read more details about the images - the context, their parameters and internal structure in the -`IMAGES.rst `_ document. +We provide :doc:`a Docker Image (OCI) for Apache Airflow ` for use in a containerized environment. Consider using it to guarantee that software will always run the same no matter where it’s deployed. .. _production-deployment:kerberos: diff --git a/docs/apache-airflow/project.rst b/docs/apache-airflow/project.rst index 6b1cd7a9cee76..0a97e0ff9882e 100644 --- a/docs/apache-airflow/project.rst +++ b/docs/apache-airflow/project.rst @@ -36,40 +36,46 @@ in January 2019. Committers ---------- -- @aijamalnk (Aizhamal Nurmamat kyzy) -- @alexvanboxel (Alex Van Boxel) -- @aoen (Dan Davydov) -- @artwr (Arthur Wiedmer) -- @ashb (Ash Berlin-Taylor) -- @basph (Bas Harenslak) -- @bolkedebruin (Bolke de Bruin) -- @criccomini (Chris Riccomini) -- @dimberman (Daniel Imberman) -- @feluelle (Felix Uellendall) -- @feng-tao (Tao Feng) -- @fokko (Fokko Driesprong) -- @hiteshs (Hitesh Shah) -- @houqp (Qingping Hou) -- @jghoman (Jakob Homan) -- @jmcarp (Joshua Carp) -- @joygao (Joy Gao) -- @kaxil (Kaxil Naik) -- @KevinYang21 (Kevin Yang) -- @leahecole (Leah Cole) -- @mik-laj (Kamil Breguła) -- @milton0825 (Chao-Han Tsai) -- @mistercrunch (Maxime "Max" Beauchemin) -- @msumit (Sumit Maheshwari) -- @potiuk (Jarek Potiuk) -- @r39132 (Siddharth "Sid" Anand) -- @ryanahamilton (Ryan Hamilton) -- @ryw (Ry Walker) -- @saguziel (Alex Guziel) -- @sekikn (Kengo Seki) -- @turbaszek (Tomasz Urbaszek) -- @vikramkoka (Vikram Koka) -- @XD-DENG (Xiaodong Deng) -- @zhongjiajie (Jiajie Zhong) +- Aizhamal Nurmamat kyzy (@aijamalnk) +- Alex Guziel (@saguziel) +- Alex Van Boxel (@alexvanboxel) +- Arthur Wiedmer (@artwr) +- Ash Berlin-Taylor (@ashb) +- Bas Harenslak (@basph) +- Bolke de Bruin (@bolkedebruin) +- Chao-Han Tsai (@milton0825) +- Chris Riccomini (@criccomini) +- Dan Davydov (@aoen) +- Daniel Imberman (@dimberman) +- Daniel Standish (@dstandish) +- Elad Kalif (@eladkal) +- Ephraim Anierobi (@ephraimbuddy) +- Felix Uellendall (@feluelle) +- Fokko Driesprong (@fokko) +- Hitesh Shah (@hiteshs) +- Jakob Homan (@jghoman) +- James Timmins (@jhtimmins) +- Jarek Potiuk (@potiuk) +- Jiajie Zhong (@zhongjiajie) +- Joshua Carp (@jmcarp) +- Joy Gao (@joygao) +- Kamil Breguła (@mik-laj) +- Kaxil Naik (@kaxil) +- Kengo Seki (@sekikn) +- Kevin Yang (@KevinYang21) +- Leah Cole (@leahecole) +- Maxime "Max" Beauchemin (@mistercrunch) +- Qian Yu (@yuqian90) +- Qingping Hou (@houqp) +- Ry Walker (@ryw) +- Ryan Hamilton (@ryanahamilton) +- Siddharth "Sid" Anand (@r39132) +- Sumit Maheshwari (@msumit) +- Tao Feng (@feng-tao) +- Tomasz Urbaszek (@turbaszek) +- Vikram Koka (@vikramkoka) +- Xiaodong Deng (@XD-DENG) +- Xinbin Huang (@xinbinhuang) For the full list of contributors, take a look at `Airflow's GitHub Contributor page: diff --git a/docs/apache-airflow/start/airflow.sh b/docs/apache-airflow/start/airflow.sh index 2324ba681ee0a..4aa1d71cacf11 100755 --- a/docs/apache-airflow/start/airflow.sh +++ b/docs/apache-airflow/start/airflow.sh @@ -24,5 +24,5 @@ PROJECT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" set -euo pipefail -export COMPOSE_FILE=${PROJECT_DIR}/docker-compose.yaml -exec docker-compose run airflow-worker "${@}" +export COMPOSE_FILE="${PROJECT_DIR}/docker-compose.yaml" +exec docker-compose run --rm -e CONNECTION_CHECK_MAX_COUNT=0 airflow-worker "${@}" diff --git a/docs/apache-airflow/start/docker-compose.yaml b/docs/apache-airflow/start/docker-compose.yaml index e1699385c0dd9..470a475077115 100644 --- a/docs/apache-airflow/start/docker-compose.yaml +++ b/docs/apache-airflow/start/docker-compose.yaml @@ -49,6 +49,7 @@ x-airflow-common: AIRFLOW__CORE__FERNET_KEY: '' AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true' AIRFLOW__CORE__LOAD_EXAMPLES: 'true' + AIRFLOW__API__AUTH_BACKEND: 'airflow.api.auth.backend.basic_auth' volumes: - ./dags:/opt/airflow/dags - ./logs:/opt/airflow/logs diff --git a/docs/apache-airflow/start/docker.rst b/docs/apache-airflow/start/docker.rst index 290cc330fe5f7..0e2becf923e46 100644 --- a/docs/apache-airflow/start/docker.rst +++ b/docs/apache-airflow/start/docker.rst @@ -25,7 +25,7 @@ Before you begin Follow these steps to install the necessary tools. -1. Install `Docker Community Edition (CE) `__ on your workstation. +1. Install `Docker Community Edition (CE) `__ on your workstation. Depending on the OS, you may need to configure your Docker instance to use 4.00 GB of memory for all containers to run properly. Please refer to the Resources section if using `Docker for Windows `__ or `Docker for Mac `__ for more information. 2. Install `Docker Compose `__ v1.27.0 and newer on your workstation. Older versions of ``docker-compose`` do not support all features required by ``docker-compose.yaml`` file, so double check that it meets the minimum version requirements. @@ -72,7 +72,7 @@ On **Linux**, the mounted volumes in container use the native Linux filesystem u mkdir ./dags ./logs ./plugins echo -e "AIRFLOW_UID=$(id -u)\nAIRFLOW_GID=0" > .env -On **all operating system**, you need to run database migrations and create the first user account. To do it, run. +On **all operating systems**, you need to run database migrations and create the first user account. To do it, run. .. code-block:: bash @@ -195,7 +195,7 @@ To stop and delete containers, delete volumes with database data and download im Notes ===== -By default, the Docker Compose file uses the latest Airflow image (`apache/airflow `__). If you need, you can :ref:`customize and extend it `. +By default, the Docker Compose file uses the latest Airflow image (`apache/airflow `__). If you need, you can :doc:`customize and extend it `. What's Next? ============ diff --git a/docs/apache-airflow/start/local.rst b/docs/apache-airflow/start/local.rst index 7b0bb33d60ea2..64aaa7a5710ce 100644 --- a/docs/apache-airflow/start/local.rst +++ b/docs/apache-airflow/start/local.rst @@ -86,7 +86,7 @@ the ``Admin->Configuration`` menu. The PID file for the webserver will be stored in ``$AIRFLOW_HOME/airflow-webserver.pid`` or in ``/run/airflow/webserver.pid`` if started by systemd. -Out of the box, Airflow uses a sqlite database, which you should outgrow +Out of the box, Airflow uses a SQLite database, which you should outgrow fairly quickly since no parallelization is possible using this database backend. It works in conjunction with the :class:`~airflow.executors.sequential_executor.SequentialExecutor` which will diff --git a/docs/apache-airflow/tutorial.rst b/docs/apache-airflow/tutorial.rst index 932401425fb50..3a6b7ce9341ab 100644 --- a/docs/apache-airflow/tutorial.rst +++ b/docs/apache-airflow/tutorial.rst @@ -109,6 +109,7 @@ instantiated from an operator is called a task. The first argument .. exampleinclude:: /../../airflow/example_dags/tutorial.py :language: python + :dedent: 4 :start-after: [START basic_task] :end-before: [END basic_task] @@ -144,6 +145,7 @@ stamp"). .. exampleinclude:: /../../airflow/example_dags/tutorial.py :language: python + :dedent: 4 :start-after: [START jinja_template] :end-before: [END jinja_template] @@ -186,6 +188,7 @@ json, yaml. .. exampleinclude:: /../../airflow/example_dags/tutorial.py :language: python + :dedent: 4 :start-after: [START documentation] :end-before: [END documentation] diff --git a/docs/apache-airflow/tutorial_taskflow_api.rst b/docs/apache-airflow/tutorial_taskflow_api.rst index cea1438031705..ea52be1871470 100644 --- a/docs/apache-airflow/tutorial_taskflow_api.rst +++ b/docs/apache-airflow/tutorial_taskflow_api.rst @@ -69,6 +69,7 @@ as shown below. The function name acts as a unique identifier for the task. .. exampleinclude:: /../../airflow/example_dags/tutorial_taskflow_api_etl.py :language: python + :dedent: 4 :start-after: [START extract] :end-before: [END extract] @@ -83,6 +84,7 @@ we can move to the main part of the DAG. .. exampleinclude:: /../../airflow/example_dags/tutorial_taskflow_api_etl.py :language: python + :dedent: 4 :start-after: [START main_flow] :end-before: [END main_flow] @@ -119,6 +121,7 @@ in the middle of the data pipeline. In Airflow 1.x, this task is defined as show .. exampleinclude:: /../../airflow/example_dags/tutorial_etl_dag.py :language: python + :dedent: 4 :start-after: [START transform_function] :end-before: [END transform_function] @@ -130,6 +133,7 @@ Contrasting that with Taskflow API in Airflow 2.0 as shown below. .. exampleinclude:: /../../airflow/example_dags/tutorial_taskflow_api_etl.py :language: python + :dedent: 4 :start-after: [START transform] :end-before: [END transform] @@ -143,6 +147,7 @@ dependencies specified as shown below. .. exampleinclude:: /../../airflow/example_dags/tutorial_etl_dag.py :language: python + :dedent: 4 :start-after: [START main_flow] :end-before: [END main_flow] @@ -151,6 +156,7 @@ the dependencies as shown below. .. exampleinclude:: /../../airflow/example_dags/tutorial_taskflow_api_etl.py :language: python + :dedent: 4 :start-after: [START main_flow] :end-before: [END main_flow] @@ -182,7 +188,7 @@ Building this dependency is shown in the code below: .. code-block:: python @task() - def extract_from_file(): + def extract_from_file(): """ #### Extract from file task A simple Extract task to get data ready for the rest of the data diff --git a/docs/apache-airflow/upgrade-check.rst b/docs/apache-airflow/upgrade-check.rst index 2a183778da3c5..18bbec0094536 100644 --- a/docs/apache-airflow/upgrade-check.rst +++ b/docs/apache-airflow/upgrade-check.rst @@ -33,7 +33,7 @@ a period of time and specifically only for upgrading from Airflow 1.10 releases While there has been a lot of work put into making this upgrade as easy as possible, there are some changes which are compatible between Airflow 1.10 and Airflow 2.0. In order to make this as simple to navigate, we recommend that people first upgrade to the latest release in the 1.10 series (at the -time of writing: 1.10.14) and then to download this package and run the script as detailed below. +time of writing: 1.10.15) and then to download this package and run the script as detailed below. .. note:: @@ -173,7 +173,7 @@ statement for the Python Operator to be as follows will make this DAG work in Ai from airflow.operators.python import PythonOperator -However, at the time of writing, this is incompatible in Airflow 1.10.14. So, this change +However, at the time of writing, this is incompatible in Airflow 1.10.15. So, this change can only be made while moving to Airflow 2.0. diff --git a/docs/apache-airflow/upgrading-to-2.rst b/docs/apache-airflow/upgrading-to-2.rst index 876d2cd9b495a..9903410e0de00 100644 --- a/docs/apache-airflow/upgrading-to-2.rst +++ b/docs/apache-airflow/upgrading-to-2.rst @@ -37,26 +37,26 @@ For a list of breaking changes between Python 2 and Python 3, please refer to th from the CouchBaseDB team. -Step 2: Upgrade to Airflow 1.10.14 (a.k.a our "bridge" release) +Step 2: Upgrade to Airflow 1.10.15 (a.k.a our "bridge" release) ''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''' -To minimize friction for users upgrading from Airflow 1.10 to Airflow 2.0 and beyond, Airflow 1.10.14 "a bridge release" has -been created. This is intended to be the final 1.10 feature release. Airflow 1.10.14 includes support for various features +To minimize friction for users upgrading from Airflow 1.10 to Airflow 2.0 and beyond, Airflow 1.10.15 "a bridge release" has +been created. This is intended to be the final 1.10 feature release. Airflow 1.10.15 includes support for various features that have been backported from Airflow 2.0 to make it easy for users to test their Airflow environment before upgrading to Airflow 2.0. We strongly recommend that all users upgrading to Airflow 2.0, first -upgrade to Airflow 1.10.14 and test their Airflow deployment and only then upgrade to Airflow 2.0. +upgrade to Airflow 1.10.15 and test their Airflow deployment and only then upgrade to Airflow 2.0. The Airflow 1.10.x release tree will be supported for six months from Airflow 2.0 release date. -Features in 1.10.14 include: +Features in 1.10.15 include: -1. Most breaking DAG and architecture changes of Airflow 2.0 have been backported to Airflow 1.10.14. This backward-compatibility does not mean -that 1.10.14 will process these DAGs the same way as Airflow 2.0. Instead, this means that most Airflow 2.0 -compatible DAGs will work in Airflow 1.10.14. This backport will give users time to modify their DAGs over time +1. Most breaking DAG and architecture changes of Airflow 2.0 have been backported to Airflow 1.10.15. This backward-compatibility does not mean +that 1.10.15 will process these DAGs the same way as Airflow 2.0. Instead, this means that most Airflow 2.0 +compatible DAGs will work in Airflow 1.10.15. This backport will give users time to modify their DAGs over time without any service disruption. -2. We have also backported the updated Airflow 2.0 CLI commands to Airflow 1.10.14, so that users can modify their scripts +2. We have also backported the updated Airflow 2.0 CLI commands to Airflow 1.10.15, so that users can modify their scripts to be compatible with Airflow 2.0 before the upgrade. 3. For users of the KubernetesExecutor, we have backported the ``pod_template_file`` capability for the KubernetesExecutor @@ -73,7 +73,7 @@ section of your ``airflow.cfg`` Step 3: Install and run the Upgrade check scripts ''''''''''''''''''''''''''''''''''''''''''''''''' -After upgrading to Airflow 1.10.14, we recommend that you install the "upgrade check" scripts. These scripts will read through your ``airflow.cfg`` and all of your DAGs and will give a detailed report of all changes required before upgrading. We are testing this script diligently, and our goal is that any Airflow setup that can pass these tests will be able to upgrade to 2.0 without any issues. +After upgrading to Airflow 1.10.15, we recommend that you install the "upgrade check" scripts. These scripts will read through your ``airflow.cfg`` and all of your DAGs and will give a detailed report of all changes required before upgrading. We are testing this script diligently, and our goal is that any Airflow setup that can pass these tests will be able to upgrade to 2.0 without any issues. .. code-block:: bash @@ -91,7 +91,7 @@ More details about this process are here :ref:`Upgrade Check Scripts") - print() - print("This usually takes from 20 seconds to 2 minutes.") - print() - print("You can also use other extra flags to iterate faster:") - print(" --docs-only - Only build documentation") - print(" --spellcheck-only - Only perform spellchecking") - print() - print("For more info:") - print(" ./breeze build-docs --help") - print(TEXT_RESET) + console.print("You can quickly build documentation locally with just one command.") + console.print(" [blue]./breeze build-docs[/]") + console.print() + console.print("[yellow]Still too slow?[/]") + console.print() + console.print("You can only build one documentation package:") + console.print(" [blue]./breeze build-docs -- --package-filter [/]") + console.print() + console.print("This usually takes from [yellow]20 seconds[/] to [yellow]2 minutes[/].") + console.print() + console.print("You can also use other extra flags to iterate faster:") + console.print(" [blue]--docs-only - Only build documentation[/]") + console.print(" [blue]--spellcheck-only - Only perform spellchecking[/]") + console.print() + console.print("For more info:") + console.print(" [blue]./breeze build-docs --help[/]") + console.print() def _get_parser(): @@ -115,6 +118,20 @@ def _get_parser(): action='store_true', help='Builds documentation for official release i.e. all links point to stable version', ) + parser.add_argument( + "-j", + "--jobs", + dest='jobs', + type=int, + default=0, + help=( + """\ + Number of parallel processes that will be spawned to build the docs. + + If passed 0, the value will be determined based on the number of CPUs. + """ + ), + ) parser.add_argument( "-v", "--verbose", @@ -129,31 +146,244 @@ def _get_parser(): return parser +class BuildSpecification(NamedTuple): + """Specification of single build.""" + + package_name: str + for_production: bool + verbose: bool + + +class BuildDocsResult(NamedTuple): + """Result of building documentation.""" + + package_name: str + log_file_name: str + errors: List[DocBuildError] + + +class SpellCheckResult(NamedTuple): + """Result of spellcheck.""" + + package_name: str + log_file_name: str + errors: List[SpellingError] + + +def perform_docs_build_for_single_package(build_specification: BuildSpecification) -> BuildDocsResult: + """Performs single package docs build.""" + builder = AirflowDocsBuilder( + package_name=build_specification.package_name, for_production=build_specification.for_production + ) + console.print(f"[blue]{build_specification.package_name:60}:[/] Building documentation") + result = BuildDocsResult( + package_name=build_specification.package_name, + errors=builder.build_sphinx_docs( + verbose=build_specification.verbose, + ), + log_file_name=builder.log_build_filename, + ) + return result + + +def perform_spell_check_for_single_package(build_specification: BuildSpecification) -> SpellCheckResult: + """Performs single package spell check.""" + builder = AirflowDocsBuilder( + package_name=build_specification.package_name, for_production=build_specification.for_production + ) + console.print(f"[blue]{build_specification.package_name:60}:[/] Checking spelling started") + result = SpellCheckResult( + package_name=build_specification.package_name, + errors=builder.check_spelling( + verbose=build_specification.verbose, + ), + log_file_name=builder.log_spelling_filename, + ) + console.print(f"[blue]{build_specification.package_name:60}:[/] Checking spelling completed") + return result + + def build_docs_for_packages( - current_packages: List[str], docs_only: bool, spellcheck_only: bool, for_production: bool, verbose: bool + current_packages: List[str], + docs_only: bool, + spellcheck_only: bool, + for_production: bool, + jobs: int, + verbose: bool, ) -> Tuple[Dict[str, List[DocBuildError]], Dict[str, List[SpellingError]]]: - """Builds documentation for single package and returns errors""" + """Builds documentation for all packages and combines errors.""" all_build_errors: Dict[str, List[DocBuildError]] = defaultdict(list) all_spelling_errors: Dict[str, List[SpellingError]] = defaultdict(list) - for package_no, package_name in enumerate(current_packages, start=1): - print("#" * 20, f"[{package_no}/{len(current_packages)}] {package_name}", "#" * 20) - builder = AirflowDocsBuilder(package_name=package_name, for_production=for_production) - builder.clean_files() - if not docs_only: - with with_group(f"Check spelling: {package_name}"): - spelling_errors = builder.check_spelling(verbose=verbose) - if spelling_errors: - all_spelling_errors[package_name].extend(spelling_errors) - - if not spellcheck_only: - with with_group(f"Building docs: {package_name}"): - docs_errors = builder.build_sphinx_docs(verbose=verbose) - if docs_errors: - all_build_errors[package_name].extend(docs_errors) - + with with_group("Cleaning documentation files"): + for package_name in current_packages: + console.print(f"[blue]{package_name:60}:[/] Cleaning files") + builder = AirflowDocsBuilder(package_name=package_name, for_production=for_production) + builder.clean_files() + if jobs > 1: + run_in_parallel( + all_build_errors, + all_spelling_errors, + current_packages, + docs_only, + for_production, + jobs, + spellcheck_only, + verbose, + ) + else: + run_sequentially( + all_build_errors, + all_spelling_errors, + current_packages, + docs_only, + for_production, + spellcheck_only, + verbose, + ) return all_build_errors, all_spelling_errors +def run_sequentially( + all_build_errors, + all_spelling_errors, + current_packages, + docs_only, + for_production, + spellcheck_only, + verbose, +): + """Run both - spellcheck and docs build sequentially without multiprocessing""" + if not spellcheck_only: + for package_name in current_packages: + build_result = perform_docs_build_for_single_package( + build_specification=BuildSpecification( + package_name=package_name, + for_production=for_production, + verbose=verbose, + ) + ) + if build_result.errors: + all_build_errors[package_name].extend(build_result.errors) + print_build_output(build_result) + if not docs_only: + for package_name in current_packages: + spellcheck_result = perform_spell_check_for_single_package( + build_specification=BuildSpecification( + package_name=package_name, + for_production=for_production, + verbose=verbose, + ) + ) + if spellcheck_result.errors: + all_spelling_errors[package_name].extend(spellcheck_result.errors) + print_spelling_output(spellcheck_result) + + +def run_in_parallel( + all_build_errors, + all_spelling_errors, + current_packages, + docs_only, + for_production, + jobs, + spellcheck_only, + verbose, +): + """Run both - spellcheck and docs build sequentially without multiprocessing""" + pool = multiprocessing.Pool(processes=jobs) + if not spellcheck_only: + run_docs_build_in_parallel( + all_build_errors=all_build_errors, + for_production=for_production, + current_packages=current_packages, + verbose=verbose, + pool=pool, + ) + if not docs_only: + run_spell_check_in_parallel( + all_spelling_errors=all_spelling_errors, + for_production=for_production, + current_packages=current_packages, + verbose=verbose, + pool=pool, + ) + + +def print_build_output(result: BuildDocsResult): + """Prints output of docs build job.""" + with with_group(f"{TEXT_RED}Output for documentation build {result.package_name}{TEXT_RESET}"): + console.print() + console.print(f"[blue]{result.package_name:60}: " + "#" * 80) + with open(result.log_file_name) as output: + for line in output.read().splitlines(): + console.print(f"{result.package_name:60} {line}") + console.print(f"[blue]{result.package_name:60}: " + "#" * 80) + + +def run_docs_build_in_parallel( + all_build_errors: Dict[str, List[DocBuildError]], + for_production: bool, + current_packages: List[str], + verbose: bool, + pool, +): + """Runs documentation building in parallel.""" + doc_build_specifications: List[BuildSpecification] = [] + with with_group("Scheduling documentation to build"): + for package_name in current_packages: + console.print(f"[blue]{package_name:60}:[/] Scheduling documentation to build") + doc_build_specifications.append( + BuildSpecification( + package_name=package_name, + for_production=for_production, + verbose=verbose, + ) + ) + with with_group("Running docs building"): + console.print() + result_list = pool.map(perform_docs_build_for_single_package, doc_build_specifications) + for result in result_list: + if result.errors: + all_build_errors[result.package_name].extend(result.errors) + print_build_output(result) + + +def print_spelling_output(result: SpellCheckResult): + """Prints output of spell check job.""" + with with_group(f"{TEXT_RED}Output for spelling check: {result.package_name}{TEXT_RESET}"): + console.print() + console.print(f"[blue]{result.package_name:60}: " + "#" * 80) + with open(result.log_file_name) as output: + for line in output.read().splitlines(): + console.print(f"{result.package_name:60} {line}") + console.print(f"[blue]{result.package_name:60}: " + "#" * 80) + console.print() + + +def run_spell_check_in_parallel( + all_spelling_errors: Dict[str, List[SpellingError]], + for_production: bool, + current_packages: List[str], + verbose: bool, + pool, +): + """Runs spell check in parallel.""" + spell_check_specifications: List[BuildSpecification] = [] + with with_group("Scheduling spell checking of documentation"): + for package_name in current_packages: + console.print(f"[blue]{package_name:60}:[/] Scheduling spellchecking") + spell_check_specifications.append( + BuildSpecification(package_name=package_name, for_production=for_production, verbose=verbose) + ) + with with_group("Running spell checking of documentation"): + console.print() + result_list = pool.map(perform_spell_check_for_single_package, spell_check_specifications) + for result in result_list: + if result.errors: + all_spelling_errors[result.package_name].extend(result.errors) + print_spelling_output(result) + + def display_packages_summary( build_errors: Dict[str, List[DocBuildError]], spelling_errors: Dict[str, List[SpellingError]] ): @@ -161,15 +391,15 @@ def display_packages_summary( packages_names = {*build_errors.keys(), *spelling_errors.keys()} tabular_data = [ { - "Package name": package_name, + "Package name": f"[blue]{package_name}[/]", "Count of doc build errors": len(build_errors.get(package_name, [])), "Count of spelling errors": len(spelling_errors.get(package_name, [])), } for package_name in sorted(packages_names, key=lambda k: k or '') ] - print("#" * 20, "Packages errors summary", "#" * 20) - print(tabulate(tabular_data=tabular_data, headers="keys")) - print("#" * 50) + console.print("#" * 20, " Packages errors summary ", "#" * 20) + console.print(tabulate(tabular_data=tabular_data, headers="keys")) + console.print("#" * 50) def print_build_errors_and_exit( @@ -180,15 +410,17 @@ def print_build_errors_and_exit( if build_errors or spelling_errors: if build_errors: display_errors_summary(build_errors) - print() + console.print() if spelling_errors: display_spelling_error_summary(spelling_errors) - print() - print("The documentation has errors.") + console.print() + console.print("The documentation has errors.") display_packages_summary(build_errors, spelling_errors) - print() - print(CHANNEL_INVITATION) + console.print() + console.print(CHANNEL_INVITATION) sys.exit(1) + else: + console.print("[green]Documentation build is successful[/]") def main(): @@ -201,22 +433,26 @@ def main(): package_filters = args.package_filter for_production = args.for_production - if not package_filters: - _promote_new_flags() - with with_group("Available packages"): - for pkg in available_packages: - print(f" - {pkg}") + for pkg in sorted(available_packages): + console.print(f" - {pkg}") if package_filters: - print("Current package filters: ", package_filters) + console.print("Current package filters: ", package_filters) current_packages = process_package_filters(available_packages, package_filters) - with with_group(f"Documentation will be built for {len(current_packages)} package(s)"): - for pkg_no, pkg in enumerate(current_packages, start=1): - print(f"{pkg_no}. {pkg}") with with_group("Fetching inventories"): - fetch_inventories() + # Inventories that could not be retrieved should be retrieved first. This may mean this is a + # new package. + priority_packages = fetch_inventories() + current_packages = sorted(current_packages, key=lambda d: -1 if d in priority_packages else 1) + + jobs = args.jobs if args.jobs != 0 else os.cpu_count() + with with_group( + f"Documentation will be built for {len(current_packages)} package(s) with {jobs} parallel jobs" + ): + for pkg_no, pkg in enumerate(current_packages, start=1): + console.print(f"{pkg_no}. {pkg}") all_build_errors: Dict[Optional[str], List[DocBuildError]] = {} all_spelling_errors: Dict[Optional[str], List[SpellingError]] = {} @@ -225,6 +461,7 @@ def main(): docs_only=docs_only, spellcheck_only=spellcheck_only, for_production=for_production, + jobs=jobs, verbose=args.verbose, ) if package_build_errors: @@ -248,6 +485,7 @@ def main(): docs_only=docs_only, spellcheck_only=spellcheck_only, for_production=for_production, + jobs=jobs, verbose=args.verbose, ) if package_build_errors: @@ -265,10 +503,14 @@ def main(): if not package_filters: _promote_new_flags() + if os.path.exists(PROVIDER_INIT_FILE): + os.remove(PROVIDER_INIT_FILE) + print_build_errors_and_exit( all_build_errors, all_spelling_errors, ) -main() +if __name__ == "__main__": + main() diff --git a/docs/conf.py b/docs/conf.py index a60bbe3987fe3..11708f986bc05 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -39,6 +39,11 @@ import yaml +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader # type: ignore[misc] + import airflow from airflow.configuration import AirflowConfigParser, default_config_yaml from docs.exts.docs_build.third_party_inventories import ( # pylint: disable=no-name-in-module,wrong-import-order @@ -71,10 +76,10 @@ except StopIteration: raise Exception(f"Could not find provider.yaml file for package: {PACKAGE_NAME}") PACKAGE_DIR = CURRENT_PROVIDER['package-dir'] - PACKAGE_VERSION = 'master' + PACKAGE_VERSION = 'devel' else: PACKAGE_DIR = None - PACKAGE_VERSION = 'master' + PACKAGE_VERSION = 'devel' # Adds to environment variables for easy access from other plugins like airflow_intersphinx. os.environ['AIRFLOW_PACKAGE_NAME'] = PACKAGE_NAME if PACKAGE_DIR: @@ -140,6 +145,9 @@ 'providers_packages_ref', ] ) +elif PACKAGE_NAME in ("helm-chart", "docker-stack"): + # No extra extensions + pass else: extensions.append('autoapi.extension') # List of patterns, relative to source directory, that match files and @@ -297,8 +305,8 @@ def _get_rst_filepath_from_path(filepath: str): 'conf_py_path': f'/docs/{PACKAGE_NAME}/', 'github_user': 'apache', 'github_repo': 'airflow', - 'github_version': 'master', - 'display_github': 'master', + 'github_version': 'devel', + 'display_github': 'devel', 'suffix': '.rst', } @@ -334,7 +342,7 @@ def _load_config(): return {} with open(file_path) as config_file: - return yaml.safe_load(config_file) + return yaml.load(config_file, SafeLoader) config = _load_config() if config: @@ -391,7 +399,7 @@ def _load_config(): 'qds_sdk', 'redis', 'simple_salesforce', - 'slackclient', + 'slack_sdk', 'smbclient', 'snowflake', 'sshtunnel', @@ -498,7 +506,7 @@ def _load_config(): # Relative path to output the AutoAPI files into. This can also be used to place the generated documentation # anywhere in your documentation hierarchy. -autoapi_root = f'{PACKAGE_NAME}/_api' +autoapi_root = '_api' # Whether to insert the generated documentation into the TOC tree. If this is False, the default AutoAPI # index page is not generated and you will need to include the generated documentation in a @@ -533,4 +541,4 @@ def _load_config(): ] # Options for script updater - redoc_script_url = "https://cdn.jsdelivr.net/npm/redoc@2.0.0-rc.30/bundles/redoc.standalone.js" + redoc_script_url = "https://cdn.jsdelivr.net/npm/redoc@2.0.0-rc.48/bundles/redoc.standalone.js" diff --git a/docs/docker-stack/build-arg-ref.rst b/docs/docker-stack/build-arg-ref.rst new file mode 100644 index 0000000000000..f459cb7837b95 --- /dev/null +++ b/docs/docker-stack/build-arg-ref.rst @@ -0,0 +1,241 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Image build arguments reference +------------------------------- + +The following build arguments (``--build-arg`` in docker build command) can be used for production images. +Those arguments are used when you want to customize the image. You can see some examples of it in +:ref:`Building from PyPI packages`. + +Basic arguments +............... + +Those are the most common arguments that you use when you want to build a custom image. + ++------------------------------------------+------------------------------------------+------------------------------------------+ +| Build argument | Default value | Description | ++==========================================+==========================================+==========================================+ +| ``PYTHON_BASE_IMAGE`` | ``python:3.6-slim-buster`` | Base python image. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_VERSION`` | ``2.0.1`` | version of Airflow. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_EXTRAS`` | (see Dockerfile) | Default extras with which airflow is | +| | | installed. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``ADDITIONAL_AIRFLOW_EXTRAS`` | | Optional additional extras with which | +| | | airflow is installed. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_HOME`` | ``/opt/airflow`` | Airflow’s HOME (that’s where logs and | +| | | SQLite databases are stored). | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_USER_HOME_DIR`` | ``/home/airflow`` | Home directory of the Airflow user. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_PIP_VERSION`` | ``20.2.4`` | PIP version used. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``PIP_PROGRESS_BAR`` | ``on`` | Progress bar for PIP installation | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_UID`` | ``50000`` | Airflow user UID. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_GID`` | ``50000`` | Airflow group GID. Note that writable | +| | | files/dirs, created on behalf of airflow | +| | | user are set to the ``root`` group (0) | +| | | to allow arbitrary UID to run the image. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_CONSTRAINTS_REFERENCE`` | | Reference (branch or tag) from GitHub | +| | | where constraints file is taken from | +| | | It can be ``constraints-master`` but | +| | | can be ``constraints-1-10`` for 1.10.* | +| | | versions of ``constraints-2-0`` for | +| | | 2.0.* installation. In case of building | +| | | specific version you want to point it | +| | | to specific tag, for example | +| | | ``constraints-2.0.1``. | +| | | Auto-detected if empty. | ++------------------------------------------+------------------------------------------+------------------------------------------+ + +Image optimization options +.......................... + +The main advantage of Customization method of building Airflow image, is that it allows to build highly optimized image because +the final image (RUNTIME) might not contain all the dependencies that are needed to build and install all other dependencies +(DEV). Those arguments allow to control what is installed in the DEV image and what is installed in RUNTIME one, thus +allowing to produce much more optimized images. See :ref:`Building optimized images`. +for examples of using those arguments. + ++------------------------------------------+------------------------------------------+------------------------------------------+ +| Build argument | Default value | Description | ++==========================================+==========================================+==========================================+ +| ``CONTINUE_ON_PIP_CHECK_FAILURE`` | ``false`` | By default the image build fails if pip | +| | | check fails for it. This is good for | +| | | interactive building but on CI the | +| | | image should be built regardless - we | +| | | have a separate step to verify image. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``UPGRADE_TO_NEWER_DEPENDENCIES`` | ``false`` | If set to true, the dependencies are | +| | | upgraded to newer versions matching | +| | | setup.py before installation. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``ADDITIONAL_PYTHON_DEPS`` | | Optional python packages to extend | +| | | the image with some extra dependencies. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``DEV_APT_COMMAND`` | (see Dockerfile) | Dev apt command executed before dev deps | +| | | are installed in the Build image. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``ADDITIONAL_DEV_APT_COMMAND`` | | Additional Dev apt command executed | +| | | before dev dep are installed | +| | | in the Build image. Should start with | +| | | ``&&``. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``DEV_APT_DEPS`` | (see Dockerfile) | Dev APT dependencies installed | +| | | in the Build image. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``ADDITIONAL_DEV_APT_DEPS`` | | Additional apt dev dependencies | +| | | installed in the Build image. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``ADDITIONAL_DEV_APT_ENV`` | | Additional env variables defined | +| | | when installing dev deps. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``RUNTIME_APT_COMMAND`` | (see Dockerfile) | Runtime apt command executed before deps | +| | | are installed in the Main image. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``ADDITIONAL_RUNTIME_APT_COMMAND`` | | Additional Runtime apt command executed | +| | | before runtime dep are installed | +| | | in the Main image. Should start with | +| | | ``&&``. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``RUNTIME_APT_DEPS`` | (see Dockerfile) | Runtime APT dependencies installed | +| | | in the Main image. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``ADDITIONAL_RUNTIME_APT_DEPS`` | | Additional apt runtime dependencies | +| | | installed in the Main image. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``ADDITIONAL_RUNTIME_APT_ENV`` | | Additional env variables defined | +| | | when installing runtime deps. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``CASS_DRIVER_BUILD_CONCURRENCY`` | ``8`` | Number of processors to use for | +| | | cassandra PIP install (speeds up | +| | | installing in case cassandra extra is | +| | | used). | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``INSTALL_MYSQL_CLIENT`` | ``true`` | Whether MySQL client should be installed | +| | | The mysql extra is removed from extras | +| | | if the client is not installed. | ++------------------------------------------+------------------------------------------+------------------------------------------+ + +Installing Airflow using different methods +.......................................... + +Those parameters are useful only if you want to install Airflow using different installation methods than the default +(installing from PyPI packages). + +This is usually only useful if you have your own fork of Airflow and want to build the images locally from +those sources - either locally or directly from GitHub sources. This way you do not need to release your +Airflow and Providers via PyPI - they can be installed directly from sources or from GitHub repository. +Another option of installation is to build Airflow from previously prepared binary Python packages which might +be useful if you need to build Airflow in environments that require high levels of security. + +You can see some examples of those in: + * :ref:`Building from GitHub`, + * :ref:`Using custom installation sources`, + * :ref:`Build images in security restricted environments` + ++------------------------------------------+------------------------------------------+------------------------------------------+ +| Build argument | Default value | Description | ++==========================================+==========================================+==========================================+ +| ``AIRFLOW_INSTALLATION_METHOD`` | ``apache-airflow`` | Installation method of Apache Airflow. | +| | | ``apache-airflow`` for installation from | +| | | PyPI. It can be GitHub repository URL | +| | | including branch or tag to install from | +| | | that repository or "." to install from | +| | | local sources. Installing from sources | +| | | requires appropriate values of the | +| | | ``AIRFLOW_SOURCES_FROM`` and | +| | | ``AIRFLOW_SOURCES_TO`` variables (see | +| | | below) | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_SOURCES_FROM`` | ``empty`` | Sources of Airflow. Set it to "." when | +| | | you install Airflow from local sources | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_SOURCES_TO`` | ``/empty`` | Target for Airflow sources. Set to | +| | | "/opt/airflow" when you install Airflow | +| | | from local sources. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_VERSION_SPECIFICATION`` | | Optional - might be used for using limit | +| | | for Airflow version installation - for | +| | | example ``<2.0.2`` for automated builds. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``INSTALL_PROVIDERS_FROM_SOURCES`` | ``false`` | If set to ``true`` and image is built | +| | | from sources, all provider packages are | +| | | installed from sources rather than from | +| | | packages. It has no effect when | +| | | installing from PyPI or GitHub repo. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_CONSTRAINTS_LOCATION`` | | If not empty, it will override the | +| | | source of the constraints with the | +| | | specified URL or file. Note that the | +| | | file has to be in docker context so | +| | | it's best to place such file in | +| | | one of the folders included in | +| | | ``.dockerignore`` file. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``INSTALL_FROM_DOCKER_CONTEXT_FILES`` | ``false`` | If set to true, Airflow, providers and | +| | | all dependencies are installed from | +| | | from locally built/downloaded | +| | | .whl and .tar.gz files placed in the | +| | | ``docker-context-files``. In certain | +| | | corporate environments, this is required | +| | | to install airflow from such pre-vetted | +| | | packages rather than from PyPI. For this | +| | | to work, also set ``INSTALL_FROM_PYPI``. | +| | | Note that packages starting with | +| | | ``apache?airflow`` glob are treated | +| | | differently than other packages. All | +| | | ``apache?airflow`` packages are | +| | | installed with dependencies limited by | +| | | airflow constraints. All other packages | +| | | are installed without dependencies | +| | | 'as-is'. If you wish to install airflow | +| | | via 'pip download' with all dependencies | +| | | downloaded, you have to rename the | +| | | apache airflow and provider packages to | +| | | not start with ``apache?airflow`` glob. | ++------------------------------------------+------------------------------------------+------------------------------------------+ + +Pre-caching PIP dependencies +............................ + +When image is build from PIP, by default pre-caching of PIP dependencies is used. This is in order to speed-up incremental +builds during development. When pre-cached PIP dependencies are used and ``setup.py`` or ``setup.cfg`` changes, the +PIP dependencies are already pre-installed, thus resulting in much faster image rebuild. This is purely an optimization +of time needed to build the images and should be disabled if you want to install Airflow from +docker context files. + ++------------------------------------------+------------------------------------------+------------------------------------------+ +| Build argument | Default value | Description | ++==========================================+==========================================+==========================================+ +| ``AIRFLOW_BRANCH`` | ``master`` | the branch from which PIP dependencies | +| | | are pre-installed initially. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_REPO`` | ``apache/airflow`` | the repository from which PIP | +| | | dependencies are pre-installed. | ++------------------------------------------+------------------------------------------+------------------------------------------+ +| ``AIRFLOW_PRE_CACHED_PIP_PACKAGES`` | ``false`` | Allows to pre-cache airflow PIP packages | +| | | from the GitHub of Apache Airflow | +| | | This allows to optimize iterations for | +| | | Image builds and speeds up CI builds. | ++------------------------------------------+------------------------------------------+------------------------------------------+ diff --git a/docs/docker-stack/build.rst b/docs/docker-stack/build.rst new file mode 100644 index 0000000000000..5fa0a594f813a --- /dev/null +++ b/docs/docker-stack/build.rst @@ -0,0 +1,540 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Building the image +================== + +Before you dive-deeply in the way how the Airflow Image is build, named and why we are doing it the +way we do, you might want to know very quickly how you can extend or customize the existing image +for Apache Airflow. This chapter gives you a short answer to those questions. + +Extending vs. customizing the image +----------------------------------- + +Here is the comparison of the two types of building images. Here is your guide if you want to choose +how you want to build your image. + ++----------------------------------------------------+-----------+-------------+ +| | Extending | Customizing | ++====================================================+===========+=============+ +| Can be built without airflow sources | Yes | No | ++----------------------------------------------------+-----------+-------------+ +| Uses familiar 'FROM ' pattern of image building | Yes | No | ++----------------------------------------------------+-----------+-------------+ +| Requires only basic knowledge about images | Yes | No | ++----------------------------------------------------+-----------+-------------+ +| Builds quickly | Yes | No | ++----------------------------------------------------+-----------+-------------+ +| Produces image heavily optimized for size | No | Yes | ++----------------------------------------------------+-----------+-------------+ +| Can build from custom airflow sources (forks) | No | Yes | ++----------------------------------------------------+-----------+-------------+ +| Can build on air-gaped system | No | Yes | ++----------------------------------------------------+-----------+-------------+ + +TL;DR; If you have a need to build custom image, it is easier to start with "Extending" however if your +dependencies require compilation step or when your require to build the image from security vetted +packages, switching to "Customizing" the image provides much more optimized images. In the example further +where we compare equivalent "Extending" and "Customizing" the image, similar images build by +Extending vs. Customization had shown 1.1GB vs 874MB image sizes respectively - with 20% improvement in +size of the Customized image. + +.. note:: + + You can also combine both - customizing & extending the image in one. You can build your + optimized base image first using ``customization`` method (for example by your admin team) with all + the heavy compilation required dependencies and you can publish it in your registry and let others + ``extend`` your image using ``FROM`` and add their own lightweight dependencies. This reflects well + the split where typically "Casual" users will Extend the image and "Power-users" will customize it. + +Airflow Summit 2020's `Production Docker Image `_ talk provides more +details about the context, architecture and customization/extension methods for the Production Image. + +Extending the image +------------------- + +Extending the image is easiest if you just need to add some dependencies that do not require +compiling. The compilation framework of Linux (so called ``build-essential``) is pretty big, and +for the production images, size is really important factor to optimize for, so our Production Image +does not contain ``build-essential``. If you need compiler like gcc or g++ or make/cmake etc. - those +are not found in the image and it is recommended that you follow the "customize" route instead. + +How to extend the image - it is something you are most likely familiar with - simply +build a new image using Dockerfile's ``FROM`` directive and add whatever you need. Then you can add your +Debian dependencies with ``apt`` or PyPI dependencies with ``pip install`` or any other stuff you need. + +You should be aware, about a few things: + +* The production image of airflow uses "airflow" user, so if you want to add some of the tools + as ``root`` user, you need to switch to it with ``USER`` directive of the Dockerfile and switch back to + ``airflow`` user when you are done. Also you should remember about following the + `best practises of Dockerfiles `_ + to make sure your image is lean and small. + +* The PyPI dependencies in Apache Airflow are installed in the user library, of the "airflow" user, so + PIP packages are installed to ``~/.local`` folder as if the ``--user`` flag was specified when running PIP. + Note also that using ``--no-cache-dir`` is a good idea that can help to make your image smaller. + +.. note:: + Only as of ``2.0.1`` image the ``--user`` flag is turned on by default by setting ``PIP_USER`` environment + variable to ``true``. This can be disabled by un-setting the variable or by setting it to ``false``. In the + 2.0.0 image you had to add the ``--user`` flag as ``pip install --user`` command. + +* If your apt, or PyPI dependencies require some of the ``build-essential`` or other packages that need + to compile your python dependencies, then your best choice is to follow the "Customize the image" route, + because you can build a highly-optimized (for size) image this way. However it requires to checkout sources + of Apache Airflow, so you might still want to choose to add ``build-essential`` to your image, + even if your image will be significantly bigger. + +* You can also embed your dags in the image by simply adding them with COPY directive of Airflow. + The DAGs in production image are in ``/opt/airflow/dags`` folder. + +* You can build your image without any need for Airflow sources. It is enough that you place the + ``Dockerfile`` and any files that are referred to (such as Dag files) in a separate directory and run + a command ``docker build . --tag my-image:my-tag`` (where ``my-image`` is the name you want to name it + and ``my-tag`` is the tag you want to tag the image with. + +* If your way of extending image requires to create writable directories, you MUST remember about adding + ``umask 0002`` step in your RUN command. This is necessary in order to accommodate our approach for + running the image with an arbitrary user. Such user will always run with ``GID=0`` - + the entrypoint will prevent non-root GIDs. You can read more about it in + :ref:`arbitrary docker user ` documentation for the entrypoint. The + ``umask 0002`` is set as default when you enter the image, so any directories you create by default + in runtime, will have ``GID=0`` and will be group-writable. + +.. note:: + Only as of ``2.0.2`` the default group of ``airflow`` user is ``root``. Previously it was ``airflow``, + so if you are building your images based on an earlier image, you need to manually change the default + group for airflow user: + +.. code-block:: docker + + RUN usermod -g 0 airflow + +Examples of image extending +--------------------------- + +An ``apt`` package example +.......................... + +The following example adds ``vim`` to the airflow image. + +.. exampleinclude:: docker-examples/extending/add-apt-packages/Dockerfile + :language: Dockerfile + :start-after: [START Dockerfile] + :end-before: [END Dockerfile] + +A ``PyPI`` package example +.......................... + +The following example adds ``lxml`` python package from PyPI to the image. + +.. exampleinclude:: docker-examples/extending/add-pypi-packages/Dockerfile + :language: Dockerfile + :start-after: [START Dockerfile] + :end-before: [END Dockerfile] + +A ``umask`` requiring example +............................. + +The following example adds a new directory that is supposed to be writable for any arbitrary user +running the container. + +.. exampleinclude:: docker-examples/extending/writable-directory/Dockerfile + :language: Dockerfile + :start-after: [START Dockerfile] + :end-before: [END Dockerfile] + + +A ``build-essential`` requiring package example +............................................... + +The following example adds ``mpi4py`` package which requires both ``build-essential`` and ``mpi compiler``. + +.. exampleinclude:: docker-examples/extending/add-build-essential-extend/Dockerfile + :language: Dockerfile + :start-after: [START Dockerfile] + :end-before: [END Dockerfile] + +The size of this image is ~ 1.1 GB when build. As you will see further, you can achieve 20% reduction in +size of the image in case you use "Customizing" rather than "Extending" the image. + +DAG embedding example +..................... + +The following example adds ``test_dag.py`` to your image in the ``/opt/airflow/dags`` folder. + +.. exampleinclude:: docker-examples/extending/embedding-dags/Dockerfile + :language: Dockerfile + :start-after: [START Dockerfile] + :end-before: [END Dockerfile] + + +.. exampleinclude:: docker-examples/extending/embedding-dags/test_dag.py + :language: Python + :start-after: [START dag] + :end-before: [END dag] + +Customizing the image +--------------------- + +Customizing the image is an optimized way of adding your own dependencies to the image - better +suited to prepare highly optimized (for size) production images, especially when you have dependencies +that require to be compiled before installing (such as ``mpi4py``). + +It also allows more sophisticated usages, needed by "Power-users" - for example using forked version +of Airflow, or building the images from security-vetted sources. + +The big advantage of this method is that it produces optimized image even if you need some compile-time +dependencies that are not needed in the final image. + +The disadvantage is that you need to use Airflow Sources to build such images from the +`official distribution repository of Apache Airflow `_ for the +released versions, or from the checked out sources (using release tags or main branches) in the +`Airflow GitHub Project `_ or from your own fork +if you happen to do maintain your own fork of Airflow. + +Another disadvantage is that the pattern of building Docker images with ``--build-arg`` is less familiar +to developers of such images. However it is quite well-known to "power-users". That's why the +customizing flow is better suited for those users who have more familiarity and have more custom +requirements. + +The image also usually builds much longer than the equivalent "Extended" image because instead of +extending the layers that are already coming from the base image, it rebuilds the layers needed +to add extra dependencies needed at early stages of image building. + +When customizing the image you can choose a number of options how you install Airflow: + + * From the PyPI releases (default) + * From the custom installation sources - using additional/replacing the original apt or PyPI repositories + * From local sources. This is used mostly during development. + * From tag or branch, or specific commit from a GitHub Airflow repository (or fork). This is particularly + useful when you build image for a custom version of Airflow that you keep in your fork and you do not + want to release the custom Airflow version to PyPI. + * From locally stored binary packages for Airflow, Airflow Providers and other dependencies. This is + particularly useful if you want to build Airflow in a highly-secure environment where all such packages + must be vetted by your security team and stored in your private artifact registry. This also + allows to build airflow image in an air-gaped environment. + * Side note. Building ``Airflow`` in an ``air-gaped`` environment sounds pretty funny, doesn't it? + +You can also add a range of customizations while building the image: + + * base python image you use for Airflow + * version of Airflow to install + * extras to install for Airflow (or even removing some default extras) + * additional apt/python dependencies to use while building Airflow (DEV dependencies) + * additional apt/python dependencies to install for runtime version of Airflow (RUNTIME dependencies) + * additional commands and variables to set if needed during building or preparing Airflow runtime + * choosing constraint file to use when installing Airflow + +Additional explanation is needed for the last point. Airflow uses constraints to make sure +that it can be predictably installed, even if some new versions of Airflow dependencies are +released (or even dependencies of our dependencies!). The docker image and accompanying scripts +usually determine automatically the right versions of constraints to be used based on the Airflow +version installed and Python version. For example 2.0.1 version of Airflow installed from PyPI +uses constraints from ``constraints-2.0.1`` tag). However in some cases - when installing airflow from +GitHub for example - you have to manually specify the version of constraints used, otherwise +it will default to the latest version of the constraints which might not be compatible with the +version of Airflow you use. + +You can also download any version of Airflow constraints and adapt it with your own set of +constraints and manually set your own versions of dependencies in your own constraints and use the version +of constraints that you manually prepared. + +You can read more about constraints in the documentation of the +`Installation `_ + +Examples of image customizing +----------------------------- + +.. _image-build-pypi: + + +Building from PyPI packages +........................... + +This is the basic way of building the custom images from sources. + +The following example builds the production image in version ``3.6`` with latest PyPI-released Airflow, +with default set of Airflow extras and dependencies. The ``2.0.1`` constraints are used automatically. + +.. exampleinclude:: docker-examples/customizing/stable-airflow.sh + :language: bash + :start-after: [START build] + :end-before: [END build] + +The following example builds the production image in version ``3.7`` with default extras from ``2.0.1`` PyPI +package. The ``2.0.1`` constraints are used automatically. + +.. exampleinclude:: docker-examples/customizing/pypi-selected-version.sh + :language: bash + :start-after: [START build] + :end-before: [END build] + +The following example builds the production image in version ``3.8`` with additional airflow extras +(``mssql,hdfs``) from ``2.0.1`` PyPI package, and additional dependency (``oauth2client``). + +.. exampleinclude:: docker-examples/customizing/pypi-extras-and-deps.sh + :language: bash + :start-after: [START build] + :end-before: [END build] + + +The following example adds ``mpi4py`` package which requires both ``build-essential`` and ``mpi compiler``. + +.. exampleinclude:: docker-examples/customizing/add-build-essential-custom.sh + :language: bash + :start-after: [START build] + :end-before: [END build] + +The above image is equivalent of the "extended" image from previous chapter but it's size is only +874 MB. Comparing to 1.1 GB of the "extended image" this is about 230 MB less, so you can achieve ~20% +improvement in size of the image by using "customization" vs. extension. The saving can increase in case you +have more complex dependencies to build. + + +.. _image-build-optimized: + +Building optimized images +......................... + +The following example the production image in version ``3.6`` with additional airflow extras from ``2.0.1`` +PyPI package but it includes additional apt dev and runtime dependencies. + +The dev dependencies are those that require ``build-essential`` and usually need to involve recompiling +of some python dependencies so those packages might require some additional DEV dependencies to be +present during recompilation. Those packages are not needed at runtime, so we only install them for the +"build" time. They are not installed in the final image, thus producing much smaller images. +In this case pandas requires recompilation so it also needs gcc and g++ as dev APT dependencies. +The ``jre-headless`` does not require recompiling so it can be installed as the runtime APT dependency. + +.. exampleinclude:: docker-examples/customizing/pypi-dev-runtime-deps.sh + :language: bash + :start-after: [START build] + :end-before: [END build] + +.. _image-build-github: + + +Building from GitHub +.................... + +This method is usually used for development purpose. But in case you have your own fork you can point +it to your forked version of source code without having to release it to PyPI. It is enough to have +a branch or tag in your repository and use the tag or branch in the URL that you point the installation to. + +In case of GitHyb builds you need to pass the constraints reference manually in case you want to use +specific constraints, otherwise the default ``constraints-master`` is used. + +The following example builds the production image in version ``3.7`` with default extras from the latest master version and +constraints are taken from latest version of the constraints-master branch in GitHub. + +.. exampleinclude:: docker-examples/customizing/github-master.sh + :language: bash + :start-after: [START build] + :end-before: [END build] + +The following example builds the production image with default extras from the +latest ``v2-0-test`` version and constraints are taken from the latest version of +the ``constraints-2-0`` branch in GitHub. Note that this command might fail occasionally as only +the "released version" constraints when building a version and "master" constraints when building +master are guaranteed to work. + +.. exampleinclude:: docker-examples/customizing/github-v2-0-test.sh + :language: bash + :start-after: [START build] + :end-before: [END build] + +You can also specify another repository to build from. If you also want to use different constraints +repository source, you must specify it as additional ``CONSTRAINTS_GITHUB_REPOSITORY`` build arg. + +The following example builds the production image using ``potiuk/airflow`` fork of Airflow and constraints +are also downloaded from that repository. + +.. exampleinclude:: docker-examples/customizing/github-different-repository.sh + :language: bash + :start-after: [START build] + :end-before: [END build] + +.. _image-build-custom: + +Using custom installation sources +................................. + +You can customize more aspects of the image - such as additional commands executed before apt dependencies +are installed, or adding extra sources to install your dependencies from. You can see all the arguments +described below but here is an example of rather complex command to customize the image +based on example in `this comment `_: + +In case you need to use your custom PyPI package indexes, you can also customize PYPI sources used during +image build by adding a ``docker-context-files``/``.pypirc`` file when building the image. +This ``.pypirc`` will not be committed to the repository (it is added to ``.gitignore``) and it will not be +present in the final production image. It is added and used only in the build segment of the image. +Therefore this ``.pypirc`` file can safely contain list of package indexes you want to use, +usernames and passwords used for authentication. More details about ``.pypirc`` file can be found in the +`pypirc specification `_. + +Such customizations are independent of the way how airflow is installed. + +.. note:: + Similar results could be achieved by modifying the Dockerfile manually (see below) and injecting the + commands needed, but by specifying the customizations via build-args, you avoid the need of + synchronizing the changes from future Airflow Dockerfiles. Those customizations should work with the + future version of Airflow's official ``Dockerfile`` at most with minimal modifications od parameter + names (if any), so using the build command for your customizations makes your custom image more + future-proof. + +The following - rather complex - example shows capabilities of: + + * Adding airflow extras (slack, odbc) + * Adding PyPI dependencies (``azure-storage-blob, oauth2client, beautifulsoup4, dateparser, rocketchat_API,typeform``) + * Adding custom environment variables while installing ``apt`` dependencies - both DEV and RUNTIME + (``ACCEPT_EULA=Y'``) + * Adding custom curl command for adding keys and configuring additional apt sources needed to install + ``apt`` dependencies (both DEV and RUNTIME) + * Adding custom ``apt`` dependencies, both DEV (``msodbcsql17 unixodbc-dev g++) and runtime msodbcsql17 unixodbc git procps vim``) + +.. exampleinclude:: docker-examples/customizing/custom-sources.sh + :language: bash + :start-after: [START build] + :end-before: [END build] + +.. _image-build-secure-environments: + +Build images in security restricted environments +................................................ + +You can also make sure your image is only build using local constraint file and locally downloaded +wheel files. This is often useful in Enterprise environments where the binary files are verified and +vetted by the security teams. It is also the most complex way of building the image. You should be an +expert of building and using Dockerfiles in order to use it and have to have specific needs of security if +you want to follow that route. + +This builds below builds the production image with packages and constraints used from the local +``docker-context-files`` rather than installed from PyPI or GitHub. It also disables MySQL client +installation as it is using external installation method. + +Note that as a prerequisite - you need to have downloaded wheel files. In the example below we +first download such constraint file locally and then use ``pip download`` to get the ``.whl`` files needed +but in most likely scenario, those wheel files should be copied from an internal repository of such .whl +files. Note that ``AIRFLOW_VERSION_SPECIFICATION`` is only there for reference, the apache airflow ``.whl`` file +in the right version is part of the ``.whl`` files downloaded. + +Note that 'pip download' will only works on Linux host as some of the packages need to be compiled from +sources and you cannot install them providing ``--platform`` switch. They also need to be downloaded using +the same python version as the target image. + +The ``pip download`` might happen in a separate environment. The files can be committed to a separate +binary repository and vetted/verified by the security team and used subsequently to build images +of Airflow when needed on an air-gaped system. + +Example of preparing the constraint files and wheel files. Note that ``mysql`` dependency is removed +as ``mysqlclient`` is installed from Oracle's ``apt`` repository and if you want to add it, you need +to provide this library from you repository if you want to build Airflow image in an "air-gaped" system. + +.. exampleinclude:: docker-examples/restricted/restricted_environments.sh + :language: bash + :start-after: [START download] + :end-before: [END download] + +After this step is finished, your ``docker-context-files`` folder will contain all the packages that +are needed to install Airflow from. + +Those downloaded packages and constraint file can be pre-vetted by your security team before you attempt +to install the image. You can also store those downloaded binary packages in your private artifact registry +which allows for the flow where you will download the packages on one machine, submit only new packages for +security vetting and only use the new packages when they were vetted. + +On a separate (air-gaped) system, all the PyPI packages can be copied to ``docker-context-files`` +where you can build the image using the packages downloaded by passing those build args: + + * ``INSTALL_FROM_DOCKER_CONTEXT_FILES="true"`` - to use packages present in ``docker-context-files`` + * ``AIRFLOW_PRE_CACHED_PIP_PACKAGES="false"`` - to not pre-cache packages from PyPI when building image + * ``AIRFLOW_CONSTRAINTS_LOCATION=/docker-context-files/YOUR_CONSTRAINT_FILE.txt`` - to downloaded constraint files + * (Optional) ``INSTALL_MYSQL_CLIENT="false"`` if you do not want to install ``MySQL`` + client from the Oracle repositories. In this case also make sure that your + +Note, that the solution we have for installing python packages from local packages, only solves the problem +of "air-gaped" python installation. The Docker image also downloads ``apt`` dependencies and ``node-modules``. +Those type of dependencies are however more likely to be available in your "air-gaped" system via transparent +proxies and it should automatically reach out to your private registries, however in the future the +solution might be applied to both of those installation steps. + +You can also use techniques described in the previous chapter to make ``docker build`` use your private +apt sources or private PyPI repositories (via ``.pypirc``) available which can be security-vetted. + +If you fulfill all the criteria, you can build the image on an air-gaped system by running command similar +to the below: + +.. exampleinclude:: docker-examples/restricted/restricted_environments.sh + :language: bash + :start-after: [START build] + :end-before: [END build] + +Modifying the Dockerfile +........................ + +The build arg approach is a convenience method if you do not want to manually modify the ``Dockerfile``. +Our approach is flexible enough, to be able to accommodate most requirements and +customizations out-of-the-box. When you use it, you do not need to worry about adapting the image every +time new version of Airflow is released. However sometimes it is not enough if you have very +specific needs and want to build a very custom image. In such case you can simply modify the +``Dockerfile`` manually as you see fit and store it in your forked repository. However you will have to +make sure to rebase your changes whenever new version of Airflow is released, because we might modify +the approach of our Dockerfile builds in the future and you might need to resolve conflicts +and rebase your changes. + +There are a few things to remember when you modify the ``Dockerfile``: + +* We are using the widely recommended pattern of ``.dockerignore`` where everything is ignored by default + and only the required folders are added through exclusion (!). This allows to keep docker context small + because there are many binary artifacts generated in the sources of Airflow and if they are added to + the context, the time of building the image would increase significantly. If you want to add any new + folders to be available in the image you must add it here with leading ``!``. + + .. code-block:: text + + # Ignore everything + ** + + # Allow only these directories + !airflow + ... + + +* The ``docker-context-files`` folder is automatically added to the context of the image, so if you want + to add individual files, binaries, requirement files etc you can add them there. The + ``docker-context-files`` is copied to the ``/docker-context-files`` folder of the build segment of the + image, so it is not present in the final image - which makes the final image smaller in case you want + to use those files only in the ``build`` segment. You must copy any files from the directory manually, + using COPY command if you want to get the files in your final image (in the main image segment). + + +More details +------------ + +Build Args reference +.................... + +The detailed ``--build-arg`` reference can be found in :doc:`build-arg-ref`. + + +The architecture of the images +.............................. + +You can read more details about the images - the context, their parameters and internal structure in the +`IMAGES.rst `_ document. diff --git a/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh new file mode 100755 index 0000000000000..716447055666e --- /dev/null +++ b/docs/docker-stack/docker-examples/customizing/add-build-essential-custom.sh @@ -0,0 +1,33 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This is an example docker build script. It is not intended for PRODUCTION use +set -euo pipefail +AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)" +cd "${AIRFLOW_SOURCES}" + +# [START build] +docker build . \ + --build-arg PYTHON_BASE_IMAGE="python:3.6-slim-buster" \ + --build-arg AIRFLOW_VERSION="2.0.1" \ + --build-arg ADDITIONAL_PYTHON_DEPS="mpi4py" \ + --build-arg ADDITIONAL_DEV_APT_DEPS="libopenmpi-dev" \ + --build-arg ADDITIONAL_RUNTIME_APT_DEPS="openmpi-common" \ + --tag "$(basename "$0")" +# [END build] +docker rmi --force "$(basename "$0")" diff --git a/docs/docker-stack/docker-examples/customizing/custom-sources.sh b/docs/docker-stack/docker-examples/customizing/custom-sources.sh new file mode 100755 index 0000000000000..242fc2e41bcb7 --- /dev/null +++ b/docs/docker-stack/docker-examples/customizing/custom-sources.sh @@ -0,0 +1,48 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This is an example docker build script. It is not intended for PRODUCTION use +set -euo pipefail +AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)" +cd "${AIRFLOW_SOURCES}" + +# [START build] +docker build . -f Dockerfile \ + --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ + --build-arg AIRFLOW_VERSION="2.0.1" \ + --build-arg ADDITIONAL_AIRFLOW_EXTRAS="slack,odbc" \ + --build-arg ADDITIONAL_PYTHON_DEPS=" \ + azure-storage-blob \ + oauth2client \ + beautifulsoup4 \ + dateparser \ + rocketchat_API \ + typeform" \ + --build-arg ADDITIONAL_DEV_APT_COMMAND="curl https://packages.microsoft.com/keys/microsoft.asc | \ + apt-key add --no-tty - && \ + curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list" \ + --build-arg ADDITIONAL_DEV_APT_ENV="ACCEPT_EULA=Y" \ + --build-arg ADDITIONAL_DEV_APT_DEPS="msodbcsql17 unixodbc-dev g++" \ + --build-arg ADDITIONAL_RUNTIME_APT_COMMAND="curl https://packages.microsoft.com/keys/microsoft.asc | \ + apt-key add --no-tty - && \ + curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list" \ + --build-arg ADDITIONAL_RUNTIME_APT_ENV="ACCEPT_EULA=Y" \ + --build-arg ADDITIONAL_RUNTIME_APT_DEPS="msodbcsql17 unixodbc git procps vim" \ + --tag "$(basename "$0")" +# [END build] +docker rmi --force "$(basename "$0")" diff --git a/docs/docker-stack/docker-examples/customizing/github-different-repository.sh b/docs/docker-stack/docker-examples/customizing/github-different-repository.sh new file mode 100755 index 0000000000000..b980b5b7c0e88 --- /dev/null +++ b/docs/docker-stack/docker-examples/customizing/github-different-repository.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This is an example docker build script. It is not intended for PRODUCTION use +set -euo pipefail +AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)" +cd "${AIRFLOW_SOURCES}" +# [START build] +docker build . \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-buster" \ + --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/potiuk/airflow/archive/master.tar.gz#egg=apache-airflow" \ + --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-master" \ + --build-arg CONSTRAINTS_GITHUB_REPOSITORY="potiuk/airflow" \ + --tag "$(basename "$0")" +# [END build] +docker rmi --force "$(basename "$0")" diff --git a/docs/docker-stack/docker-examples/customizing/github-master.sh b/docs/docker-stack/docker-examples/customizing/github-master.sh new file mode 100755 index 0000000000000..4237e91e6ff56 --- /dev/null +++ b/docs/docker-stack/docker-examples/customizing/github-master.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This is an example docker build script. It is not intended for PRODUCTION use +set -euo pipefail +AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)" +cd "${AIRFLOW_SOURCES}" + +# [START build] +docker build . \ + --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ + --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/master.tar.gz#egg=apache-airflow" \ + --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-master" \ + --tag "$(basename "$0")" +# [END build] +docker rmi --force "$(basename "$0")" diff --git a/docs/docker-stack/docker-examples/customizing/github-v2-0-test.sh b/docs/docker-stack/docker-examples/customizing/github-v2-0-test.sh new file mode 100755 index 0000000000000..b893618a66834 --- /dev/null +++ b/docs/docker-stack/docker-examples/customizing/github-v2-0-test.sh @@ -0,0 +1,31 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This is an example docker build script. It is not intended for PRODUCTION use +set -euo pipefail +AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)" +cd "${AIRFLOW_SOURCES}" + +# [START build] +docker build . \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-buster" \ + --build-arg AIRFLOW_INSTALLATION_METHOD="https://github.com/apache/airflow/archive/v2-0-test.tar.gz#egg=apache-airflow" \ + --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="constraints-2-0" \ + --tag "$(basename "$0")" +# [END build] +docker rmi --force "$(basename "$0")" diff --git a/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh b/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh new file mode 100755 index 0000000000000..43a80927a0125 --- /dev/null +++ b/docs/docker-stack/docker-examples/customizing/pypi-dev-runtime-deps.sh @@ -0,0 +1,34 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This is an example docker build script. It is not intended for PRODUCTION use +set -euo pipefail +AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)" +cd "${AIRFLOW_SOURCES}" + +# [START build] +docker build . \ + --build-arg PYTHON_BASE_IMAGE="python:3.6-slim-buster" \ + --build-arg AIRFLOW_VERSION="2.0.1" \ + --build-arg ADDITIONAL_AIRFLOW_EXTRAS="jdbc" \ + --build-arg ADDITIONAL_PYTHON_DEPS="pandas" \ + --build-arg ADDITIONAL_DEV_APT_DEPS="gcc g++" \ + --build-arg ADDITIONAL_RUNTIME_APT_DEPS="default-jre-headless" \ + --tag "$(basename "$0")" +# [END build] +docker rmi --force "$(basename "$0")" diff --git a/docs/docker-stack/docker-examples/customizing/pypi-extras-and-deps.sh b/docs/docker-stack/docker-examples/customizing/pypi-extras-and-deps.sh new file mode 100755 index 0000000000000..7d150bc588488 --- /dev/null +++ b/docs/docker-stack/docker-examples/customizing/pypi-extras-and-deps.sh @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This is an example docker build script. It is not intended for PRODUCTION use +set -euo pipefail +AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)" +cd "${AIRFLOW_SOURCES}" + +# [START build] +docker build . \ + --build-arg PYTHON_BASE_IMAGE="python:3.8-slim-buster" \ + --build-arg AIRFLOW_VERSION="2.0.1" \ + --build-arg ADDITIONAL_AIRFLOW_EXTRAS="mssql,hdfs" \ + --build-arg ADDITIONAL_PYTHON_DEPS="oauth2client" \ + --tag "$(basename "$0")" +# [END build] +docker rmi --force "$(basename "$0")" diff --git a/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh b/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh new file mode 100755 index 0000000000000..98e06a1c523c4 --- /dev/null +++ b/docs/docker-stack/docker-examples/customizing/pypi-selected-version.sh @@ -0,0 +1,30 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This is an example docker build script. It is not intended for PRODUCTION use +set -euo pipefail +AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)" +cd "${AIRFLOW_SOURCES}" + +# [START build] +docker build . \ + --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ + --build-arg AIRFLOW_VERSION="2.0.1" \ + --tag "$(basename "$0")" +# [END build] +docker rmi --force "$(basename "$0")" diff --git a/scripts/ci/images/ci_wait_for_all_prod_images.sh b/docs/docker-stack/docker-examples/customizing/stable-airflow.sh similarity index 72% rename from scripts/ci/images/ci_wait_for_all_prod_images.sh rename to docs/docker-stack/docker-examples/customizing/stable-airflow.sh index 25bfd7c42cf99..d3471acc181fe 100755 --- a/scripts/ci/images/ci_wait_for_all_prod_images.sh +++ b/docs/docker-stack/docker-examples/customizing/stable-airflow.sh @@ -15,12 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -echo -echo "Waiting for all PROD images to appear: ${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING}" -echo -for PYTHON_MAJOR_MINOR_VERSION in ${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING} -do - export PYTHON_MAJOR_MINOR_VERSION - "$( dirname "${BASH_SOURCE[0]}" )/ci_wait_for_prod_image.sh" -done +# This is an example docker build script. It is not intended for PRODUCTION use +set -euo pipefail +AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)" +cd "${AIRFLOW_SOURCES}" + +# [START build] +docker build . \ + --tag "$(basename "$0")" +# [END build] +docker rmi --force "$(basename "$0")" diff --git a/docs/docker-stack/docker-examples/extending/add-apt-packages/Dockerfile b/docs/docker-stack/docker-examples/extending/add-apt-packages/Dockerfile new file mode 100644 index 0000000000000..8fb128ebae986 --- /dev/null +++ b/docs/docker-stack/docker-examples/extending/add-apt-packages/Dockerfile @@ -0,0 +1,27 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is an example Dockerfile. It is not intended for PRODUCTION use +# [START Dockerfile] +FROM apache/airflow:2.0.1 +USER root +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + vim \ + && apt-get autoremove -yqq --purge \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* +USER airflow +# [END Dockerfile] diff --git a/docs/docker-stack/docker-examples/extending/add-build-essential-extend/Dockerfile b/docs/docker-stack/docker-examples/extending/add-build-essential-extend/Dockerfile new file mode 100644 index 0000000000000..f0dc0d1e53825 --- /dev/null +++ b/docs/docker-stack/docker-examples/extending/add-build-essential-extend/Dockerfile @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is an example Dockerfile. It is not intended for PRODUCTION use +# [START Dockerfile] +FROM apache/airflow:2.0.1 +USER root +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + build-essential libopenmpi-dev \ + && apt-get autoremove -yqq --purge \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* +USER airflow +RUN pip install --no-cache-dir mpi4py +# [END Dockerfile] diff --git a/docs/docker-stack/docker-examples/extending/add-pypi-packages/Dockerfile b/docs/docker-stack/docker-examples/extending/add-pypi-packages/Dockerfile new file mode 100644 index 0000000000000..401e493bd1cc9 --- /dev/null +++ b/docs/docker-stack/docker-examples/extending/add-pypi-packages/Dockerfile @@ -0,0 +1,20 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is an example Dockerfile. It is not intended for PRODUCTION use +# [START Dockerfile] +FROM apache/airflow:2.0.1 +RUN pip install --no-cache-dir lxml +# [END Dockerfile] diff --git a/docs/docker-stack/docker-examples/extending/embedding-dags/Dockerfile b/docs/docker-stack/docker-examples/extending/embedding-dags/Dockerfile new file mode 100644 index 0000000000000..9213729d72984 --- /dev/null +++ b/docs/docker-stack/docker-examples/extending/embedding-dags/Dockerfile @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is an example Dockerfile. It is not intended for PRODUCTION use +# [START Dockerfile] +FROM apache/airflow:2.0.1 + +COPY --chown=airflow:root test_dag.py /opt/airflow/dags + +# [END Dockerfile] diff --git a/docs/docker-stack/docker-examples/extending/embedding-dags/test_dag.py b/docs/docker-stack/docker-examples/extending/embedding-dags/test_dag.py new file mode 100644 index 0000000000000..467c8c3e6539e --- /dev/null +++ b/docs/docker-stack/docker-examples/extending/embedding-dags/test_dag.py @@ -0,0 +1,39 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# [START dag] +"""This dag only runs some simple tasks to test Airflow's task execution.""" +from datetime import datetime, timedelta + +from airflow.models.dag import DAG +from airflow.operators.dummy import DummyOperator +from airflow.utils.dates import days_ago + +now = datetime.now() +now_to_the_hour = (now - timedelta(0, 0, 0, 0, 0, 3)).replace(minute=0, second=0, microsecond=0) +START_DATE = now_to_the_hour +DAG_NAME = 'test_dag_v1' + +default_args = {'owner': 'airflow', 'depends_on_past': True, 'start_date': days_ago(2)} +dag = DAG(DAG_NAME, schedule_interval='*/10 * * * *', default_args=default_args) + +run_this_1 = DummyOperator(task_id='run_this_1', dag=dag) +run_this_2 = DummyOperator(task_id='run_this_2', dag=dag) +run_this_2.set_upstream(run_this_1) +run_this_3 = DummyOperator(task_id='run_this_3', dag=dag) +run_this_3.set_upstream(run_this_2) +# [END dag] diff --git a/docs/docker-stack/docker-examples/extending/writable-directory/Dockerfile b/docs/docker-stack/docker-examples/extending/writable-directory/Dockerfile new file mode 100644 index 0000000000000..76c6535867c45 --- /dev/null +++ b/docs/docker-stack/docker-examples/extending/writable-directory/Dockerfile @@ -0,0 +1,21 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This is an example Dockerfile. It is not intended for PRODUCTION use +# [START Dockerfile] +FROM apache/airflow:2.0.1 +RUN umask 0002; \ + mkdir -p ~/writeable-directory +# [END Dockerfile] diff --git a/docs/docker-stack/docker-examples/restricted/restricted_environments.sh b/docs/docker-stack/docker-examples/restricted/restricted_environments.sh new file mode 100755 index 0000000000000..e7a36994c6cef --- /dev/null +++ b/docs/docker-stack/docker-examples/restricted/restricted_environments.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This is an example docker build script. It is not intended for PRODUCTION use +set -euo pipefail +AIRFLOW_SOURCES="$(cd "$(dirname "${BASH_SOURCE[0]}")/../../../../" && pwd)" +cd "${AIRFLOW_SOURCES}" + +# [START download] +rm docker-context-files/*.whl docker-context-files/*.tar.gz docker-context-files/*.txt || true + +curl -Lo "docker-context-files/constraints-3.7.txt" \ + https://raw.githubusercontent.com/apache/airflow/constraints-2.0.1/constraints-3.7.txt + +pip download --dest docker-context-files \ + --constraint docker-context-files/constraints-3.7.txt \ + "apache-airflow[async,aws,azure,celery,dask,elasticsearch,gcp,kubernetes,postgres,redis,slack,ssh,statsd,virtualenv]==2.0.1" +# [END download] + +# [START build] +docker build . \ + --build-arg PYTHON_BASE_IMAGE="python:3.7-slim-buster" \ + --build-arg AIRFLOW_INSTALLATION_METHOD="apache-airflow" \ + --build-arg AIRFLOW_VERSION="2.0.1" \ + --build-arg INSTALL_MYSQL_CLIENT="false" \ + --build-arg AIRFLOW_PRE_CACHED_PIP_PACKAGES="false" \ + --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="true" \ + --build-arg AIRFLOW_CONSTRAINTS_LOCATION="/docker-context-files/constraints-3.7.txt" +# [END build] diff --git a/docs/apache-airflow/docker-images-recipes/gcloud.Dockerfile b/docs/docker-stack/docker-images-recipes/gcloud.Dockerfile similarity index 100% rename from docs/apache-airflow/docker-images-recipes/gcloud.Dockerfile rename to docs/docker-stack/docker-images-recipes/gcloud.Dockerfile diff --git a/docs/apache-airflow/docker-images-recipes/hadoop.Dockerfile b/docs/docker-stack/docker-images-recipes/hadoop.Dockerfile similarity index 100% rename from docs/apache-airflow/docker-images-recipes/hadoop.Dockerfile rename to docs/docker-stack/docker-images-recipes/hadoop.Dockerfile diff --git a/docs/docker-stack/entrypoint.rst b/docs/docker-stack/entrypoint.rst new file mode 100644 index 0000000000000..cc898727381c5 --- /dev/null +++ b/docs/docker-stack/entrypoint.rst @@ -0,0 +1,237 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Entrypoint +========== + +If you are using the default entrypoint of the production image, +there are a few actions that are automatically performed when the container starts. +In some cases, you can pass environment variables to the image to trigger some of that behaviour. + +The variables that control the "execution" behaviour start with ``_AIRFLOW`` to distinguish them +from the variables used to build the image starting with ``AIRFLOW``. + +The image entrypoint works as follows: + +* In case the user is not "airflow" (with undefined user id) and the group id of the user is set to ``0`` (root), + then the user is dynamically added to ``/etc/passwd`` at entry using ``USER_NAME`` variable to define the user name. + This is in order to accommodate the + `OpenShift Guidelines `_ + +* The ``AIRFLOW_HOME`` is set by default to ``/opt/airflow/`` - this means that DAGs + are in default in the ``/opt/airflow/dags`` folder and logs are in the ``/opt/airflow/logs`` + +* The working directory is ``/opt/airflow`` by default. + +* If ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable is passed to the container and it is either mysql or postgres + SQL alchemy connection, then the connection is checked and the script waits until the database is reachable. + If ``AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD`` variable is passed to the container, it is evaluated as a + command to execute and result of this evaluation is used as ``AIRFLOW__CORE__SQL_ALCHEMY_CONN``. The + ``_CMD`` variable takes precedence over the ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable. + +* If no ``AIRFLOW__CORE__SQL_ALCHEMY_CONN`` variable is set then SQLite database is created in + ``${AIRFLOW_HOME}/airflow.db`` and db reset is executed. + +* If first argument equals to "bash" - you are dropped to a bash shell or you can executes bash command + if you specify extra arguments. For example: + + .. code-block:: bash + + docker run -it apache/airflow:master-python3.6 bash -c "ls -la" + total 16 + drwxr-xr-x 4 airflow root 4096 Jun 5 18:12 . + drwxr-xr-x 1 root root 4096 Jun 5 18:12 .. + drwxr-xr-x 2 airflow root 4096 Jun 5 18:12 dags + drwxr-xr-x 2 airflow root 4096 Jun 5 18:12 logs + +* If first argument is equal to ``python`` - you are dropped in python shell or python commands are executed if + you pass extra parameters. For example: + + .. code-block:: bash + + > docker run -it apache/airflow:master-python3.6 python -c "print('test')" + test + +* If first argument equals to "airflow" - the rest of the arguments is treated as an airflow command + to execute. Example: + + .. code-block:: bash + + docker run -it apache/airflow:master-python3.6 airflow webserver + +* If there are any other arguments - they are simply passed to the "airflow" command + + .. code-block:: bash + + > docker run -it apache/airflow:master-python3.6 version + 2.1.0.dev0 + +* If ``AIRFLOW__CELERY__BROKER_URL`` variable is passed and airflow command with + scheduler, worker of flower command is used, then the script checks the broker connection + and waits until the Celery broker database is reachable. + If ``AIRFLOW__CELERY__BROKER_URL_CMD`` variable is passed to the container, it is evaluated as a + command to execute and result of this evaluation is used as ``AIRFLOW__CELERY__BROKER_URL``. The + ``_CMD`` variable takes precedence over the ``AIRFLOW__CELERY__BROKER_URL`` variable. + +.. _arbitrary-docker-user: + +Allowing arbitrary user to run the container +-------------------------------------------- + +Airflow image is Open-Shift compatible, which means that you can start it with random user ID and the +group id ``0`` (``root``). If you want to run the image with user different than Airflow, you MUST set +GID of the user to ``0``. In case you try to use different group, the entrypoint exits with error. + +In order to accommodate a number of external libraries and projects, Airflow will automatically create +such an arbitrary user in (`/etc/passwd`) and make it's home directory point to ``/home/airflow``. +Many of 3rd-party libraries and packages require home directory of the user to be present, because they +need to write some cache information there, so such a dynamic creation of a user is necessary. + +Such arbitrary user has to be able to write to certain directories that needs write access, and since +it is not advised to allow write access to "other" for security reasons, the OpenShift +guidelines introduced the concept of making all such folders have the ``0`` (``root``) group id (GID). +All the directories that need write access in the Airflow production image have GID set to 0 (and +they are writable for the group). We are following that concept and all the directories that need +write access follow that. + +The GID=0 is set as default for the ``airflow`` user, so any directories it creates have GID set to 0 +by default. The entrypoint sets ``umask`` to be ``0002`` - this means that any directories created by +the user have also "group write" access for group ``0`` - they will be writable by other users with +``root`` group. Also whenever any "arbitrary" user creates a folder (for example in a mounted volume), that +folder will have a "group write" access and ``GID=0``, so that execution with another, arbitrary user +will still continue to work, even if such directory is mounted by another arbitrary user later. + +The ``umask`` setting however only works for runtime of the container - it is not used during building of +the image. If you would like to extend the image and add your own packages, you should remember to add +``umask 0002`` in front of your docker command - this way the directories created by any installation +that need group access will also be writable for the group. This can be done for example this way: + + .. code-block:: docker + + RUN umask 0002; \ + do_something; \ + do_otherthing; + + +You can read more about it in the "Support arbitrary user ids" chapter in the +`Openshift best practices `_. + + +Waits for Airflow DB connection +------------------------------- + +In case Postgres or MySQL DB is used, the entrypoint will wait until the airflow DB connection becomes +available. This happens always when you use the default entrypoint. + +The script detects backend type depending on the URL schema and assigns default port numbers if not specified +in the URL. Then it loops until the connection to the host/port specified can be established +It tries ``CONNECTION_CHECK_MAX_COUNT`` times and sleeps ``CONNECTION_CHECK_SLEEP_TIME`` between checks +To disable check, set ``CONNECTION_CHECK_MAX_COUNT=0``. + +Supported schemes: + +* ``postgres://`` - default port 5432 +* ``mysql://`` - default port 3306 +* ``sqlite://`` + +In case of SQLite backend, there is no connection to establish and waiting is skipped. + +Upgrading Airflow DB +-------------------- + +If you set ``_AIRFLOW_DB_UPGRADE`` variable to a non-empty value, the entrypoint will run +the ``airflow db upgrade`` command right after verifying the connection. You can also use this +when you are running airflow with internal SQLite database (default) to upgrade the db and create +admin users at entrypoint, so that you can start the webserver immediately. Note - using SQLite is +intended only for testing purpose, never use SQLite in production as it has severe limitations when it +comes to concurrency. + +Creating admin user +------------------- + +The entrypoint can also create webserver user automatically when you enter it. you need to set +``_AIRFLOW_WWW_USER_CREATE`` to a non-empty value in order to do that. This is not intended for +production, it is only useful if you would like to run a quick test with the production image. +You need to pass at least password to create such user via ``_AIRFLOW_WWW_USER_PASSWORD`` or +``_AIRFLOW_WWW_USER_PASSWORD_CMD`` similarly like for other ``*_CMD`` variables, the content of +the ``*_CMD`` will be evaluated as shell command and it's output will be set as password. + +User creation will fail if none of the ``PASSWORD`` variables are set - there is no default for +password for security reasons. + ++-----------+--------------------------+----------------------------------------------------------------------+ +| Parameter | Default | Environment variable | ++===========+==========================+======================================================================+ +| username | admin | ``_AIRFLOW_WWW_USER_USERNAME`` | ++-----------+--------------------------+----------------------------------------------------------------------+ +| password | | ``_AIRFLOW_WWW_USER_PASSWORD_CMD`` or ``_AIRFLOW_WWW_USER_PASSWORD`` | ++-----------+--------------------------+----------------------------------------------------------------------+ +| firstname | Airflow | ``_AIRFLOW_WWW_USER_FIRSTNAME`` | ++-----------+--------------------------+----------------------------------------------------------------------+ +| lastname | Admin | ``_AIRFLOW_WWW_USER_LASTNAME`` | ++-----------+--------------------------+----------------------------------------------------------------------+ +| email | airflowadmin@example.com | ``_AIRFLOW_WWW_USER_EMAIL`` | ++-----------+--------------------------+----------------------------------------------------------------------+ +| role | Admin | ``_AIRFLOW_WWW_USER_ROLE`` | ++-----------+--------------------------+----------------------------------------------------------------------+ + +In case the password is specified, the user will be attempted to be created, but the entrypoint will +not fail if the attempt fails (this accounts for the case that the user is already created). + +You can, for example start the webserver in the production image with initializing the internal SQLite +database and creating an ``admin/admin`` Admin user with the following command: + +.. code-block:: bash + + docker run -it -p 8080:8080 \ + --env "_AIRFLOW_DB_UPGRADE=true" \ + --env "_AIRFLOW_WWW_USER_CREATE=true" \ + --env "_AIRFLOW_WWW_USER_PASSWORD=admin" \ + apache/airflow:master-python3.8 webserver + + +.. code-block:: bash + + docker run -it -p 8080:8080 \ + --env "_AIRFLOW_DB_UPGRADE=true" \ + --env "_AIRFLOW_WWW_USER_CREATE=true" \ + --env "_AIRFLOW_WWW_USER_PASSWORD_CMD=echo admin" \ + apache/airflow:master-python3.8 webserver + +The commands above perform initialization of the SQLite database, create admin user with admin password +and Admin role. They also forward local port ``8080`` to the webserver port and finally start the webserver. + +Waits for celery broker connection +---------------------------------- + +In case Postgres or MySQL DB is used, and one of the ``scheduler``, ``celery``, ``worker``, or ``flower`` +commands are used the entrypoint will wait until the celery broker DB connection is available. + +The script detects backend type depending on the URL schema and assigns default port numbers if not specified +in the URL. Then it loops until connection to the host/port specified can be established +It tries ``CONNECTION_CHECK_MAX_COUNT`` times and sleeps ``CONNECTION_CHECK_SLEEP_TIME`` between checks. +To disable check, set ``CONNECTION_CHECK_MAX_COUNT=0``. + +Supported schemes: + +* ``amqp(s)://`` (rabbitmq) - default port 5672 +* ``redis://`` - default port 6379 +* ``postgres://`` - default port 5432 +* ``mysql://`` - default port 3306 +* ``sqlite://`` + +In case of SQLite backend, there is no connection to establish and waiting is skipped. diff --git a/docs/docker-stack/img/docker-logo.png b/docs/docker-stack/img/docker-logo.png new file mode 100644 index 0000000000000..d83e54a7e9ce5 Binary files /dev/null and b/docs/docker-stack/img/docker-logo.png differ diff --git a/docs/docker-stack/index.rst b/docs/docker-stack/index.rst new file mode 100644 index 0000000000000..29a7daf1b4d69 --- /dev/null +++ b/docs/docker-stack/index.rst @@ -0,0 +1,54 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +.. image:: /img/docker-logo.png + :width: 100 + +Docker Image for Apache Airflow +=============================== + +.. toctree:: + :hidden: + + Home + build + entrypoint + recipes + +.. toctree:: + :hidden: + :caption: References + + build-arg-ref + +For the ease of deployment in production, the community releases a production-ready reference container +image. + +The docker image provided (as convenience binary package) in the +`apache/airflow DockerHub `_ is a bare image +that has a few external dependencies and extras installed.. + +The Apache Airflow image provided as convenience package is optimized for size, so +it provides just a bare minimal set of the extras and dependencies installed and in most cases +you want to either extend or customize the image. You can see all possible extras in +:doc:`extra-packages-ref`. The set of extras used in Airflow Production image are available in the +`Dockerfile `_. + +The production images are build in DockerHub from released version and release candidates. There +are also images published from branches but they are used mainly for development and testing purpose. +See `Airflow Git Branching `_ +for details. diff --git a/docs/docker-stack/recipes.rst b/docs/docker-stack/recipes.rst new file mode 100644 index 0000000000000..8b89a3ef1bae8 --- /dev/null +++ b/docs/docker-stack/recipes.rst @@ -0,0 +1,70 @@ + .. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + .. http://www.apache.org/licenses/LICENSE-2.0 + + .. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +Recipes +======= + +Users sometimes share interesting ways of using the Docker images. We encourage users to contribute these +recipes to the documentation in case they prove useful to other members of the community by +submitting a pull request. The sections below capture this knowledge. + +Google Cloud SDK installation +----------------------------- + +Some operators, such as :class:`~airflow.providers.google.cloud.operators.kubernetes_engine.GKEStartPodOperator`, +:class:`~airflow.providers.google.cloud.operators.dataflow.DataflowStartSqlJobOperator`, require +the installation of `Google Cloud SDK `__ (includes ``gcloud``). +You can also run these commands with BashOperator. + +Create a new Dockerfile like the one shown below. + +.. exampleinclude:: /docker-images-recipes/gcloud.Dockerfile + :language: dockerfile + +Then build a new image. + +.. code-block:: bash + + docker build . \ + --build-arg BASE_AIRFLOW_IMAGE="apache/airflow:2.0.1" \ + -t my-airflow-image + + +Apache Hadoop Stack installation +-------------------------------- + +Airflow is often used to run tasks on Hadoop cluster. It required Java Runtime Environment (JRE) to run. +Below are the steps to take tools that are frequently used in Hadoop-world: + +- Java Runtime Environment (JRE) +- Apache Hadoop +- Apache Hive +- `Cloud Storage connector for Apache Hadoop `__ + + +Create a new Dockerfile like the one shown below. + +.. exampleinclude:: /docker-images-recipes/hadoop.Dockerfile + :language: dockerfile + +Then build a new image. + +.. code-block:: bash + + docker build . \ + --build-arg BASE_AIRFLOW_IMAGE="apache/airflow:2.0.1" \ + -t my-airflow-image diff --git a/docs/exts/airflow_intersphinx.py b/docs/exts/airflow_intersphinx.py index ee83b8f9b1a75..750579fe664ac 100644 --- a/docs/exts/airflow_intersphinx.py +++ b/docs/exts/airflow_intersphinx.py @@ -67,14 +67,15 @@ def _generate_provider_intersphinx_mapping(): f'/docs/apache-airflow/{current_version}/', (doc_inventory if os.path.exists(doc_inventory) else cache_inventory,), ) + for pkg_name in ['apache-airflow-providers', 'docker-stack']: + if os.environ.get('AIRFLOW_PACKAGE_NAME') == pkg_name: + continue + doc_inventory = f'{DOCS_DIR}/_build/docs/{pkg_name}/objects.inv' + cache_inventory = f'{DOCS_DIR}/_inventory_cache/{pkg_name}/objects.inv' - if os.environ.get('AIRFLOW_PACKAGE_NAME') != 'apache-airflow-providers': - doc_inventory = f'{DOCS_DIR}/_build/docs/apache-airflow-providers/objects.inv' - cache_inventory = f'{DOCS_DIR}/_inventory_cache/apache-airflow-providers/objects.inv' - - airflow_mapping['apache-airflow-providers'] = ( + airflow_mapping[pkg_name] = ( # base URI - '/docs/apache-airflow-providers/', + f'/docs/{pkg_name}/', (doc_inventory if os.path.exists(doc_inventory) else cache_inventory,), ) diff --git a/docs/exts/docs_build/code_utils.py b/docs/exts/docs_build/code_utils.py index e77d0b6e57095..adab5c256f9ff 100644 --- a/docs/exts/docs_build/code_utils.py +++ b/docs/exts/docs_build/code_utils.py @@ -17,6 +17,24 @@ import os from contextlib import suppress +from docs.exts.provider_yaml_utils import load_package_data + +ROOT_PROJECT_DIR = os.path.abspath( + os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, os.pardir) +) +PROVIDER_INIT_FILE = os.path.join(ROOT_PROJECT_DIR, "airflow", "providers", "__init__.py") +DOCS_DIR = os.path.join(ROOT_PROJECT_DIR, "docs") +AIRFLOW_DIR = os.path.join(ROOT_PROJECT_DIR, "airflow") + +ALL_PROVIDER_YAMLS = load_package_data() +AIRFLOW_SITE_DIR = os.environ.get('AIRFLOW_SITE_DIRECTORY') +PROCESS_TIMEOUT = 8 * 60 # 400 seconds + +TEXT_RED = '\033[31m' +TEXT_RESET = '\033[0m' + +CONSOLE_WIDTH = 180 + def prepare_code_snippet(file_path: str, line_no: int, context_lines_count: int = 5) -> str: """ diff --git a/docs/exts/docs_build/dev_index_template.html.jinja2 b/docs/exts/docs_build/dev_index_template.html.jinja2 index 0de5879307ade..b680255f0f3a2 100644 --- a/docs/exts/docs_build/dev_index_template.html.jinja2 +++ b/docs/exts/docs_build/dev_index_template.html.jinja2 @@ -67,6 +67,17 @@ +
+
+ Docker - logo +
+
+

Docker image

+

+ It makes efficient, lightweight, self-contained environment and guarantees that software will always run the same no matter of where it’s deployed. +

+
+
diff --git a/docs/exts/docs_build/docs_builder.py b/docs/exts/docs_build/docs_builder.py index 6874f783828ec..669d76ddec311 100644 --- a/docs/exts/docs_build/docs_builder.py +++ b/docs/exts/docs_build/docs_builder.py @@ -20,24 +20,27 @@ import shutil from glob import glob from subprocess import run -from tempfile import NamedTemporaryFile, TemporaryDirectory from typing import List -# pylint: disable=no-name-in-module -from docs.exts.docs_build.code_utils import pretty_format_path +from rich.console import Console + +from docs.exts.docs_build.code_utils import ( + AIRFLOW_SITE_DIR, + ALL_PROVIDER_YAMLS, + CONSOLE_WIDTH, + DOCS_DIR, + PROCESS_TIMEOUT, + ROOT_PROJECT_DIR, + pretty_format_path, +) from docs.exts.docs_build.errors import DocBuildError, parse_sphinx_warnings + +# pylint: disable=no-name-in-module from docs.exts.docs_build.spelling_checks import SpellingError, parse_spelling_warnings -from docs.exts.provider_yaml_utils import load_package_data # pylint: enable=no-name-in-module -ROOT_PROJECT_DIR = os.path.abspath( - os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, os.pardir, os.pardir) -) -DOCS_DIR = os.path.join(ROOT_PROJECT_DIR, "docs") -ALL_PROVIDER_YAMLS = load_package_data() -AIRFLOW_SITE_DIR = os.environ.get('AIRFLOW_SITE_DIRECTORY') -PROCESS_TIMEOUT = 4 * 60 +console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH) class AirflowDocsBuilder: @@ -51,12 +54,16 @@ def __init__(self, package_name: str, for_production: bool): def _doctree_dir(self) -> str: return f"{DOCS_DIR}/_doctrees/docs/{self.package_name}" + @property + def _inventory_cache_dir(self) -> str: + return f"{DOCS_DIR}/_inventory_cache" + @property def is_versioned(self): """Is current documentation package versioned?""" - # Disable versioning. This documentation does not apply to any issued product and we can update + # Disable versioning. This documentation does not apply to any released product and we can update # it as needed, i.e. with each new package of providers. - return self.package_name != 'apache-airflow-providers' + return self.package_name not in ('apache-airflow-providers', 'docker-stack') @property def _build_dir(self) -> str: @@ -66,6 +73,26 @@ def _build_dir(self) -> str: else: return f"{DOCS_DIR}/_build/docs/{self.package_name}" + @property + def log_spelling_filename(self) -> str: + """Log from spelling job.""" + return os.path.join(self._build_dir, f"output-spelling-{self.package_name}.log") + + @property + def log_spelling_output_dir(self) -> str: + """Results from spelling job.""" + return os.path.join(self._build_dir, f"output-spelling-results-{self.package_name}") + + @property + def log_build_filename(self) -> str: + """Log from build job.""" + return os.path.join(self._build_dir, f"output-build-{self.package_name}.log") + + @property + def log_build_warning_filename(self) -> str: + """Warnings from build job.""" + return os.path.join(self._build_dir, f"warning-build-{self.package_name}.log") + @property def _current_version(self): if not self.is_versioned: @@ -99,31 +126,44 @@ def clean_files(self) -> None: os.makedirs(api_dir, exist_ok=True) os.makedirs(self._build_dir, exist_ok=True) - def check_spelling(self, verbose): - """Checks spelling.""" + def check_spelling(self, verbose: bool) -> List[SpellingError]: + """ + Checks spelling + + :param verbose: whether to show output while running + :return: list of errors + """ spelling_errors = [] - with TemporaryDirectory() as tmp_dir, NamedTemporaryFile() as output: - build_cmd = [ - "sphinx-build", - "-W", # turn warnings into errors - "--color", # do emit colored output - "-T", # show full traceback on exception - "-b", # builder to use - "spelling", - "-c", - DOCS_DIR, - "-d", # path for the cached environment and doctree files - self._doctree_dir, - self._src_dir, # path to documentation source files - tmp_dir, - ] - print("Executing cmd: ", " ".join([shlex.quote(c) for c in build_cmd])) - if not verbose: - print("The output is hidden until an error occurs.") - env = os.environ.copy() - env['AIRFLOW_PACKAGE_NAME'] = self.package_name - if self.for_production: - env['AIRFLOW_FOR_PRODUCTION'] = 'true' + os.makedirs(self._build_dir, exist_ok=True) + shutil.rmtree(self.log_spelling_output_dir, ignore_errors=True) + os.makedirs(self.log_spelling_output_dir, exist_ok=True) + + build_cmd = [ + os.path.join(ROOT_PROJECT_DIR, "docs", "exts", "docs_build", "run_patched_sphinx.py"), + "-W", # turn warnings into errors + "--color", # do emit colored output + "-T", # show full traceback on exception + "-b", # builder to use + "spelling", + "-c", + DOCS_DIR, + "-d", # path for the cached environment and doctree files + self._doctree_dir, + self._src_dir, # path to documentation source files + self.log_spelling_output_dir, + ] + + env = os.environ.copy() + env['AIRFLOW_PACKAGE_NAME'] = self.package_name + if self.for_production: + env['AIRFLOW_FOR_PRODUCTION'] = 'true' + if verbose: + console.print( + f"[blue]{self.package_name:60}:[/] Executing cmd: ", + " ".join([shlex.quote(c) for c in build_cmd]), + ) + console.print(f"[blue]{self.package_name:60}:[/] The output is hidden until an error occurs.") + with open(self.log_spelling_filename, "wt") as output: completed_proc = run( # pylint: disable=subprocess-run-check build_cmd, cwd=self._src_dir, @@ -132,58 +172,77 @@ def check_spelling(self, verbose): stderr=output if not verbose else None, timeout=PROCESS_TIMEOUT, ) - if completed_proc.returncode != 0: - output.seek(0) - print(output.read().decode()) - - spelling_errors.append( - SpellingError( - file_path=None, - line_no=None, - spelling=None, - suggestion=None, - context_line=None, - message=( - f"Sphinx spellcheck returned non-zero exit status: {completed_proc.returncode}." - ), - ) + if completed_proc.returncode != 0: + spelling_errors.append( + SpellingError( + file_path=None, + line_no=None, + spelling=None, + suggestion=None, + context_line=None, + message=( + f"Sphinx spellcheck returned non-zero exit status: {completed_proc.returncode}." + ), ) - warning_text = "" - for filepath in glob(f"{tmp_dir}/**/*.spelling", recursive=True): - with open(filepath) as speeling_file: - warning_text += speeling_file.read() + ) + warning_text = "" + for filepath in glob(f"{self.log_spelling_output_dir}/**/*.spelling", recursive=True): + with open(filepath) as spelling_file: + warning_text += spelling_file.read() - spelling_errors.extend(parse_spelling_warnings(warning_text, self._src_dir)) + spelling_errors.extend(parse_spelling_warnings(warning_text, self._src_dir)) + console.print(f"[blue]{self.package_name:60}:[/] [red]Finished spell-checking with errors[/]") + else: + if spelling_errors: + console.print( + f"[blue]{self.package_name:60}:[/] [yellow]Finished spell-checking with warnings[/]" + ) + else: + console.print( + f"[blue]{self.package_name:60}:[/] [green]Finished spell-checking successfully[/]" + ) return spelling_errors - def build_sphinx_docs(self, verbose) -> List[DocBuildError]: - """Build Sphinx documentation""" + def build_sphinx_docs(self, verbose: bool) -> List[DocBuildError]: + """ + Build Sphinx documentation. + + :param verbose: whether to show output while running + :return: list of errors + """ build_errors = [] - with NamedTemporaryFile() as tmp_file, NamedTemporaryFile() as output: - build_cmd = [ - "sphinx-build", - "-T", # show full traceback on exception - "--color", # do emit colored output - "-b", # builder to use - "html", - "-d", # path for the cached environment and doctree files - self._doctree_dir, - "-c", - DOCS_DIR, - "-w", # write warnings (and errors) to given file - tmp_file.name, - self._src_dir, # path to documentation source files - self._build_dir, # path to output directory - ] - print("Executing cmd: ", " ".join([shlex.quote(c) for c in build_cmd])) - if not verbose: - print("The output is hidden until an error occurs.") - - env = os.environ.copy() - env['AIRFLOW_PACKAGE_NAME'] = self.package_name - if self.for_production: - env['AIRFLOW_FOR_PRODUCTION'] = 'true' + os.makedirs(self._build_dir, exist_ok=True) + build_cmd = [ + os.path.join(ROOT_PROJECT_DIR, "docs", "exts", "docs_build", "run_patched_sphinx.py"), + "-T", # show full traceback on exception + "--color", # do emit colored output + "-b", # builder to use + "html", + "-d", # path for the cached environment and doctree files + self._doctree_dir, + "-c", + DOCS_DIR, + "-w", # write warnings (and errors) to given file + self.log_build_warning_filename, + self._src_dir, + self._build_dir, # path to output directory + ] + env = os.environ.copy() + env['AIRFLOW_PACKAGE_NAME'] = self.package_name + if self.for_production: + env['AIRFLOW_FOR_PRODUCTION'] = 'true' + if verbose: + console.print( + f"[blue]{self.package_name:60}:[/] Executing cmd: ", + " ".join([shlex.quote(c) for c in build_cmd]), + ) + else: + console.print( + f"[blue]{self.package_name:60}:[/] Running sphinx. " + f"The output is hidden until an error occurs." + ) + with open(self.log_build_filename, "wt") as output: completed_proc = run( # pylint: disable=subprocess-run-check build_cmd, cwd=self._src_dir, @@ -192,35 +251,48 @@ def build_sphinx_docs(self, verbose) -> List[DocBuildError]: stderr=output if not verbose else None, timeout=PROCESS_TIMEOUT, ) - if completed_proc.returncode != 0: - output.seek(0) - print(output.read().decode()) - build_errors.append( - DocBuildError( - file_path=None, - line_no=None, - message=f"Sphinx returned non-zero exit status: {completed_proc.returncode}.", - ) + if completed_proc.returncode != 0: + build_errors.append( + DocBuildError( + file_path=None, + line_no=None, + message=f"Sphinx returned non-zero exit status: {completed_proc.returncode}.", ) - tmp_file.seek(0) - warning_text = tmp_file.read().decode() + ) + if os.path.isfile(self.log_build_warning_filename): + with open(self.log_build_warning_filename) as warning_file: + warning_text = warning_file.read() # Remove 7-bit C1 ANSI escape sequences warning_text = re.sub(r"\x1B[@-_][0-?]*[ -/]*[@-~]", "", warning_text) build_errors.extend(parse_sphinx_warnings(warning_text, self._src_dir)) + if build_errors: + console.print(f"[blue]{self.package_name:60}:[/] [red]Finished docs building with errors[/]") + else: + console.print(f"[blue]{self.package_name:60}:[/] [green]Finished docs building successfully[/]") return build_errors def publish(self): """Copy documentation packages files to airflow-site repository.""" - print(f"Publishing docs for {self.package_name}") + console.print(f"Publishing docs for {self.package_name}") output_dir = os.path.join(AIRFLOW_SITE_DIR, self._publish_dir) pretty_source = pretty_format_path(self._build_dir, os.getcwd()) pretty_target = pretty_format_path(output_dir, AIRFLOW_SITE_DIR) - print(f"Copy directory: {pretty_source} => {pretty_target}") + console.print(f"Copy directory: {pretty_source} => {pretty_target}") + if os.path.exists(output_dir): + if self.is_versioned: + console.print( + f"Skipping previously existing {output_dir}! " + f"Delete it manually if you want to regenerate it!" + ) + console.print() + return + else: + shutil.rmtree(output_dir) shutil.copytree(self._build_dir, output_dir) if self.is_versioned: with open(os.path.join(output_dir, "..", "stable.txt"), "w") as stable_file: stable_file.write(self._current_version) - print() + console.print() def get_available_providers_packages(): @@ -231,4 +303,9 @@ def get_available_providers_packages(): def get_available_packages(): """Get list of all available packages to build.""" provider_package_names = get_available_providers_packages() - return ["apache-airflow", *provider_package_names, "apache-airflow-providers"] + return [ + "apache-airflow", + *provider_package_names, + "apache-airflow-providers", + "docker-stack", + ] diff --git a/docs/exts/docs_build/errors.py b/docs/exts/docs_build/errors.py index 21106ce53a1ac..3fe9f36d810b3 100644 --- a/docs/exts/docs_build/errors.py +++ b/docs/exts/docs_build/errors.py @@ -18,11 +18,16 @@ from functools import total_ordering from typing import Dict, List, NamedTuple, Optional +from rich.console import Console + from airflow.utils.code_utils import prepare_code_snippet +from docs.exts.docs_build.code_utils import CONSOLE_WIDTH CURRENT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__))) DOCS_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir)) +console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH) + @total_ordering class DocBuildError(NamedTuple): @@ -52,25 +57,29 @@ def __lt__(self, right): def display_errors_summary(build_errors: Dict[str, List[DocBuildError]]) -> None: """Displays summary of errors""" - print("#" * 20, "Docs build errors summary", "#" * 20) - + console.print() + console.print("[red]" + "#" * 30 + " Start docs build errors summary " + "#" * 30 + "[/]") + console.print() for package_name, errors in build_errors.items(): if package_name: - print("=" * 20, package_name, "=" * 20) + console.print("=" * 30 + f" [blue]{package_name}[/] " + "=" * 30) else: - print("=" * 20, "General", "=" * 20) + console.print("=" * 30, " [blue]General[/] ", "=" * 30) for warning_no, error in enumerate(sorted(errors), 1): - print("-" * 20, f"Error {warning_no:3}", "-" * 20) - print(error.message) - print() - if error.file_path and error.file_path != "" and error.line_no: - print(f"File path: {os.path.relpath(error.file_path, start=DOCS_DIR)} ({error.line_no})") - print() - print(prepare_code_snippet(error.file_path, error.line_no)) + console.print("-" * 30, f"[red]Error {warning_no:3}[/]", "-" * 20) + console.print(error.message) + console.print() + if error.file_path and not error.file_path.endswith("") and error.line_no: + console.print( + f"File path: {os.path.relpath(error.file_path, start=DOCS_DIR)} ({error.line_no})" + ) + console.print() + console.print(prepare_code_snippet(error.file_path, error.line_no)) elif error.file_path: - print(f"File path: {error.file_path}") - - print("#" * 50) + console.print(f"File path: {error.file_path}") + console.print() + console.print("[red]" + "#" * 30 + " End docs build errors summary " + "#" * 30 + "[/]") + console.print() def parse_sphinx_warnings(warning_text: str, docs_dir: str) -> List[DocBuildError]: diff --git a/docs/exts/docs_build/fetch_inventories.py b/docs/exts/docs_build/fetch_inventories.py index e9da26442bd54..592e8184abf41 100644 --- a/docs/exts/docs_build/fetch_inventories.py +++ b/docs/exts/docs_build/fetch_inventories.py @@ -20,10 +20,13 @@ import datetime import os import shutil +from itertools import repeat +from typing import Iterator, List, Tuple import requests from requests.adapters import DEFAULT_POOLSIZE +from airflow.utils.helpers import partition from docs.exts.docs_build.docs_builder import ( # pylint: disable=no-name-in-module get_available_providers_packages, ) @@ -42,17 +45,22 @@ S3_DOC_URL_NON_VERSIONED = S3_DOC_URL + "/docs/{package_name}/objects.inv" -def _fetch_file(session: requests.Session, url: str, path: str): +def _fetch_file(session: requests.Session, package_name: str, url: str, path: str) -> Tuple[str, bool]: + """ + Download a file and returns status information as a tuple with package + name and success status(bool value). + """ response = session.get(url, allow_redirects=True, stream=True) if not response.ok: print(f"Failed to fetch inventory: {url}") - return + return package_name, False os.makedirs(os.path.dirname(path), exist_ok=True) with open(path, 'wb') as f: response.raw.decode_content = True shutil.copyfileobj(response.raw, f) print(f"Fetched inventory: {url}") + return package_name, True def _is_outdated(path: str): @@ -65,42 +73,61 @@ def _is_outdated(path: str): def fetch_inventories(): """Fetch all inventories for Airflow documentation packages and store in cache.""" os.makedirs(os.path.dirname(CACHE_DIR), exist_ok=True) - to_download = [] + to_download: List[Tuple[str, str, str]] = [] for pkg_name in get_available_providers_packages(): to_download.append( ( + pkg_name, S3_DOC_URL_VERSIONED.format(package_name=pkg_name), f'{CACHE_DIR}/{pkg_name}/objects.inv', ) ) to_download.append( ( + "apache-airflow", S3_DOC_URL_VERSIONED.format(package_name='apache-airflow'), f'{CACHE_DIR}/apache-airflow/objects.inv', ) ) - to_download.append( - ( - S3_DOC_URL_NON_VERSIONED.format(package_name='apache-airflow-providers'), - f'{CACHE_DIR}/apache-airflow-providers/objects.inv', + for pkg_name in ['apache-airflow-providers', 'docker-stack']: + to_download.append( + ( + pkg_name, + S3_DOC_URL_NON_VERSIONED.format(package_name=pkg_name), + f'{CACHE_DIR}/{pkg_name}/objects.inv', + ) ) - ) to_download.extend( ( + pkg_name, f"{doc_url}/objects.inv", f'{CACHE_DIR}/{pkg_name}/objects.inv', ) for pkg_name, doc_url in THIRD_PARTY_INDEXES.items() ) - to_download = [(url, path) for url, path in to_download if _is_outdated(path)] + to_download = [(pkg_name, url, path) for pkg_name, url, path in to_download if _is_outdated(path)] if not to_download: print("Nothing to do") - return + return [] print(f"To download {len(to_download)} inventorie(s)") with requests.Session() as session, concurrent.futures.ThreadPoolExecutor(DEFAULT_POOLSIZE) as pool: - for url, path in to_download: - pool.submit(_fetch_file, session=session, url=url, path=path) + download_results: Iterator[Tuple[str, bool]] = pool.map( + _fetch_file, + repeat(session, len(to_download)), + (pkg_name for pkg_name, _, _ in to_download), + (url for _, url, _ in to_download), + (path for _, _, path in to_download), + ) + failed, success = partition(lambda d: d[1], download_results) + failed, success = list(failed), list(success) + print(f"Result: {len(success)} success, {len(failed)} failed") + if failed: + print("Failed packages:") + for pkg_no, (pkg_name, _) in enumerate(failed, start=1): + print(f"{pkg_no}. {pkg_name}") + + return [pkg_name for pkg_name, status in failed] diff --git a/docs/exts/docs_build/github_action_utils.py b/docs/exts/docs_build/github_action_utils.py index 4b21b03a71a82..f0fc4834a87eb 100644 --- a/docs/exts/docs_build/github_action_utils.py +++ b/docs/exts/docs_build/github_action_utils.py @@ -33,6 +33,7 @@ def with_group(title): yield return print(f"::group::{title}") + print() yield print("\033[0m") print("::endgroup::") diff --git a/docs/exts/docs_build/lint_checks.py b/docs/exts/docs_build/lint_checks.py index 54d9705d1865b..88aa1bbfc162d 100644 --- a/docs/exts/docs_build/lint_checks.py +++ b/docs/exts/docs_build/lint_checks.py @@ -24,7 +24,11 @@ import yaml -# pylint: disable=wrong-import-order +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader # type: ignore[misc] + import airflow from docs.exts.docs_build.docs_builder import ALL_PROVIDER_YAMLS # pylint: disable=no-name-in-module from docs.exts.docs_build.errors import DocBuildError # pylint: disable=no-name-in-module @@ -330,7 +334,7 @@ def check_docker_image_tag_in_quick_start_guide() -> List[DocBuildError]: # master tag is little outdated. expected_image = f'apache/airflow:{expected_tag}' with open(compose_file_path) as yaml_file: - content = yaml.safe_load(yaml_file) + content = yaml.load(yaml_file, SafeLoader) current_image_expression = content['x-airflow-common']['image'] if expected_image not in current_image_expression: build_errors.append( diff --git a/docs/exts/docs_build/run_patched_sphinx.py b/docs/exts/docs_build/run_patched_sphinx.py new file mode 100755 index 0000000000000..887b982e5c0d8 --- /dev/null +++ b/docs/exts/docs_build/run_patched_sphinx.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os +import sys + +import autoapi +from autoapi.extension import ( + LOGGER, + ExtensionError, + bold, + darkgreen, + default_backend_mapping, + default_file_mapping, + default_ignore_patterns, +) +from sphinx.cmd.build import main + + +def run_autoapi(app): + """Load AutoAPI data from the filesystem.""" + if not app.config.autoapi_dirs: + raise ExtensionError("You must configure an autoapi_dirs setting") + + # Make sure the paths are full + normalized_dirs = [] + autoapi_dirs = app.config.autoapi_dirs + if isinstance(autoapi_dirs, str): + autoapi_dirs = [autoapi_dirs] + for path in autoapi_dirs: + if os.path.isabs(path): + normalized_dirs.append(path) + else: + normalized_dirs.append(os.path.normpath(os.path.join(app.confdir, path))) + + for _dir in normalized_dirs: + if not os.path.exists(_dir): + raise ExtensionError( + "AutoAPI Directory `{dir}` not found. " + "Please check your `autoapi_dirs` setting.".format(dir=_dir) + ) + + # Change from app.confdir to app.srcdir. + # Before: + # - normalized_root = os.path.normpath( + # - os.path.join(app.confdir, app.config.autoapi_root) + # -) + normalized_root = os.path.normpath(os.path.join(app.srcdir, app.config.autoapi_root)) + url_root = os.path.join("/", app.config.autoapi_root) + sphinx_mapper = default_backend_mapping[app.config.autoapi_type] + sphinx_mapper_obj = sphinx_mapper(app, template_dir=app.config.autoapi_template_dir, url_root=url_root) + app.env.autoapi_mapper = sphinx_mapper_obj + + if app.config.autoapi_file_patterns: + file_patterns = app.config.autoapi_file_patterns + else: + file_patterns = default_file_mapping.get(app.config.autoapi_type, []) + + if app.config.autoapi_ignore: + ignore_patterns = app.config.autoapi_ignore + else: + ignore_patterns = default_ignore_patterns.get(app.config.autoapi_type, []) + + if ".rst" in app.config.source_suffix: + out_suffix = ".rst" + elif ".txt" in app.config.source_suffix: + out_suffix = ".txt" + else: + # Fallback to first suffix listed + out_suffix = app.config.source_suffix[0] + + # Actual meat of the run. + LOGGER.info(bold("[AutoAPI] ") + darkgreen("Loading Data")) + sphinx_mapper_obj.load(patterns=file_patterns, dirs=normalized_dirs, ignore=ignore_patterns) + + LOGGER.info(bold("[AutoAPI] ") + darkgreen("Mapping Data")) + sphinx_mapper_obj.map(options=app.config.autoapi_options) + + if app.config.autoapi_generate_api_docs: + LOGGER.info(bold("[AutoAPI] ") + darkgreen("Rendering Data")) + sphinx_mapper_obj.output_rst(root=normalized_root, source_suffix=out_suffix) + + +# HACK: sphinx-auto map did not correctly use the confdir attribute instead of srcdir when specifying the +# directory to contain the generated files. +# Unfortunately we have a problem updating to a newer version of this library and we have to use +# sphinx-autoapi v1.0.0, so I am monkeypatching this library to fix this one problem. +autoapi.extension.run_autoapi = run_autoapi + +sys.exit(main(sys.argv[1:])) diff --git a/docs/exts/docs_build/spelling_checks.py b/docs/exts/docs_build/spelling_checks.py index c2b7ca93bad15..41a54c8c00c4b 100644 --- a/docs/exts/docs_build/spelling_checks.py +++ b/docs/exts/docs_build/spelling_checks.py @@ -20,11 +20,16 @@ from functools import total_ordering from typing import Dict, List, NamedTuple, Optional +from rich.console import Console + from airflow.utils.code_utils import prepare_code_snippet +from docs.exts.docs_build.code_utils import CONSOLE_WIDTH CURRENT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__))) DOCS_DIR = os.path.abspath(os.path.join(CURRENT_DIR, os.pardir, os.pardir)) +console = Console(force_terminal=True, color_system="standard", width=CONSOLE_WIDTH) + @total_ordering class SpellingError(NamedTuple): @@ -75,7 +80,7 @@ def __lt__(self, other): return left < right -def parse_spelling_warnings(warning_text: str, docs_dir) -> List[SpellingError]: +def parse_spelling_warnings(warning_text: str, docs_dir: str) -> List[SpellingError]: """ Parses warnings from Sphinx. @@ -130,43 +135,47 @@ def parse_spelling_warnings(warning_text: str, docs_dir) -> List[SpellingError]: def display_spelling_error_summary(spelling_errors: Dict[str, List[SpellingError]]) -> None: """Displays summary of Spelling errors""" - print("#" * 20, "Spelling errors summary", "#" * 20) + console.print() + console.print("[red]" + "#" * 30 + " Start spelling errors summary " + "#" * 30 + "[/]") + console.print() for package_name, errors in sorted(spelling_errors.items()): if package_name: - print("=" * 20, package_name, "=" * 20) + console.print("=" * 30, f" [blue]{package_name}[/] ", "=" * 30) else: - print("=" * 20, "General", "=" * 20) + console.print("=" * 30, " [blue]General[/] ", "=" * 30) for warning_no, error in enumerate(sorted(errors), 1): - print("-" * 20, f"Error {warning_no:3}", "-" * 20) + console.print("-" * 30, f"Error {warning_no:3}", "-" * 30) _display_error(error) - print("=" * 50) - print() + console.print("=" * 100) + console.print() msg = """ If the spelling is correct, add the spelling to docs/spelling_wordlist.txt or use the spelling directive. Check https://sphinxcontrib-spelling.readthedocs.io/en/latest/customize.html#private-dictionaries for more details. """ - print(msg) - print() - print("#" * 50) + console.print(msg) + console.print() + console.print + console.print("[red]" + "#" * 30 + " End docs build errors summary " + "#" * 30 + "[/]") + console.print def _display_error(error: SpellingError): - print(error.message) - print() + console.print(error.message) + console.print() if error.file_path: - print(f"File path: {os.path.relpath(error.file_path, start=DOCS_DIR)}") + console.print(f"File path: {os.path.relpath(error.file_path, start=DOCS_DIR)}") if error.spelling: - print(f"Incorrect Spelling: '{error.spelling}'") + console.print(f"Incorrect Spelling: '{error.spelling}'") if error.suggestion: - print(f"Suggested Spelling: '{error.suggestion}'") + console.print(f"Suggested Spelling: '{error.suggestion}'") if error.context_line: - print(f"Line with Error: '{error.context_line}'") - if error.line_no: - print(f"Line Number: {error.line_no}") - print(prepare_code_snippet(error.file_path, error.line_no)) + console.print(f"Line with Error: '{error.context_line}'") + if error.file_path and not error.file_path.endswith("") and error.line_no: + console.print(f"Line Number: {error.line_no}") + console.print(prepare_code_snippet(error.file_path, error.line_no)) diff --git a/docs/exts/docs_build/third_party_inventories.py b/docs/exts/docs_build/third_party_inventories.py index 27b461f7881e8..307fd391f26b3 100644 --- a/docs/exts/docs_build/third_party_inventories.py +++ b/docs/exts/docs_build/third_party_inventories.py @@ -20,7 +20,7 @@ 'celery': 'https://docs.celeryproject.org/en/stable', 'hdfs': 'https://hdfscli.readthedocs.io/en/latest', 'jinja2': 'https://jinja.palletsprojects.com/en/master', - 'mongodb': 'https://pymongo.readthedocs.io/en/stable/', + 'mongodb': 'https://pymongo.readthedocs.io/en/3.11.3', 'pandas': 'https://pandas.pydata.org/pandas-docs/stable', 'python': 'https://docs.python.org/3', 'requests': 'https://requests.readthedocs.io/en/master', diff --git a/docs/exts/provider_init_hack.py b/docs/exts/provider_init_hack.py index 0d885591453b7..40f7fefa5b18f 100644 --- a/docs/exts/provider_init_hack.py +++ b/docs/exts/provider_init_hack.py @@ -34,17 +34,12 @@ def _create_init_py(app, config): del app del config + # This file is deleted by /docs/build_docs.py. If you are not using the script, the file will be + # deleted by pre-commit. with open(PROVIDER_INIT_FILE, "wt"): pass -def _delete_init_py(app, exception): - del app - del exception - if os.path.exists(PROVIDER_INIT_FILE): - os.remove(PROVIDER_INIT_FILE) - - def setup(app: Sphinx): """ Sets the plugin up and returns configuration of the plugin. @@ -53,6 +48,5 @@ def setup(app: Sphinx): :return json description of the configuration that is needed by the plugin. """ app.connect("config-inited", _create_init_py) - app.connect("build-finished", _delete_init_py) return {"version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True} diff --git a/docs/exts/provider_yaml_utils.py b/docs/exts/provider_yaml_utils.py index 130084cdf87ad..a6d1ee297d560 100644 --- a/docs/exts/provider_yaml_utils.py +++ b/docs/exts/provider_yaml_utils.py @@ -23,6 +23,12 @@ import jsonschema import yaml +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader # type: ignore[misc] + + ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) PROVIDER_DATA_SCHEMA_PATH = os.path.join(ROOT_DIR, "airflow", "provider.yaml.schema.json") @@ -53,7 +59,7 @@ def load_package_data() -> List[Dict[str, Any]]: result = [] for provider_yaml_path in get_provider_yaml_paths(): with open(provider_yaml_path) as yaml_file: - provider = yaml.safe_load(yaml_file) + provider = yaml.load(yaml_file, SafeLoader) try: jsonschema.validate(provider, schema=schema) except jsonschema.ValidationError: diff --git a/docs/integration-logos/airbyte/Airbyte.png b/docs/integration-logos/airbyte/Airbyte.png new file mode 100644 index 0000000000000..0cc1d077374e4 Binary files /dev/null and b/docs/integration-logos/airbyte/Airbyte.png differ diff --git a/docs/integration-logos/apache/cassandra-3.png b/docs/integration-logos/apache/cassandra-3.png new file mode 100644 index 0000000000000..3b4af5c68bc11 Binary files /dev/null and b/docs/integration-logos/apache/cassandra-3.png differ diff --git a/docs/integration-logos/apache/druid-1.png b/docs/integration-logos/apache/druid-1.png new file mode 100644 index 0000000000000..eb838de6e5b33 Binary files /dev/null and b/docs/integration-logos/apache/druid-1.png differ diff --git a/docs/integration-logos/apache/hadoop.png b/docs/integration-logos/apache/hadoop.png new file mode 100644 index 0000000000000..4bb7e1b8db534 Binary files /dev/null and b/docs/integration-logos/apache/hadoop.png differ diff --git a/docs/integration-logos/apache/hive.png b/docs/integration-logos/apache/hive.png new file mode 100644 index 0000000000000..0399e5f750877 Binary files /dev/null and b/docs/integration-logos/apache/hive.png differ diff --git a/docs/integration-logos/apache/pig.png b/docs/integration-logos/apache/pig.png new file mode 100644 index 0000000000000..21e11be4dd9fa Binary files /dev/null and b/docs/integration-logos/apache/pig.png differ diff --git a/docs/integration-logos/apache/pinot.png b/docs/integration-logos/apache/pinot.png new file mode 100644 index 0000000000000..79472ed8a9624 Binary files /dev/null and b/docs/integration-logos/apache/pinot.png differ diff --git a/docs/integration-logos/apache/spark.png b/docs/integration-logos/apache/spark.png new file mode 100644 index 0000000000000..776c4459a4cc0 Binary files /dev/null and b/docs/integration-logos/apache/spark.png differ diff --git a/docs/integration-logos/apache/sqoop.png b/docs/integration-logos/apache/sqoop.png new file mode 100644 index 0000000000000..b4bb262a604b5 Binary files /dev/null and b/docs/integration-logos/apache/sqoop.png differ diff --git a/docs/integration-logos/aws/AWS-Batch_light-bg@4x.png b/docs/integration-logos/aws/AWS-Batch_light-bg@4x.png new file mode 100644 index 0000000000000..53768d94f9829 Binary files /dev/null and b/docs/integration-logos/aws/AWS-Batch_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/AWS-Glue_light-bg@4x.png b/docs/integration-logos/aws/AWS-Glue_light-bg@4x.png new file mode 100644 index 0000000000000..8249889183cd5 Binary files /dev/null and b/docs/integration-logos/aws/AWS-Glue_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/AWS-Lambda_light-bg@4x.png b/docs/integration-logos/aws/AWS-Lambda_light-bg@4x.png new file mode 100644 index 0000000000000..3b67fbb1a93eb Binary files /dev/null and b/docs/integration-logos/aws/AWS-Lambda_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/Amazon-Athena_light-bg@4x.png b/docs/integration-logos/aws/Amazon-Athena_light-bg@4x.png new file mode 100644 index 0000000000000..bf50342b43da1 Binary files /dev/null and b/docs/integration-logos/aws/Amazon-Athena_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/Amazon-CloudWatch_light-bg@4x.png b/docs/integration-logos/aws/Amazon-CloudWatch_light-bg@4x.png new file mode 100644 index 0000000000000..9b0f3a45e716d Binary files /dev/null and b/docs/integration-logos/aws/Amazon-CloudWatch_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/Amazon-DynamoDB_light-bg@4x.png b/docs/integration-logos/aws/Amazon-DynamoDB_light-bg@4x.png new file mode 100644 index 0000000000000..8d22fa7aa657e Binary files /dev/null and b/docs/integration-logos/aws/Amazon-DynamoDB_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/Amazon-EC2_light-bg@4x.png b/docs/integration-logos/aws/Amazon-EC2_light-bg@4x.png new file mode 100644 index 0000000000000..d1e13b0fd2dc8 Binary files /dev/null and b/docs/integration-logos/aws/Amazon-EC2_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/Amazon-EMR_light-bg@4x.png b/docs/integration-logos/aws/Amazon-EMR_light-bg@4x.png new file mode 100644 index 0000000000000..cb00bf985b71f Binary files /dev/null and b/docs/integration-logos/aws/Amazon-EMR_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/Amazon-Kinesis-Data-Firehose_light-bg@4x.png b/docs/integration-logos/aws/Amazon-Kinesis-Data-Firehose_light-bg@4x.png new file mode 100644 index 0000000000000..fcfee0406e8f8 Binary files /dev/null and b/docs/integration-logos/aws/Amazon-Kinesis-Data-Firehose_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/Amazon-Redshift_light-bg@4x.png b/docs/integration-logos/aws/Amazon-Redshift_light-bg@4x.png new file mode 100644 index 0000000000000..5e3f5cf13ef64 Binary files /dev/null and b/docs/integration-logos/aws/Amazon-Redshift_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/Amazon-SageMaker_light-bg@4x.png b/docs/integration-logos/aws/Amazon-SageMaker_light-bg@4x.png new file mode 100644 index 0000000000000..affa812f8b9ac Binary files /dev/null and b/docs/integration-logos/aws/Amazon-SageMaker_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/Amazon-Simple-Notification-Service-SNS_light-bg@4x.png b/docs/integration-logos/aws/Amazon-Simple-Notification-Service-SNS_light-bg@4x.png new file mode 100644 index 0000000000000..7f45058889992 Binary files /dev/null and b/docs/integration-logos/aws/Amazon-Simple-Notification-Service-SNS_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/Amazon-Simple-Queue-Service-SQS_light-bg@4x.png b/docs/integration-logos/aws/Amazon-Simple-Queue-Service-SQS_light-bg@4x.png new file mode 100644 index 0000000000000..ef001894ef33e Binary files /dev/null and b/docs/integration-logos/aws/Amazon-Simple-Queue-Service-SQS_light-bg@4x.png differ diff --git a/docs/integration-logos/aws/Amazon-Simple-Storage-Service-S3_light-bg@4x.png b/docs/integration-logos/aws/Amazon-Simple-Storage-Service-S3_light-bg@4x.png new file mode 100644 index 0000000000000..769076d555706 Binary files /dev/null and b/docs/integration-logos/aws/Amazon-Simple-Storage-Service-S3_light-bg@4x.png differ diff --git a/docs/integration-logos/azure/Azure Cosmos DB.svg b/docs/integration-logos/azure/Azure Cosmos DB.svg new file mode 100644 index 0000000000000..1b6504f64627e --- /dev/null +++ b/docs/integration-logos/azure/Azure Cosmos DB.svg @@ -0,0 +1,11 @@ + + + +Cosmos_DB_2 + + + + + + + diff --git a/docs/integration-logos/azure/Azure Data Factory.svg b/docs/integration-logos/azure/Azure Data Factory.svg new file mode 100644 index 0000000000000..481d3d4742462 --- /dev/null +++ b/docs/integration-logos/azure/Azure Data Factory.svg @@ -0,0 +1 @@ +Icon-databases-126 diff --git a/docs/integration-logos/azure/Azure Files.svg b/docs/integration-logos/azure/Azure Files.svg new file mode 100644 index 0000000000000..9b48467bad656 --- /dev/null +++ b/docs/integration-logos/azure/Azure Files.svg @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/docs/integration-logos/azure/Blob Storage.svg b/docs/integration-logos/azure/Blob Storage.svg new file mode 100644 index 0000000000000..0a883275ed7f5 --- /dev/null +++ b/docs/integration-logos/azure/Blob Storage.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/docs/integration-logos/azure/Container Instances.svg b/docs/integration-logos/azure/Container Instances.svg new file mode 100644 index 0000000000000..e11f5e80f1a1f --- /dev/null +++ b/docs/integration-logos/azure/Container Instances.svg @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/docs/integration-logos/azure/Data Lake Storage.svg b/docs/integration-logos/azure/Data Lake Storage.svg new file mode 100644 index 0000000000000..d08dfeb329e22 --- /dev/null +++ b/docs/integration-logos/azure/Data Lake Storage.svg @@ -0,0 +1,37 @@ + + + + + + + + + diff --git a/docs/integration-logos/gcp/AI-Platform.png b/docs/integration-logos/gcp/AI-Platform.png new file mode 100644 index 0000000000000..f85ff7f8e3d50 Binary files /dev/null and b/docs/integration-logos/gcp/AI-Platform.png differ diff --git a/docs/integration-logos/gcp/BigQuery.png b/docs/integration-logos/gcp/BigQuery.png new file mode 100644 index 0000000000000..a7e0a715624aa Binary files /dev/null and b/docs/integration-logos/gcp/BigQuery.png differ diff --git a/docs/integration-logos/gcp/Cloud-AutoML.png b/docs/integration-logos/gcp/Cloud-AutoML.png new file mode 100644 index 0000000000000..b147e074b58fd Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-AutoML.png differ diff --git a/docs/integration-logos/gcp/Cloud-Bigtable.png b/docs/integration-logos/gcp/Cloud-Bigtable.png new file mode 100644 index 0000000000000..afce5e05ec659 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Bigtable.png differ diff --git a/docs/integration-logos/gcp/Cloud-Build.png b/docs/integration-logos/gcp/Cloud-Build.png new file mode 100644 index 0000000000000..d09c343e1cf6f Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Build.png differ diff --git a/docs/integration-logos/gcp/Cloud-Dataflow.png b/docs/integration-logos/gcp/Cloud-Dataflow.png new file mode 100644 index 0000000000000..c53ddf835370f Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Dataflow.png differ diff --git a/docs/integration-logos/gcp/Cloud-Dataproc.png b/docs/integration-logos/gcp/Cloud-Dataproc.png new file mode 100644 index 0000000000000..eb5ed2ce0b32c Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Dataproc.png differ diff --git a/docs/integration-logos/gcp/Cloud-Datastore.png b/docs/integration-logos/gcp/Cloud-Datastore.png new file mode 100644 index 0000000000000..43a77a8cc67e4 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Datastore.png differ diff --git a/docs/integration-logos/gcp/Cloud-Functions.png b/docs/integration-logos/gcp/Cloud-Functions.png new file mode 100644 index 0000000000000..24188a42be478 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Functions.png differ diff --git a/docs/integration-logos/gcp/Cloud-Memorystore.png b/docs/integration-logos/gcp/Cloud-Memorystore.png new file mode 100644 index 0000000000000..ceb3b893bd346 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Memorystore.png differ diff --git a/docs/integration-logos/gcp/Cloud-NLP.png b/docs/integration-logos/gcp/Cloud-NLP.png new file mode 100644 index 0000000000000..a8c4308d77f23 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-NLP.png differ diff --git a/docs/integration-logos/gcp/Cloud-PubSub.png b/docs/integration-logos/gcp/Cloud-PubSub.png new file mode 100644 index 0000000000000..5b5610c95fd35 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-PubSub.png differ diff --git a/docs/integration-logos/gcp/Cloud-SQL.png b/docs/integration-logos/gcp/Cloud-SQL.png new file mode 100644 index 0000000000000..9763c557dd8f0 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-SQL.png differ diff --git a/docs/integration-logos/gcp/Cloud-Spanner.png b/docs/integration-logos/gcp/Cloud-Spanner.png new file mode 100644 index 0000000000000..88068fbb6c0d7 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Spanner.png differ diff --git a/docs/integration-logos/gcp/Cloud-Speech-to-Text.png b/docs/integration-logos/gcp/Cloud-Speech-to-Text.png new file mode 100644 index 0000000000000..fd5ccea58ed5a Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Speech-to-Text.png differ diff --git a/docs/integration-logos/gcp/Cloud-Storage.png b/docs/integration-logos/gcp/Cloud-Storage.png new file mode 100644 index 0000000000000..3b7bb1aed2376 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Storage.png differ diff --git a/docs/integration-logos/gcp/Cloud-Tasks.png b/docs/integration-logos/gcp/Cloud-Tasks.png new file mode 100644 index 0000000000000..39a54f00cc937 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Tasks.png differ diff --git a/docs/integration-logos/gcp/Cloud-Text-to-Speech.png b/docs/integration-logos/gcp/Cloud-Text-to-Speech.png new file mode 100644 index 0000000000000..5822fa635264c Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Text-to-Speech.png differ diff --git a/docs/integration-logos/gcp/Cloud-Translation-API.png b/docs/integration-logos/gcp/Cloud-Translation-API.png new file mode 100644 index 0000000000000..6997d9f555d59 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Translation-API.png differ diff --git a/docs/integration-logos/gcp/Cloud-Video-Intelligence-API.png b/docs/integration-logos/gcp/Cloud-Video-Intelligence-API.png new file mode 100644 index 0000000000000..1c6e00c162b5c Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Video-Intelligence-API.png differ diff --git a/docs/integration-logos/gcp/Cloud-Vision-API.png b/docs/integration-logos/gcp/Cloud-Vision-API.png new file mode 100644 index 0000000000000..28e2d63160082 Binary files /dev/null and b/docs/integration-logos/gcp/Cloud-Vision-API.png differ diff --git a/docs/integration-logos/gcp/Compute-Engine.png b/docs/integration-logos/gcp/Compute-Engine.png new file mode 100644 index 0000000000000..a4b52d92c4d0a Binary files /dev/null and b/docs/integration-logos/gcp/Compute-Engine.png differ diff --git a/docs/integration-logos/gcp/Key-Management-Service.png b/docs/integration-logos/gcp/Key-Management-Service.png new file mode 100644 index 0000000000000..46edd260a2522 Binary files /dev/null and b/docs/integration-logos/gcp/Key-Management-Service.png differ diff --git a/docs/integration-logos/gcp/Kubernetes-Engine.png b/docs/integration-logos/gcp/Kubernetes-Engine.png new file mode 100644 index 0000000000000..0e434e31a09f3 Binary files /dev/null and b/docs/integration-logos/gcp/Kubernetes-Engine.png differ diff --git a/docs/integration-logos/tableau/tableau.png b/docs/integration-logos/tableau/tableau.png new file mode 100644 index 0000000000000..4ec356cf0cde3 Binary files /dev/null and b/docs/integration-logos/tableau/tableau.png differ diff --git a/docs/integration-logos/trino/trino-og.png b/docs/integration-logos/trino/trino-og.png new file mode 100644 index 0000000000000..55bedf93dd346 Binary files /dev/null and b/docs/integration-logos/trino/trino-og.png differ diff --git a/docs/spelling_wordlist.txt b/docs/spelling_wordlist.txt index 0795107a06043..ed64a2251c0cf 100644 --- a/docs/spelling_wordlist.txt +++ b/docs/spelling_wordlist.txt @@ -1,6 +1,7 @@ Ack Acyclic Airbnb +Airbyte AirflowException Aizhamal Alphasort @@ -9,13 +10,12 @@ AnalyzeEntitiesResponse AnalyzeSentimentResponse AnalyzeSyntaxResponse Anand +Anierobi AnnotateTextResponse Ansible AppBuilder Arg Args -asctime -ashb Async Atlassian Auth @@ -123,11 +123,13 @@ Dynamodb EDITMSG ETag Eg +Elad EmrAddSteps EmrCreateJobFlow Enum Env EnvVar +Ephraim ExaConnection Exasol Failover @@ -141,9 +143,11 @@ Fileshares Filesystem Firehose Firestore +Flink FluentD Fokko Formaturas +Fspark Fundera GCS GH @@ -179,6 +183,7 @@ Hou Http HttpError HttpRequest +Huang IdP ImageAnnotatorClient Imap @@ -204,6 +209,7 @@ Json Jupyter KYLIN Kalibrr +Kalif Kamil Kaxil Kengo @@ -235,6 +241,7 @@ Lyft Maheshwari Makefile Mapreduce +Mariadb Masternode Maxime Memorystore @@ -251,6 +258,7 @@ NaN Naik Namenode Namespace +Neo4j Nextdoor Nones NotFound @@ -293,6 +301,7 @@ Pyarrow Pylint Pyspark PythonOperator +Qian Qingping Qplum Quantopian @@ -308,6 +317,7 @@ Reddit Redhat ReidentifyContentResponse Reinitialising +ResourceRequirements Riccomini Roadmap Robinhood @@ -323,6 +333,7 @@ Seki Sendgrid Siddharth SlackHook +Spark SparkPi SparkR SparkSQL @@ -334,6 +345,7 @@ Sqlite Sqoop Stackdriver Standarization +Standish StatsD Statsd StoredInfoType @@ -347,7 +359,6 @@ SubscriberClient Subtasks Sumit Systemd -Terraform TCP TLS TTY @@ -357,9 +368,11 @@ Taskfail Templated Templating Teradata +Terraform TextToSpeechClient Tez Thinknear +Timmins ToC Tomasz Tooltip @@ -384,6 +397,7 @@ Vertica Vevo VideoIntelligenceServiceClient Vikram +VolumeMount WTF WaiterModel Wasb @@ -397,9 +411,11 @@ XComs Xcom Xero Xiaodong +Xinbin Yamllint Yandex Yieldr +Yu Zego Zendesk Zhong @@ -415,6 +431,7 @@ acyclic adhoc aijamalnk airbnb +airbyte airfl airflowignore ajax @@ -472,17 +489,17 @@ autoscaling avro aws awsbatch +backcompat backend backends -backcompat backfill backfilled backfilling backfills backoff backport -backports backported +backports backreference backtick backticks @@ -676,11 +693,13 @@ docstring docstrings doesn doesnt +dogstatsd donot dropdown druidHook ds dsn +dstandish dttm dtypes durations @@ -692,6 +711,7 @@ ec ecb editorconfig eg +eladkal elasticsearch emr enableAutoScale @@ -705,6 +725,7 @@ env envFrom envvar eols +ephraimbuddy errno eslint etl @@ -871,6 +892,7 @@ jdbc jdk jenkins jghoman +jhtimmins jinja jira jitter @@ -924,6 +946,7 @@ licence licences lifecycle lineterminator +linux livy localExecutor localexecutor @@ -980,6 +1003,7 @@ mv mypy mysql mysqlclient +mysqldb mytaxi namenode namespace @@ -990,6 +1014,8 @@ navbar nd ndjson neighbours +neo +neo4j neq networkUri nginx @@ -1002,6 +1028,7 @@ noqa notificationChannels npm ntlm +nullable num oauth objectORfile @@ -1016,13 +1043,13 @@ openfaas oper opsgenie optimise +optionality ora -orm orchestrator orgtbl +orm os ot -optionality overridable oversubscription pagerduty @@ -1055,6 +1082,7 @@ png podName podSpec podspec +poller polyfill postMessage postfix @@ -1136,7 +1164,6 @@ repo repos reqs resetdb -ResourceRequirements resourceVersion resultset resumable @@ -1148,6 +1175,7 @@ romeoandjuliet rootcss rowid rpc +rsa rshift rst rtype @@ -1196,6 +1224,7 @@ sigv skipable sku sla +slack_sdk slackclient slas smtp @@ -1217,6 +1246,7 @@ sqlsensor sqoop src srv +ssc ssd sshHook sshtunnel @@ -1265,12 +1295,14 @@ superclass svg swp symlink -symlinks symlinking +symlinks sync'ed sys syspath systemd +tableId +tableau tableauserverclient tablefmt tagKey @@ -1310,6 +1342,7 @@ tooltips traceback tracebacks travis +trino trojan tsv ttl @@ -1367,7 +1400,6 @@ videointelligence vikramkoka virtualenv vm -VolumeMount volumeMounts wasb webProperty @@ -1390,6 +1422,7 @@ www xcom xcomarg xcomresult +xinbinhuang xml xpath xyz @@ -1400,6 +1433,7 @@ yarnpkg yml youtrack youtube +yuqian zA zendesk zhongjiajie diff --git a/images/ci/pull_request_ci_flow.md5 b/images/ci/pull_request_ci_flow.md5 index 3fe05dd3059f9..0da67b0f88fb0 100644 --- a/images/ci/pull_request_ci_flow.md5 +++ b/images/ci/pull_request_ci_flow.md5 @@ -1 +1 @@ -e3aa93e8b46ca97b4aad06de23a10ad5 images/ci/pull_request_ci_flow.mermaid +ad4af91ca126141b54e67c8741fa50ab images/ci/pull_request_ci_flow.mermaid diff --git a/images/ci/pull_request_ci_flow.mermaid b/images/ci/pull_request_ci_flow.mermaid index 2e4bf9ce78e48..c4729d3d601b1 100644 --- a/images/ci/pull_request_ci_flow.mermaid +++ b/images/ci/pull_request_ci_flow.mermaid @@ -89,8 +89,6 @@ sequenceDiagram Note over CI Build: Build docs and Note over CI Build: Spell check docs - and - Note over CI Build: Backport packages and opt Triggered? Note over CI Build: Tests diff --git a/images/ci/pull_request_ci_flow.png b/images/ci/pull_request_ci_flow.png index 7fec85a3d3139..04aa6b5eb695f 100644 Binary files a/images/ci/pull_request_ci_flow.png and b/images/ci/pull_request_ci_flow.png differ diff --git a/images/ci/push_ci_flow.md5 b/images/ci/push_ci_flow.md5 index edaa0cc14c7f9..021419a2e73b2 100644 --- a/images/ci/push_ci_flow.md5 +++ b/images/ci/push_ci_flow.md5 @@ -1 +1 @@ -95db6b17e315a47318eaf6260b3f0fd3 images/ci/push_ci_flow.mermaid +2e735f22a91bec2ce043289dc2f7f824 images/ci/push_ci_flow.mermaid diff --git a/images/ci/push_ci_flow.mermaid b/images/ci/push_ci_flow.mermaid index 790f800a1c24d..a7bd2d3e5b243 100644 --- a/images/ci/push_ci_flow.mermaid +++ b/images/ci/push_ci_flow.mermaid @@ -90,8 +90,6 @@ sequenceDiagram Note over CI Build: Build docs and Note over CI Build: Spell check docs - and - Note over CI Build: Backport packages and Note over CI Build: Tests and diff --git a/images/ci/push_ci_flow.png b/images/ci/push_ci_flow.png index 81c19bb44cf4c..9f791f2f86e6c 100644 Binary files a/images/ci/push_ci_flow.png and b/images/ci/push_ci_flow.png differ diff --git a/images/ci/scheduled_ci_flow.md5 b/images/ci/scheduled_ci_flow.md5 index 8ae69387116d8..0de07aae69dc7 100644 --- a/images/ci/scheduled_ci_flow.md5 +++ b/images/ci/scheduled_ci_flow.md5 @@ -1 +1 @@ -f6ea87f0b4d990fd48a9dbec2a2a4f2d images/ci/scheduled_ci_flow.mermaid +5e470a0b524d58aa8e8946b570719c0d images/ci/scheduled_ci_flow.mermaid diff --git a/images/ci/scheduled_ci_flow.mermaid b/images/ci/scheduled_ci_flow.mermaid index b892a7a6ec8b7..f982a8f9a6068 100644 --- a/images/ci/scheduled_ci_flow.mermaid +++ b/images/ci/scheduled_ci_flow.mermaid @@ -92,8 +92,6 @@ sequenceDiagram Note over CI Build: Build docs and Note over CI Build: Spell check docs - and - Note over CI Build: Backport packages and Note over CI Build: Tests and diff --git a/images/ci/scheduled_ci_flow.png b/images/ci/scheduled_ci_flow.png index 553ccfcceac59..e9bf21c0e3bd3 100644 Binary files a/images/ci/scheduled_ci_flow.png and b/images/ci/scheduled_ci_flow.png differ diff --git a/provider_packages/.gitignore b/provider_packages/.gitignore index 59b11608fc9d8..fe7af9fd806bd 100644 --- a/provider_packages/.gitignore +++ b/provider_packages/.gitignore @@ -3,4 +3,4 @@ setup.py CHANGELOG.txt README.md setup.cfg -/airflow/ +/airflow diff --git a/provider_packages/README.rst b/provider_packages/README.rst new file mode 100644 index 0000000000000..9761c5e8c11b5 --- /dev/null +++ b/provider_packages/README.rst @@ -0,0 +1,53 @@ + +.. Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + +.. http://www.apache.org/licenses/LICENSE-2.0 + +.. Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + + +Package ``apache-airflow-providers-ssh`` + +Release: ``1.0.0dev`` + + +`Secure Shell (SSH) `__ + + +Provider package +================ + +This is a provider package for ``ssh`` provider. All classes for this provider package +are in ``airflow.providers.ssh`` python package. + +You can find package information and changelog for the provider +in the `documentation `_. + + +Installation +============ + +You can install this package on top of an existing airflow 2.* installation via +``pip install apache-airflow-providers-ssh`` + +PIP requirements +================ + +============= ================== +PIP package Version required +============= ================== +``paramiko`` ``>=2.6.0`` +``pysftp`` ``>=0.2.9`` +``sshtunnel`` ``>=0.1.4,<0.2`` +============= ================== diff --git a/pylintrc b/pylintrc index cd9a06023bb56..c2253d7af1fe1 100644 --- a/pylintrc +++ b/pylintrc @@ -29,7 +29,7 @@ ignore-patterns= # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use. -jobs=1 +jobs=0 # Control the amount of potential inferred values when inferring a single # object. This can help the performance when dealing with large functions or @@ -38,7 +38,7 @@ limit-inference-results=100 # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. -load-plugins=tests.airflow_pylint.do_not_use_asserts,tests.airflow_pylint.disable_checks_for_tests +#load-plugins=tests.airflow_pylint.do_not_use_asserts,tests.airflow_pylint.disable_checks_for_tests # Pickle collected data for later comparisons. persistent=yes diff --git a/pylintrc-tests b/pylintrc-tests new file mode 100644 index 0000000000000..8a7e56e699836 --- /dev/null +++ b/pylintrc-tests @@ -0,0 +1,602 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +[MASTER] + +# Add files or directories to the ignore list. They should be base names, not +# paths. +ignore=CVS + +# Add files or directories matching the regex patterns to the ignore list. The +# regex matches against base names, not paths. +ignore-patterns= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use. +jobs=0 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python modules names) to load, +# usually to register additional checkers. +#load-plugins=tests.airflow_pylint.do_not_use_asserts,tests.airflow_pylint.disable_checks_for_tests + +# Pickle collected data for later comparisons. +persistent=yes + +# Specify a configuration file. +#rcfile= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence= + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then reenable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=print-statement, + parameter-unpacking, + unpacking-in-except, + old-raise-syntax, + backtick, + long-suffix, + old-ne-operator, + old-octal-literal, + import-star-module-level, + non-ascii-bytes-literal, + raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + use-symbolic-message-instead, + apply-builtin, + basestring-builtin, + buffer-builtin, + cmp-builtin, + coerce-builtin, + execfile-builtin, + file-builtin, + long-builtin, + raw_input-builtin, + reduce-builtin, + standarderror-builtin, + unicode-builtin, + xrange-builtin, + coerce-method, + delslice-method, + getslice-method, + setslice-method, + no-absolute-import, + old-division, + dict-iter-method, + dict-view-method, + next-method-called, + metaclass-assignment, + indexing-exception, + raising-string, + reload-builtin, + oct-method, + hex-method, + nonzero-method, + cmp-method, + input-builtin, + round-builtin, + intern-builtin, + unichr-builtin, + map-builtin-not-iterating, + zip-builtin-not-iterating, + range-builtin-not-iterating, + filter-builtin-not-iterating, + using-cmp-argument, + eq-without-hash, + div-method, + idiv-method, + rdiv-method, + exception-message-attribute, + invalid-str-codec, + sys-max-int, + bad-python3-import, + next-method-defined, + xreadlines-attribute, + exception-escape, + comprehension-escape, + duplicate-code, # deemed unnecessary + abstract-method, # deemed unnecessary + keyword-arg-before-vararg, # deemed unnecessary + no-self-use, # http://pylint-messages.wikidot.com/messages:r0201 + no-else-return, # deemed unnecessary + no-else-raise, # deemed unnecessary + too-many-format-args, # Pylint fails on multiline string format + too-many-lines, # Pylint fails on too many lines and we have several cases of those + cell-var-from-loop, # Raises spurious errors + arguments-differ, # Doesn't always raise valid messages + import-error, # Requires installing Airflow environment in CI task which takes long, therefore ignored. Tests should fail anyways if deps are missing. Possibly un-ignore in the future if we ever use pre-built Docker images for CI. + fixme, # There should be a good reason for adding a TODO + pointless-statement, # Is raised on the bitshift operator. Could be disabled only on /example_dags after https://github.com/PyCQA/pylint/projects/1. + ungrouped-imports, # Disabled to avoid conflict with isort import order rules, which is enabled in the project. + missing-module-docstring, + import-outside-toplevel, # We import outside toplevel to avoid cyclic imports + raise-missing-from, # We don't use raise...from + misplaced-comparison-constant, + # Those checks are disabled for tests only + missing-docstring, + no-self-use, + too-many-public-methods, + protected-access + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable=c-extension-no-member + + +[REPORTS] + +# Python expression which should return a note less than 10 (10 is the highest +# note). You have access to the variables errors warning, statement which +# respectively contain the number of errors / warnings messages and the total +# number of statements analyzed. This is used by the global evaluation report +# (RP0004). +evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +#msg-template= + +# Set the output format. Available formats are text, parseable, colorized, json +# and msvs (visual studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +output-format=text + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=3 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit + + +[LOGGING] + +# Format style used to check logging format string. `old` means using % +# formatting, while `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. Available dictionaries: none. To make it working +# install python-enchant package.. +spelling-dict= + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to indicated private dictionary in +# --spelling-private-dict-file option instead of raising a message. +spelling-store-unknown-words=no + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether missing members accessed in mixin class should be ignored. A +# mixin class is detected if its name ends with "mixin" (case insensitive). +ignore-mixin-members=yes + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,sqlalchemy.orm.scoping.scoped_session + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis. It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules=alembic.op,alembic.context + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_|args|kwargs + +# Argument names that match this expression will be ignored. Default to name +# with leading underscore. +ignored-argument-names=_.*|^ignored_|^unused_|^kwargs|^args|^mock_.+ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=110 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# List of optional constructs for which whitespace checking is disabled. `dict- +# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. +# `trailing-comma` allows a space between comma and closing bracket: (a, ). +# `empty-line` allows space-only lines. +no-space-check=trailing-comma, + dict-separator + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[SIMILARITIES] + +# Ignore comments when computing similarities. +ignore-comments=yes + +# Ignore docstrings when computing similarities. +ignore-docstrings=yes + +# Ignore imports when computing similarities. +ignore-imports=no + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[BASIC] + +# Naming style matching correct argument names. +#argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. +# BasPH: Kept snake_case but allow for 1-char argument names. +argument-rgx=[a-z_][a-z0-9_]{0,30}$ + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=toto, + tutu, + tata + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. +#class-attribute-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=any + +# Regular expression matching correct constant names. Overrides const-naming- +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=e, + ex, + i, + j, + k, + n, + v, # Commonly used when iterating dict.items() + _, + ti, # Commonly used in Airflow as shorthand for taskinstance + op, # Commonly used in Airflow as shorthand for operator + dr, # Commonly used in Airflow as shorthand for dag run + f, # Commonly used as shorthand for file + db, # Commonly used as shorthand for database + df, # Commonly used as shorthand for DataFrame + cm, # Commonly used as shorthand for context manager + ds, # Used in Airflow templates + ts, # Used in Airflow templates + id, # Commonly used as shorthand for identifier + fd, # aka "file-descriptor" -- common in socket code + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming-style. +# Default regex changes: +# - allow modules beginning with 0-9 (used in airflow/migrations) +# - allow up to 60 chars (airflow/migrations contains quite long names) +module-rgx=[a-z0-9_][a-z0-9_]{2,59}$ + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. +#variable-rgx= + + +[IMPORTS] + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules=optparse,tkinter.tix,imp + +# Create a graph of external dependencies in the given file (report RP0402 must +# not be disabled). +ext-import-graph= + +# Create a graph of every (i.e. internal and external) dependencies in the +# given file (report RP0402 must not be disabled). +import-graph= + +# Create a graph of internal dependencies in the given file (report RP0402 must +# not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + + +[CLASSES] + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict, + _fields, + _replace, + _source, + _make + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=cls + + +[DESIGN] + +# Maximum number of arguments for function / method. +# BasPH: choose 10 because this was 80% of the sorted list of number of arguments above 5 (Pylint default) +max-args=10 + +# Maximum number of attributes for a class (see R0902). +# BasPH: choose 15 because this was 80% of the sorted list of number of attributes above 7 (Pylint default) +max-attributes=15 + +# Maximum number of boolean expressions in an if statement. +max-bool-expr=5 + +# Maximum number of branch for function / method body. +# BasPH: choose 22 because this was 80% of the sorted list of number of attributes above 12 (Pylint default) +max-branches=22 + +# Maximum number of locals for function / method body. +# BasPH: choose 24 because this was 80% of the sorted list of number of locals above 15 (Pylint default) +max-locals=24 + +# Maximum number of parents for a class (see R0901). +max-parents=8 + +# Maximum number of public methods for a class (see R0904). +# BasPH: choose 27 because this was 50% of the sorted list of 30 number of public methods above 20 (Pylint default) +max-public-methods=27 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +# BasPH: choose 69 because this was 80% of the sorted list of number of statements above 50 (Pylint default) +max-statements=69 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=0 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when being caught. Defaults to +# "Exception". +overgeneral-exceptions=Exception diff --git a/scripts/ci/build_airflow/ci_build_airflow_package.sh b/scripts/ci/build_airflow/ci_build_airflow_package.sh index ba089f2b2e122..a3706ad9f53f6 100755 --- a/scripts/ci/build_airflow/ci_build_airflow_package.sh +++ b/scripts/ci/build_airflow/ci_build_airflow_package.sh @@ -21,5 +21,3 @@ build_airflow_packages::build_airflow_packages cd "${AIRFLOW_SOURCES}/dist" || exit 1 - -echo "Airflow packages are in dist folder" diff --git a/scripts/ci/constraints/ci_branch_constraints.sh b/scripts/ci/constraints/ci_branch_constraints.sh index 6be2d84bd185d..1f733d44b0b19 100755 --- a/scripts/ci/constraints/ci_branch_constraints.sh +++ b/scripts/ci/constraints/ci_branch_constraints.sh @@ -18,7 +18,9 @@ # shellcheck source=scripts/ci/libraries/_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" -if [[ ${GITHUB_REF} == 'refs/heads/master' ]]; then +if [[ ${GITHUB_REF} == 'refs/heads/main' ]]; then + echo "::set-output name=branch::constraints-main" +elif [[ ${GITHUB_REF} == 'refs/heads/master' ]]; then echo "::set-output name=branch::constraints-master" elif [[ ${GITHUB_REF} == 'refs/heads/v1-10-test' ]]; then echo "::set-output name=branch::constraints-1-10" diff --git a/scripts/ci/constraints/ci_commit_constraints.sh b/scripts/ci/constraints/ci_commit_constraints.sh index 7c24dc5d757f7..7eda70f9d4d36 100755 --- a/scripts/ci/constraints/ci_commit_constraints.sh +++ b/scripts/ci/constraints/ci_commit_constraints.sh @@ -18,11 +18,11 @@ # shellcheck source=scripts/ci/libraries/_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" -cp -v ./artifacts/constraints-*/constraints*.txt repo/ +cp -v ./files/constraints-*/constraints*.txt repo/ cd repo || exit 1 git config --local user.email "dev@airflow.apache.org" git config --local user.name "Automated GitHub Actions commit" -git diff --exit-code || git commit --all --message "Updating constraints. Build id:${CI_BUILD_ID} +git diff --color --exit-code || git commit --all --message "Updating constraints. Build id:${CI_BUILD_ID} This update in constraints is automatically committed by the CI 'constraints-push' step based on HEAD of '${CI_REF}' in '${CI_TARGET_REPO}' diff --git a/scripts/ci/constraints/ci_generate_all_constraints.sh b/scripts/ci/constraints/ci_generate_all_constraints.sh new file mode 100755 index 0000000000000..9a7a77e014600 --- /dev/null +++ b/scripts/ci/constraints/ci_generate_all_constraints.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +set -euo pipefail + + +# We cannot perform full initialization because it will be done later in the "single run" scripts +# And some readonly variables are set there, therefore we only selectively reuse parallel lib needed +LIBRARIES_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")/../libraries/" && pwd) +# shellcheck source=scripts/ci/libraries/_all_libs.sh +source "${LIBRARIES_DIR}/_all_libs.sh" + +initialization::set_output_color_variables + +export CHECK_IMAGE_FOR_REBUILD="false" +echo +echo "${COLOR_YELLOW}Skip rebuilding CI images. Assume the one we have is good!${COLOR_RESET}" +echo "${COLOR_YELLOW}You must run './breeze build-image --upgrade-to-newer-dependencies before for all python versions before running this one!${COLOR_RESET}" +echo + +parallel::make_sure_gnu_parallel_is_installed + +parallel::make_sure_python_versions_are_specified + +echo +echo "${COLOR_BLUE}Generating all constraint files${COLOR_RESET}" +echo + +parallel::initialize_monitoring + +parallel::monitor_progress + +# shellcheck disable=SC2086 +parallel --results "${PARALLEL_MONITORED_DIR}" \ + "$( dirname "${BASH_SOURCE[0]}" )/ci_generate_constraints.sh" ::: \ + ${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING} diff --git a/scripts/ci/constraints/ci_generate_constraints.sh b/scripts/ci/constraints/ci_generate_constraints.sh index 10a41076528ba..7e1cefa64de61 100755 --- a/scripts/ci/constraints/ci_generate_constraints.sh +++ b/scripts/ci/constraints/ci_generate_constraints.sh @@ -15,6 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. +if [[ $1 == "" ]]; then + >&2 echo "Requires python MAJOR/MINOR version as first parameter" + exit 1 +fi + +export PYTHON_MAJOR_MINOR_VERSION=$1 +shift + # shellcheck source=scripts/ci/libraries/_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" diff --git a/scripts/ci/docker-compose/_docker.env b/scripts/ci/docker-compose/_docker.env index 69f4547a803ff..3d239e0872a1e 100644 --- a/scripts/ci/docker-compose/_docker.env +++ b/scripts/ci/docker-compose/_docker.env @@ -17,7 +17,6 @@ AIRFLOW_CI_IMAGE AIRFLOW_EXTRAS BACKEND -BACKPORT_PACKAGES BREEZE CI CI_BUILD_ID @@ -39,6 +38,7 @@ HOST_OS HOST_HOME INIT_SCRIPT_FILE INSTALL_AIRFLOW_VERSION +GENERATE_CONSTRAINTS_MODE INSTALL_PROVIDERS_FROM_SOURCES INSTALL_PACKAGES_FROM_DIST ISSUE_ID @@ -56,6 +56,7 @@ RUN_INTEGRATION_TESTS RUN_SYSTEM_TESTS START_AIRFLOW TEST_TYPE +UPGRADE_TO_NEWER_DEPENDENCIES VERBOSE VERBOSE_COMMANDS VERSION_SUFFIX_FOR_PYPI diff --git a/scripts/ci/docker-compose/backend-mysql-port.yml b/scripts/ci/docker-compose/backend-mysql-port.yml new file mode 100644 index 0000000000000..495f5f726d999 --- /dev/null +++ b/scripts/ci/docker-compose/backend-mysql-port.yml @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +version: "2.2" +services: + mysql: + ports: + - "${MYSQL_HOST_PORT}:3306" diff --git a/scripts/ci/docker-compose/backend-mysql.yml b/scripts/ci/docker-compose/backend-mysql.yml index 94b03ce4aa38c..6574dd7c02b2e 100644 --- a/scripts/ci/docker-compose/backend-mysql.yml +++ b/scripts/ci/docker-compose/backend-mysql.yml @@ -44,8 +44,5 @@ services: timeout: 10s retries: 5 restart: always - - ports: - - "${MYSQL_HOST_PORT}:3306" command: ['mysqld', '--character-set-server=utf8mb4', '--collation-server=utf8mb4_unicode_ci'] diff --git a/scripts/ci/docker-compose/backend-postgres-port.yml b/scripts/ci/docker-compose/backend-postgres-port.yml new file mode 100644 index 0000000000000..a7d78f9c72422 --- /dev/null +++ b/scripts/ci/docker-compose/backend-postgres-port.yml @@ -0,0 +1,22 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +version: "2.2" +services: + postgres: + ports: + - "${POSTGRES_HOST_PORT}:5432" diff --git a/scripts/ci/docker-compose/backend-postgres.yml b/scripts/ci/docker-compose/backend-postgres.yml index be78339310871..df3615d6023e2 100644 --- a/scripts/ci/docker-compose/backend-postgres.yml +++ b/scripts/ci/docker-compose/backend-postgres.yml @@ -36,8 +36,6 @@ services: volumes: - /dev/urandom:/dev/random # Required to get non-blocking entropy source - postgres-db-volume:/var/lib/postgresql/data - ports: - - "${POSTGRES_HOST_PORT}:5432" healthcheck: test: ["CMD", "psql", "-h", "localhost", "-U", "postgres", "-c", "select 1", "airflow"] interval: 10s diff --git a/scripts/ci/docker-compose/backend-sqlite-port.yml b/scripts/ci/docker-compose/backend-sqlite-port.yml new file mode 100644 index 0000000000000..c7bbb7d7a1179 --- /dev/null +++ b/scripts/ci/docker-compose/backend-sqlite-port.yml @@ -0,0 +1,18 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +version: "2.2" diff --git a/scripts/ci/docker-compose/integration-kerberos.yml b/scripts/ci/docker-compose/integration-kerberos.yml index 5223ee20a97b5..95fc8c9c79e2f 100644 --- a/scripts/ci/docker-compose/integration-kerberos.yml +++ b/scripts/ci/docker-compose/integration-kerberos.yml @@ -36,7 +36,7 @@ services: /opt/kerberos-utils/create_client.sh bob bob /root/kerberos-keytabs/airflow.keytab; /opt/kerberos-utils/create_service.sh krb5-machine-example-com airflow /root/kerberos-keytabs/airflow.keytab; - /opt/kerberos-utils/create_service.sh presto HTTP /root/kerberos-keytabs/presto.keytab; + /opt/kerberos-utils/create_service.sh trino HTTP /root/kerberos-keytabs/trino.keytab; healthcheck: test: |- python -c " @@ -79,4 +79,10 @@ volumes: networks: example.com: - external: true + name: example.com + driver: bridge + ipam: + config: + - subnet: 10.5.0.0/16 + ip_range: 10.5.0.0/16 + gateway: 10.5.0.254 diff --git a/scripts/ci/docker-compose/integration-redis.yml b/scripts/ci/docker-compose/integration-redis.yml index ab353d267ebe8..3cdf68caf18b9 100644 --- a/scripts/ci/docker-compose/integration-redis.yml +++ b/scripts/ci/docker-compose/integration-redis.yml @@ -21,7 +21,7 @@ services: image: redis:5.0.1 volumes: - /dev/urandom:/dev/random # Required to get non-blocking entropy source - - redis-db-volume:/data/presto + - redis-db-volume:/data/redis ports: - "${REDIS_HOST_PORT}:6379" healthcheck: diff --git a/scripts/ci/docker-compose/integration-statsd.yml b/scripts/ci/docker-compose/integration-statsd.yml new file mode 100644 index 0000000000000..4685392c6671f --- /dev/null +++ b/scripts/ci/docker-compose/integration-statsd.yml @@ -0,0 +1,40 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +version: "2.2" +services: + statsd-exporter: + image: apache/airflow:airflow-statsd-exporter-2020.09.05-v0.17.0 + ports: + - "9125:9125" + - "9125:9125/udp" + - "29102:9102" + + grafana: + image: grafana/grafana + ports: + - "23000:3000" + + airflow: + environment: + - INTEGRATION_STATSD=true + - AIRFLOW__METRICS__STATSD_ON=True + - AIRFLOW__METRICS__STATSD_HOST=statsd-exporter + - AIRFLOW__METRICS__STATSD_PORT=9125 + depends_on: + - statsd-exporter + - grafana diff --git a/scripts/ci/docker-compose/integration-presto.yml b/scripts/ci/docker-compose/integration-trino.yml similarity index 81% rename from scripts/ci/docker-compose/integration-presto.yml rename to scripts/ci/docker-compose/integration-trino.yml index 7fce2069b31c9..3f420fb61ca17 100644 --- a/scripts/ci/docker-compose/integration-presto.yml +++ b/scripts/ci/docker-compose/integration-trino.yml @@ -17,10 +17,10 @@ --- version: "2.2" services: - presto: - image: apache/airflow:presto-2020.10.08 - container_name: presto - hostname: presto + trino: + image: apache/airflow:trino-2021.04.04 + container_name: trino + hostname: trino domainname: example.com networks: @@ -40,19 +40,19 @@ services: volumes: - /dev/urandom:/dev/random # Required to get non-blocking entropy source - ../dockerfiles/krb5-kdc-server/krb5.conf:/etc/krb5.conf:ro - - presto-db-volume:/data/presto - - kerberos-keytabs:/home/presto/kerberos-keytabs + - trino-db-volume:/data/trino + - kerberos-keytabs:/home/trino/kerberos-keytabs environment: - KRB5_CONFIG=/etc/krb5.conf - KRB5_TRACE=/dev/stderr - - KRB5_KTNAME=/home/presto/kerberos-keytabs/presto.keytab + - KRB5_KTNAME=/home/trino/kerberos-keytabs/trino.keytab airflow: environment: - - INTEGRATION_PRESTO=true + - INTEGRATION_TRINO=true depends_on: - presto: + trino: condition: service_healthy volumes: - presto-db-volume: + trino-db-volume: diff --git a/scripts/ci/docker-compose/local-all-sources.yml b/scripts/ci/docker-compose/local-all-sources.yml new file mode 100644 index 0000000000000..d584488c420ed --- /dev/null +++ b/scripts/ci/docker-compose/local-all-sources.yml @@ -0,0 +1,35 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +version: "2.2" +services: + airflow: + stdin_open: true # docker run -i + tty: true # docker run -t + # We need to mount files and directories individually because some files + # such apache_airflow.egg-info should not be mounted from host + # we only mount those files, so that it makes sense to edit while developing + # or those that might be useful to see in the host as output of the + # tests (such as logs) + volumes: + - ../../../.bash_aliases:/root/.bash_aliases:cached + - ../../../.bash_history:/root/.bash_history:cached + - ../../../.inputrc:/root/.inputrc:cached + - ../../../tmp:/tmp:cached + - ../../../:/opt/airflow:cached + ports: + - "${WEBSERVER_HOST_PORT}:8080" diff --git a/scripts/ci/docker-compose/local.yml b/scripts/ci/docker-compose/local.yml index 641f93f172fc4..56a06f1a6d21e 100644 --- a/scripts/ci/docker-compose/local.yml +++ b/scripts/ci/docker-compose/local.yml @@ -47,6 +47,7 @@ services: - ../../../hooks:/opt/airflow/hooks:cached - ../../../logs:/root/airflow/logs:cached - ../../../pylintrc:/opt/airflow/pylintrc:cached + - ../../../pylintrc-tests:/opt/airflow/pylintrc-tests:cached - ../../../pyproject.toml:/opt/airflow/pyproject.toml:cached - ../../../pytest.ini:/opt/airflow/pytest.ini:cached - ../../../scripts:/opt/airflow/scripts:cached diff --git a/scripts/ci/dockerfiles/krb5-kdc-server/Dockerfile b/scripts/ci/dockerfiles/krb5-kdc-server/Dockerfile index 118b4db20624e..e3c906df3259f 100644 --- a/scripts/ci/dockerfiles/krb5-kdc-server/Dockerfile +++ b/scripts/ci/dockerfiles/krb5-kdc-server/Dockerfile @@ -24,7 +24,8 @@ FROM centos:7 WORKDIR /root/ # Dev stuff -RUN yum -y install curl wget +# hadolint ignore=DL3033 +RUN yum -y install curl wget && yum clean all # python RUN curl "https://bootstrap.pypa.io/get-pip.py" -o /tmp/get-pip.py && \ @@ -32,11 +33,12 @@ RUN curl "https://bootstrap.pypa.io/get-pip.py" -o /tmp/get-pip.py && \ rm /tmp/get-pip.py # supervisord -RUN pip install supervisor==3.3.3 && \ +RUN pip install --no-cache-dir supervisor==3.3.3 && \ mkdir -p /var/log/supervisord/ # kerberos server -RUN yum -y install ntp krb5-server krb5-libs +# hadolint ignore=DL3033 +RUN yum -y install ntp krb5-server krb5-libs && yum clean all # kerberos server configuration ENV KRB5_CONFIG=/etc/krb5.conf @@ -61,11 +63,11 @@ COPY supervisord.conf /etc/supervisord.conf COPY entrypoint.sh /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] -LABEL org.apache.airflow.component="krb5-kdc-server" -LABEL org.apache.airflow.krb5-kdc-server.core.version="krb5" -LABEL org.apache.airflow.airflow_bats.version="${AIRFLOW_KRB5KDCSERVER_VERSION}" -LABEL org.apache.airflow.commit_sha="${COMMIT_SHA}" -LABEL maintainer="Apache Airflow Community " +LABEL org.apache.airflow.component="krb5-kdc-server" \ + org.apache.airflow.krb5-kdc-server.core.version="krb5" \ + org.apache.airflow.airflow_bats.version="${AIRFLOW_KRB5KDCSERVER_VERSION}" \ + org.apache.airflow.commit_sha="${COMMIT_SHA}" \ + maintainer="Apache Airflow Community " # when container is starting CMD ["/usr/bin/supervisord", "-n", "-c", "/etc/supervisord.conf"] diff --git a/scripts/ci/dockerfiles/krb5-kdc-server/utils/create_service.sh b/scripts/ci/dockerfiles/krb5-kdc-server/utils/create_service.sh index 30161a3f6c5c7..c92aeab70f629 100755 --- a/scripts/ci/dockerfiles/krb5-kdc-server/utils/create_service.sh +++ b/scripts/ci/dockerfiles/krb5-kdc-server/utils/create_service.sh @@ -29,7 +29,7 @@ Usage: ${CMDNAME} Creates an account for the service. The service name is combined with the domain to create an principal name. If your service is named -\"presto\" a principal \"presto.example.com\" will be created. +\"trino\" a principal \"trino.example.com\" will be created. The protocol can have any value, but it must be identical in the server and client configuration. For example: HTTP. diff --git a/scripts/ci/dockerfiles/presto/Dockerfile b/scripts/ci/dockerfiles/trino/Dockerfile similarity index 78% rename from scripts/ci/dockerfiles/presto/Dockerfile rename to scripts/ci/dockerfiles/trino/Dockerfile index 80ccbfd344527..080491f6a7f4e 100644 --- a/scripts/ci/dockerfiles/presto/Dockerfile +++ b/scripts/ci/dockerfiles/trino/Dockerfile @@ -14,8 +14,8 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -ARG PRESTO_VERSION="330" -FROM prestosql/presto:${PRESTO_VERSION} +ARG TRINO_VERSION="354" +FROM trinodb/trino:${TRINO_VERSION} # Obtain root privileges USER 0 @@ -23,16 +23,16 @@ USER 0 # Setup entrypoint COPY entrypoint.sh /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] -CMD ["/usr/lib/presto/bin/run-presto"] +CMD ["/usr/lib/trino/bin/run-trino"] # Expose HTTPS EXPOSE 7778 -LABEL org.apache.airflow.component="presto" -LABEL org.apache.airflow.presto.core.version="${PRESTO_VERSION}" -LABEL org.apache.airflow.airflow_bats.version="${AIRFLOW_PRESTO_VERSION}" +LABEL org.apache.airflow.component="trino" +LABEL org.apache.airflow.trino.core.version="${TRINO_VERSION}" +LABEL org.apache.airflow.airflow_trino.version="${AIRFLOW_TRINO_VERSION}" LABEL org.apache.airflow.commit_sha="${COMMIT_SHA}" LABEL maintainer="Apache Airflow Community " # Restore user -USER presto:presto +USER trino:trino diff --git a/scripts/ci/dockerfiles/presto/build_and_push.sh b/scripts/ci/dockerfiles/trino/build_and_push.sh similarity index 79% rename from scripts/ci/dockerfiles/presto/build_and_push.sh rename to scripts/ci/dockerfiles/trino/build_and_push.sh index d3cac47775b69..ea8a59d0742b0 100755 --- a/scripts/ci/dockerfiles/presto/build_and_push.sh +++ b/scripts/ci/dockerfiles/trino/build_and_push.sh @@ -21,24 +21,24 @@ DOCKERHUB_REPO=${DOCKERHUB_REPO:="airflow"} readonly DOCKERHUB_USER readonly DOCKERHUB_REPO -PRESTO_VERSION="330" -readonly PRESTO_VERSION +TRINO_VERSION="354" +readonly TRINO_VERSION -AIRFLOW_PRESTO_VERSION="2020.10.08" -readonly AIRFLOW_PRESTO_VERSION +AIRFLOW_TRINO_VERSION="2021.04.04" +readonly AIRFLOW_TRINO_VERSION COMMIT_SHA=$(git rev-parse HEAD) readonly COMMIT_SHA cd "$( dirname "${BASH_SOURCE[0]}" )" || exit 1 -TAG="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:presto-${AIRFLOW_PRESTO_VERSION}" +TAG="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:trino-${AIRFLOW_TRINO_VERSION}" readonly TAG docker build . \ --pull \ - --build-arg "PRESTO_VERSION=${PRESTO_VERSION}" \ - --build-arg "AIRFLOW_PRESTO_VERSION=${AIRFLOW_PRESTO_VERSION}" \ + --build-arg "TRINO_VERSION=${TRINO_VERSION}" \ + --build-arg "AIRFLOW_TRINO_VERSION=${AIRFLOW_TRINO_VERSION}" \ --build-arg "COMMIT_SHA=${COMMIT_SHA}" \ --tag "${TAG}" diff --git a/scripts/ci/dockerfiles/presto/entrypoint.sh b/scripts/ci/dockerfiles/trino/entrypoint.sh similarity index 73% rename from scripts/ci/dockerfiles/presto/entrypoint.sh rename to scripts/ci/dockerfiles/trino/entrypoint.sh index 9c8d1130beeb2..314cc5a8ee166 100755 --- a/scripts/ci/dockerfiles/presto/entrypoint.sh +++ b/scripts/ci/dockerfiles/trino/entrypoint.sh @@ -32,7 +32,7 @@ function check_service { RES=$? set -e if [[ ${RES} == 0 ]]; then - echo "${COLOR_GREEN}OK. ${COLOR_RESET}" + echo "OK." break else echo -n "." @@ -58,27 +58,29 @@ function log() { echo -e "\u001b[32m[$(date +'%Y-%m-%dT%H:%M:%S%z')]: $*\u001b[0m" } -if [ -f /tmp/presto-initiaalized ]; then +if [ -f /tmp/trino-initialized ]; then exec /bin/sh -c "$@" fi -PRESTO_CONFIG_FILE="/usr/lib/presto/default/etc/config.properties" -JVM_CONFIG_FILE="/usr/lib/presto/default/etc/jvm.config" +TRINO_CONFIG_FILE="/etc/trino/config.properties" +JVM_CONFIG_FILE="/etc/trino/jvm.config" log "Generate self-signed SSL certificate" JKS_KEYSTORE_FILE=/tmp/ssl_keystore.jks -JKS_KEYSTORE_PASS=presto +JKS_KEYSTORE_PASS=trinodb +keytool -delete --alias "trino-ssl" -keystore "${JKS_KEYSTORE_FILE}" -storepass "${JKS_KEYSTORE_PASS}" || true + keytool \ -genkeypair \ - -alias "presto-ssl" \ + -alias "trino-ssl" \ -keyalg RSA \ -keystore "${JKS_KEYSTORE_FILE}" \ -validity 10000 \ -dname "cn=Unknown, ou=Unknown, o=Unknown, c=Unknown"\ -storepass "${JKS_KEYSTORE_PASS}" -log "Set up SSL in ${PRESTO_CONFIG_FILE}" -cat << EOF >> "${PRESTO_CONFIG_FILE}" +log "Set up SSL in ${TRINO_CONFIG_FILE}" +cat << EOF >> "${TRINO_CONFIG_FILE}" http-server.https.enabled=true http-server.https.port=7778 http-server.https.keystore.path=${JKS_KEYSTORE_FILE} @@ -86,9 +88,18 @@ http-server.https.keystore.key=${JKS_KEYSTORE_PASS} node.internal-address-source=FQDN EOF +log "Set up memory limits in ${TRINO_CONFIG_FILE}" +cat << EOF >> "${TRINO_CONFIG_FILE}" +memory.heap-headroom-per-node=128MB +query.max-memory-per-node=512MB +query.max-total-memory-per-node=512MB +EOF + +sed -i "s/Xmx.*$/Xmx640M/" "${JVM_CONFIG_FILE}" + if [[ -n "${KRB5_CONFIG=}" ]]; then - log "Set up Kerberos in ${PRESTO_CONFIG_FILE}" - cat << EOF >> "${PRESTO_CONFIG_FILE}" + log "Set up Kerberos in ${TRINO_CONFIG_FILE}" + cat << EOF >> "${TRINO_CONFIG_FILE}" http-server.https.enabled=true http-server.https.port=7778 http-server.https.keystore.path=${JKS_KEYSTORE_FILE} @@ -103,16 +114,18 @@ EOF EOF fi -log "Waiting for keytab:${KRB5_KTNAME}" -check_service "Keytab" "test -f ${KRB5_KTNAME}" 30 +if [[ -n "${KRB5_CONFIG=}" ]]; then + log "Waiting for keytab:${KRB5_KTNAME}" + check_service "Keytab" "test -f ${KRB5_KTNAME}" 30 +fi -touch /tmp/presto-initiaalized +touch /tmp/trino-initialized echo "Config: ${JVM_CONFIG_FILE}" cat "${JVM_CONFIG_FILE}" -echo "Config: ${PRESTO_CONFIG_FILE}" -cat "${PRESTO_CONFIG_FILE}" +echo "Config: ${TRINO_CONFIG_FILE}" +cat "${TRINO_CONFIG_FILE}" log "Executing cmd: ${*}" exec /bin/sh -c "${@}" diff --git a/scripts/ci/images/ci_build_dockerhub.sh b/scripts/ci/images/ci_build_dockerhub.sh index a115f9734a5f3..7a527827f69ea 100755 --- a/scripts/ci/images/ci_build_dockerhub.sh +++ b/scripts/ci/images/ci_build_dockerhub.sh @@ -111,11 +111,12 @@ else export INSTALL_PROVIDERS_FROM_SOURCES="false" export AIRFLOW_PRE_CACHED_PIP_PACKAGES="false" export DOCKER_CACHE="local" + export FORCE_PULL_BASE_PYTHON_IMAGE="true" # Name the image based on the TAG rather than based on the branch name export FORCE_AIRFLOW_PROD_BASE_TAG="${DOCKER_TAG}" export INSTALL_AIRFLOW_VERSION="${DOCKER_TAG%-python*}" export AIRFLOW_CONSTRAINTS_REFERENCE="constraints-${INSTALL_AIRFLOW_VERSION}" - + export AIRFLOW_CONSTRAINTS="constraints" # shellcheck source=scripts/ci/libraries/_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" echo diff --git a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh index eb0d0c2af0470..ed76b482fb2f4 100755 --- a/scripts/ci/images/ci_prepare_ci_image_on_ci.sh +++ b/scripts/ci/images/ci_prepare_ci_image_on_ci.sh @@ -41,9 +41,9 @@ function build_ci_image_on_ci() { # first we pull base python image. We will need it to re-push it after master build # Becoming the new "latest" image for other builds build_images::wait_for_image_tag "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}" \ - "${python_tag_suffix}" "${PYTHON_BASE_IMAGE}" + "${python_tag_suffix}" "${AIRFLOW_PYTHON_BASE_IMAGE}" - # And then the base image + # And then the actual image build_images::wait_for_image_tag "${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}" \ ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" "${AIRFLOW_CI_IMAGE}" diff --git a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh index 516cc06c89e0b..9b088e1486f72 100755 --- a/scripts/ci/images/ci_prepare_prod_image_on_ci.sh +++ b/scripts/ci/images/ci_prepare_prod_image_on_ci.sh @@ -33,9 +33,26 @@ function build_prod_images_on_ci() { build_images::prepare_prod_build if [[ ${USE_GITHUB_REGISTRY} == "true" && ${GITHUB_REGISTRY_WAIT_FOR_IMAGE} == "true" ]]; then + # Tries to wait for the images indefinitely + # skips further image checks - since we already have the target image + + local python_tag_suffix="" + if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then + python_tag_suffix="-${GITHUB_REGISTRY_PULL_IMAGE_TAG}" + fi + + if [[ "${WAIT_FOR_PYTHON_BASE_IMAGE=}" == "true" ]]; then + # first we pull base python image. We will need it to re-push it after master build + # Becoming the new "latest" image for other builds + build_images::wait_for_image_tag "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}" \ + "${python_tag_suffix}" "${AIRFLOW_PYTHON_BASE_IMAGE}" + fi + + # And then the actual image build_images::wait_for_image_tag "${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}" \ ":${GITHUB_REGISTRY_PULL_IMAGE_TAG}" "${AIRFLOW_PROD_IMAGE}" + # And the prod build image if [[ "${WAIT_FOR_PROD_BUILD_IMAGE=}" == "true" ]]; then # If specified in variable - also waits for the build image build_images::wait_for_image_tag "${GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE}" \ diff --git a/scripts/ci/images/ci_run_prod_image_test.sh b/scripts/ci/images/ci_run_prod_image_test.sh new file mode 100755 index 0000000000000..3039eca88ca2d --- /dev/null +++ b/scripts/ci/images/ci_run_prod_image_test.sh @@ -0,0 +1,50 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# shellcheck source=scripts/ci/libraries/_initialization.sh +. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_initialization.sh" + +initialization::set_output_color_variables + +job_name=$1 +file=$2 + +set +e + +if [[ ${file} == *".sh" ]]; then + "${file}" + res=$? +elif [[ ${file} == *"Dockerfile" ]]; then + cd "$(dirname "${file}")" || exit 1 + docker build . --tag "${job_name}" + res=$? + docker rmi --force "${job_name}" +else + echo "Bad file ${file}. Should be either a Dockerfile or script" + exit 1 +fi +# Print status to status file +echo "${res}" >"${PARALLEL_JOB_STATUS}" + +echo +# print status to log +if [[ ${res} == "0" ]]; then + echo "${COLOR_GREEN}Extend PROD image test ${job_name} succeeded${COLOR_RESET}" +else + echo "${COLOR_RED}Extend PROD image test ${job_name} failed${COLOR_RESET}" +fi +echo diff --git a/scripts/ci/images/ci_test_examples_of_prod_image_building.sh b/scripts/ci/images/ci_test_examples_of_prod_image_building.sh new file mode 100755 index 0000000000000..7e045356be116 --- /dev/null +++ b/scripts/ci/images/ci_test_examples_of_prod_image_building.sh @@ -0,0 +1,91 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# shellcheck source=scripts/ci/libraries/_script_init.sh +. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_script_init.sh" + +SEMAPHORE_NAME="image_tests" +export SEMAPHORE_NAME + +DOCKER_EXAMPLES_DIR=${AIRFLOW_SOURCES}/docs/docker-stack/docker-examples/ +export DOCKER_EXAMPLES_DIR + +# Launches parallel building of images. Redirects output to log set the right directories +# $1 - name of the job +# $2 - bash file to execute in parallel +function run_image_test_job() { + local file=$1 + + local job_name=$2 + mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job_name}" + export JOB_LOG="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job_name}/stdout" + export PARALLEL_JOB_STATUS="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job_name}/status" + parallel --ungroup --bg --semaphore --semaphorename "${SEMAPHORE_NAME}" \ + --jobs "${MAX_PARALLEL_IMAGE_JOBS}" \ + "$(dirname "${BASH_SOURCE[0]}")/ci_run_prod_image_test.sh" "${job_name}" "${file}" >"${JOB_LOG}" 2>&1 +} + + +function test_images() { + if [[ ${CI=} == "true" ]]; then + echo + echo "Skipping the script builds on CI! " + echo "They take very long time to build." + echo + else + local scripts_to_test + scripts_to_test=$(find "${DOCKER_EXAMPLES_DIR}" -type f -name '*.sh' ) + for file in ${scripts_to_test} + do + local job_name + job_name=$(basename "${file}") + run_image_test_job "${file}" "${job_name}" + done + fi + local dockerfiles_to_test + dockerfiles_to_test=$(find "${DOCKER_EXAMPLES_DIR}" -type f -name 'Dockerfile' ) + for file in ${dockerfiles_to_test} + do + local job_name + job_name="$(basename "$(dirname "${file}")")" + run_image_test_job "${file}" "${job_name}" + done + +} + +cd "${AIRFLOW_SOURCES}" || exit 1 + +docker_engine_resources::get_available_cpus_in_docker + +# Building max for images in parlallel helps to conserve docker image space +MAX_PARALLEL_IMAGE_JOBS=4 +export MAX_PARALLEL_IMAGE_JOBS + +parallel::make_sure_gnu_parallel_is_installed +parallel::kill_stale_semaphore_locks +parallel::initialize_monitoring + +start_end::group_start "Testing image building" + +parallel::monitor_progress + +test_images + +parallel --semaphore --semaphorename "${SEMAPHORE_NAME}" --wait +start_end::group_end + +parallel::print_job_summary_and_return_status_code diff --git a/scripts/ci/images/ci_verify_prod_image.sh b/scripts/ci/images/ci_verify_prod_image.sh deleted file mode 100755 index 89a393fe5269e..0000000000000 --- a/scripts/ci/images/ci_verify_prod_image.sh +++ /dev/null @@ -1,211 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# shellcheck source=scripts/ci/libraries/_script_init.sh -. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_script_init.sh" - -function run_command_in_image() { - docker run --rm \ - -e COLUMNS=180 \ - --entrypoint /bin/bash "${AIRFLOW_PROD_IMAGE}" \ - -c "${@}" -} - -FEATURES_OK="true" - -function check_feature() { - DESCRIPTION="${1}" - COMMAND=${2} - set +e - echo -n "Feature: ${DESCRIPTION} " - local output - output=$(run_command_in_image "${COMMAND}" 2>&1) - local res=$? - if [[ ${res} == "0" ]]; then - echo "${COLOR_GREEN}OK${COLOR_RESET}" - else - echo "${COLOR_RED}NOK${COLOR_RESET}" - echo "${COLOR_BLUE}========================= OUTPUT start ============================${COLOR_RESET}" - echo "${output}" - echo "${COLOR_BLUE}========================= OUTPUT end ===========================${COLOR_RESET}" - FEATURES_OK="false" - fi - set -e -} - -function verify_prod_image_has_airflow_and_providers() { - start_end::group_start "Verify prod image: ${AIRFLOW_PROD_IMAGE}" - echo - echo "Checking if Providers are installed" - echo - - all_providers_installed_in_image=$(run_command_in_image "airflow providers list --output table") - - echo - echo "Installed providers:" - echo - echo "${all_providers_installed_in_image}" - echo - local error="false" - for provider in "${INSTALLED_PROVIDERS[@]}"; do - echo -n "Verifying if provider ${provider} installed: " - if [[ ${all_providers_installed_in_image} == *"apache-airflow-providers-${provider//./-}"* ]]; then - echo "${COLOR_GREEN}OK${COLOR_RESET}" - else - echo "${COLOR_RED}NOK${COLOR_RESET}" - error="true" - fi - done - if [[ ${error} == "true" ]]; then - echo - echo "${COLOR_RED}ERROR: Some expected providers are not installed!${COLOR_RESET}" - echo - exit 1 - else - echo - echo "${COLOR_GREEN}OK. All expected providers installed!${COLOR_RESET}" - echo - fi - start_end::group_end -} - -function verify_prod_image_dependencies() { - start_end::group_start "Checking if Airflow dependencies are non-conflicting in ${AIRFLOW_PROD_IMAGE} image." - - set +e - run_command_in_image 'pip check' - local res=$? - if [[ ${res} != "0" ]]; then - echo "${COLOR_RED}ERROR: ^^^ Some dependencies are conflicting. See instructions below on how to deal with it. ${COLOR_RESET}" - echo - build_images::inform_about_pip_check "--production " - exit ${res} - else - echo - echo "${COLOR_GREEN}OK. The ${AIRFLOW_PROD_IMAGE} image dependencies are consistent. ${COLOR_RESET}" - echo - fi - set -e - start_end::group_end -} - -GOOGLE_IMPORTS=( - 'OpenSSL' - 'google.ads' - 'googleapiclient' - 'google.auth' - 'google_auth_httplib2' - 'google.cloud.automl' - 'google.cloud.bigquery_datatransfer' - 'google.cloud.bigtable' - 'google.cloud.container' - 'google.cloud.datacatalog' - 'google.cloud.dataproc' - 'google.cloud.dlp' - 'google.cloud.kms' - 'google.cloud.language' - 'google.cloud.logging' - 'google.cloud.memcache' - 'google.cloud.monitoring' - 'google.cloud.oslogin' - 'google.cloud.pubsub' - 'google.cloud.redis' - 'google.cloud.secretmanager' - 'google.cloud.spanner' - 'google.cloud.speech' - 'google.cloud.storage' - 'google.cloud.tasks' - 'google.cloud.texttospeech' - 'google.cloud.translate' - 'google.cloud.videointelligence' - 'google.cloud.vision' -) - -AZURE_IMPORTS=( - 'azure.batch' - 'azure.cosmos' - 'azure.datalake.store' - 'azure.identity' - 'azure.keyvault' - 'azure.kusto.data' - 'azure.mgmt.containerinstance' - 'azure.mgmt.datalake.store' - 'azure.mgmt.resource' - 'azure.storage' -) - -function verify_production_image_features() { - start_end::group_start "Verify prod image features: ${AIRFLOW_PROD_IMAGE}" - - check_feature "Import: async" "python -c 'import gevent, eventlet, greenlet'" - check_feature "Import: amazon" "python -c 'import boto3, botocore, watchtower'" - check_feature "Import: celery" "python -c 'import celery, flower, vine'" - check_feature "Import: cncf.kubernetes" "python -c 'import kubernetes, cryptography'" - check_feature "Import: docker" "python -c 'import docker'" - check_feature "Import: dask" "python -c 'import cloudpickle, distributed'" - check_feature "Import: elasticsearch" "python -c 'import elasticsearch,es.elastic, elasticsearch_dsl'" - check_feature "Import: grpc" "python -c 'import grpc, google.auth, google_auth_httplib2'" - check_feature "Import: hashicorp" "python -c 'import hvac'" - check_feature "Import: ldap" "python -c 'import ldap'" - for google_import in "${GOOGLE_IMPORTS[@]}" - do - check_feature "Import google: ${google_import}" "python -c 'import ${google_import}'" - done - for azure_import in "${AZURE_IMPORTS[@]}" - do - check_feature "Import azure: ${azure_import}" "python -c 'import ${azure_import}'" - done - check_feature "Import: mysql" "python -c 'import mysql'" - check_feature "Import: postgres" "python -c 'import psycopg2'" - check_feature "Import: redis" "python -c 'import redis'" - check_feature "Import: sendgrid" "python -c 'import sendgrid'" - check_feature "Import: sftp/ssh" "python -c 'import paramiko, pysftp, sshtunnel'" - check_feature "Import: slack" "python -c 'import slack'" - check_feature "Import: statsd" "python -c 'import statsd'" - check_feature "Import: virtualenv" "python -c 'import virtualenv'" - - if [[ ${FEATURES_OK} == "true" ]]; then - echo - echo "${COLOR_GREEN}OK. The ${AIRFLOW_PROD_IMAGE} features are all OK. ${COLOR_RESET}" - echo - else - echo - echo "${COLOR_RED}ERROR: Some features were not ok!${COLOR_RESET}" - echo - exit 1 - fi - start_end::group_end -} - - -function pull_prod_image() { - local image_name_with_tag="${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" - start_end::group_start "Pulling the ${image_name_with_tag} image and tagging with ${AIRFLOW_PROD_IMAGE}" - - push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PROD_IMAGE}" "${image_name_with_tag}" - start_end::group_end -} - -build_images::prepare_prod_build - -pull_prod_image - -verify_prod_image_has_airflow_and_providers - -verify_production_image_features - -verify_prod_image_dependencies diff --git a/scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh b/scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh new file mode 100755 index 0000000000000..4255374309fbe --- /dev/null +++ b/scripts/ci/images/ci_wait_for_and_verify_all_ci_images.sh @@ -0,0 +1,44 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +set -euo pipefail + +# We cannot perform full initialization because it will be done later in the "single run" scripts +# And some readonly variables are set there, therefore we only selectively reuse parallel lib needed +LIBRARIES_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")/../libraries/" && pwd) +# shellcheck source=scripts/ci/libraries/_all_libs.sh +source "${LIBRARIES_DIR}/_all_libs.sh" + +initialization::set_output_color_variables + +parallel::make_sure_gnu_parallel_is_installed + +parallel::make_sure_python_versions_are_specified + +echo +echo "${COLOR_BLUE}Waiting for all CI images to appear${COLOR_RESET}" +echo + + +parallel::initialize_monitoring + +parallel::monitor_progress + +# shellcheck disable=SC2086 +parallel --results "${PARALLEL_MONITORED_DIR}" \ + "$( dirname "${BASH_SOURCE[0]}" )/ci_wait_for_and_verify_ci_image.sh" ::: \ + ${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING} diff --git a/scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh b/scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh new file mode 100755 index 0000000000000..08ed54b323da2 --- /dev/null +++ b/scripts/ci/images/ci_wait_for_and_verify_all_prod_images.sh @@ -0,0 +1,43 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +set -euo pipefail + +# We cannot perform full initialization because it will be done later in the "single run" scripts +# And some readonly variables are set there, therefore we only selectively reuse parallel lib needed +LIBRARIES_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")/../libraries/" && pwd) +# shellcheck source=scripts/ci/libraries/_all_libs.sh +source "${LIBRARIES_DIR}/_all_libs.sh" + +initialization::set_output_color_variables + +parallel::make_sure_gnu_parallel_is_installed + +parallel::make_sure_python_versions_are_specified + +echo +echo "${COLOR_BLUE}Waiting for all PROD images to appear${COLOR_RESET}" +echo + +parallel::initialize_monitoring + +parallel::monitor_progress + +# shellcheck disable=SC2086 +parallel --results "${PARALLEL_MONITORED_DIR}" \ + "$( dirname "${BASH_SOURCE[0]}" )/ci_wait_for_and_verify_prod_image.sh" ::: \ + ${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING} diff --git a/scripts/ci/images/ci_verify_ci_image.sh b/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh similarity index 60% rename from scripts/ci/images/ci_verify_ci_image.sh rename to scripts/ci/images/ci_wait_for_and_verify_ci_image.sh index ee624338f6694..29daca74255f4 100755 --- a/scripts/ci/images/ci_verify_ci_image.sh +++ b/scripts/ci/images/ci_wait_for_and_verify_ci_image.sh @@ -15,40 +15,44 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +if [[ $1 == "" ]]; then + >&2 echo "Requires python MAJOR/MINOR version as first parameter" + exit 1 +fi + +export PYTHON_MAJOR_MINOR_VERSION=$1 +shift + + # shellcheck source=scripts/ci/libraries/_script_init.sh -. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_script_init.sh" - -function verify_ci_image_dependencies() { - start_end::group_start "Checking if Airflow dependencies are non-conflicting in ${AIRFLOW_CI_IMAGE} image." - set +e - docker run --rm --entrypoint /bin/bash "${AIRFLOW_CI_IMAGE}" -c 'pip check' - local res=$? - if [[ ${res} != "0" ]]; then - echo "${COLOR_RED}ERROR: ^^^ Some dependencies are conflicting. See instructions below on how to deal with it. ${COLOR_RESET}" - echo - build_images::inform_about_pip_check "" - else - echo - echo "${COLOR_GREEN}OK. The ${AIRFLOW_PROD_IMAGE} image dependencies are consistent. ${COLOR_RESET}" - echo - fi - set -e - start_end::group_end - exit ${res} -} +. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" function pull_ci_image() { local image_name_with_tag="${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" start_end::group_start "Pulling ${image_name_with_tag} image" - push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_CI_IMAGE}" "${image_name_with_tag}" start_end::group_end } +push_pull_remove_images::check_if_github_registry_wait_for_image_enabled + +start_end::group_start "Configure Docker Registry" +build_image::configure_docker_registry +start_end::group_end + +export AIRFLOW_CI_IMAGE_NAME="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci" + +start_end::group_start "Waiting for ${AIRFLOW_CI_IMAGE_NAME} image to appear" + +push_pull_remove_images::wait_for_github_registry_image \ + "${AIRFLOW_CI_IMAGE_NAME}${GITHUB_REGISTRY_IMAGE_SUFFIX}" "${GITHUB_REGISTRY_PULL_IMAGE_TAG}" build_images::prepare_ci_build pull_ci_image -verify_ci_image_dependencies +verify_image::verify_ci_image "${AIRFLOW_CI_IMAGE}" + +start_end::group_end diff --git a/scripts/ci/images/ci_wait_for_prod_image.sh b/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh similarity index 65% rename from scripts/ci/images/ci_wait_for_prod_image.sh rename to scripts/ci/images/ci_wait_for_and_verify_prod_image.sh index 84cf48125fc78..84c310e73d4d3 100755 --- a/scripts/ci/images/ci_wait_for_prod_image.sh +++ b/scripts/ci/images/ci_wait_for_and_verify_prod_image.sh @@ -15,17 +15,38 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +if [[ $1 == "" ]]; then + >&2 echo "Requires python MAJOR/MINOR version as first parameter" + exit 1 +fi + +export PYTHON_MAJOR_MINOR_VERSION=$1 +shift + + # shellcheck source=scripts/ci/libraries/_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" push_pull_remove_images::check_if_github_registry_wait_for_image_enabled -build_image::configure_github_docker_registry +start_end::group_start "Configure Docker Registry" +build_image::configure_docker_registry +start_end::group_end export AIRFLOW_PROD_IMAGE_NAME="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}" + start_end::group_start "Waiting for ${AIRFLOW_PROD_IMAGE_NAME} image to appear" push_pull_remove_images::wait_for_github_registry_image \ "${AIRFLOW_PROD_IMAGE_NAME}${GITHUB_REGISTRY_IMAGE_SUFFIX}" "${GITHUB_REGISTRY_PULL_IMAGE_TAG}" +start_end::group_end +start_end::group_start "Pulling the PROD Image" +build_images::prepare_prod_build +image_name_with_tag="${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" +verbosity::print_info "Pulling the ${image_name_with_tag} image and tagging with ${AIRFLOW_PROD_IMAGE}" +push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PROD_IMAGE}" "${image_name_with_tag}" start_end::group_end + +verify_image::verify_prod_image "${AIRFLOW_PROD_IMAGE}" diff --git a/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh b/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh index 7713132ef90be..fbcf66bc89f6b 100755 --- a/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh +++ b/scripts/ci/kubernetes/ci_run_kubernetes_tests.sh @@ -62,7 +62,7 @@ function parse_tests_to_run() { "--durations=100" "--cov=airflow/" "--cov-config=.coveragerc" - "--cov-report=xml:files/coverage.xml" + "--cov-report=xml:files/coverage=${KIND_CLUSTER_NAME}.xml" "--color=yes" "--maxfail=50" "--pythonwarnings=ignore::DeprecationWarning" @@ -73,12 +73,12 @@ function parse_tests_to_run() { } function create_virtualenv() { - start_end::group_start "Creating virtualenv" HOST_PYTHON_VERSION=$(python3 -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")') readonly HOST_PYTHON_VERSION - local virtualenv_path="${BUILD_CACHE_DIR}/.kubernetes_venv_${HOST_PYTHON_VERSION}" + local virtualenv_path="${BUILD_CACHE_DIR}/.kubernetes_venv/${KIND_CLUSTER_NAME}_host_python_${HOST_PYTHON_VERSION}" + mkdir -pv "${BUILD_CACHE_DIR}/.kubernetes_venv/" if [[ ! -d ${virtualenv_path} ]]; then echo echo "Creating virtualenv at ${virtualenv_path}" @@ -91,18 +91,14 @@ function create_virtualenv() { pip install --upgrade "pip==${AIRFLOW_PIP_VERSION}" "wheel==${WHEEL_VERSION}" pip install pytest freezegun pytest-cov \ - --constraint "https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${HOST_PYTHON_VERSION}.txt" + --constraint "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${HOST_PYTHON_VERSION}.txt" pip install -e ".[kubernetes]" \ - --constraint "https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${HOST_PYTHON_VERSION}.txt" - - start_end::group_end + --constraint "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${HOST_PYTHON_VERSION}.txt" } function run_tests() { - start_end::group_start "Running K8S tests" pytest "${pytest_args[@]}" "${tests_to_run[@]}" - start_end::group_end } cd "${AIRFLOW_SOURCES}" || exit 1 diff --git a/scripts/ci/kubernetes/ci_setup_cluster_and_deploy_airflow_to_kubernetes.sh b/scripts/ci/kubernetes/ci_setup_cluster_and_deploy_airflow_to_kubernetes.sh index ec493f81d091b..1e0fa36045dc1 100755 --- a/scripts/ci/kubernetes/ci_setup_cluster_and_deploy_airflow_to_kubernetes.sh +++ b/scripts/ci/kubernetes/ci_setup_cluster_and_deploy_airflow_to_kubernetes.sh @@ -15,9 +15,11 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +export SKIP_BUILDING_PROD_IMAGE="true" + # shellcheck source=scripts/ci/libraries/_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" -set -euo pipefail traps::add_trap "kind::dump_kind_logs" EXIT HUP INT TERM @@ -25,7 +27,7 @@ kind::make_sure_kubernetes_tools_are_installed kind::get_kind_cluster_name kind::perform_kind_cluster_operation "start" build_images::prepare_prod_build -build_images::build_prod_images_with_group +build_images::build_prod_images kind::build_image_for_kubernetes_tests kind::load_image_to_kind_cluster kind::deploy_airflow_with_helm diff --git a/scripts/ci/kubernetes/ci_setup_cluster_and_run_kubernetes_tests_single_job.sh b/scripts/ci/kubernetes/ci_setup_cluster_and_run_kubernetes_tests_single_job.sh new file mode 100755 index 0000000000000..9b0d86f3ed3e2 --- /dev/null +++ b/scripts/ci/kubernetes/ci_setup_cluster_and_run_kubernetes_tests_single_job.sh @@ -0,0 +1,54 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +if [[ $1 == "" ]]; then + >&2 echo "Requires Kubernetes_version as first parameter" + exit 1 +fi +export KUBERNETES_VERSION=$1 +shift + + +if [[ $1 == "" ]]; then + >&2 echo "Requires Python Major/Minor version as second parameter" + exit 1 +fi +export PYTHON_MAJOR_MINOR_VERSION=$1 +shift + +# Requires PARALLEL_JOB_STATUS + +if [[ -z "${PARALLEL_JOB_STATUS=}" ]]; then + echo "Needs PARALLEL_JOB_STATUS to be set" + exit 1 +fi + +echo +echo "KUBERNETES_VERSION: ${KUBERNETES_VERSION}" +echo "PYTHON_MAJOR_MINOR_VERSION: ${PYTHON_MAJOR_MINOR_VERSION}" +echo + +# shellcheck source=scripts/ci/libraries/_script_init.sh +. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" + +kind::get_kind_cluster_name +trap 'echo $? > "${PARALLEL_JOB_STATUS}"; kind::perform_kind_cluster_operation "stop"' EXIT HUP INT TERM + +"$( dirname "${BASH_SOURCE[0]}" )/ci_setup_cluster_and_deploy_airflow_to_kubernetes.sh" + +export CLUSTER_FORWARDED_PORT="${FORWARDED_PORT_NUMBER}" +"$( dirname "${BASH_SOURCE[0]}" )/ci_run_kubernetes_tests.sh" diff --git a/scripts/ci/kubernetes/ci_setup_clusters_and_run_kubernetes_tests_in_parallel.sh b/scripts/ci/kubernetes/ci_setup_clusters_and_run_kubernetes_tests_in_parallel.sh new file mode 100755 index 0000000000000..88aa2cd1498b1 --- /dev/null +++ b/scripts/ci/kubernetes/ci_setup_clusters_and_run_kubernetes_tests_in_parallel.sh @@ -0,0 +1,106 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +set -euo pipefail + +# We cannot perform full initialization because it will be done later in the "single run" scripts +# And some readonly variables are set there, therefore we only selectively reuse parallel lib needed +LIBRARIES_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")/../libraries/" && pwd) +# shellcheck source=scripts/ci/libraries/_all_libs.sh +source "${LIBRARIES_DIR}/_all_libs.sh" +export SEMAPHORE_NAME="kubernetes-tests" + +function get_maximum_parallel_k8s_jobs() { + docker_engine_resources::get_available_cpus_in_docker + if [[ -n ${RUNS_ON=} && ${RUNS_ON} != *"self-hosted"* ]]; then + echo + echo "${COLOR_YELLOW}This is a Github Public runner - for now we are forcing max parallel K8S tests jobs to 1 for those${COLOR_RESET}" + echo + export MAX_PARALLEL_K8S_JOBS="1" + else + if [[ ${MAX_PARALLEL_K8S_JOBS=} != "" ]]; then + echo + echo "${COLOR_YELLOW}Maximum parallel k8s jobs forced vi MAX_PARALLEL_K8S_JOBS = ${MAX_PARALLEL_K8S_JOBS}${COLOR_RESET}" + echo + else + MAX_PARALLEL_K8S_JOBS=${CPUS_AVAILABLE_FOR_DOCKER} + echo + echo "${COLOR_YELLOW}Maximum parallel k8s jobs set to number of CPUs available for Docker = ${MAX_PARALLEL_K8S_JOBS}${COLOR_RESET}" + echo + fi + fi + export MAX_PARALLEL_K8S_JOBS +} + +# Launches parallel building of images. Redirects output to log set the right directories +# $1 - test_specification +# $2 - bash file to execute in parallel +function run_kubernetes_test() { + local kubernetes_version=$1 + local python_version=$2 + local job="Cluster-${kubernetes_version}-python-${python_version}" + + mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}" + export JOB_LOG="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}/stdout" + export PARALLEL_JOB_STATUS="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}/status" + echo "Starting K8S tests for kubernetes version ${kubernetes_version}, python version: ${python_version}" + parallel --ungroup --bg --semaphore --semaphorename "${SEMAPHORE_NAME}" \ + --jobs "${MAX_PARALLEL_K8S_JOBS}" \ + "$(dirname "${BASH_SOURCE[0]}")/ci_setup_cluster_and_run_kubernetes_tests_single_job.sh" \ + "${kubernetes_version}" "${python_version}" >"${JOB_LOG}" 2>&1 +} + +function run_k8s_tests_in_parallel() { + parallel::cleanup_runner + start_end::group_start "Monitoring k8s tests" + parallel::initialize_monitoring + parallel::monitor_progress + + # In case there are more kubernetes versions than strings, we can reuse python versions so we add it twice here + local repeated_python_versions + # shellcheck disable=SC2206 + repeated_python_versions=(${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING} ${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING}) + local index=0 + for kubernetes_version in ${CURRENT_KUBERNETES_VERSIONS_AS_STRING} + do + index=$((index + 1)) + python_version=${repeated_python_versions[${index}]} + FORWARDED_PORT_NUMBER=$((38080 + index)) + export FORWARDED_PORT_NUMBER + API_SERVER_PORT=$((19090 + index)) + export API_SERVER_PORT + run_kubernetes_test "${kubernetes_version}" "${python_version}" "${@}" + done + set +e + parallel --semaphore --semaphorename "${SEMAPHORE_NAME}" --wait + parallel::kill_monitor + set -e + start_end::group_end +} + +initialization::set_output_color_variables + +parallel::make_sure_gnu_parallel_is_installed +parallel::make_sure_python_versions_are_specified +parallel::make_sure_kubernetes_versions_are_specified + +get_maximum_parallel_k8s_jobs + +run_k8s_tests_in_parallel "${@}" + +# this will exit with error code in case some of the tests failed +parallel::print_job_summary_and_return_status_code diff --git a/scripts/ci/kubernetes/kind-cluster-conf.yaml b/scripts/ci/kubernetes/kind-cluster-conf.yaml index df608208de4d6..4e891f80ddf62 100644 --- a/scripts/ci/kubernetes/kind-cluster-conf.yaml +++ b/scripts/ci/kubernetes/kind-cluster-conf.yaml @@ -16,23 +16,16 @@ # under the License. --- kind: Cluster -apiVersion: kind.sigs.k8s.io/v1alpha3 +apiVersion: kind.x-k8s.io/v1alpha4 networking: - apiServerAddress: 0.0.0.0 - apiServerPort: 19090 + ipFamily: ipv4 + apiServerAddress: "127.0.0.1" + apiServerPort: {{API_SERVER_PORT}} nodes: - role: control-plane - role: worker extraPortMappings: - containerPort: 30007 - hostPort: 8080 - listenAddress: "0.0.0.0" + hostPort: {{FORWARDED_PORT_NUMBER}} + listenAddress: "127.0.0.1" protocol: TCP -kubeadmConfigPatchesJson6902: - - group: kubeadm.k8s.io - version: v1beta2 - kind: ClusterConfiguration - patch: | - - op: add - path: /apiServer/certSANs/- - value: docker diff --git a/scripts/ci/libraries/_all_libs.sh b/scripts/ci/libraries/_all_libs.sh index 5bba06e88c76d..04e25e811a8ae 100755 --- a/scripts/ci/libraries/_all_libs.sh +++ b/scripts/ci/libraries/_all_libs.sh @@ -28,6 +28,10 @@ readonly SCRIPTS_CI_DIR . "${LIBRARIES_DIR}"/_traps.sh # shellcheck source=scripts/ci/libraries/_initialization.sh . "${LIBRARIES_DIR}"/_initialization.sh +# shellcheck source=scripts/ci/libraries/_parallel.sh +. "${LIBRARIES_DIR}"/_parallel.sh +# shellcheck source=scripts/ci/libraries/_docker_engine_resources.sh +. "${LIBRARIES_DIR}"/_docker_engine_resources.sh # shellcheck source=scripts/ci/libraries/_repeats.sh . "${LIBRARIES_DIR}"/_repeats.sh # shellcheck source=scripts/ci/libraries/_sanity_checks.sh @@ -56,7 +60,9 @@ readonly SCRIPTS_CI_DIR . "${LIBRARIES_DIR}"/_spinner.sh # shellcheck source=scripts/ci/libraries/_start_end.sh . "${LIBRARIES_DIR}"/_start_end.sh +# shellcheck source=scripts/ci/libraries/_testing.sh +. "${LIBRARIES_DIR}"/_testing.sh # shellcheck source=scripts/ci/libraries/_verbosity.sh . "${LIBRARIES_DIR}"/_verbosity.sh -# shellcheck source=scripts/ci/libraries/_kerberos.sh -. "${LIBRARIES_DIR}"/_kerberos.sh +# shellcheck source=scripts/ci/libraries/_verify_image.sh +. "${LIBRARIES_DIR}"/_verify_image.sh diff --git a/scripts/ci/libraries/_build_airflow_packages.sh b/scripts/ci/libraries/_build_airflow_packages.sh index 609803e27bdb7..46429308c4060 100644 --- a/scripts/ci/libraries/_build_airflow_packages.sh +++ b/scripts/ci/libraries/_build_airflow_packages.sh @@ -34,14 +34,14 @@ function build_airflow_packages::build_airflow_packages() { fi # Prepare airflow's wheel - python setup.py compile_assets "${packages[@]}" + PYTHONUNBUFFERED=1 python setup.py compile_assets "${packages[@]}" # clean-up rm -rf -- *egg-info* rm -rf -- build echo - echo "Airflow package prepared: ${PACKAGE_FORMAT}" + echo "${COLOR_GREEN}Airflow package prepared in format: ${PACKAGE_FORMAT}${COLOR_RESET}" echo start_end::group_end } diff --git a/scripts/ci/libraries/_build_images.sh b/scripts/ci/libraries/_build_images.sh index be82a6a0e203c..da015b9473cf7 100644 --- a/scripts/ci/libraries/_build_images.sh +++ b/scripts/ci/libraries/_build_images.sh @@ -26,6 +26,11 @@ function build_images::add_build_args_for_remote_install() { "--build-arg" "AIRFLOW_SOURCES_FROM=empty" "--build-arg" "AIRFLOW_SOURCES_TO=/empty" ) + if [[ ${CI} == "true" ]]; then + EXTRA_DOCKER_PROD_BUILD_FLAGS+=( + "--build-arg" "PIP_PROGRESS_BAR=off" + ) + fi if [[ -n "${AIRFLOW_CONSTRAINTS_REFERENCE}" ]]; then EXTRA_DOCKER_PROD_BUILD_FLAGS+=( "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=${AIRFLOW_CONSTRAINTS_REFERENCE}" @@ -84,7 +89,7 @@ function build_images::add_build_args_for_remote_install() { # Retrieves version of airflow stored in the production image (used to display the actual # Version we use if it was build from PyPI or GitHub function build_images::get_airflow_version_from_production_image() { - VERBOSE="false" docker run --entrypoint /bin/bash "${AIRFLOW_PROD_IMAGE}" -c 'echo "${AIRFLOW_VERSION}"' + docker run --entrypoint /bin/bash "${AIRFLOW_PROD_IMAGE}" -c 'echo "${AIRFLOW_VERSION}"' } # Removes the "Forced answer" (yes/no/quit) given previously, unless you specifically want to remember it. @@ -119,7 +124,7 @@ function build_images::forget_last_answer() { function build_images::confirm_via_terminal() { echo >"${DETECTED_TERMINAL}" echo >"${DETECTED_TERMINAL}" - echo "${COLOR_YELLOW}WARNING:Make sure that you rebased to latest master before rebuilding!${COLOR_RESET}" >"${DETECTED_TERMINAL}" + echo "${COLOR_YELLOW}WARNING:Make sure that you rebased to latest upstream before rebuilding!${COLOR_RESET}" >"${DETECTED_TERMINAL}" echo >"${DETECTED_TERMINAL}" # Make sure to use output of tty rather than stdin/stdout when available - this way confirm # will works also in case of pre-commits (git does not pass stdin/stdout to pre-commit hooks) @@ -170,7 +175,7 @@ function build_images::confirm_image_rebuild() { elif [[ -t 0 ]]; then echo echo - echo "${COLOR_YELLOW}WARNING:Make sure that you rebased to latest master before rebuilding!${COLOR_RESET}" + echo "${COLOR_YELLOW}WARNING:Make sure that you rebased to latest upstream before rebuilding!${COLOR_RESET}" echo # Check if this script is run interactively with stdin open and terminal attached "${AIRFLOW_SOURCES}/confirm" "${ACTION} image ${THE_IMAGE_TYPE}-python${PYTHON_MAJOR_MINOR_VERSION}" @@ -247,7 +252,7 @@ function build_images::confirm_non-empty-docker-context-files() { # We cannot use docker registry APIs as they are available only with authorisation # But this image can be pulled without authentication function build_images::build_ci_image_manifest() { - docker build \ + docker_v build \ --tag="${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" \ -f- . </dev/null >/dev/null - if ! docker create --name "local-airflow-ci-container" "${AIRFLOW_CI_IMAGE}" 2>/dev/null; then + docker_v rm --force "local-airflow-ci-container" 2>/dev/null >/dev/null + if ! docker_v inspect "${AIRFLOW_CI_IMAGE}" 2>/dev/null >/dev/null; then verbosity::print_info verbosity::print_info "Local airflow CI image not available" verbosity::print_info @@ -274,8 +279,10 @@ function build_images::get_local_build_cache_hash() { export LOCAL_MANIFEST_IMAGE_UNAVAILABLE touch "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}" return + fi - docker cp "local-airflow-ci-container:/build-cache-hash" \ + docker_v create --name "local-airflow-ci-container" "${AIRFLOW_CI_IMAGE}" 2>/dev/null + docker_v cp "local-airflow-ci-container:/build-cache-hash" \ "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}" 2>/dev/null || touch "${LOCAL_IMAGE_BUILD_CACHE_HASH_FILE}" set -e @@ -298,7 +305,7 @@ function build_images::get_local_build_cache_hash() { function build_images::get_remote_image_build_cache_hash() { set +e # Pull remote manifest image - if ! docker pull "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}" 2>/dev/null >/dev/null; then + if ! docker_v pull "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}" 2>/dev/null >/dev/null; then verbosity::print_info verbosity::print_info "Remote docker registry unreachable" verbosity::print_info @@ -310,11 +317,11 @@ function build_images::get_remote_image_build_cache_hash() { set -e rm -f "${REMOTE_IMAGE_CONTAINER_ID_FILE}" # Create container dump out of the manifest image without actually running it - docker create --cidfile "${REMOTE_IMAGE_CONTAINER_ID_FILE}" "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}" + docker_v create --cidfile "${REMOTE_IMAGE_CONTAINER_ID_FILE}" "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}" # Extract manifest and store it in local file - docker cp "$(cat "${REMOTE_IMAGE_CONTAINER_ID_FILE}"):/build-cache-hash" \ + docker_v cp "$(cat "${REMOTE_IMAGE_CONTAINER_ID_FILE}"):/build-cache-hash" \ "${REMOTE_IMAGE_BUILD_CACHE_HASH_FILE}" - docker rm --force "$(cat "${REMOTE_IMAGE_CONTAINER_ID_FILE}")" + docker_v rm --force "$(cat "${REMOTE_IMAGE_CONTAINER_ID_FILE}")" rm -f "${REMOTE_IMAGE_CONTAINER_ID_FILE}" verbosity::print_info verbosity::print_info "Remote build cache hash: '$(cat "${REMOTE_IMAGE_BUILD_CACHE_HASH_FILE}")'" @@ -368,7 +375,7 @@ function build_images::get_docker_image_names() { # CI image to build export AIRFLOW_CI_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_CI_BASE_TAG}" # Default CI image - export AIRFLOW_CI_PYTHON_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:python${PYTHON_MAJOR_MINOR_VERSION}-${BRANCH_NAME}" + export AIRFLOW_PYTHON_BASE_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:python${PYTHON_MAJOR_MINOR_VERSION}-${BRANCH_NAME}" # CI image to build export AIRFLOW_CI_IMAGE="${DOCKERHUB_USER}/${DOCKERHUB_REPO}:${AIRFLOW_CI_BASE_TAG}" @@ -395,16 +402,16 @@ function build_images::get_docker_image_names() { export BUILT_CI_IMAGE_FLAG_FILE="${BUILD_CACHE_DIR}/${BRANCH_NAME}/.built_${PYTHON_MAJOR_MINOR_VERSION}" # This is 1-1 mapping of image names of Apache Airflow stored in DockerHub vs. the same images stored - # in Github Registries (either Github Container Registry or Github Packages) + # in GitHub Registries (either GitHub Container Registry or GitHub Packages) # # We have to apply naming conventions used by the registries and keep multiple RUN_ID tags. We use # common suffix ('gcr-v1') to be able to switch to different set of cache images if needed - # - for example when some images gets broken (might happen with Github Actions Registries) or when + # - for example when some images gets broken (might happen with GitHub Actions Registries) or when # the storage capacity per image is reached (though it is apparently unlimited) # # Some examples: # - # In case of Github Container Registry: + # In case of GitHub Container Registry: # # * Prod Image: "apache/airflow:master-python3.8" -> "apache/airflow-master-python3.8-gcr-v1:" # * Prod build image: "apache/airflow:master-python3.8-build" -> "apache/airflow-master-python3.8-build-gcr-v1:" @@ -415,7 +422,7 @@ function build_images::get_docker_image_names() { # # "apache/airflow:python-3.6 -> "apache/airflow-python-gcr-v1:3.6-slim-buster-" # - # In case of Github Packages image must be part of the repository: + # In case of GitHub Packages image must be part of the repository: # # * Prod Image: "apache/airflow:master-python3.8" -> "apache/airflow/master-python3.8-gcr-v1:" # * Prod build image: "apache/airflow:master-python3.8-build" -> "apache/airflow/master-python3.8-build-gcr-v1:" @@ -446,29 +453,27 @@ function build_images::get_docker_image_names() { export GITHUB_REGISTRY_PYTHON_BASE_IMAGE="${image_name}${image_separator}python${GITHUB_REGISTRY_IMAGE_SUFFIX}:${PYTHON_BASE_IMAGE_VERSION}-slim-buster" export GITHUB_REGISTRY_AIRFLOW_CI_IMAGE="${image_name}${image_separator}${AIRFLOW_CI_BASE_TAG}${GITHUB_REGISTRY_IMAGE_SUFFIX}" - export GITHUB_REGISTRY_PYTHON_BASE_IMAGE="${image_name}${image_separator}python${GITHUB_REGISTRY_IMAGE_SUFFIX}:${PYTHON_BASE_IMAGE_VERSION}-slim-buster" } # If GitHub Registry is used, login to the registry using GITHUB_USERNAME and # either GITHUB_TOKEN or CONTAINER_REGISTRY_TOKEN depending on the registry. # In case Personal Access token is not set, skip logging in # Also enable experimental features of docker (we need `docker manifest` command) -function build_image::configure_github_docker_registry() { +function build_image::configure_docker_registry() { if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then - start_end::group_start "Determine Github Registry token used and login if needed" local token="" if [[ "${GITHUB_REGISTRY}" == "ghcr.io" ]]; then # For now ghcr.io can only authenticate using Personal Access Token with package access scope. # There are plans to implement GITHUB_TOKEN authentication but this is not implemented yet token="${CONTAINER_REGISTRY_TOKEN=}" - echo - echo "Using CONTAINER_REGISTRY_TOKEN!" - echo + verbosity::print_info + verbosity::print_info "Using CONTAINER_REGISTRY_TOKEN!" + verbosity::print_info elif [[ "${GITHUB_REGISTRY}" == "docker.pkg.github.com" ]]; then token="${GITHUB_TOKEN}" - echo - echo "Using GITHUB_TOKEN!" - echo + verbosity::print_info + verbosity::print_info "Using GITHUB_TOKEN!" + verbosity::print_info else echo echo "${COLOR_RED}ERROR: Bad value of '${GITHUB_REGISTRY}'. Should be either 'ghcr.io' or 'docker.pkg.github.com'!${COLOR_RESET}" @@ -476,25 +481,21 @@ function build_image::configure_github_docker_registry() { exit 1 fi if [[ -z "${token}" ]] ; then - echo - echo "Skip logging in to Github Registry. No Token available!" - echo + verbosity::print_info + verbosity::print_info "Skip logging in to GitHub Registry. No Token available!" + verbosity::print_info fi if [[ -n "${token}" ]]; then - echo "${token}" | docker login \ + echo "${token}" | docker_v login \ --username "${GITHUB_USERNAME:-apache}" \ --password-stdin \ "${GITHUB_REGISTRY}" else - echo "Skip Login to GitHub Registry ${GITHUB_REGISTRY} as token is missing" + verbosity::print_info "Skip Login to GitHub Registry ${GITHUB_REGISTRY} as token is missing" fi - echo "Make sure experimental docker features are enabled" local new_config new_config=$(jq '.experimental = "enabled"' "${HOME}/.docker/config.json") echo "${new_config}" > "${HOME}/.docker/config.json" - echo "Docker config after change:" - echo "${new_config}" - start_end::group_end fi } @@ -514,7 +515,7 @@ function build_images::prepare_ci_build() { export AIRFLOW_IMAGE="${AIRFLOW_CI_IMAGE}" readonly AIRFLOW_IMAGE - build_image::configure_github_docker_registry + build_image::configure_docker_registry sanity_checks::go_to_airflow_sources permissions::fix_group_permissions } @@ -646,7 +647,7 @@ function build_images::rebuild_ci_image_if_needed_and_confirmed() { fi } -# Retrieves Github Container Registry image prefix from repository name +# Retrieves GitHub Container Registry image prefix from repository name # GitHub Container Registry stores all images at the organization level, they are just # linked to the repository via docker label - however we assume a convention where we will # add repository name to organisation separated by '-' and convert everything to lowercase @@ -688,9 +689,12 @@ function build_images::build_ci_image() { exit 1 fi EXTRA_DOCKER_CI_BUILD_FLAGS=( - "--build-arg" "AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH}" ) - + if [[ ${CI} == "true" ]]; then + EXTRA_DOCKER_PROD_BUILD_FLAGS+=( + "--build-arg" "PIP_PROGRESS_BAR=off" + ) + fi if [[ -n "${AIRFLOW_CONSTRAINTS_LOCATION}" ]]; then EXTRA_DOCKER_CI_BUILD_FLAGS+=( "--build-arg" "AIRFLOW_CONSTRAINTS_LOCATION=${AIRFLOW_CONSTRAINTS_LOCATION}" @@ -732,10 +736,9 @@ Docker building ${AIRFLOW_CI_IMAGE}. if [[ -n "${RUNTIME_APT_COMMAND}" ]]; then additional_runtime_args+=("--build-arg" "RUNTIME_APT_COMMAND=\"${RUNTIME_APT_COMMAND}\"") fi - docker build \ + docker_v build \ "${EXTRA_DOCKER_CI_BUILD_FLAGS[@]}" \ - --build-arg PYTHON_BASE_IMAGE="${PYTHON_BASE_IMAGE}" \ - --build-arg PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION}" \ + --build-arg PYTHON_BASE_IMAGE="${AIRFLOW_PYTHON_BASE_IMAGE}" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg AIRFLOW_BRANCH="${BRANCH_NAME}" \ --build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \ @@ -753,8 +756,11 @@ Docker building ${AIRFLOW_CI_IMAGE}. --build-arg INSTALL_FROM_DOCKER_CONTEXT_FILES="${INSTALL_FROM_DOCKER_CONTEXT_FILES}" \ --build-arg UPGRADE_TO_NEWER_DEPENDENCIES="${UPGRADE_TO_NEWER_DEPENDENCIES}" \ --build-arg CONTINUE_ON_PIP_CHECK_FAILURE="${CONTINUE_ON_PIP_CHECK_FAILURE}" \ + --build-arg CONSTRAINTS_GITHUB_REPOSITORY="${CONSTRAINTS_GITHUB_REPOSITORY}" \ + --build-arg AIRFLOW_CONSTRAINTS_REFERENCE="${DEFAULT_CONSTRAINTS_BRANCH}" \ + --build-arg AIRFLOW_CONSTRAINTS="${AIRFLOW_CONSTRAINTS}" \ --build-arg AIRFLOW_IMAGE_REPOSITORY="https://github.com/${GITHUB_REPOSITORY}" \ - --build-arg AIRFLOW_IMAGE_DATE_CREATED="$(date --rfc-3339=seconds | sed 's/ /T/')" \ + --build-arg AIRFLOW_IMAGE_DATE_CREATED="$(date -u +'%Y-%m-%dT%H:%M:%SZ')" \ --build-arg BUILD_ID="${CI_BUILD_ID}" \ --build-arg COMMIT_SHA="${COMMIT_SHA}" \ "${additional_dev_args[@]}" \ @@ -766,11 +772,11 @@ Docker building ${AIRFLOW_CI_IMAGE}. set -u if [[ -n "${DEFAULT_CI_IMAGE=}" ]]; then echo "Tagging additionally image ${AIRFLOW_CI_IMAGE} with ${DEFAULT_CI_IMAGE}" - docker tag "${AIRFLOW_CI_IMAGE}" "${DEFAULT_CI_IMAGE}" + docker_v tag "${AIRFLOW_CI_IMAGE}" "${DEFAULT_CI_IMAGE}" fi if [[ -n "${IMAGE_TAG=}" ]]; then echo "Tagging additionally image ${AIRFLOW_CI_IMAGE} with ${IMAGE_TAG}" - docker tag "${AIRFLOW_CI_IMAGE}" "${IMAGE_TAG}" + docker_v tag "${AIRFLOW_CI_IMAGE}" "${IMAGE_TAG}" fi if [[ -n ${SPIN_PID=} ]]; then kill -HUP "${SPIN_PID}" || true @@ -790,13 +796,21 @@ function build_images::prepare_prod_build() { export AIRFLOW_VERSION="${INSTALL_AIRFLOW_REFERENCE}" build_images::add_build_args_for_remote_install elif [[ -n "${INSTALL_AIRFLOW_VERSION=}" ]]; then - # When --install-airflow-version is used then the image is build from PIP package + # When --install-airflow-version is used then the image is build using released PIP package + # For PROD image only numeric versions are allowed + if [[ ! ${INSTALL_AIRFLOW_VERSION} =~ ^[0-9\.]*$ ]]; then + echo + echo "${COLOR_RED}ERROR: Bad value for install-airflow-version: '${INSTALL_AIRFLOW_VERSION}'. Only numerical versions allowed for PROD image here'!${COLOR_RESET}" + echo + exit 1 + fi EXTRA_DOCKER_PROD_BUILD_FLAGS=( "--build-arg" "AIRFLOW_INSTALLATION_METHOD=apache-airflow" - "--build-arg" "AIRFLOW_INSTALL_VERSION=${INSTALL_AIRFLOW_VERSION}" + "--build-arg" "AIRFLOW_VERSION_SPECIFICATION===${INSTALL_AIRFLOW_VERSION}" "--build-arg" "AIRFLOW_VERSION=${INSTALL_AIRFLOW_VERSION}" ) export AIRFLOW_VERSION="${INSTALL_AIRFLOW_VERSION}" + export INSTALL_PROVIDERS_FROM_SOURCES="false" build_images::add_build_args_for_remote_install else # When no airflow version/reference is specified, production image is built either from the @@ -824,7 +838,7 @@ function build_images::prepare_prod_build() { export AIRFLOW_IMAGE="${AIRFLOW_PROD_IMAGE}" readonly AIRFLOW_IMAGE - build_image::configure_github_docker_registry + build_image::configure_docker_registry AIRFLOW_BRANCH_FOR_PYPI_PRELOADING="${BRANCH_NAME}" sanity_checks::go_to_airflow_sources } @@ -838,9 +852,10 @@ function build_images::build_prod_images() { build_images::print_build_info if [[ ${SKIP_BUILDING_PROD_IMAGE} == "true" ]]; then - verbosity::print_info - verbosity::print_info "Skip building production image. Assume the one we have is good!" - verbosity::print_info + echo + echo "${COLOR_YELLOW}Skip building production image. Assume the one we have is good!${COLOR_RESET}" + echo "${COLOR_YELLOW}You must run './breeze build-image --production-image before for all python versions!${COLOR_RESET}" + echo return fi @@ -875,10 +890,9 @@ function build_images::build_prod_images() { if [[ -n "${DEV_APT_COMMAND}" ]]; then additional_dev_args+=("--build-arg" "DEV_APT_COMMAND=\"${DEV_APT_COMMAND}\"") fi - docker build \ + docker_v build \ "${EXTRA_DOCKER_PROD_BUILD_FLAGS[@]}" \ - --build-arg PYTHON_BASE_IMAGE="${PYTHON_BASE_IMAGE}" \ - --build-arg PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION}" \ + --build-arg PYTHON_BASE_IMAGE="${AIRFLOW_PYTHON_BASE_IMAGE}" \ --build-arg INSTALL_MYSQL_CLIENT="${INSTALL_MYSQL_CLIENT}" \ --build-arg AIRFLOW_VERSION="${AIRFLOW_VERSION}" \ --build-arg AIRFLOW_BRANCH="${AIRFLOW_BRANCH_FOR_PYPI_PRELOADING}" \ @@ -897,8 +911,10 @@ function build_images::build_prod_images() { --build-arg CONTINUE_ON_PIP_CHECK_FAILURE="${CONTINUE_ON_PIP_CHECK_FAILURE}" \ --build-arg BUILD_ID="${CI_BUILD_ID}" \ --build-arg COMMIT_SHA="${COMMIT_SHA}" \ + --build-arg CONSTRAINTS_GITHUB_REPOSITORY="${CONSTRAINTS_GITHUB_REPOSITORY}" \ + --build-arg AIRFLOW_CONSTRAINTS="${AIRFLOW_CONSTRAINTS}" \ --build-arg AIRFLOW_IMAGE_REPOSITORY="https://github.com/${GITHUB_REPOSITORY}" \ - --build-arg AIRFLOW_IMAGE_DATE_CREATED="$(date --rfc-3339=seconds | sed 's/ /T/')" \ + --build-arg AIRFLOW_IMAGE_DATE_CREATED="$(date -u +'%Y-%m-%dT%H:%M:%SZ')" \ "${DOCKER_CACHE_PROD_BUILD_DIRECTIVE[@]}" \ -t "${AIRFLOW_PROD_BUILD_IMAGE}" \ --target "airflow-build-image" \ @@ -910,10 +926,9 @@ function build_images::build_prod_images() { if [[ -n "${RUNTIME_APT_COMMAND}" ]]; then additional_runtime_args+=("--build-arg" "RUNTIME_APT_COMMAND=\"${RUNTIME_APT_COMMAND}\"") fi - docker build \ + docker_v build \ "${EXTRA_DOCKER_PROD_BUILD_FLAGS[@]}" \ - --build-arg PYTHON_BASE_IMAGE="${PYTHON_BASE_IMAGE}" \ - --build-arg PYTHON_MAJOR_MINOR_VERSION="${PYTHON_MAJOR_MINOR_VERSION}" \ + --build-arg PYTHON_BASE_IMAGE="${AIRFLOW_PYTHON_BASE_IMAGE}" \ --build-arg INSTALL_MYSQL_CLIENT="${INSTALL_MYSQL_CLIENT}" \ --build-arg ADDITIONAL_AIRFLOW_EXTRAS="${ADDITIONAL_AIRFLOW_EXTRAS}" \ --build-arg ADDITIONAL_PYTHON_DEPS="${ADDITIONAL_PYTHON_DEPS}" \ @@ -934,8 +949,10 @@ function build_images::build_prod_images() { --build-arg AIRFLOW_EXTRAS="${AIRFLOW_EXTRAS}" \ --build-arg BUILD_ID="${CI_BUILD_ID}" \ --build-arg COMMIT_SHA="${COMMIT_SHA}" \ + --build-arg CONSTRAINTS_GITHUB_REPOSITORY="${CONSTRAINTS_GITHUB_REPOSITORY}" \ + --build-arg AIRFLOW_CONSTRAINTS="${AIRFLOW_CONSTRAINTS}" \ --build-arg AIRFLOW_IMAGE_REPOSITORY="https://github.com/${GITHUB_REPOSITORY}" \ - --build-arg AIRFLOW_IMAGE_DATE_CREATED="$(date --rfc-3339=seconds | sed 's/ /T/')" \ + --build-arg AIRFLOW_IMAGE_DATE_CREATED="$(date -u +'%Y-%m-%dT%H:%M:%SZ')" \ "${additional_dev_args[@]}" \ "${additional_runtime_args[@]}" \ "${DOCKER_CACHE_PROD_DIRECTIVE[@]}" \ @@ -945,20 +962,14 @@ function build_images::build_prod_images() { set -u if [[ -n "${DEFAULT_PROD_IMAGE:=}" ]]; then echo "Tagging additionally image ${AIRFLOW_PROD_IMAGE} with ${DEFAULT_PROD_IMAGE}" - docker tag "${AIRFLOW_PROD_IMAGE}" "${DEFAULT_PROD_IMAGE}" + docker_v tag "${AIRFLOW_PROD_IMAGE}" "${DEFAULT_PROD_IMAGE}" fi if [[ -n "${IMAGE_TAG=}" ]]; then echo "Tagging additionally image ${AIRFLOW_PROD_IMAGE} with ${IMAGE_TAG}" - docker tag "${AIRFLOW_PROD_IMAGE}" "${IMAGE_TAG}" + docker_v tag "${AIRFLOW_PROD_IMAGE}" "${IMAGE_TAG}" fi } -function build_images::build_prod_images_with_group() { - start_end::group_start "Build PROD images ${AIRFLOW_PROD_BUILD_IMAGE}" - build_images::build_prod_images - start_end::group_end -} - # Waits for image tag to appear in GitHub Registry, pulls it and tags with the target tag # Parameters: # $1 - image name to wait for @@ -974,11 +985,18 @@ function build_images::wait_for_image_tag() { start_end::group_start "Wait for image tag ${IMAGE_TO_WAIT_FOR}" while true; do set +e - docker pull "${IMAGE_TO_WAIT_FOR}" 2>/dev/null >/dev/null + echo "${COLOR_BLUE}Docker pull ${IMAGE_TO_WAIT_FOR} ${COLOR_RESET}" >"${OUTPUT_LOG}" + docker_v pull "${IMAGE_TO_WAIT_FOR}" >>"${OUTPUT_LOG}" 2>&1 set -e - if [[ -z "$(docker images -q "${IMAGE_TO_WAIT_FOR}" 2>/dev/null || true)" ]]; then + local image_hash + echo "${COLOR_BLUE} Docker images -q ${IMAGE_TO_WAIT_FOR}${COLOR_RESET}" >>"${OUTPUT_LOG}" + image_hash="$(docker images -q "${IMAGE_TO_WAIT_FOR}" 2>>"${OUTPUT_LOG}" || true)" + if [[ -z "${image_hash}" ]]; then echo - echo "The image ${IMAGE_TO_WAIT_FOR} is not yet available. Waiting" + echo "The image ${IMAGE_TO_WAIT_FOR} is not yet available. No local hash for the image. Waiting." + echo + echo "Last log:" + cat "${OUTPUT_LOG}" || true echo sleep 10 else @@ -988,12 +1006,12 @@ function build_images::wait_for_image_tag() { echo echo "Tagging ${IMAGE_TO_WAIT_FOR} as ${IMAGE_NAME}." echo - docker tag "${IMAGE_TO_WAIT_FOR}" "${IMAGE_NAME}" + docker_v tag "${IMAGE_TO_WAIT_FOR}" "${IMAGE_NAME}" for TARGET_TAG in "${@}"; do echo echo "Tagging ${IMAGE_TO_WAIT_FOR} as ${TARGET_TAG}." echo - docker tag "${IMAGE_TO_WAIT_FOR}" "${TARGET_TAG}" + docker_v tag "${IMAGE_TO_WAIT_FOR}" "${TARGET_TAG}" done break fi @@ -1052,7 +1070,7 @@ function build_images::build_prod_images_from_locally_built_airflow_packages() { build_airflow_packages::build_airflow_packages mv "${AIRFLOW_SOURCES}/dist/"* "${AIRFLOW_SOURCES}/docker-context-files/" - build_images::build_prod_images_with_group + build_images::build_prod_images } # Useful information for people who stumble upon a pip check failure diff --git a/scripts/ci/libraries/_docker_engine_resources.sh b/scripts/ci/libraries/_docker_engine_resources.sh new file mode 100644 index 0000000000000..04333591481e3 --- /dev/null +++ b/scripts/ci/libraries/_docker_engine_resources.sh @@ -0,0 +1,87 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +function docker_engine_resources::print_overall_stats() { + echo + echo "Docker statistics" + echo + docker stats --all --no-stream --no-trunc + echo + echo "Memory statistics" + echo + docker run --rm --entrypoint /bin/sh "alpine:latest" -c "free -m" + echo + echo "Disk statistics" + echo + df -h || true +} + + +function docker_engine_resources::get_available_memory_in_docker() { + MEMORY_AVAILABLE_FOR_DOCKER=$(docker run --rm --entrypoint /bin/bash "debian:buster-slim" -c 'echo $(($(getconf _PHYS_PAGES) * $(getconf PAGE_SIZE) / (1024 * 1024)))') + echo "${COLOR_BLUE}Memory available for Docker${COLOR_RESET}: $(numfmt --to iec $((MEMORY_AVAILABLE_FOR_DOCKER * 1024 * 1024)))" + export MEMORY_AVAILABLE_FOR_DOCKER +} + +function docker_engine_resources::get_available_cpus_in_docker() { + CPUS_AVAILABLE_FOR_DOCKER=$(docker run --rm "debian:buster-slim" grep -cE 'cpu[0-9]+' /proc/stat) + echo "${COLOR_BLUE}CPUS available for Docker${COLOR_RESET}: ${CPUS_AVAILABLE_FOR_DOCKER}" + export CPUS_AVAILABLE_FOR_DOCKER +} + +function docker_engine_resources::get_available_disk_space_in_docker() { + DISK_SPACE_AVAILABLE_FOR_DOCKER=$(docker run --rm --entrypoint /bin/bash "debian:buster-slim" -c \ + 'df / | tail -1 | awk '\''{print $4}'\') + echo "${COLOR_BLUE}Disk space available for Docker${COLOR_RESET}: $(numfmt --to iec $((DISK_SPACE_AVAILABLE_FOR_DOCKER * 1024)))" + export DISK_SPACE_AVAILABLE_FOR_DOCKER +} + +function docker_engine_resources::check_enough_resources() { + local successful_resource_check="true" + if (( MEMORY_AVAILABLE_FOR_DOCKER < 4000 )) ; then + successful_resource_check="false" + echo + echo "${COLOR_RED}WARNING! Not enough memory to use breeze. At least 4GB memory is required for Docker engine to run Breeze${COLOR_RESET}" + fi + + if (( CPUS_AVAILABLE_FOR_DOCKER < 2 )) ; then + successful_resource_check="false" + echo + echo "${COLOR_RED}WARNING! Not enough CPUs to use breeze. At least 2 CPUS are required for Docker engine to run Breeze.${COLOR_RESET}" + fi + + if (( DISK_SPACE_AVAILABLE_FOR_DOCKER < 40000000 )) ; then + successful_resource_check="false" + echo + echo "${COLOR_RED}WARNING! Not enough disk space to use breeze. At least 40GB are required for Docker engine to run Breeze.${COLOR_RESET}" + fi + + if [[ ${successful_resource_check} != "true" ]];then + echo + echo "${COLOR_RED}Please check https://github.com/apache/airflow/blob/master/BREEZE.rst#resources-required for details${COLOR_RESET}" + echo + fi +} + +function docker_engine_resources::check_all_resources() { + docker_engine_resources::get_available_memory_in_docker + docker_engine_resources::get_available_cpus_in_docker + docker_engine_resources::get_available_disk_space_in_docker + docker_engine_resources::check_enough_resources +} diff --git a/scripts/ci/libraries/_initialization.sh b/scripts/ci/libraries/_initialization.sh index e861bf1272950..191baee1bb146 100644 --- a/scripts/ci/libraries/_initialization.sh +++ b/scripts/ci/libraries/_initialization.sh @@ -148,6 +148,9 @@ function initialization::initialize_base_variables() { # If no Airflow Home defined - fallback to ${HOME}/airflow AIRFLOW_HOME_DIR=${AIRFLOW_HOME:=${HOME}/airflow} export AIRFLOW_HOME_DIR + + # Dry run - only show docker-compose and docker commands but do not execute them + export DRY_RUN_DOCKER=${DRY_RUN_DOCKER:="false"} } # Determine current branch @@ -176,13 +179,26 @@ function initialization::initialize_dockerhub_variables() { # Determine available integrations function initialization::initialize_available_integrations() { - export AVAILABLE_INTEGRATIONS="cassandra kerberos mongo openldap pinot presto rabbitmq redis" + export AVAILABLE_INTEGRATIONS="cassandra kerberos mongo openldap pinot rabbitmq redis statsd trino" } # Needs to be declared outside of function for MacOS FILES_FOR_REBUILD_CHECK=() # Determine which files trigger rebuild check +# +# !!!!!!!!!! IMPORTANT NOTE !!!!!!!!!! +# When you add files here, please make sure to not add files +# with the same name. And if you do - make sure that files with the +# same name are stored in directories with different name. For +# example we have two package.json files here, but they are in +# directories with different names (`www` and `ui`). +# The problem is that md5 hashes of those files are stored in +# `./build/directory` in the same directory as -.md5sum. +# For example md5sum of the `airflow/www/package.json` file is stored +# as `www-package.json` and `airflow/ui/package.json` as `ui-package.json`, +# The file list here changes extremely rarely. +# !!!!!!!!!! IMPORTANT NOTE !!!!!!!!!! function initialization::initialize_files_for_rebuild_check() { FILES_FOR_REBUILD_CHECK+=( "setup.py" @@ -190,9 +206,10 @@ function initialization::initialize_files_for_rebuild_check() { "Dockerfile.ci" ".dockerignore" "scripts/docker/compile_www_assets.sh" + "scripts/docker/common.sh" "scripts/docker/install_additional_dependencies.sh" "scripts/docker/install_airflow.sh" - "scripts/docker/install_airflow_from_latest_master.sh" + "scripts/docker/install_airflow_from_branch_tip.sh" "scripts/docker/install_from_docker_context_files.sh" "scripts/docker/install_mysql.sh" "airflow/www/package.json" @@ -219,13 +236,21 @@ EXTRA_DOCKER_FLAGS=() function initialization::initialize_mount_variables() { # Whether necessary for airflow run local sources are mounted to docker - export MOUNT_LOCAL_SOURCES=${MOUNT_LOCAL_SOURCES:="true"} + export MOUNT_SELECTED_LOCAL_SOURCES=${MOUNT_SELECTED_LOCAL_SOURCES:="true"} + + # Whether all airflow sources are mounted to docker + export MOUNT_ALL_LOCAL_SOURCES=${MOUNT_ALL_LOCAL_SOURCES:="false"} - if [[ ${MOUNT_LOCAL_SOURCES} == "true" ]]; then + if [[ ${MOUNT_SELECTED_LOCAL_SOURCES} == "true" ]]; then verbosity::print_info verbosity::print_info "Mounting necessary host volumes to Docker" verbosity::print_info read -r -a EXTRA_DOCKER_FLAGS <<<"$(local_mounts::convert_local_mounts_to_docker_params)" + elif [[ ${MOUNT_ALL_LOCAL_SOURCES} == "true" ]]; then + verbosity::print_info + verbosity::print_info "Mounting whole airflow volume to Docker" + verbosity::print_info + EXTRA_DOCKER_FLAGS+=("-v" "${AIRFLOW_SOURCES}:/opt/airflow/:cached") else verbosity::print_info verbosity::print_info "Skip mounting host volumes to Docker" @@ -243,9 +268,13 @@ function initialization::initialize_mount_variables() { # Determine values of force settings function initialization::initialize_force_variables() { - # Whether necessary for airflow run local sources are mounted to docker + # By default we do not pull CI/PROD images. We can force-pull them when needed export FORCE_PULL_IMAGES=${FORCE_PULL_IMAGES:="false"} + # By default we do not pull python base image. We should do that only when we run upgrade check in + # CI master and when we manually refresh the images to latest versions + export FORCE_PULL_BASE_PYTHON_IMAGE="false" + # Determines whether to force build without checking if it is needed # Can be overridden by '--force-build-images' flag. export FORCE_BUILD_IMAGES=${FORCE_BUILD_IMAGES:="false"} @@ -303,7 +332,7 @@ function initialization::initialize_image_build_variables() { # Default build id export CI_BUILD_ID="${CI_BUILD_ID:="0"}" - # Default extras used for building Production image. The master of this information is in the Dockerfile + # Default extras used for building Production image. The canonical source of this information is in the Dockerfile DEFAULT_PROD_EXTRAS=$(grep "ARG AIRFLOW_EXTRAS=" "${AIRFLOW_SOURCES}/Dockerfile" | awk 'BEGIN { FS="=" } { print $2 }' | tr -d '"') export DEFAULT_PROD_EXTRAS @@ -388,8 +417,8 @@ function initialization::initialize_image_build_variables() { export WHEEL_VERSION # And installed from there (breeze and ci) - AIRFLOW_INSTALL_VERSION=${AIRFLOW_INSTALL_VERSION:="."} - export AIRFLOW_INSTALL_VERSION + AIRFLOW_VERSION_SPECIFICATION=${AIRFLOW_VERSION_SPECIFICATION:=""} + export AIRFLOW_VERSION_SPECIFICATION # By default no sources are copied to image AIRFLOW_SOURCES_FROM=${AIRFLOW_SOURCES_FROM:="empty"} @@ -410,6 +439,9 @@ function initialization::initialize_image_build_variables() { # Determines if airflow should be installed from a specified reference in GitHub export INSTALL_AIRFLOW_REFERENCE=${INSTALL_AIRFLOW_REFERENCE:=""} + # Determines which providers are used to generate constraints - source, pypi or no providers + export GENERATE_CONSTRAINTS_MODE=${GENERATE_CONSTRAINTS_MODE:="source-providers"} + # whether installation of Airflow should be done via PIP. You can set it to false if you have # all the binary packages (including airflow) in the docker-context-files folder and use # INSTALL_FROM_DOCKER_CONTEXT_FILES="true" to install it from there. @@ -424,29 +456,33 @@ function initialization::initialize_image_build_variables() { # direct constraints Location - can be URL or path to local file. If empty, it will be calculated # based on which Airflow version is installed and from where export AIRFLOW_CONSTRAINTS_LOCATION="${AIRFLOW_CONSTRAINTS_LOCATION:=""}" + + # Suffix for constraints. Can be: + # * 'constraints' = for constraints with PyPI released providers (default for installations) + # * 'constraints-source-providers' for constraints with source version of providers (defaults in Breeze and CI) + # * 'constraints-no-providers' for constraints without providers + export AIRFLOW_CONSTRAINTS="${AIRFLOW_CONSTRAINTS:="constraints-source-providers"}" } # Determine version suffixes used to build provider packages function initialization::initialize_provider_package_building() { # Version suffix for PyPI packaging - export VERSION_SUFFIX_FOR_PYPI="" + export VERSION_SUFFIX_FOR_PYPI="${VERSION_SUFFIX_FOR_PYPI=}" # Artifact name suffix for SVN packaging - export VERSION_SUFFIX_FOR_SVN="" - # If set to true, the backport provider packages will be built (false will build regular provider packages) - export BACKPORT_PACKAGES=${BACKPORT_PACKAGES:="false"} + export VERSION_SUFFIX_FOR_SVN="${VERSION_SUFFIX_FOR_SVN=}" } # Determine versions of kubernetes cluster and tools used function initialization::initialize_kubernetes_variables() { # Currently supported versions of Kubernetes - CURRENT_KUBERNETES_VERSIONS+=("v1.18.6" "v1.17.5" "v1.16.9") + CURRENT_KUBERNETES_VERSIONS+=("v1.20.2" "v1.19.7" "v1.18.15") export CURRENT_KUBERNETES_VERSIONS # Currently supported modes of Kubernetes CURRENT_KUBERNETES_MODES+=("image") export CURRENT_KUBERNETES_MODES # Currently supported versions of Kind - CURRENT_KIND_VERSIONS+=("v0.8.0") + CURRENT_KIND_VERSIONS+=("v0.10.0") export CURRENT_KIND_VERSIONS # Currently supported versions of Helm CURRENT_HELM_VERSIONS+=("v3.2.4") @@ -472,14 +508,18 @@ function initialization::initialize_kubernetes_variables() { # Kubectl version export KUBECTL_VERSION=${KUBERNETES_VERSION:=${DEFAULT_KUBERNETES_VERSION}} # Local Kind path - export KIND_BINARY_PATH="${BUILD_CACHE_DIR}/bin/kind" + export KIND_BINARY_PATH="${BUILD_CACHE_DIR}/kubernetes-bin/${KUBERNETES_VERSION}/kind" readonly KIND_BINARY_PATH # Local Helm path - export HELM_BINARY_PATH="${BUILD_CACHE_DIR}/bin/helm" + export HELM_BINARY_PATH="${BUILD_CACHE_DIR}/kubernetes-bin/${KUBERNETES_VERSION}/helm" readonly HELM_BINARY_PATH # local Kubectl path - export KUBECTL_BINARY_PATH="${BUILD_CACHE_DIR}/bin/kubectl" + export KUBECTL_BINARY_PATH="${BUILD_CACHE_DIR}/kubernetes-bin/${KUBERNETES_VERSION}/kubectl" readonly KUBECTL_BINARY_PATH + FORWARDED_PORT_NUMBER="${FORWARDED_PORT_NUMBER:="8080"}" + readonly FORWARDED_PORT_NUMBER + API_SERVER_PORT="${API_SERVER_PORT:="19090"}" + readonly API_SERVER_PORT } function initialization::initialize_git_variables() { @@ -492,12 +532,14 @@ function initialization::initialize_github_variables() { # Defaults for interacting with GitHub export USE_GITHUB_REGISTRY=${USE_GITHUB_REGISTRY:="false"} export GITHUB_REGISTRY_IMAGE_SUFFIX=${GITHUB_REGISTRY_IMAGE_SUFFIX:="-v2"} - export GITHUB_REGISTRY=${GITHUB_REGISTRY:="ghcr.io"} + export GITHUB_REGISTRY=${GITHUB_REGISTRY:="docker.pkg.github.com"} export GITHUB_REGISTRY_WAIT_FOR_IMAGE=${GITHUB_REGISTRY_WAIT_FOR_IMAGE:="false"} export GITHUB_REGISTRY_PULL_IMAGE_TAG=${GITHUB_REGISTRY_PULL_IMAGE_TAG:="latest"} export GITHUB_REGISTRY_PUSH_IMAGE_TAG=${GITHUB_REGISTRY_PUSH_IMAGE_TAG:="latest"} export GITHUB_REPOSITORY=${GITHUB_REPOSITORY:="apache/airflow"} + # Allows to override the repository which is used as source of constraints during the build + export CONSTRAINTS_GITHUB_REPOSITORY=${CONSTRAINTS_GITHUB_REPOSITORY:="apache/airflow"} # Used only in CI environment export GITHUB_TOKEN="${GITHUB_TOKEN=""}" @@ -505,7 +547,10 @@ function initialization::initialize_github_variables() { } function initialization::initialize_test_variables() { - export TEST_TYPE=${TEST_TYPE:=""} + + # In case we want to force certain test type to run, this variable should be set to this type + # Otherwise TEST_TYPEs to run will be derived from TEST_TYPES space-separated string + export FORCE_TEST_TYPE=${FORCE_TEST_TYPE:=""} } function initialization::initialize_package_variables() { @@ -525,11 +570,13 @@ function initialization::set_output_color_variables() { COLOR_RED=$'\e[31m' COLOR_RESET=$'\e[0m' COLOR_YELLOW=$'\e[33m' + COLOR_CYAN=$'\e[36m' export COLOR_BLUE export COLOR_GREEN export COLOR_RED export COLOR_RESET export COLOR_YELLOW + export COLOR_CYAN } # Common environment that is initialized by both Breeze and CI scripts @@ -581,7 +628,8 @@ DockerHub variables: Mount variables: - MOUNT_LOCAL_SOURCES: ${MOUNT_LOCAL_SOURCES} + MOUNT_SELECTED_LOCAL_SOURCES: ${MOUNT_SELECTED_LOCAL_SOURCES} + MOUNT_ALL_LOCAL_SOURCES: ${MOUNT_ALL_LOCAL_SOURCES} Force variables: @@ -640,7 +688,7 @@ Common image build variables: Production image build variables: AIRFLOW_INSTALLATION_METHOD: '${AIRFLOW_INSTALLATION_METHOD}' - AIRFLOW_INSTALL_VERSION: '${AIRFLOW_INSTALL_VERSION}' + AIRFLOW_VERSION_SPECIFICATION: '${AIRFLOW_VERSION_SPECIFICATION}' AIRFLOW_SOURCES_FROM: '${AIRFLOW_SOURCES_FROM}' AIRFLOW_SOURCES_TO: '${AIRFLOW_SOURCES_TO}' @@ -666,7 +714,7 @@ Initialization variables: Test variables: - TEST_TYPE: '${TEST_TYPE}' + TEST_TYPE: '${TEST_TYPE=}' EOF if [[ "${CI}" == "true" ]]; then @@ -690,14 +738,16 @@ EOF # we used in other scripts function initialization::get_environment_for_builds_on_ci() { if [[ ${CI:=} == "true" ]]; then + export GITHUB_REPOSITORY="${GITHUB_REPOSITORY="apache/airflow"}" export CI_TARGET_REPO="${GITHUB_REPOSITORY}" export CI_TARGET_BRANCH="${GITHUB_BASE_REF:="master"}" - export CI_BUILD_ID="${GITHUB_RUN_ID}" - export CI_JOB_ID="${GITHUB_JOB}" - export CI_EVENT_TYPE="${GITHUB_EVENT_NAME}" - export CI_REF="${GITHUB_REF:=}" + export CI_BUILD_ID="${GITHUB_RUN_ID="0"}" + export CI_JOB_ID="${GITHUB_JOB="0"}" + export CI_EVENT_TYPE="${GITHUB_EVENT_NAME="pull_request"}" + export CI_REF="${GITHUB_REF:="refs/head/master"}" else # CI PR settings + export GITHUB_REPOSITORY="${GITHUB_REPOSITORY="apache/airflow"}" export CI_TARGET_REPO="${CI_TARGET_REPO="apache/airflow"}" export CI_TARGET_BRANCH="${DEFAULT_BRANCH="master"}" export CI_BUILD_ID="${CI_BUILD_ID="0"}" @@ -706,8 +756,8 @@ function initialization::get_environment_for_builds_on_ci() { export CI_REF="${CI_REF="refs/head/master"}" fi - if [[ ${VERBOSE} == "true" && ${PRINT_INFO_FROM_SCRIPTS} == "true" ]]; then - initialization::summarize_build_environment + if [[ -z "${LIBRARY_PATH:-}" && -n "${LD_LIBRARY_PATH:-}" ]]; then + export LIBRARY_PATH="${LD_LIBRARY_PATH}" fi } @@ -719,10 +769,6 @@ function initialization::make_constants_read_only() { # Set the arguments as read-only readonly PYTHON_MAJOR_MINOR_VERSION - readonly WEBSERVER_HOST_PORT - readonly POSTGRES_HOST_PORT - readonly MYSQL_HOST_PORT - readonly HOST_USER_ID readonly HOST_GROUP_ID readonly HOST_HOME @@ -734,11 +780,11 @@ function initialization::make_constants_read_only() { readonly HELM_VERSION readonly KUBECTL_VERSION - readonly BACKEND readonly POSTGRES_VERSION readonly MYSQL_VERSION - readonly MOUNT_LOCAL_SOURCES + readonly MOUNT_SELECTED_LOCAL_SOURCES + readonly MOUNT_ALL_LOCAL_SOURCES readonly INSTALL_AIRFLOW_VERSION readonly INSTALL_AIRFLOW_REFERENCE @@ -813,6 +859,7 @@ function initialization::make_constants_read_only() { readonly PYTHON_BASE_IMAGE_VERSION readonly PYTHON_BASE_IMAGE + readonly AIRFLOW_PYTHON_BASE_IMAGE readonly AIRFLOW_CI_BASE_TAG readonly AIRFLOW_CI_IMAGE readonly AIRFLOW_CI_IMAGE_DEFAULT diff --git a/scripts/ci/libraries/_kerberos.sh b/scripts/ci/libraries/_kerberos.sh deleted file mode 100644 index 3592e9cc971b3..0000000000000 --- a/scripts/ci/libraries/_kerberos.sh +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -network_name="example.com" -readonly network_name -network="10.5.0.0/24" -readonly network -gateway="$(echo ${network} | cut -f1-3 -d'.').254" -readonly gateway - -function kerberos::_check_networks_exists { - if docker network ls --format '{{.Name}}' | grep "^${network_name}$" &> /dev/null; then - return 1 - fi - return 0 -} - -function kerberos::create_kerberos_network { - if ! kerberos::_check_networks_exists; then - return 0 - fi - - docker network create \ - --driver=bridge \ - --subnet="${network}" \ - --ip-range="${network}" \ - --gateway="${gateway}" \ - "${network_name}" - - echo "Create network \"${network_name}\" for Kerberos integration" -} - -function kerberos::delete_kerberos_network { - if kerberos::_check_networks_exists; then - return 0 - fi - - docker network rm "${network_name}" - - echo "Deleted network \"${network_name}\" for Kerberos integration" -} diff --git a/scripts/ci/libraries/_kind.sh b/scripts/ci/libraries/_kind.sh index 4fcf9fce9c473..bb883ecb9ac5e 100644 --- a/scripts/ci/libraries/_kind.sh +++ b/scripts/ci/libraries/_kind.sh @@ -21,26 +21,24 @@ function kind::get_kind_cluster_name() { export KIND_CLUSTER_NAME=${KIND_CLUSTER_NAME:="airflow-python-${PYTHON_MAJOR_MINOR_VERSION}-${KUBERNETES_VERSION}"} # Name of the KinD cluster to connect to when referred to via kubectl export KUBECTL_CLUSTER_NAME=kind-${KIND_CLUSTER_NAME} - export KUBECONFIG="${BUILD_CACHE_DIR}/.kube/config" - mkdir -pv "${BUILD_CACHE_DIR}/.kube/" + export KUBECONFIG="${BUILD_CACHE_DIR}/${KIND_CLUSTER_NAME}/.kube/config" + mkdir -pv "${BUILD_CACHE_DIR}/${KIND_CLUSTER_NAME}/.kube/" touch "${KUBECONFIG}" } function kind::dump_kind_logs() { - start_end::group_start "Dumping logs from KinD" + verbosity::print_info "Dumping logs from KinD" local DUMP_DIR_NAME DUMP_DIR DUMP_DIR_NAME=kind_logs_$(date "+%Y-%m-%d")_${CI_BUILD_ID}_${CI_JOB_ID} DUMP_DIR="/tmp/${DUMP_DIR_NAME}" kind --name "${KIND_CLUSTER_NAME}" export logs "${DUMP_DIR}" - start_end::group_end } function kind::make_sure_kubernetes_tools_are_installed() { - start_end::group_start "Make sure Kubernetes tools are installed" SYSTEM=$(uname -s | tr '[:upper:]' '[:lower:]') KIND_URL="https://github.com/kubernetes-sigs/kind/releases/download/${KIND_VERSION}/kind-${SYSTEM}-amd64" - mkdir -pv "${BUILD_CACHE_DIR}/bin" + mkdir -pv "${BUILD_CACHE_DIR}/kubernetes-bin/${KUBERNETES_VERSION}" if [[ -f "${KIND_BINARY_PATH}" ]]; then DOWNLOADED_KIND_VERSION=v"$(${KIND_BINARY_PATH} --version | awk '{ print $3 }')" echo "Currently downloaded kind version = ${DOWNLOADED_KIND_VERSION}" @@ -87,15 +85,17 @@ function kind::make_sure_kubernetes_tools_are_installed() { echo "Helm version ok" echo fi - PATH=${PATH}:${BUILD_CACHE_DIR}/bin - start_end::group_end + PATH=${PATH}:${BUILD_CACHE_DIR}/kubernetes-bin/${KUBERNETES_VERSION} } function kind::create_cluster() { - kind create cluster \ - --name "${KIND_CLUSTER_NAME}" \ - --config "${AIRFLOW_SOURCES}/scripts/ci/kubernetes/kind-cluster-conf.yaml" \ - --image "kindest/node:${KUBERNETES_VERSION}" + sed "s/{{FORWARDED_PORT_NUMBER}}/${FORWARDED_PORT_NUMBER}/" < \ + "${AIRFLOW_SOURCES}/scripts/ci/kubernetes/kind-cluster-conf.yaml" | \ + sed "s/{{API_SERVER_PORT}}/${API_SERVER_PORT}/" | \ + kind create cluster \ + --name "${KIND_CLUSTER_NAME}" \ + --config - \ + --image "kindest/node:${KUBERNETES_VERSION}" echo echo "Created cluster ${KIND_CLUSTER_NAME}" echo @@ -106,7 +106,7 @@ function kind::delete_cluster() { echo echo "Deleted cluster ${KIND_CLUSTER_NAME}" echo - rm -rf "${HOME}/.kube/*" + rm -rf "${BUILD_CACHE_DIR}/${KIND_CLUSTER_NAME}/.kube/" } function kind::set_current_context() { @@ -122,7 +122,6 @@ function kind::perform_kind_cluster_operation() { echo exit 1 fi - start_end::group_start "Perform KinD cluster operation: ${1}" set -u OPERATION="${1}" @@ -229,7 +228,6 @@ function kind::perform_kind_cluster_operation() { exit 1 fi fi - start_end::group_end } function kind::check_cluster_ready_for_airflow() { @@ -250,28 +248,20 @@ function kind::check_cluster_ready_for_airflow() { } function kind::build_image_for_kubernetes_tests() { - start_end::group_start "Build image for kubernetes tests ${AIRFLOW_PROD_IMAGE_KUBERNETES}" cd "${AIRFLOW_SOURCES}" || exit 1 - docker build --tag "${AIRFLOW_PROD_IMAGE_KUBERNETES}" . -f - </dev/null 2>&1 || true kubectl delete namespace "test-namespace" >/dev/null 2>&1 || true kubectl create namespace "${HELM_AIRFLOW_NAMESPACE}" kubectl create namespace "test-namespace" - pushd "${AIRFLOW_SOURCES}/chart" >/dev/null 2>&1 || exit 1 + + # If on CI, "pass-through" the current docker credentials from the host to be default image pull-secrets in the namespace + if [[ ${CI:=} == "true" ]]; then + local regcred + regcred=$(jq -sRn ' + .apiVersion="v1" | + .kind = "Secret" | + .type = "kubernetes.io/dockerconfigjson" | + .metadata.name="regcred" | + .data[".dockerconfigjson"] = @base64 "\(inputs)" + ' ~/.docker/config.json) + kubectl -n test-namespace apply -f - <<<"$regcred" + kubectl -n test-namespace patch serviceaccount default -p '{"imagePullSecrets": [{"name": "regcred"}]}' + + kubectl -n "${HELM_AIRFLOW_NAMESPACE}" apply -f - <<<"$regcred" + kubectl -n "${HELM_AIRFLOW_NAMESPACE}" patch serviceaccount default -p '{"imagePullSecrets": [{"name": "regcred"}]}' + fi + + local chartdir + chartdir=$(mktemp -d) + traps::add_trap "rm -rf ${chartdir}" EXIT INT HUP TERM + # Copy chart to temporary directory to allow chart deployment in parallel + # Otherwise helm deployment will fail on renaming charts to tmpcharts + cp -r "${AIRFLOW_SOURCES}/chart" "${chartdir}" + + pushd "${chartdir}/chart" >/dev/null 2>&1 || exit 1 helm repo add stable https://charts.helm.sh/stable/ helm dep update helm install airflow . --namespace "${HELM_AIRFLOW_NAMESPACE}" \ @@ -329,15 +335,10 @@ function kind::deploy_airflow_with_helm() { --set "config.api.enable_experimental_api=true" echo popd > /dev/null 2>&1|| exit 1 - start_end::group_end } function kind::deploy_test_kubernetes_resources() { - start_end::group_start "Deploying Airflow with Helm" - echo - echo "Deploying Custom kubernetes resources" - echo + verbosity::print_info "Deploying Custom kubernetes resources" kubectl apply -f "scripts/ci/kubernetes/volumes.yaml" --namespace default kubectl apply -f "scripts/ci/kubernetes/nodeport.yaml" --namespace airflow - start_end::group_end } diff --git a/scripts/ci/libraries/_local_mounts.sh b/scripts/ci/libraries/_local_mounts.sh index 492514059df34..9f0675243794a 100644 --- a/scripts/ci/libraries/_local_mounts.sh +++ b/scripts/ci/libraries/_local_mounts.sh @@ -43,6 +43,7 @@ function local_mounts::generate_local_mounts_list { "$prefix"hooks:/opt/airflow/hooks:cached "$prefix"logs:/root/airflow/logs:cached "$prefix"pylintrc:/opt/airflow/pylintrc:cached + "$prefix"pylintrc-tests:/opt/airflow/pylintrc-tests:cached "$prefix"pyproject.toml:/opt/airflow/pyproject.toml:cached "$prefix"pytest.ini:/opt/airflow/pytest.ini:cached "$prefix"scripts:/opt/airflow/scripts:cached diff --git a/scripts/ci/libraries/_md5sum.sh b/scripts/ci/libraries/_md5sum.sh index 7c9dd4af2d019..052e98d51bd6e 100644 --- a/scripts/ci/libraries/_md5sum.sh +++ b/scripts/ci/libraries/_md5sum.sh @@ -28,9 +28,9 @@ function md5sum::calculate_file_md5sum { mkdir -pv "${MD5SUM_CACHE_DIR}" MD5SUM=$(md5sum "${FILE}") local MD5SUM_FILE - MD5SUM_FILE="${MD5SUM_CACHE_DIR}"/$(basename "${FILE}").md5sum + MD5SUM_FILE="${MD5SUM_CACHE_DIR}"/$(basename "$(dirname "${FILE}")")-$(basename "${FILE}").md5sum local MD5SUM_FILE_NEW - MD5SUM_FILE_NEW=${CACHE_TMP_FILE_DIR}/$(basename "${FILE}").md5sum.new + MD5SUM_FILE_NEW=${CACHE_TMP_FILE_DIR}/$(basename "$(dirname "${FILE}")")-$(basename "${FILE}").md5sum.new echo "${MD5SUM}" > "${MD5SUM_FILE_NEW}" local RET_CODE=0 if [[ ! -f "${MD5SUM_FILE}" ]]; then @@ -40,7 +40,13 @@ function md5sum::calculate_file_md5sum { diff "${MD5SUM_FILE_NEW}" "${MD5SUM_FILE}" >/dev/null RES=$? if [[ "${RES}" != "0" ]]; then - verbosity::print_info "The md5sum changed for ${FILE}" + verbosity::print_info "The md5sum changed for ${FILE}: was $(cat "${MD5SUM_FILE}") now it is $(cat "${MD5SUM_FILE_NEW}")" + if [[ ${CI} == "true" ]]; then + echo "${COLOR_RED}The file has changed: ${FILE}${COLOR_RESET}" + echo "${COLOR_BLUE}==============================${COLOR_RESET}" + cat "${FILE}" + echo "${COLOR_BLUE}==============================${COLOR_RESET}" + fi RET_CODE=1 fi fi @@ -56,12 +62,12 @@ function md5sum::move_file_md5sum { local MD5SUM_FILE local MD5SUM_CACHE_DIR="${BUILD_CACHE_DIR}/${BRANCH_NAME}/${PYTHON_MAJOR_MINOR_VERSION}/${THE_IMAGE_TYPE}" mkdir -pv "${MD5SUM_CACHE_DIR}" - MD5SUM_FILE="${MD5SUM_CACHE_DIR}"/$(basename "${FILE}").md5sum + MD5SUM_FILE="${MD5SUM_CACHE_DIR}"/$(basename "$(dirname "${FILE}")")-$(basename "${FILE}").md5sum local MD5SUM_FILE_NEW - MD5SUM_FILE_NEW=${CACHE_TMP_FILE_DIR}/$(basename "${FILE}").md5sum.new + MD5SUM_FILE_NEW=${CACHE_TMP_FILE_DIR}/$(basename "$(dirname "${FILE}")")-$(basename "${FILE}").md5sum.new if [[ -f "${MD5SUM_FILE_NEW}" ]]; then mv "${MD5SUM_FILE_NEW}" "${MD5SUM_FILE}" - verbosity::print_info "Updated md5sum file ${MD5SUM_FILE} for ${FILE}." + verbosity::print_info "Updated md5sum file ${MD5SUM_FILE} for ${FILE}: $(cat "${MD5SUM_FILE}")" fi } diff --git a/scripts/ci/libraries/_parallel.sh b/scripts/ci/libraries/_parallel.sh new file mode 100644 index 0000000000000..935f4658ec4f0 --- /dev/null +++ b/scripts/ci/libraries/_parallel.sh @@ -0,0 +1,219 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + + +# Require SEMAPHORE_NAME + +function parallel::initialize_monitoring() { + PARALLEL_MONITORED_DIR="$(mktemp -d)" + export PARALLEL_MONITORED_DIR +} + +function parallel::make_sure_gnu_parallel_is_installed() { + start_end::group_start "Making sure GNU Parallels is installed" + echo + echo "Making sure you have GNU parallel installed" + echo + echo "You might need to provide root password if you do not have it" + echo + (command -v parallel || apt install parallel || sudo apt install parallel || brew install parallel) >/dev/null + start_end::group_end "Making sure GNU Parallels is installed" +} + +function parallel::kill_stale_semaphore_locks() { + local pid + echo + echo "${COLOR_BLUE}Killing stale semaphore locks${COLOR_RESET}" + echo + for s in "${HOME}/.parallel/semaphores/id-${SEMAPHORE_NAME}/"*@* + do + pid="${s%%@*}" + if [[ ${pid} != "-*" ]]; then + kill -15 -- -"$(basename "${s%%@*}")" 2>/dev/null || true + rm -f "${s}" 2>/dev/null + fi + done +} + + +# Periodical loop to print summary of all the processes run by parallel +function parallel::monitor_loop() { + trap 'exit 0' TERM + echo + echo "Start monitoring of parallel execution in ${PARALLEL_MONITORED_DIR} directory." + echo + local progress_report_number=1 + local start_time + local end_time + start_time=${SECONDS} + while true + do + echo + echo "${COLOR_YELLOW}########### Monitoring progress start: ${progress_report_number} #################${COLOR_RESET}" + echo + echo "${COLOR_BLUE}########### STATISTICS #################" + docker_engine_resources::print_overall_stats + echo "########### STATISTICS #################${COLOR_RESET}" + for directory in "${PARALLEL_MONITORED_DIR}"/*/* + do + parallel_process=$(basename "${directory}") + + echo "${COLOR_BLUE}### The last lines for ${parallel_process} process: ${directory}/stdout ###${COLOR_RESET}" + echo + tail -2 "${directory}/stdout" || true + echo + echo + done + echo + echo "${COLOR_YELLOW}########### Monitoring progress end: ${progress_report_number} #################${COLOR_RESET}" + echo + end_time=${SECONDS} + echo "${COLOR_YELLOW}############## $((end_time - start_time)) seconds passed since start ####################### ${COLOR_RESET}" + sleep 10 + progress_report_number=$((progress_report_number + 1)) + done +} + +# Monitors progress of parallel execution and periodically summarizes stdout entries created by +# the parallel execution. Sets PAPARALLEL_MONITORED_DIR which should be be passed as --results +# parameter to GNU parallel execution. +function parallel::monitor_progress() { + echo "Parallel results are stored in: ${PARALLEL_MONITORED_DIR}" + parallel::monitor_loop 2>/dev/null & + + # shellcheck disable=SC2034 + PARALLEL_MONITORING_PID=$! + # shellcheck disable=SC2016 + traps::add_trap 'parallel::kill_monitor' EXIT +} + + +function parallel::kill_monitor() { + kill ${PARALLEL_MONITORING_PID} >/dev/null 2>&1 || true +} + +# Outputs logs for successful test type +# $1 test type +function parallel::output_log_for_successful_job(){ + local job=$1 + local log_dir="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}" + start_end::group_start "${COLOR_GREEN}Output for successful ${job}${COLOR_RESET}" + echo "${COLOR_GREEN}##### The ${job} succeeded ##### ${COLOR_RESET}" + echo + cat "${log_dir}"/stdout + echo + echo "${COLOR_GREEN}##### The ${job} succeeded ##### ${COLOR_RESET}" + echo + start_end::group_end +} + +# Outputs logs for failed test type +# $1 test type +function parallel::output_log_for_failed_job(){ + local job=$1 + local log_dir="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}" + start_end::group_start "${COLOR_RED}Output: for failed ${job}${COLOR_RESET}" + echo "${COLOR_RED}##### The ${job} failed ##### ${COLOR_RESET}" + echo + cat "${log_dir}"/stdout + echo + echo + echo "${COLOR_RED}##### The ${job} failed ##### ${COLOR_RESET}" + echo + start_end::group_end +} + +# Prints summary of jobs and returns status: +# 0 - all jobs succeeded (SKIPPED_FAILED_JOBS is not counted) +# >0 - number of failed jobs (except Quarantine) +function parallel::print_job_summary_and_return_status_code() { + local return_code="0" + local job + for job_path in "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/"* + do + job="$(basename "${job_path}")" + status=$(cat "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${job}/status" || true) + if [[ ${status} == "0" ]]; then + parallel::output_log_for_successful_job "${job}" + else + parallel::output_log_for_failed_job "${job}" + # SKIPPED_FAILED_JOB failure does not trigger whole test failure + if [[ ${SKIPPED_FAILED_JOB=} != "${job}" ]]; then + return_code=$((return_code + 1)) + fi + fi + done + return "${return_code}" +} + +function parallel::kill_all_running_docker_containers() { + echo + echo "${COLOR_BLUE}Kill all running docker containers${COLOR_RESET}" + echo + # shellcheck disable=SC2046 + docker kill $(docker ps -q) || true +} + +function parallel::system_prune_docker() { + echo + echo "${COLOR_BLUE}System-prune docker${COLOR_RESET}" + echo + docker_v system prune --force --volumes + echo +} + +# Cleans up runner before test execution. +# * Kills all running docker containers +# * System prune to clean all the temporary/unnamed images and left-over volumes +# * Print information about available space and memory +# * Kills stale semaphore locks +function parallel::cleanup_runner() { + start_end::group_start "Cleanup runner" + parallel::kill_all_running_docker_containers + parallel::system_prune_docker + docker_engine_resources::get_available_memory_in_docker + docker_engine_resources::get_available_cpus_in_docker + docker_engine_resources::get_available_disk_space_in_docker + docker_engine_resources::print_overall_stats + parallel::kill_stale_semaphore_locks + start_end::group_end +} + +function parallel::make_sure_python_versions_are_specified() { + if [[ -z "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING=}" ]]; then + echo + echo "${COLOR_RED}The CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING variable must be set and list python versions to use!${COLOR_RESET}" + echo + exit 1 + fi + echo + echo "${COLOR_BLUE}Running parallel builds for those Python versions: ${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING}${COLOR_RESET}" + echo +} + +function parallel::make_sure_kubernetes_versions_are_specified() { + if [[ -z "${CURRENT_KUBERNETES_VERSIONS_AS_STRING=}" ]]; then + echo + echo "${COLOR_RED}The CURRENT_KUBERNETES_VERSIONS_AS_STRING variable must be set and list K8S versions to use!${COLOR_RESET}" + echo + exit 1 + fi + echo + echo "${COLOR_BLUE}Running parallel builds for those Kubernetes versions: ${CURRENT_KUBERNETES_VERSIONS_AS_STRING}${COLOR_RESET}" + echo +} diff --git a/scripts/ci/libraries/_push_pull_remove_images.sh b/scripts/ci/libraries/_push_pull_remove_images.sh index 4c57ebf9e8c9f..5b16240428810 100644 --- a/scripts/ci/libraries/_push_pull_remove_images.sh +++ b/scripts/ci/libraries/_push_pull_remove_images.sh @@ -25,7 +25,7 @@ function push_pull_remove_images::push_image_with_retries() { set +e echo echo "Trying to push the image ${1}. Number of try: ${try_num}" - docker push "${1}" + docker_v push "${1}" local res=$? set -e if [[ ${res} != "0" ]]; then @@ -61,7 +61,7 @@ function push_pull_remove_images::pull_image_if_not_present_or_forced() { echo echo "Pulling the image ${IMAGE_TO_PULL}" echo - docker pull "${IMAGE_TO_PULL}" + docker_v pull "${IMAGE_TO_PULL}" EXIT_VALUE="$?" if [[ ${EXIT_VALUE} != "0" && ${FAIL_ON_GITHUB_DOCKER_PULL_ERROR} == "true" ]]; then echo @@ -97,44 +97,69 @@ function push_pull_remove_images::pull_image_github_dockerhub() { set +e if push_pull_remove_images::pull_image_if_not_present_or_forced "${GITHUB_IMAGE}"; then # Tag the image to be the DockerHub one - docker tag "${GITHUB_IMAGE}" "${DOCKERHUB_IMAGE}" + docker_v tag "${GITHUB_IMAGE}" "${DOCKERHUB_IMAGE}" else push_pull_remove_images::pull_image_if_not_present_or_forced "${DOCKERHUB_IMAGE}" fi set -e } +# Rebuilds python base image from the latest available Python version +function push_pull_remove_images::rebuild_python_base_image() { + echo + echo "Rebuilding ${AIRFLOW_PYTHON_BASE_IMAGE} from latest ${PYTHON_BASE_IMAGE}" + echo + docker_v pull "${PYTHON_BASE_IMAGE}" + echo "FROM ${PYTHON_BASE_IMAGE}" | \ + docker_v build \ + --label "org.opencontainers.image.source=https://github.com/${GITHUB_REPOSITORY}" \ + -t "${AIRFLOW_PYTHON_BASE_IMAGE}" - +} + +# Pulls the base Python image. This image is used as base for CI and PROD images, depending on the parameters used: +# +# * if FORCE_PULL_BASE_PYTHON_IMAGE != false, then it rebuild the image using latest Python image available +# and adds `org.opencontainers.image.source` label to it, so that it is linked to Airflow +# repository when we push it to GHCR registry +# * Otherwise it pulls the Python base image from either GitHub registry or from DockerHub +# depending on USE_GITHUB_REGISTRY variable. In case we pull specific build image (via suffix) +# it will pull the right image using the specified suffix +function push_pull_remove_images::pull_base_python_image() { + if [[ ${FORCE_PULL_BASE_PYTHON_IMAGE} == "true" ]] ; then + push_pull_remove_images::rebuild_python_base_image + return + fi + echo + echo "Docker pulling base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES}" + echo + if [[ -n ${DETECTED_TERMINAL=} ]]; then + echo -n "Docker pulling base python image. Upgrade to newer deps: ${UPGRADE_TO_NEWER_DEPENDENCIES} +" > "${DETECTED_TERMINAL}" + fi + if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then + PYTHON_TAG_SUFFIX="" + if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then + PYTHON_TAG_SUFFIX="-${GITHUB_REGISTRY_PULL_IMAGE_TAG}" + fi + push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PYTHON_BASE_IMAGE}" \ + "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${PYTHON_TAG_SUFFIX}" + else + docker_v pull "${AIRFLOW_PYTHON_BASE_IMAGE}" + fi +} + # Pulls CI image in case caching strategy is "pulled" and the image needs to be pulled function push_pull_remove_images::pull_ci_images_if_needed() { + local python_image_hash + python_image_hash=$(docker images -q "${AIRFLOW_PYTHON_BASE_IMAGE}" 2> /dev/null || true) + if [[ -z "${python_image_hash=}" || "${FORCE_PULL_IMAGES}" == "true" || \ + ${FORCE_PULL_BASE_PYTHON_IMAGE} == "true" ]]; then + push_pull_remove_images::pull_base_python_image + fi if [[ "${DOCKER_CACHE}" == "pulled" ]]; then - local python_image_hash - python_image_hash=$(docker images -q "${AIRFLOW_CI_PYTHON_IMAGE}" 2> /dev/null || true) - if [[ -z "${python_image_hash=}" ]]; then - FORCE_PULL_IMAGES="true" - fi - if [[ "${FORCE_PULL_IMAGES}" == "true" ]]; then - echo - echo "Force pull base image ${PYTHON_BASE_IMAGE}" - echo - if [[ -n ${DETECTED_TERMINAL=} ]]; then - echo -n " -Docker pulling ${PYTHON_BASE_IMAGE}. - " > "${DETECTED_TERMINAL}" - fi - if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then - PYTHON_TAG_SUFFIX="" - if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then - PYTHON_TAG_SUFFIX="-${GITHUB_REGISTRY_PULL_IMAGE_TAG}" - fi - push_pull_remove_images::pull_image_github_dockerhub "${PYTHON_BASE_IMAGE}" "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${PYTHON_TAG_SUFFIX}" - else - docker pull "${AIRFLOW_CI_PYTHON_IMAGE}" - docker tag "${AIRFLOW_CI_PYTHON_IMAGE}" "${PYTHON_BASE_IMAGE}" - fi - echo - fi if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then - push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_CI_IMAGE}" "${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" + push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_CI_IMAGE}" \ + "${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" else push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_CI_IMAGE}" fi @@ -144,28 +169,20 @@ Docker pulling ${PYTHON_BASE_IMAGE}. # Pulls PROD image in case caching strategy is "pulled" and the image needs to be pulled function push_pull_remove_images::pull_prod_images_if_needed() { + local python_image_hash + python_image_hash=$(docker images -q "${AIRFLOW_PYTHON_BASE_IMAGE}" 2> /dev/null || true) + if [[ -z "${python_image_hash=}" || "${FORCE_PULL_IMAGES}" == "true" || \ + ${FORCE_PULL_BASE_PYTHON_IMAGE} == "true" ]]; then + push_pull_remove_images::pull_base_python_image + fi if [[ "${DOCKER_CACHE}" == "pulled" ]]; then - if [[ "${FORCE_PULL_IMAGES}" == "true" ]]; then - echo - echo "Force pull base image ${PYTHON_BASE_IMAGE}" - echo - if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then - PYTHON_TAG_SUFFIX="" - if [[ ${GITHUB_REGISTRY_PULL_IMAGE_TAG} != "latest" ]]; then - PYTHON_TAG_SUFFIX="-${GITHUB_REGISTRY_PULL_IMAGE_TAG}" - fi - push_pull_remove_images::pull_image_github_dockerhub "${PYTHON_BASE_IMAGE}" "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${PYTHON_TAG_SUFFIX}" - else - docker pull "${AIRFLOW_CI_PYTHON_IMAGE}" - docker tag "${AIRFLOW_CI_PYTHON_IMAGE}" "${PYTHON_BASE_IMAGE}" - fi - echo - fi if [[ ${USE_GITHUB_REGISTRY} == "true" ]]; then # "Build" segment of production image - push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PROD_BUILD_IMAGE}" "${GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" + push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PROD_BUILD_IMAGE}" \ + "${GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" # "Main" segment of production image - push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PROD_IMAGE}" "${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" + push_pull_remove_images::pull_image_github_dockerhub "${AIRFLOW_PROD_IMAGE}" \ + "${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PULL_IMAGE_TAG}" else push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_PROD_BUILD_IMAGE}" push_pull_remove_images::pull_image_if_not_present_or_forced "${AIRFLOW_PROD_IMAGE}" @@ -175,47 +192,48 @@ function push_pull_remove_images::pull_prod_images_if_needed() { # Pushes Ci images and the manifest to the registry in DockerHub. function push_pull_remove_images::push_ci_images_to_dockerhub() { + push_pull_remove_images::push_image_with_retries "${AIRFLOW_PYTHON_BASE_IMAGE}" push_pull_remove_images::push_image_with_retries "${AIRFLOW_CI_IMAGE}" - docker tag "${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}" + docker_v tag "${AIRFLOW_CI_LOCAL_MANIFEST_IMAGE}" "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}" push_pull_remove_images::push_image_with_retries "${AIRFLOW_CI_REMOTE_MANIFEST_IMAGE}" if [[ -n ${DEFAULT_CI_IMAGE=} ]]; then # Only push default image to DockerHub registry if it is defined push_pull_remove_images::push_image_with_retries "${DEFAULT_CI_IMAGE}" fi - # Also push python image so that we use the same image as the CI image it was built with - docker tag "${PYTHON_BASE_IMAGE}" "${AIRFLOW_CI_PYTHON_IMAGE}" - push_pull_remove_images::push_image_with_retries "${AIRFLOW_CI_PYTHON_IMAGE}" +} + + +# Push image to GitHub registry with the push tag: +# "${GITHUB_RUN_ID}" - in case of pull-request triggered 'workflow_run' builds +# "latest" - in case of push builds +# Push python image to GitHub registry with the push tag: +# X.Y-slim-buster-"${GITHUB_RUN_ID}" - in case of pull-request triggered 'workflow_run' builds +# X.Y-slim-buster - in case of push builds +function push_pull_remove_images::push_python_image_to_github() { + PYTHON_TAG_SUFFIX="" + if [[ ${GITHUB_REGISTRY_PUSH_IMAGE_TAG} != "latest" ]]; then + PYTHON_TAG_SUFFIX="-${GITHUB_REGISTRY_PUSH_IMAGE_TAG}" + fi + docker_v tag "${AIRFLOW_PYTHON_BASE_IMAGE}" \ + "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${PYTHON_TAG_SUFFIX}" + push_pull_remove_images::push_image_with_retries \ + "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${PYTHON_TAG_SUFFIX}" } # Pushes Ci images and their tags to registry in GitHub function push_pull_remove_images::push_ci_images_to_github() { - # Push image to GitHub registry with the push tag: - # "${GITHUB_RUN_ID}" - in case of pull-request triggered 'workflow_run' builds - # "latest" - in case of push builds + if [[ "${PUSH_PYTHON_BASE_IMAGE=}" != "false" ]]; then + push_pull_remove_images::push_python_image_to_github + fi AIRFLOW_CI_TAGGED_IMAGE="${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}" - docker tag "${AIRFLOW_CI_IMAGE}" "${AIRFLOW_CI_TAGGED_IMAGE}" + docker_v tag "${AIRFLOW_CI_IMAGE}" "${AIRFLOW_CI_TAGGED_IMAGE}" push_pull_remove_images::push_image_with_retries "${AIRFLOW_CI_TAGGED_IMAGE}" if [[ -n ${GITHUB_SHA=} ]]; then # Also push image to GitHub registry with commit SHA AIRFLOW_CI_SHA_IMAGE="${GITHUB_REGISTRY_AIRFLOW_CI_IMAGE}:${COMMIT_SHA}" - docker tag "${AIRFLOW_CI_IMAGE}" "${AIRFLOW_CI_SHA_IMAGE}" + docker_v tag "${AIRFLOW_CI_IMAGE}" "${AIRFLOW_CI_SHA_IMAGE}" push_pull_remove_images::push_image_with_retries "${AIRFLOW_CI_SHA_IMAGE}" fi - # Push python image to GitHub registry with the push tag: - # X.Y-slim-buster-"${GITHUB_RUN_ID}" - in case of pull-request triggered 'workflow_run' builds - # X.Y-slim-buster - in case of push builds - PYTHON_TAG_SUFFIX="" - if [[ ${GITHUB_REGISTRY_PUSH_IMAGE_TAG} != "latest" ]]; then - PYTHON_TAG_SUFFIX="-${GITHUB_REGISTRY_PUSH_IMAGE_TAG}" - fi - - # Label the python image for GCR, so that it is linked to the current project it is build in - echo "FROM ${PYTHON_BASE_IMAGE}" | \ - docker build --label "org.opencontainers.image.source=https://github.com/${GITHUB_REPOSITORY}" \ - -t "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${PYTHON_TAG_SUFFIX}" - - - push_pull_remove_images::push_image_with_retries \ - "${GITHUB_REGISTRY_PYTHON_BASE_IMAGE}${PYTHON_TAG_SUFFIX}" } @@ -230,6 +248,7 @@ function push_pull_remove_images::push_ci_images() { # Pushes PROD image to registry in DockerHub function push_pull_remove_images::push_prod_images_to_dockerhub () { + push_pull_remove_images::push_image_with_retries "${AIRFLOW_PYTHON_BASE_IMAGE}" # Prod image push_pull_remove_images::push_image_with_retries "${AIRFLOW_PROD_IMAGE}" if [[ -n ${DEFAULT_PROD_IMAGE=} ]]; then @@ -241,23 +260,23 @@ function push_pull_remove_images::push_prod_images_to_dockerhub () { } # Pushes PROD image to and their tags to registry in GitHub +# Push image to GitHub registry with chosen push tag +# the PUSH tag might be: +# "${GITHUB_RUN_ID}" - in case of pull-request triggered 'workflow_run' builds +# "latest" - in case of push builds function push_pull_remove_images::push_prod_images_to_github () { - # Push image to GitHub registry with chosen push tag - # the PUSH tag might be: - # "${GITHUB_RUN_ID}" - in case of pull-request triggered 'workflow_run' builds - # "latest" - in case of push builds AIRFLOW_PROD_TAGGED_IMAGE="${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}" - docker tag "${AIRFLOW_PROD_IMAGE}" "${AIRFLOW_PROD_TAGGED_IMAGE}" + docker_v tag "${AIRFLOW_PROD_IMAGE}" "${AIRFLOW_PROD_TAGGED_IMAGE}" push_pull_remove_images::push_image_with_retries "${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}" if [[ -n ${COMMIT_SHA=} ]]; then # Also push image to GitHub registry with commit SHA AIRFLOW_PROD_SHA_IMAGE="${GITHUB_REGISTRY_AIRFLOW_PROD_IMAGE}:${COMMIT_SHA}" - docker tag "${AIRFLOW_PROD_IMAGE}" "${AIRFLOW_PROD_SHA_IMAGE}" + docker_v tag "${AIRFLOW_PROD_IMAGE}" "${AIRFLOW_PROD_SHA_IMAGE}" push_pull_remove_images::push_image_with_retries "${AIRFLOW_PROD_SHA_IMAGE}" fi # Also push prod build image AIRFLOW_PROD_BUILD_TAGGED_IMAGE="${GITHUB_REGISTRY_AIRFLOW_PROD_BUILD_IMAGE}:${GITHUB_REGISTRY_PUSH_IMAGE_TAG}" - docker tag "${AIRFLOW_PROD_BUILD_IMAGE}" "${AIRFLOW_PROD_BUILD_TAGGED_IMAGE}" + docker_v tag "${AIRFLOW_PROD_BUILD_IMAGE}" "${AIRFLOW_PROD_BUILD_TAGGED_IMAGE}" push_pull_remove_images::push_image_with_retries "${AIRFLOW_PROD_BUILD_TAGGED_IMAGE}" } @@ -272,74 +291,73 @@ function push_pull_remove_images::push_prod_images() { fi } -# waits for an image to be available in GitHub Packages -function push_pull_remove_images::wait_for_image_in_github_packages() { +# waits for an image to be available in GitHub Packages. Should be run with `set +e` +# the buid automatically determines which registry to use based one the images available +function push_pull_remove_images::check_for_image_in_github_packages() { local github_repository_lowercase github_repository_lowercase="$(echo "${GITHUB_REPOSITORY}" |tr '[:upper:]' '[:lower:]')" local github_api_endpoint - github_api_endpoint="https://${GITHUB_REGISTRY}/v2/${github_repository_lowercase}" + github_api_endpoint="https://docker.pkg.github.com/v2/${github_repository_lowercase}" local image_name_in_github_registry="${1}" local image_tag_in_github_registry=${2} - - echo - echo "Waiting for ${GITHUB_REPOSITORY}/${image_name_in_github_registry}:${image_tag_in_github_registry} image" - echo - - GITHUB_API_CALL="${github_api_endpoint}/${image_name_in_github_registry}/manifests/${image_tag_in_github_registry}" - while true; do - http_status=$(curl --silent --output "${OUTPUT_LOG}" --write-out "%{http_code}" \ - --connect-timeout 60 --max-time 60 \ - -X GET "${GITHUB_API_CALL}" -u "${GITHUB_USERNAME}:${GITHUB_TOKEN}") - if [[ ${http_status} == "200" ]]; then - echo "${COLOR_GREEN}OK. ${COLOR_RESET}" - break - else - echo "${COLOR_YELLOW}Still waiting - status code ${http_status}!${COLOR_RESET}" - cat "${OUTPUT_LOG}" - fi - sleep 60 - done - verbosity::print_info "Found ${image_name_in_github_registry}:${image_tag_in_github_registry} image" + local image_to_wait_for=${GITHUB_REPOSITORY}/${image_name_in_github_registry}:${image_tag_in_github_registry} + local github_api_call + github_api_call="${github_api_endpoint}/${image_name_in_github_registry}/manifests/${image_tag_in_github_registry}" + echo "GitHub Packages: checking for ${image_to_wait_for} via ${github_api_call}!" + http_status=$(curl --silent --output "${OUTPUT_LOG}" --write-out "%{http_code}" \ + --connect-timeout 60 --max-time 60 \ + -X GET "${github_api_call}" -u "${GITHUB_USERNAME}:${GITHUB_TOKEN}") + if [[ ${http_status} == "200" ]]; then + echo "Image: ${image_to_wait_for} found in GitHub Packages: ${COLOR_GREEN}OK. ${COLOR_RESET}" + echo "::set-output name=githubRegistry::docker.pkg.github.com" + echo + echo "Setting githubRegistry output to docker.pkg.github.com" + echo + return 0 + else + cat "${OUTPUT_LOG}" + echo "${COLOR_YELLOW}Still waiting. Status code ${http_status}!${COLOR_RESET}" + return 1 + fi } - -# waits for an image to be available in GitHub Container Registry -function push_pull_remove_images::wait_for_image_in_github_container_registry() { +# waits for an image to be available in GitHub Container Registry. Should be run with `set +e` +function push_pull_remove_images::check_for_image_in_github_container_registry() { local image_name_in_github_registry="${1}" local image_tag_in_github_registry=${2} - local image_to_wait_for="${GITHUB_REGISTRY}/${GITHUB_REPOSITORY}-${image_name_in_github_registry}:${image_tag_in_github_registry}" - echo - echo "Waiting for ${GITHUB_REGISTRY}/${GITHUB_REPOSITORY}-${image_name_in_github_registry}:${image_tag_in_github_registry} image" - echo + local image_to_wait_for="ghcr.io/${GITHUB_REPOSITORY}-${image_name_in_github_registry}:${image_tag_in_github_registry}" + echo "GitHub Container Registry: checking for ${image_to_wait_for} via docker manifest inspect!" + docker_v manifest inspect "${image_to_wait_for}" + local res=$? + if [[ ${res} == "0" ]]; then + echo "Image: ${image_to_wait_for} found in Container Registry: ${COLOR_GREEN}OK.${COLOR_RESET}" + echo + echo "Setting githubRegistry output to ghcr.io" + echo + echo "::set-output name=githubRegistry::ghcr.io" + return 0 + else + echo "${COLOR_YELLOW}Still waiting. Not found!${COLOR_RESET}" + return 1 + fi +} + +# waits for an image to be available in the GitHub registry +function push_pull_remove_images::wait_for_github_registry_image() { set +e - while true; do - docker manifest inspect "${image_to_wait_for}" - local res=$? - if [[ ${res} == "0" ]]; then - echo "${COLOR_GREEN}OK.${COLOR_RESET}" + echo " Waiting for github registry image: " "${@}" + while true + do + if push_pull_remove_images::check_for_image_in_github_container_registry "${@}"; then + break + fi + if push_pull_remove_images::check_for_image_in_github_packages "${@}"; then break - else - echo "${COLOR_YELLOW}Still waiting for ${image_to_wait_for}!${COLOR_RESET}" fi sleep 30 done set -e - verbosity::print_info "Found ${image_name_in_github_registry}:${image_tag_in_github_registry} image" -} - -# waits for an image to be available in the GitHub registry -function push_pull_remove_images::wait_for_github_registry_image() { - if [[ ${GITHUB_REGISTRY} == "ghcr.io" ]]; then - push_pull_remove_images::wait_for_image_in_github_container_registry "${@}" - elif [[ ${GITHUB_REGISTRY} == "docker.pkg.github.com" ]]; then - push_pull_remove_images::wait_for_image_in_github_packages "${@}" - else - echo - echo "${COLOR_RED}ERROR: Bad value of '${GITHUB_REGISTRY}'. Should be either 'ghcr.io' or 'docker.pkg.github.com'!${COLOR_RESET}" - echo - exit 1 - fi } function push_pull_remove_images::check_if_github_registry_wait_for_image_enabled() { diff --git a/scripts/ci/libraries/_runs.sh b/scripts/ci/libraries/_runs.sh index 5f4569f38bc96..45c4c2bcbc1ee 100644 --- a/scripts/ci/libraries/_runs.sh +++ b/scripts/ci/libraries/_runs.sh @@ -19,7 +19,7 @@ # Docker command to build documentation function runs::run_docs() { start_end::group_start "Run build docs" - docker run "${EXTRA_DOCKER_FLAGS[@]}" -t \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" -t \ -e "GITHUB_ACTIONS=${GITHUB_ACTIONS="false"}" \ --entrypoint "/usr/local/bin/dumb-init" \ "${AIRFLOW_CI_IMAGE}" \ @@ -30,7 +30,7 @@ function runs::run_docs() { # Docker command to generate constraint files. function runs::run_generate_constraints() { start_end::group_start "Run generate constraints" - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/in_container/run_generate_constraints.sh" @@ -40,7 +40,7 @@ function runs::run_generate_constraints() { # Docker command to prepare airflow packages function runs::run_prepare_airflow_packages() { start_end::group_start "Run prepare airflow packages" - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ -t \ -v "${AIRFLOW_SOURCES}:/opt/airflow" \ @@ -53,7 +53,7 @@ function runs::run_prepare_airflow_packages() { # Docker command to prepare provider packages function runs::run_prepare_provider_packages() { # No group here - groups are added internally - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ -t \ -v "${AIRFLOW_SOURCES}:/opt/airflow" \ @@ -62,12 +62,12 @@ function runs::run_prepare_provider_packages() { } # Docker command to generate release notes for provider packages -function runs::run_prepare_provider_readme() { +function runs::run_prepare_provider_documentation() { # No group here - groups are added internally - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ -t \ -v "${AIRFLOW_SOURCES}:/opt/airflow" \ "${AIRFLOW_CI_IMAGE}" \ - "--" "/opt/airflow/scripts/in_container/run_prepare_provider_readme.sh" "${@}" + "--" "/opt/airflow/scripts/in_container/run_prepare_provider_documentation.sh" "${@}" } diff --git a/scripts/ci/libraries/_script_init.sh b/scripts/ci/libraries/_script_init.sh index cb930a62297d4..90e6404cf0dc6 100755 --- a/scripts/ci/libraries/_script_init.sh +++ b/scripts/ci/libraries/_script_init.sh @@ -52,4 +52,9 @@ start_end::group_start "Make constants read-only" initialization::make_constants_read_only start_end::group_end +# Work around occasional unexplained failure on CI. Clear file flags on +# STDOUT (which is connected to a tmp file by GitHub Runner). +# The one error I did see: BlockingIOError: [Errno 11] write could not complete without blocking +[[ "$CI" == "true" ]] && python3 -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)" + traps::add_trap start_end::script_end EXIT HUP INT TERM diff --git a/scripts/ci/libraries/_start_end.sh b/scripts/ci/libraries/_start_end.sh index 008c430f9d03a..d2d6a61ce42ea 100644 --- a/scripts/ci/libraries/_start_end.sh +++ b/scripts/ci/libraries/_start_end.sh @@ -16,22 +16,26 @@ # specific language governing permissions and limitations # under the License. -# Starts group for Github Actions - makes logs much more readable +# Starts group for GitHub Actions - makes logs much more readable function start_end::group_start { - if [[ ${GITHUB_ACTIONS=} == "true" ]]; then - echo "::group::${1}" - else - echo - echo "${1}" - echo + if [[ ${PRINT_INFO_FROM_SCRIPTS} != "false" ]]; then + if [[ ${GITHUB_ACTIONS=} == "true" ]]; then + echo "::group::${1}" + else + echo + echo "${1}" + echo + fi fi } -# Ends group for Github Actions +# Ends group for GitHub Actions function start_end::group_end { - if [[ ${GITHUB_ACTIONS=} == "true" ]]; then - echo -e "\033[0m" # Disable any colors set in the group - echo "::endgroup::" + if [[ ${PRINT_INFO_FROM_SCRIPTS} != "false" ]]; then + if [[ ${GITHUB_ACTIONS=} == "true" ]]; then + echo -e "\033[0m" # Disable any colors set in the group + echo "::endgroup::" + fi fi } @@ -43,14 +47,14 @@ function start_end::group_end { # function start_end::script_start { verbosity::print_info - verbosity::print_info "Running $(basename "$0")" + verbosity::print_info "Running '${COLOR_GREEN}$(basename "$0")${COLOR_RESET}'" verbosity::print_info - verbosity::print_info "Log is redirected to ${OUTPUT_LOG}" + verbosity::print_info "${COLOR_BLUE}Log is redirected to '${OUTPUT_LOG}'${COLOR_RESET}" verbosity::print_info if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then verbosity::print_info - verbosity::print_info "Variable VERBOSE_COMMANDS Set to \"true\"" - verbosity::print_info "You will see a lot of output" + verbosity::print_info "${COLOR_BLUE}Variable VERBOSE_COMMANDS Set to \"true\"${COLOR_RESET}" + verbosity::print_info "${COLOR_BLUE}You will see a lot of output${COLOR_RESET}" verbosity::print_info set -x else @@ -65,16 +69,16 @@ function start_end::script_start { } function start_end::dump_container_logs() { - start_end::group_start "Dumping container logs ${container}" + start_end::group_start "${COLOR_BLUE}Dumping container logs ${container}${COLOR_RESET}" local container="${1}" local dump_file dump_file=${AIRFLOW_SOURCES}/files/container_logs_${container}_$(date "+%Y-%m-%d")_${CI_BUILD_ID}_${CI_JOB_ID}.log - echo "###########################################################################################" + echo "${COLOR_BLUE}###########################################################################################${COLOR_RESET}" echo " Dumping logs from ${container} container" - echo "###########################################################################################" - docker logs "${container}" > "${dump_file}" + echo "${COLOR_BLUE}###########################################################################################${COLOR_RESET}" + docker_v logs "${container}" > "${dump_file}" echo " Container ${container} logs dumped to ${dump_file}" - echo "###########################################################################################" + echo "${COLOR_BLUE}###########################################################################################${COLOR_RESET}" start_end::group_end } @@ -106,9 +110,9 @@ function start_end::script_end { start_end::dump_container_logs "${container}" done fi - verbosity::print_info "###########################################################################################" - verbosity::print_info " EXITING WITH STATUS CODE ${exit_code}" - verbosity::print_info "###########################################################################################" + verbosity::print_info "${COLOR_RED}###########################################################################################${COLOR_RESET}" + verbosity::print_info "${COLOR_RED} EXITING WITH STATUS CODE ${exit_code}${COLOR_RESET}" + verbosity::print_info "${COLOR_RED}###########################################################################################${COLOR_RESET}" fi if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then set +x @@ -120,11 +124,15 @@ function start_end::script_end { END_SCRIPT_TIME=$(date +%s) RUN_SCRIPT_TIME=$((END_SCRIPT_TIME-START_SCRIPT_TIME)) - if [[ ${BREEZE:=} != "true" ]]; then + if [[ ${BREEZE:=} != "true" && ${RUN_TESTS=} != "true" ]]; then verbosity::print_info - verbosity::print_info "Finished the script $(basename "$0")" - verbosity::print_info "Elapsed time spent in the script: ${RUN_SCRIPT_TIME} seconds" - verbosity::print_info "Exit code ${exit_code}" + verbosity::print_info "Finished the script ${COLOR_GREEN}$(basename "$0")${COLOR_RESET}" + verbosity::print_info "Elapsed time spent in the script: ${COLOR_BLUE}${RUN_SCRIPT_TIME} seconds${COLOR_RESET}" + if [[ ${exit_code} == "0" ]]; then + verbosity::print_info "Exit code ${COLOR_GREEN}${exit_code}${COLOR_RESET}" + else + verbosity::print_info "Exit code ${COLOR_RED}${exit_code}${COLOR_RESET}" + fi verbosity::print_info fi } diff --git a/scripts/ci/libraries/_testing.sh b/scripts/ci/libraries/_testing.sh new file mode 100644 index 0000000000000..638daf5e88c8e --- /dev/null +++ b/scripts/ci/libraries/_testing.sh @@ -0,0 +1,116 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +export MEMORY_REQUIRED_FOR_INTEGRATION_TEST_PARALLEL_RUN=33000 + +function testing::skip_tests_if_requested(){ + if [[ -f ${BUILD_CACHE_DIR}/.skip_tests ]]; then + echo + echo "Skipping running tests !!!!!" + echo + exit + fi +} + +function testing::get_docker_compose_local() { + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/files.yml") + if [[ ${MOUNT_SELECTED_LOCAL_SOURCES} == "true" ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local.yml") + fi + if [[ ${MOUNT_ALL_LOCAL_SOURCES} == "true" ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local-all-sources.yml") + fi + + if [[ ${GITHUB_ACTIONS} == "true" ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/ga.yml") + fi + + if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml") + fi + + if [[ -n ${INSTALL_AIRFLOW_VERSION=} || -n ${INSTALL_AIRFLOW_REFERENCE} ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml") + fi + readonly DOCKER_COMPOSE_LOCAL +} + +function testing::get_maximum_parallel_test_jobs() { + docker_engine_resources::get_available_cpus_in_docker + if [[ -n ${RUNS_ON=} && ${RUNS_ON} != *"self-hosted"* ]]; then + echo + echo "${COLOR_YELLOW}This is a Github Public runner - for now we are forcing max parallel Quarantined tests jobs to 1 for those${COLOR_RESET}" + echo + export MAX_PARALLEL_QUARANTINED_TEST_JOBS="1" + else + if [[ ${MAX_PARALLEL_QUARANTINED_TEST_JOBS=} != "" ]]; then + echo + echo "${COLOR_YELLOW}Maximum parallel Quarantined test jobs forced via MAX_PARALLEL_QUARANTINED_TEST_JOBS = ${MAX_PARALLEL_QUARANTINED_TEST_JOBS}${COLOR_RESET}" + echo + else + MAX_PARALLEL_QUARANTINED_TEST_JOBS=${CPUS_AVAILABLE_FOR_DOCKER} + echo + echo "${COLOR_YELLOW}Maximum parallel Quarantined test jobs set to number of CPUs available for Docker = ${MAX_PARALLEL_QUARANTINED_TEST_JOBS}${COLOR_RESET}" + echo + fi + + fi + + if [[ ${MAX_PARALLEL_TEST_JOBS=} != "" ]]; then + echo + echo "${COLOR_YELLOW}Maximum parallel test jobs forced via MAX_PARALLEL_TEST_JOBS = ${MAX_PARALLEL_TEST_JOBS}${COLOR_RESET}" + echo + else + MAX_PARALLEL_TEST_JOBS=${CPUS_AVAILABLE_FOR_DOCKER} + echo + echo "${COLOR_YELLOW}Maximum parallel test jobs set to number of CPUs available for Docker = ${MAX_PARALLEL_TEST_JOBS}${COLOR_RESET}" + echo + fi + export MAX_PARALLEL_TEST_JOBS +} + +function testing::get_test_types_to_run() { + if [[ -n "${FORCE_TEST_TYPE=}" ]]; then + # Handle case where test type is forced from outside + export TEST_TYPES="${FORCE_TEST_TYPE}" + fi + + if [[ -z "${TEST_TYPES=}" ]]; then + TEST_TYPES="Core Providers API CLI Integration Other WWW" + echo + echo "Test types not specified. Adding all: ${TEST_TYPES}" + echo + fi + + if [[ -z "${FORCE_TEST_TYPE=}" ]]; then + # Add Postgres/MySQL special test types in case we are running several test types + if [[ ${BACKEND} == "postgres" && ${TEST_TYPES} != "Quarantined" ]]; then + TEST_TYPES="${TEST_TYPES} Postgres" + echo + echo "Added Postgres. Tests to run: ${TEST_TYPES}" + echo + fi + if [[ ${BACKEND} == "mysql" && ${TEST_TYPES} != "Quarantined" ]]; then + TEST_TYPES="${TEST_TYPES} MySQL" + echo + echo "Added MySQL. Tests to run: ${TEST_TYPES}" + echo + fi + fi + readonly TEST_TYPES +} diff --git a/scripts/ci/libraries/_verbosity.sh b/scripts/ci/libraries/_verbosity.sh index 30bc6efb22219..68b356da39939 100644 --- a/scripts/ci/libraries/_verbosity.sh +++ b/scripts/ci/libraries/_verbosity.sh @@ -37,15 +37,22 @@ function verbosity::restore_exit_on_error_status() { } # In case "VERBOSE" is set to "true" (--verbose flag in Breeze) all docker commands run will be -# printed before execution -function docker { +# printed before execution. In case of DRY_RUN_DOCKER flag set to "true" +# show the command to execute instead of executing them +function docker_v { + if [[ ${DRY_RUN_DOCKER=} != "false" ]]; then + echo + echo "${COLOR_CYAN}docker" "${@}" "${COLOR_RESET}" + echo + return + fi verbosity::store_exit_on_error_status if [[ ${VERBOSE:="false"} == "true" && \ # do not print echo if VERBOSE_COMMAND is set (set -x does it already) ${VERBOSE_COMMANDS:=} != "true" && \ # And when generally printing info is disabled ${PRINT_INFO_FROM_SCRIPTS} == "true" ]]; then - >&2 echo "docker" "${@}" + >&2 echo "${COLOR_CYAN}docker ${*} ${COLOR_RESET}" fi if [[ ${PRINT_INFO_FROM_SCRIPTS} == "false" ]]; then ${DOCKER_BINARY_PATH} "${@}" >>"${OUTPUT_LOG}" 2>&1 diff --git a/scripts/ci/libraries/_verify_image.sh b/scripts/ci/libraries/_verify_image.sh new file mode 100644 index 0000000000000..b0060ac17e3f2 --- /dev/null +++ b/scripts/ci/libraries/_verify_image.sh @@ -0,0 +1,298 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +function verify_image::run_command_in_image() { + docker_v run --rm \ + -e COLUMNS=180 \ + --entrypoint /bin/bash "${DOCKER_IMAGE}" \ + -c "${@}" +} + +IMAGE_VALID="true" + +function verify_image::check_command() { + DESCRIPTION="${1}" + COMMAND=${2} + set +e + echo -n "Feature: ${DESCRIPTION} " + local output + output=$(verify_image::run_command_in_image "${COMMAND}" 2>&1) + local res=$? + if [[ ${res} == "0" ]]; then + echo "${COLOR_GREEN}OK${COLOR_RESET}" + else + echo "${COLOR_RED}NOK${COLOR_RESET}" + echo "${COLOR_BLUE}========================= OUTPUT start ============================${COLOR_RESET}" + echo "${output}" + echo "${COLOR_BLUE}========================= OUTPUT end ===========================${COLOR_RESET}" + IMAGE_VALID="false" + fi + set -e +} + +function verify_image::verify_prod_image_has_airflow_and_providers() { + start_end::group_start "Verify prod image: ${DOCKER_IMAGE}" + echo + echo "Checking if Providers are installed" + echo + + all_providers_installed_in_image=$(verify_image::run_command_in_image "airflow providers list --output table") + + echo + echo "Installed providers:" + echo + echo "${all_providers_installed_in_image}" + echo + local error="false" + for provider in "${INSTALLED_PROVIDERS[@]}"; do + echo -n "Verifying if provider ${provider} installed: " + if [[ ${all_providers_installed_in_image} == *"apache-airflow-providers-${provider//./-}"* ]]; then + echo "${COLOR_GREEN}OK${COLOR_RESET}" + else + echo "${COLOR_RED}NOK${COLOR_RESET}" + error="true" + fi + done + if [[ ${error} == "true" ]]; then + echo + echo "${COLOR_RED}ERROR: Some expected providers are not installed!${COLOR_RESET}" + echo + IMAGE_VALID="false" + else + echo + echo "${COLOR_GREEN}OK. All expected providers installed!${COLOR_RESET}" + echo + fi + start_end::group_end +} + +function verify_image::verify_ci_image_dependencies() { + start_end::group_start "Checking if Airflow dependencies are non-conflicting in ${DOCKER_IMAGE} image." + set +e + docker_v run --rm --entrypoint /bin/bash "${DOCKER_IMAGE}" -c 'pip check' + local res=$? + if [[ ${res} != "0" ]]; then + echo "${COLOR_RED}ERROR: ^^^ Some dependencies are conflicting. See instructions below on how to deal with it. ${COLOR_RESET}" + echo + build_images::inform_about_pip_check "" + IMAGE_VALID="false" + else + echo + echo "${COLOR_GREEN}OK. The ${DOCKER_IMAGE} image dependencies are consistent. ${COLOR_RESET}" + echo + fi + set -e + start_end::group_end +} + +function verify_image::verify_ci_image_has_dist_folder() { + start_end::group_start "Verify CI image dist folder (compiled www assets): ${DOCKER_IMAGE}" + + verify_image::check_command "Dist folder" '[ -f /opt/airflow/airflow/www/static/dist/manifest.json ] || exit 1' + + start_end::group_end +} + + +function verify_image::verify_prod_image_dependencies() { + start_end::group_start "Checking if Airflow dependencies are non-conflicting in ${DOCKER_IMAGE} image." + + set +e + verify_image::run_command_in_image 'pip check' + local res=$? + if [[ ${res} != "0" ]]; then + echo "${COLOR_RED}ERROR: ^^^ Some dependencies are conflicting. See instructions below on how to deal with it. ${COLOR_RESET}" + echo + build_images::inform_about_pip_check "--production " + IMAGE_VALID="false" + else + echo + echo "${COLOR_GREEN}OK. The ${DOCKER_IMAGE} image dependencies are consistent. ${COLOR_RESET}" + echo + fi + set -e + start_end::group_end +} + +GOOGLE_IMPORTS=( + 'OpenSSL' + 'google.ads' + 'googleapiclient' + 'google.auth' + 'google_auth_httplib2' + 'google.cloud.automl' + 'google.cloud.bigquery_datatransfer' + 'google.cloud.bigtable' + 'google.cloud.container' + 'google.cloud.datacatalog' + 'google.cloud.dataproc' + 'google.cloud.dlp' + 'google.cloud.kms' + 'google.cloud.language' + 'google.cloud.logging' + 'google.cloud.memcache' + 'google.cloud.monitoring' + 'google.cloud.oslogin' + 'google.cloud.pubsub' + 'google.cloud.redis' + 'google.cloud.secretmanager' + 'google.cloud.spanner' + 'google.cloud.speech' + 'google.cloud.storage' + 'google.cloud.tasks' + 'google.cloud.texttospeech' + 'google.cloud.translate' + 'google.cloud.videointelligence' + 'google.cloud.vision' +) + +AZURE_IMPORTS=( + 'azure.batch' + 'azure.cosmos' + 'azure.datalake.store' + 'azure.identity' + 'azure.keyvault' + 'azure.kusto.data' + 'azure.mgmt.containerinstance' + 'azure.mgmt.datalake.store' + 'azure.mgmt.resource' + 'azure.storage' +) + +function verify_image::verify_production_image_python_modules() { + start_end::group_start "Verify prod image features: ${DOCKER_IMAGE}" + + verify_image::check_command "Import: async" "python -c 'import gevent, eventlet, greenlet'" + verify_image::check_command "Import: amazon" "python -c 'import boto3, botocore, watchtower'" + verify_image::check_command "Import: celery" "python -c 'import celery, flower, vine'" + verify_image::check_command "Import: cncf.kubernetes" "python -c 'import kubernetes, cryptography'" + verify_image::check_command "Import: docker" "python -c 'import docker'" + verify_image::check_command "Import: dask" "python -c 'import cloudpickle, distributed'" + verify_image::check_command "Import: elasticsearch" "python -c 'import elasticsearch,es.elastic, elasticsearch_dsl'" + verify_image::check_command "Import: grpc" "python -c 'import grpc, google.auth, google_auth_httplib2'" + verify_image::check_command "Import: hashicorp" "python -c 'import hvac'" + verify_image::check_command "Import: ldap" "python -c 'import ldap'" + for google_import in "${GOOGLE_IMPORTS[@]}" + do + verify_image::check_command "Import google: ${google_import}" "python -c 'import ${google_import}'" + done + for azure_import in "${AZURE_IMPORTS[@]}" + do + verify_image::check_command "Import azure: ${azure_import}" "python -c 'import ${azure_import}'" + done + verify_image::check_command "Import: mysql" "python -c 'import mysql'" + verify_image::check_command "Import: postgres" "python -c 'import psycopg2'" + verify_image::check_command "Import: redis" "python -c 'import redis'" + verify_image::check_command "Import: sendgrid" "python -c 'import sendgrid'" + verify_image::check_command "Import: sftp/ssh" "python -c 'import paramiko, pysftp, sshtunnel'" + verify_image::check_command "Import: slack" "python -c 'import slack_sdk'" + verify_image::check_command "Import: statsd" "python -c 'import statsd'" + verify_image::check_command "Import: virtualenv" "python -c 'import virtualenv'" + + start_end::group_end +} + +function verify_image::verify_prod_image_as_root() { + start_end::group_start "Checking if the image can be run as root." + set +e + echo "Checking airflow as root" + local output + local res + output=$(docker_v run --rm --user 0 "${DOCKER_IMAGE}" "airflow" "info" 2>&1) + res=$? + if [[ ${res} == "0" ]]; then + echo "${COLOR_GREEN}OK${COLOR_RESET}" + else + echo "${COLOR_RED}NOK${COLOR_RESET}" + echo "${COLOR_BLUE}========================= OUTPUT start ============================${COLOR_RESET}" + echo "${output}" + echo "${COLOR_BLUE}========================= OUTPUT end ===========================${COLOR_RESET}" + IMAGE_VALID="false" + fi + + echo "Checking root container with custom PYTHONPATH" + local tmp_dir + tmp_dir="$(mktemp -d)" + touch "${tmp_dir}/__init__.py" + echo 'print("Awesome")' >> "${tmp_dir}/awesome.py" + output=$(docker_v run \ + --rm \ + -e "PYTHONPATH=${tmp_dir}" \ + -v "${tmp_dir}:${tmp_dir}" \ + --user 0 "${DOCKER_IMAGE}" \ + "python" "-c" "import awesome" \ + 2>&1) + res=$? + if [[ ${res} == "0" ]]; then + echo "${COLOR_GREEN}OK${COLOR_RESET}" + else + echo "${COLOR_RED}NOK${COLOR_RESET}" + echo "${COLOR_BLUE}========================= OUTPUT start ============================${COLOR_RESET}" + echo "${output}" + echo "${COLOR_BLUE}========================= OUTPUT end ===========================${COLOR_RESET}" + IMAGE_VALID="false" + fi + rm -rf "${tmp_dir}" + set -e +} + +function verify_image::verify_production_image_has_dist_folder() { + start_end::group_start "Verify prod image has dist folder (compiled www assets): ${DOCKER_IMAGE}" + # shellcheck disable=SC2016 + verify_image::check_command "Dist folder" '[ -f $(python -m site --user-site)/airflow/www/static/dist/manifest.json ] || exit 1' + + start_end::group_end +} + +function verify_image::display_result { + if [[ ${IMAGE_VALID} == "true" ]]; then + echo + echo "${COLOR_GREEN}OK. The ${DOCKER_IMAGE} features are all OK. ${COLOR_RESET}" + echo + else + echo + echo "${COLOR_RED}ERROR: Some features were not ok!${COLOR_RESET}" + echo + exit 1 + fi +} + +function verify_image::verify_prod_image { + IMAGE_VALID="true" + DOCKER_IMAGE="${1}" + verify_image::verify_prod_image_has_airflow_and_providers + + verify_image::verify_production_image_python_modules + + verify_image::verify_prod_image_dependencies + + verify_image::verify_prod_image_as_root + + verify_image::verify_production_image_has_dist_folder + + verify_image::display_result +} + +function verify_image::verify_ci_image { + IMAGE_VALID="true" + DOCKER_IMAGE="${1}" + verify_image::verify_ci_image_dependencies + + verify_image::verify_ci_image_has_dist_folder + + verify_image::display_result +} diff --git a/scripts/ci/mysql/conf.d/airflow.cnf b/scripts/ci/mysql/conf.d/airflow.cnf index 9b266a2bb5ebf..a5ab88c5b3836 100644 --- a/scripts/ci/mysql/conf.d/airflow.cnf +++ b/scripts/ci/mysql/conf.d/airflow.cnf @@ -22,3 +22,10 @@ explicit_defaults_for_timestamp = 1 secure_file_priv = "/var/lib/mysql" local_infile = 1 innodb_print_all_deadlocks = 1 + +# Optimizes memory usage during tests - by default it 132 MB but we need far less than that +innodb_buffer_pool_size = 32M + +# Performance schema monitoring uses on its own a lot of memory. We save ~130 MB by disabling it +performance_schema = OFF +performance-schema-instrument='memory/%=COUNTED' diff --git a/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.sh b/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.sh index 66955a89b6a6b..7a92900af98c7 100755 --- a/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.sh +++ b/scripts/ci/pre_commit/pre_commit_breeze_cmd_line.sh @@ -49,13 +49,16 @@ readonly FORCE_SCREEN_WIDTH export VERBOSE="false" readonly VERBOSE +export BREEZE_REDIRECT="false" + ./breeze help-all | sed 's/^/ /' | sed 's/ *$//' >>"${TMP_FILE}" -MAX_LEN=$(awk '{ print length($0); }' "${TMP_FILE}" | sort -n | tail -1 ) +MAX_LEN_FOUND=$(awk '{ print length($0); }' "${TMP_FILE}" | sort -n | tail -1 ) +MAX_LEN_EXPECTED=$((MAX_SCREEN_WIDTH + 2)) # 2 spaces added in front of the width for .rst formatting -if (( MAX_LEN > MAX_SCREEN_WIDTH + 2 )); then - cat "${TMP_FILE}" +if (( MAX_LEN_FOUND > MAX_LEN_EXPECTED )); then + awk "length(\$0) > ${MAX_LEN_EXPECTED}" <"${TMP_FILE}" echo echo "ERROR! Some lines in generate breeze help-all command are too long. See above ^^" echo diff --git a/scripts/ci/pre_commit/pre_commit_check_order_setup.py b/scripts/ci/pre_commit/pre_commit_check_order_setup.py index 169482e8dd144..333e316e673cc 100755 --- a/scripts/ci/pre_commit/pre_commit_check_order_setup.py +++ b/scripts/ci/pre_commit/pre_commit_check_order_setup.py @@ -47,18 +47,12 @@ def _check_list_sorted(the_list: List[str], message: str) -> None: print(f"{message} [red]NOK[/]") print() errors.append( - f"ERROR in {message}. First wrongly sorted element" f" {the_list[i]}. Should be {sorted_list[i]}" + f"ERROR in {message}. First wrongly sorted element {repr(the_list[i])}. Should " + f"be {repr(sorted_list[i])}" ) -def setup() -> str: - setup_py_file_path = abspath(os.path.join(dirname(__file__), os.pardir, os.pardir, os.pardir, 'setup.py')) - with open(setup_py_file_path) as setup_file: - setup_context = setup_file.read() - return setup_context - - -def check_main_dependent_group(setup_context: str) -> None: +def check_main_dependent_group(setup_contents: str) -> None: """ Test for an order of dependencies groups between mark '# Start dependencies group' and '# End dependencies group' in setup.py @@ -67,7 +61,7 @@ def check_main_dependent_group(setup_context: str) -> None: pattern_main_dependent_group = re.compile( '# Start dependencies group\n(.*)# End dependencies group', re.DOTALL ) - main_dependent_group = pattern_main_dependent_group.findall(setup_context)[0] + main_dependent_group = pattern_main_dependent_group.findall(setup_contents)[0] pattern_sub_dependent = re.compile(r' = \[.*?]\n', re.DOTALL) main_dependent = pattern_sub_dependent.sub(',', main_dependent_group) @@ -75,26 +69,17 @@ def check_main_dependent_group(setup_context: str) -> None: src = main_dependent.strip(',').split(',') _check_list_sorted(src, "Order of dependencies") + for group in src: + check_sub_dependent_group(group) + -def check_sub_dependent_group(setup_context: str) -> None: +def check_sub_dependent_group(group_name: str) -> None: r""" Test for an order of each dependencies groups declare like `^dependent_group_name = [.*?]\n` in setup.py """ - pattern_dependent_group_name = re.compile(r'^(\w+) = \[', re.MULTILINE) - dependent_group_names = pattern_dependent_group_name.findall(setup_context) - - pattern_dependent_version = re.compile(r'[~|><=;].*') - - for group_name in dependent_group_names: - print(f"[blue]Checking dependency group {group_name}[/]") - pattern_sub_dependent = re.compile(fr'{group_name} = \[(.*?)]\n', re.DOTALL) - sub_dependent = pattern_sub_dependent.findall(setup_context)[0] - pattern_dependent = re.compile(r"'(.*?)'") - dependent = pattern_dependent.findall(sub_dependent) - - src = [pattern_dependent_version.sub('', p) for p in dependent] - _check_list_sorted(src, f"Order of dependency group: {group_name}") + print(f"[blue]Checking dependency group {group_name}[/]") + _check_list_sorted(getattr(setup, group_name), f"Order of dependency group: {group_name}") def check_alias_dependent_group(setup_context: str) -> None: @@ -111,52 +96,15 @@ def check_alias_dependent_group(setup_context: str) -> None: _check_list_sorted(src, f"Order of alias dependencies group: {dependent}") -def check_provider_requirements(setup_context: str) -> None: - """ - Test for an order of dependencies in PROVIDERS_REQUIREMENTS in setup.py - """ - print("[blue]Checking providers_requirements[/]") - pattern_providers_requirements = re.compile(r'PROVIDERS_REQUIREMENTS: [^{]*\{(.*?)}\n', re.DOTALL) - providers_requirements = pattern_providers_requirements.findall(setup_context)[0] - pattern_dependent = re.compile("'(.*?)'") - src = pattern_dependent.findall(providers_requirements) - _check_list_sorted(src, "Order of dependencies in: providers_require") - - -def check_extras_require(setup_context: str) -> None: - """ - Test for an order of dependencies in EXTRAS_REQUIREMENTS in setup.py - """ - print("[blue]Checking extras_requirements[/]") - pattern_extras_requires = re.compile(r'EXTRAS_REQUIREMENTS: [^{]*{(.*?)}\n', re.DOTALL) - extras_requires = pattern_extras_requires.findall(setup_context)[0] - pattern_dependent = re.compile(r"'(.*?)'") - src = pattern_dependent.findall(extras_requires) - _check_list_sorted(src, "Order of dependencies in: extras_require") +def check_variable_order(var_name: str) -> None: + print(f"[blue]Checking {var_name}[/]") + var = getattr(setup, var_name) -def check_extras_deprecated_aliases(setup_context: str) -> None: - """ - Test for an order of dependencies in EXTRAS_DEPRECATED_ALIASES in setup.py - """ - print("[blue]Checking extras deprecated aliases[/]") - pattern_extras_deprecated_aliases = re.compile(r'EXTRAS_DEPRECATED_ALIASES: [^{]*{(.*?)}\n', re.DOTALL) - extras_deprecated_aliases = pattern_extras_deprecated_aliases.findall(setup_context)[0] - pattern_dependent = re.compile("'(.*?)',") - src = pattern_dependent.findall(extras_deprecated_aliases) - _check_list_sorted(src, "Order of dependencies in: extras_deprecated_aliases") - - -def check_preinstalled_providers(setup_context: str) -> None: - """ - Test for an order of providers in PREINSTALLED_PROVIDERS in setup.py - """ - print("[blue]Checking preinstalled providers[/]") - pattern_preinstalled_providers = re.compile(r'PREINSTALLED_PROVIDERS = \[(.*?)]\n', re.DOTALL) - preinstalled_providers = pattern_preinstalled_providers.findall(setup_context)[0] - pattern_dependent = re.compile("'(.*?)',") - src = pattern_dependent.findall(preinstalled_providers) - _check_list_sorted(src, "Order of dependencies in: preinstalled_providers") + if isinstance(var, dict): + _check_list_sorted(list(var.keys()), f"Order of dependencies in: {var_name}") + else: + _check_list_sorted(var, f"Order of dependencies in: {var_name}") def check_install_and_setup_requires() -> None: @@ -180,14 +128,17 @@ def check_install_and_setup_requires() -> None: if __name__ == '__main__': - setup_context_main = setup() - check_main_dependent_group(setup_context_main) - check_alias_dependent_group(setup_context_main) - check_sub_dependent_group(setup_context_main) - check_provider_requirements(setup_context_main) - check_extras_require(setup_context_main) - check_extras_deprecated_aliases(setup_context_main) - check_preinstalled_providers(setup_context_main) + import setup + + with open(setup.__file__) as setup_file: + file_contents = setup_file.read() + check_main_dependent_group(file_contents) + check_alias_dependent_group(file_contents) + check_variable_order("PROVIDERS_REQUIREMENTS") + check_variable_order("CORE_EXTRAS_REQUIREMENTS") + check_variable_order("ADDITIONAL_EXTRAS_REQUIREMENTS") + check_variable_order("EXTRAS_DEPRECATED_ALIASES") + check_variable_order("PREINSTALLED_PROVIDERS") check_install_and_setup_requires() print() diff --git a/scripts/ci/pre_commit/pre_commit_check_pre_commit_hook_names.py b/scripts/ci/pre_commit/pre_commit_check_pre_commit_hook_names.py index fae42c8e112de..5e16f425a68c6 100755 --- a/scripts/ci/pre_commit/pre_commit_check_pre_commit_hook_names.py +++ b/scripts/ci/pre_commit/pre_commit_check_pre_commit_hook_names.py @@ -24,6 +24,11 @@ import yaml +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader # type: ignore[no-redef] + def main() -> int: parser = argparse.ArgumentParser() @@ -34,7 +39,7 @@ def main() -> int: retval = 0 with open('.pre-commit-config.yaml', 'rb') as f: - content = yaml.safe_load(f) + content = yaml.load(f, SafeLoader) errors = get_errors(content, max_length) if errors: retval = 1 diff --git a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py index 9e6cb4a44ee50..35c80e938d830 100755 --- a/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py +++ b/scripts/ci/pre_commit/pre_commit_check_provider_yaml_files.py @@ -29,6 +29,11 @@ import yaml from tabulate import tabulate +try: + from yaml import CSafeLoader as SafeLoader +except ImportError: + from yaml import SafeLoader # type: ignore[no-redef] + if __name__ != "__main__": raise Exception( "This file is intended to be executed as an executable program. You cannot use it as a module." @@ -60,7 +65,7 @@ def _load_package_data(package_paths: Iterable[str]): result = {} for provider_yaml_path in package_paths: with open(provider_yaml_path) as yaml_file: - provider = yaml.safe_load(yaml_file) + provider = yaml.load(yaml_file, SafeLoader) rel_path = os.path.relpath(provider_yaml_path, ROOT_DIR) try: jsonschema.validate(provider, schema=schema) @@ -271,6 +276,7 @@ def check_invalid_integration(yaml_files: Dict[str, Dict]): def check_doc_files(yaml_files: Dict[str, Dict]): print("Checking doc files") current_doc_urls = [] + current_logo_urls = [] for provider in yaml_files.values(): if 'integrations' in provider: current_doc_urls.extend( @@ -279,6 +285,9 @@ def check_doc_files(yaml_files: Dict[str, Dict]): if 'how-to-guide' in guides for guide in guides['how-to-guide'] ) + current_logo_urls.extend( + integration['logo'] for integration in provider['integrations'] if 'logo' in integration + ) if 'transfers' in provider: current_doc_urls.extend( op['how-to-guide'] for op in provider['transfers'] if 'how-to-guide' in op @@ -293,9 +302,15 @@ def check_doc_files(yaml_files: Dict[str, Dict]): "/docs/" + os.path.relpath(f, start=DOCS_DIR) for f in glob(f"{DOCS_DIR}/apache-airflow-providers-*/operators.rst", recursive=True) } + expected_logo_urls = { + "/" + os.path.relpath(f, start=DOCS_DIR) + for f in glob(f"{DOCS_DIR}/integration-logos/**/*", recursive=True) + if os.path.isfile(f) + } try: assert_sets_equal(set(expected_doc_urls), set(current_doc_urls)) + assert_sets_equal(set(expected_logo_urls), set(current_logo_urls)) except AssertionError as ex: print(ex) sys.exit(1) diff --git a/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py b/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py index aee846235e257..8acf7db52f991 100755 --- a/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py +++ b/scripts/ci/pre_commit/pre_commit_check_setup_extra_packages_ref.py @@ -24,7 +24,7 @@ import re import sys from os.path import dirname -from typing import Dict, List +from typing import Dict, List, Set from rich import print from rich.console import Console @@ -41,7 +41,6 @@ add_all_provider_packages, EXTRAS_DEPRECATED_ALIASES, EXTRAS_REQUIREMENTS, - EXTRAS_WITH_PROVIDERS, PROVIDERS_REQUIREMENTS, PREINSTALLED_PROVIDERS, ) @@ -53,32 +52,24 @@ def get_file_content(*path_elements: str) -> str: return file_to_read.read() -def get_extras_from_setup() -> Dict[str, str]: - """ - Returns a dict of regular extras from setup (with value = '' for non-provider extra and '*' for - provider extra - """ - all_regular_extras = set(EXTRAS_REQUIREMENTS.keys()) - set(EXTRAS_DEPRECATED_ALIASES.keys()) - setup_extra_dict = {} - for setup_regular_extra in all_regular_extras: - setup_extra_dict[setup_regular_extra] = '*' if setup_regular_extra in EXTRAS_WITH_PROVIDERS else '' - return setup_extra_dict +def get_extras_from_setup() -> Set[str]: + """Returns a set of regular (non-deprecated) extras from setup.""" + return set(EXTRAS_REQUIREMENTS.keys()) - set(EXTRAS_DEPRECATED_ALIASES.keys()) -def get_regular_extras_from_docs() -> Dict[str, str]: +def get_extras_from_docs() -> Set[str]: """ - Returns a dict of regular extras from doce (with value = '' for non-provider extra and '*' for - provider extra + Returns a list of extras from docs. """ docs_content = get_file_content(DOCS_FILE) extras_section_regex = re.compile( - rf'\|[^|]+\|.*pip install .apache-airflow\[({PY_IDENTIFIER})][^|]+\|[^|]+\|\s+(\*?)\s+\|', + rf'\|[^|]+\|.*pip install .apache-airflow\[({PY_IDENTIFIER})][^|]+\|[^|]+\|', re.MULTILINE, ) - doc_extra_dict = {} - for doc_regular_extra in extras_section_regex.findall(docs_content): - doc_extra_dict[doc_regular_extra[0]] = doc_regular_extra[1] - return doc_extra_dict + doc_extra_set: Set[str] = set() + for doc_extra in extras_section_regex.findall(docs_content): + doc_extra_set.add(doc_extra) + return doc_extra_set def get_preinstalled_providers_from_docs() -> List[str]: @@ -87,7 +78,7 @@ def get_preinstalled_providers_from_docs() -> List[str]: """ docs_content = get_file_content(DOCS_FILE) preinstalled_section_regex = re.compile( - rf'\|\s*({PY_IDENTIFIER})\s*\|[^|]+pip install[^|]+\|[^|]+\|[^|]+\|\s+\*\s+\|$', + rf'\|\s*({PY_IDENTIFIER})\s*\|[^|]+pip install[^|]+\|[^|]+\|\s+\*\s+\|$', re.MULTILINE, ) return preinstalled_section_regex.findall(docs_content) @@ -111,36 +102,31 @@ def get_deprecated_extras_from_docs() -> Dict[str, str]: return deprecated_extras -def check_regular_extras(console: Console) -> bool: +def check_extras(console: Console) -> bool: """ - Checks if regular extras match setup vs. doc. + Checks if non-deprecated extras match setup vs. doc. :param console: print table there in case of errors :return: True if all ok, False otherwise """ - regular_extras_table = Table() - regular_extras_table.add_column("NAME", justify="right", style="cyan") - regular_extras_table.add_column("SETUP", justify="center", style="magenta") - regular_extras_table.add_column("SETUP_PROVIDER", justify="center", style="magenta") - regular_extras_table.add_column("DOCS", justify="center", style="yellow") - regular_extras_table.add_column("DOCS_PROVIDER", justify="center", style="yellow") - regular_setup_extras = get_extras_from_setup() - regular_docs_extras = get_regular_extras_from_docs() - for extra in regular_setup_extras.keys(): - if extra not in regular_docs_extras: - regular_extras_table.add_row(extra, "V", regular_setup_extras[extra], "", "") - elif regular_docs_extras[extra] != regular_setup_extras[extra]: - regular_extras_table.add_row( - extra, "V", regular_setup_extras[extra], "V", regular_docs_extras[extra] - ) - for extra in regular_docs_extras.keys(): - if extra not in regular_setup_extras: - regular_extras_table.add_row(extra, "", "", "V", regular_docs_extras[extra]) - if regular_extras_table.row_count != 0: + extras_table = Table() + extras_table.add_column("NAME", justify="right", style="cyan") + extras_table.add_column("SETUP", justify="center", style="magenta") + extras_table.add_column("DOCS", justify="center", style="yellow") + non_deprecated_setup_extras = get_extras_from_setup() + non_deprecated_docs_extras = get_extras_from_docs() + for extra in non_deprecated_setup_extras: + if extra not in non_deprecated_docs_extras: + extras_table.add_row(extra, "V", "") + for extra in non_deprecated_docs_extras: + if extra not in non_deprecated_setup_extras: + extras_table.add_row(extra, "", "V") + if extras_table.row_count != 0: print( f"""\ [red bold]ERROR!![/red bold] -The "[bold]EXTRAS_REQUIREMENTS[/bold]" and "[bold]PROVIDERS_REQUIREMENTS[/bold]" +The "[bold]CORE_EXTRAS_REQUIREMENTS[/bold]", "[bold]ADDITIONAL_PROVIDERS_REQUIREMENTS[/bold]", and + "[bold]PROVIDERS_REQUIREMENTS[/bold]" sections in the setup file: [bold yellow]{SETUP_PY_FILE}[/bold yellow] should be synchronized with the "Extra Packages Reference" in the documentation file: [bold yellow]{DOCS_FILE}[/bold yellow]. @@ -149,13 +135,12 @@ def check_regular_extras(console: Console) -> bool: * are used but are not documented, * are documented but not used, - * or have different provider flag in documentation/setup file. [bold]Please synchronize setup/documentation files![/bold] """ ) - console.print(regular_extras_table) + console.print(extras_table) return False return True @@ -261,7 +246,7 @@ def check_preinstalled_extras(console: Console) -> bool: # force adding all provider package dependencies, to check providers status add_all_provider_packages() main_console = Console() - status.append(check_regular_extras(main_console)) + status.append(check_extras(main_console)) status.append(check_deprecated_extras(main_console)) status.append(check_preinstalled_extras(main_console)) diff --git a/scripts/ci/images/ci_wait_for_ci_image.sh b/scripts/ci/pre_commit/pre_commit_sort_spelling_wordlist.sh similarity index 67% rename from scripts/ci/images/ci_wait_for_ci_image.sh rename to scripts/ci/pre_commit/pre_commit_sort_spelling_wordlist.sh index d75f149250af9..9b309b9f83d0e 100755 --- a/scripts/ci/images/ci_wait_for_ci_image.sh +++ b/scripts/ci/pre_commit/pre_commit_sort_spelling_wordlist.sh @@ -15,18 +15,18 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + # shellcheck source=scripts/ci/libraries/_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" -push_pull_remove_images::check_if_github_registry_wait_for_image_enabled - -build_image::configure_github_docker_registry +SPELLING_WORDLIST="${AIRFLOW_SOURCES}/docs/spelling_wordlist.txt" +readonly SPELLING_WORDLIST -export AIRFLOW_CI_IMAGE_NAME="${BRANCH_NAME}-python${PYTHON_MAJOR_MINOR_VERSION}-ci" +export LC_ALL=C -start_end::group_start "Waiting for ${AIRFLOW_CI_IMAGE_NAME} image to appear" +temp_file=$(mktemp) +< "${SPELLING_WORDLIST}" sort | uniq > "${temp_file}" -push_pull_remove_images::wait_for_github_registry_image \ - "${AIRFLOW_CI_IMAGE_NAME}${GITHUB_REGISTRY_IMAGE_SUFFIX}" "${GITHUB_REGISTRY_PULL_IMAGE_TAG}" +cat "${temp_file}" > "${SPELLING_WORDLIST}" -start_end::group_end +rm "${temp_file}" diff --git a/scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh b/scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh index 4be2d66085ac1..51575ebe80cdf 100755 --- a/scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh +++ b/scripts/ci/provider_packages/ci_install_and_test_provider_packages.sh @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -export MOUNT_LOCAL_SOURCES="false" +export MOUNT_SELECTED_LOCAL_SOURCES="false" # shellcheck source=scripts/ci/libraries/_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" @@ -29,8 +29,9 @@ fi function run_test_package_import_all_classes() { # Groups are added internally - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ + -t \ -v "${AIRFLOW_SOURCES}/setup.py:/airflow_sources/setup.py:cached" \ -v "${AIRFLOW_SOURCES}/setup.cfg:/airflow_sources/setup.cfg:cached" \ -v "${AIRFLOW_SOURCES}/airflow/__init__.py:/airflow_sources/airflow/__init__.py:cached" \ diff --git a/scripts/ci/provider_packages/ci_prepare_provider_readmes.sh b/scripts/ci/provider_packages/ci_prepare_provider_documentation.sh similarity index 95% rename from scripts/ci/provider_packages/ci_prepare_provider_readmes.sh rename to scripts/ci/provider_packages/ci_prepare_provider_documentation.sh index 82135364bdbb8..e1b7fd56bbf91 100755 --- a/scripts/ci/provider_packages/ci_prepare_provider_readmes.sh +++ b/scripts/ci/provider_packages/ci_prepare_provider_documentation.sh @@ -20,4 +20,4 @@ build_images::prepare_ci_build build_images::rebuild_ci_image_if_needed_with_group -runs::run_prepare_provider_readme "$@" +runs::run_prepare_provider_documentation "$@" diff --git a/scripts/ci/selective_ci_checks.sh b/scripts/ci/selective_ci_checks.sh index 8746f98c9e256..d6397149449ae 100755 --- a/scripts/ci/selective_ci_checks.sh +++ b/scripts/ci/selective_ci_checks.sh @@ -42,17 +42,15 @@ else FULL_TESTS_NEEDED_LABEL="false" fi -function check_upgrade_to_newer_dependencies() { +function check_upgrade_to_newer_dependencies_needed() { # shellcheck disable=SC2153 - if [[ "${UPGRADE_TO_NEWER_DEPENDENCIES}" == "true" || - ${EVENT_NAME} == 'push' || ${EVENT_NAME} == "scheduled" ]]; then + if [[ "${UPGRADE_TO_NEWER_DEPENDENCIES}" != "false" || + ${GITHUB_EVENT_NAME} == 'push' || ${GITHUB_EVENT_NAME} == "scheduled" ]]; then # Trigger upgrading to latest constraints where label is set or when # SHA of the merge commit triggers rebuilding layer in the docker image # Each build that upgrades to latest constraints will get truly latest constraints, not those # Cached in the image this way upgrade_to_newer_dependencies="${INCOMING_COMMIT_SHA}" - else - upgrade_to_newer_dependencies="false" fi } @@ -63,14 +61,16 @@ function output_all_basic_variables() { initialization::ga_output all-python-versions \ "$(initialization::parameters_to_json "${ALL_PYTHON_MAJOR_MINOR_VERSIONS[@]}")" initialization::ga_output python-versions-list-as-string "${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS[*]}" + initialization::ga_output kubernetes-versions-list-as-string "${CURRENT_KUBERNETES_VERSIONS[*]}" else initialization::ga_output python-versions \ "$(initialization::parameters_to_json "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}")" - # this will work as long as DEFAULT_PYTHON_MAJOR_VERSION is the same master/v1-10 + # this will work as long as DEFAULT_PYTHON_MAJOR_VERSION is the same on HEAD and v1-10 # all-python-versions are used in BuildImage Workflow initialization::ga_output all-python-versions \ "$(initialization::parameters_to_json "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}")" initialization::ga_output python-versions-list-as-string "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" + initialization::ga_output kubernetes-versions-list-as-string "${DEFAULT_KUBERNETES_VERSION}" fi initialization::ga_output default-python-version "${DEFAULT_PYTHON_MAJOR_MINOR_VERSION}" @@ -124,7 +124,12 @@ function output_all_basic_variables() { initialization::ga_output sqlite-exclude '[]' fi + + initialization::ga_output default-helm-version "${HELM_VERSION}" initialization::ga_output kubernetes-exclude '[]' + + initialization::ga_output default-branch "${DEFAULT_BRANCH}" + } function get_changed_files() { @@ -200,8 +205,12 @@ function set_upgrade_to_newer_dependencies() { initialization::ga_output upgrade-to-newer-dependencies "${@}" } - -ALL_TESTS="Always Core Other API CLI Providers WWW Integration Heisentests" +if [[ ${DEFAULT_BRANCH} == "master" ]]; then + ALL_TESTS="Always API Core Other CLI Providers WWW Integration" +else + # Skips Provider tests in case current default branch is not master + ALL_TESTS="Always API Core Other CLI WWW Integration" +fi readonly ALL_TESTS function set_outputs_run_everything_and_exit() { @@ -216,7 +225,7 @@ function set_outputs_run_everything_and_exit() { set_basic_checks_only "false" set_docs_build "true" set_image_build "true" - set_upgrade_to_newer_dependencies "${INCOMING_COMMIT_SHA}" + set_upgrade_to_newer_dependencies "${upgrade_to_newer_dependencies}" exit } @@ -546,6 +555,8 @@ function get_count_kubernetes_files() { local pattern_array=( "^chart" "^kubernetes_tests" + "^airflow/providers/cncf/kubernetes/" + "^tests/providers/cncf/kubernetes/" ) show_changed_files COUNT_KUBERNETES_CHANGED_FILES=$(count_changed_files) @@ -588,11 +599,18 @@ function calculate_test_types_to_run() { SELECTED_TESTS="${SELECTED_TESTS} CLI" kubernetes_tests_needed="true" fi - if [[ ${COUNT_PROVIDERS_CHANGED_FILES} != "0" ]]; then + + if [[ ${DEFAULT_BRANCH} == "master" ]]; then + if [[ ${COUNT_PROVIDERS_CHANGED_FILES} != "0" ]]; then + echo + echo "Adding Providers to selected files as ${COUNT_PROVIDERS_CHANGED_FILES} Provider files changed" + echo + SELECTED_TESTS="${SELECTED_TESTS} Providers" + fi + else echo - echo "Adding Providers to selected files as ${COUNT_PROVIDERS_CHANGED_FILES} Provider files changed" + echo "Providers tests are not added because they are only run in case of master branch." echo - SELECTED_TESTS="${SELECTED_TESTS} Providers" fi if [[ ${COUNT_WWW_CHANGED_FILES} != "0" ]]; then echo @@ -600,20 +618,22 @@ function calculate_test_types_to_run() { echo SELECTED_TESTS="${SELECTED_TESTS} WWW" fi - initialization::ga_output test-types "Always Integration Heisentests ${SELECTED_TESTS}" + initialization::ga_output test-types "Always Integration ${SELECTED_TESTS}" fi start_end::group_end } -start_end::group_start "Check if COMMIT_SHA passed" + + +upgrade_to_newer_dependencies="false" if (($# < 1)); then echo - echo "No Commit SHA - running all tests (likely direct master merge, or scheduled run)!" + echo "No Commit SHA - running all tests (likely direct merge, or scheduled run)!" echo INCOMING_COMMIT_SHA="" readonly INCOMING_COMMIT_SHA - # override FULL_TESTS_NEEDED_LABEL in master/scheduled run + # override FULL_TESTS_NEEDED_LABEL in main/scheduled run FULL_TESTS_NEEDED_LABEL="true" readonly FULL_TESTS_NEEDED_LABEL output_all_basic_variables @@ -621,11 +641,14 @@ if (($# < 1)); then else INCOMING_COMMIT_SHA="${1}" readonly INCOMING_COMMIT_SHA + echo + echo "Commit SHA passed: ${INCOMING_COMMIT_SHA}!" + echo + readonly FULL_TESTS_NEEDED_LABEL fi -start_end::group_end -check_upgrade_to_newer_dependencies -readonly FULL_TESTS_NEEDED_LABEL +check_upgrade_to_newer_dependencies_needed + output_all_basic_variables image_build_needed="false" diff --git a/scripts/ci/static_checks/bats_tests.sh b/scripts/ci/static_checks/bats_tests.sh index e54e5b133cd0e..eaf9171c20b88 100755 --- a/scripts/ci/static_checks/bats_tests.sh +++ b/scripts/ci/static_checks/bats_tests.sh @@ -53,7 +53,7 @@ function run_bats_tests() { # deduplicate FS=" " read -r -a bats_arguments <<< "$(tr ' ' '\n' <<< "${bats_arguments[@]}" | sort -u | tr '\n' ' ' )" if [[ ${#@} == "0" ]]; then - # Run all tests + # Run al tests docker run --workdir /airflow -v "$(pwd):/airflow" --rm \ apache/airflow:bats-2020.09.05-1.2.1 --tap /airflow/tests/bats/ elif [[ ${#bats_arguments} == "0" ]]; then diff --git a/scripts/ci/static_checks/check_license.sh b/scripts/ci/static_checks/check_license.sh index 8698bc97ee3c6..d3a8be743c481 100755 --- a/scripts/ci/static_checks/check_license.sh +++ b/scripts/ci/static_checks/check_license.sh @@ -31,7 +31,7 @@ function run_check_license() { echo "Running license checks. This can take a while." # We mount ALL airflow files for the licence check. We want to check them all! - if ! docker run -v "${AIRFLOW_SOURCES}:/opt/airflow" -t \ + if ! docker_v run -v "${AIRFLOW_SOURCES}:/opt/airflow" -t \ --user "$(id -ur):$(id -gr)" \ --rm --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \ apache/airflow:apache-rat-2020.07.10-0.13 \ diff --git a/scripts/ci/static_checks/flake8.sh b/scripts/ci/static_checks/flake8.sh index 322ab9e53f700..1c5440c134b40 100755 --- a/scripts/ci/static_checks/flake8.sh +++ b/scripts/ci/static_checks/flake8.sh @@ -20,12 +20,12 @@ function run_flake8() { if [[ "${#@}" == "0" ]]; then - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/in_container/run_flake8.sh" else - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/in_container/run_flake8.sh" "${@}" diff --git a/scripts/ci/static_checks/in_container_bats_tests.sh b/scripts/ci/static_checks/in_container_bats_tests.sh index a7c0121c0c101..fa4eacd86ab21 100644 --- a/scripts/ci/static_checks/in_container_bats_tests.sh +++ b/scripts/ci/static_checks/in_container_bats_tests.sh @@ -20,13 +20,13 @@ function run_in_container_bats_tests() { if [[ "${#@}" == "0" ]]; then - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/opt/bats/bin/bats" \ "-v" "$(pwd):/airflow" \ "${AIRFLOW_CI_IMAGE}" \ --tap "tests/bats/in_container/" else - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/opt/bats/bin/bats" \ "-v" "$(pwd):/airflow" \ "${AIRFLOW_CI_IMAGE}" \ diff --git a/scripts/ci/static_checks/lint_dockerfile.sh b/scripts/ci/static_checks/lint_dockerfile.sh index f22005ddb4a61..38327f775a2a3 100755 --- a/scripts/ci/static_checks/lint_dockerfile.sh +++ b/scripts/ci/static_checks/lint_dockerfile.sh @@ -19,16 +19,17 @@ . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" function run_docker_lint() { + IMAGE_NAME="hadolint/hadolint:v1.22.1-8-gf0ef28b-alpine" if [[ "${#@}" == "0" ]]; then echo echo "Running docker lint for all Dockerfiles" echo # shellcheck disable=SC2046 - docker run \ + docker_v run \ -v "$(pwd):/root" \ -w "/root" \ --rm \ - "hadolint/hadolint:v1.18.0-6-ga0d655d-debian" "/bin/hadolint" $(git ls-files| grep 'Dockerfile') + "${IMAGE_NAME}" "/bin/hadolint" $(git ls-files| grep 'Dockerfile') echo echo "Hadolint completed with no errors" echo @@ -36,11 +37,11 @@ function run_docker_lint() { echo echo "Running docker lint for $*" echo - docker run \ + docker_v run \ -v "$(pwd):/root" \ -w "/root" \ --rm \ - "hadolint/hadolint:v1.18.0-6-ga0d655d-debian" "/bin/hadolint" "${@}" + "${IMAGE_NAME}" "/bin/hadolint" "${@}" echo echo "Hadolint completed with no errors" echo diff --git a/scripts/ci/static_checks/mypy.sh b/scripts/ci/static_checks/mypy.sh index a7257a9633cdf..7ebbd6340ff06 100755 --- a/scripts/ci/static_checks/mypy.sh +++ b/scripts/ci/static_checks/mypy.sh @@ -26,7 +26,7 @@ function run_mypy() { files=("$@") fi - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ "-v" "${AIRFLOW_SOURCES}/.mypy_cache:/opt/airflow/.mypy_cache" \ "${AIRFLOW_CI_IMAGE}" \ diff --git a/scripts/ci/static_checks/pylint.sh b/scripts/ci/static_checks/pylint.sh index 39a2964b9760f..c69498e3832d2 100755 --- a/scripts/ci/static_checks/pylint.sh +++ b/scripts/ci/static_checks/pylint.sh @@ -20,12 +20,12 @@ function run_pylint() { if [[ "${#@}" == "0" ]]; then - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/in_container/run_pylint.sh" else - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ --entrypoint "/usr/local/bin/dumb-init" \ "${AIRFLOW_CI_IMAGE}" \ "--" "/opt/airflow/scripts/in_container/run_pylint.sh" "${@}" @@ -36,14 +36,21 @@ build_images::prepare_ci_build build_images::rebuild_ci_image_if_needed +# Bug: Pylint only looks at PYLINTRC if it can't find a file in the _default_ +# locations, meaning we can't use this env var to over-ride it +args=() + +if [[ -n "${PYLINTRC:-}" ]]; then + args=(--rcfile "${PYLINTRC}") +fi + if [[ "${#@}" != "0" ]]; then pylint::filter_out_files_from_pylint_todo_list "$@" if [[ "${#FILTERED_FILES[@]}" == "0" ]]; then echo "Filtered out all files. Skipping pylint." - else - run_pylint "${FILTERED_FILES[@]}" + exit 0 fi -else - run_pylint + args+=("${FILTERED_FILES[@]}") fi +run_pylint "${args[@]}" diff --git a/scripts/ci/static_checks/refresh_pylint_todo.sh b/scripts/ci/static_checks/refresh_pylint_todo.sh index 05dce881d4e37..52474b74e6db0 100755 --- a/scripts/ci/static_checks/refresh_pylint_todo.sh +++ b/scripts/ci/static_checks/refresh_pylint_todo.sh @@ -21,7 +21,7 @@ export FORCE_ANSWER_TO_QUESTIONS="quit" . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" function refresh_pylint_todo() { - docker run "${EXTRA_DOCKER_FLAGS[@]}" \ + docker_v run "${EXTRA_DOCKER_FLAGS[@]}" \ "${AIRFLOW_CI_IMAGE}" \ "/opt/airflow/scripts/in_container/refresh_pylint_todo.sh" } diff --git a/scripts/ci/static_checks/run_basic_static_checks.sh b/scripts/ci/static_checks/run_basic_static_checks.sh index e017bec93b1df..fb5256e78d70e 100755 --- a/scripts/ci/static_checks/run_basic_static_checks.sh +++ b/scripts/ci/static_checks/run_basic_static_checks.sh @@ -35,8 +35,10 @@ fi COMMIT_SHA="${1}" shift -python -m pip install pre-commit \ - --constraint "https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" +python -m pip install --user pre-commit \ + --constraint "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" + +export PATH=~/.local/bin:${PATH} if [[ $# == "0" ]]; then pre-commit run --all-files --show-diff-on-failure --color always \ diff --git a/scripts/ci/static_checks/run_static_checks.sh b/scripts/ci/static_checks/run_static_checks.sh index 4ceb66882b60a..7f520e0a2d448 100755 --- a/scripts/ci/static_checks/run_static_checks.sh +++ b/scripts/ci/static_checks/run_static_checks.sh @@ -29,8 +29,10 @@ build_images::prepare_ci_build build_images::rebuild_ci_image_if_needed -python -m pip install pre-commit \ - --constraint "https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" +python -m pip install --user pre-commit \ + --constraint "https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" + +export PATH=~/.local/bin:${PATH} if [[ $# == "0" ]]; then pre-commit run --all-files --show-diff-on-failure --color always diff --git a/scripts/ci/testing/ci_run_airflow_testing.sh b/scripts/ci/testing/ci_run_airflow_testing.sh index 38b000cd0c2eb..fa8c0448267e3 100755 --- a/scripts/ci/testing/ci_run_airflow_testing.sh +++ b/scripts/ci/testing/ci_run_airflow_testing.sh @@ -15,167 +15,118 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. + +# Enable automated tests execution +RUN_TESTS="true" +export RUN_TESTS + +SKIPPED_FAILED_JOB="Quarantined" +export SKIPPED_FAILED_JOB + +SEMAPHORE_NAME="tests" +export SEMAPHORE_NAME + # shellcheck source=scripts/ci/libraries/_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" -DOCKER_COMPOSE_LOCAL=() -if [[ -f ${BUILD_CACHE_DIR}/.skip_tests ]]; then - echo - echo "Skipping running tests !!!!!" - echo - exit -fi -function run_airflow_testing_in_docker() { - set +u - set +e - local exit_code - for try_num in {1..5} +# Starts test types in parallel +# test_types_to_run - list of test types (it's not an array, it is space-separate list) +# ${@} - additional arguments to pass to test execution +function run_test_types_in_parallel() { + start_end::group_start "Monitoring tests: ${test_types_to_run}" + parallel::monitor_progress + mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}" + for TEST_TYPE in ${test_types_to_run} do - echo - echo "Making sure docker-compose is down and remnants removed" - echo - docker-compose --log-level INFO -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ - down --remove-orphans --volumes --timeout 10 - echo - echo "System-prune docker" - echo - docker system prune --force --volumes - echo - echo "Check available space" - echo - df --human - echo - echo "Check available memory" - echo - free --human - echo - echo "Starting try number ${try_num}" - echo - if [[ " ${ENABLED_INTEGRATIONS} " =~ " kerberos " ]]; then - echo "Creating Kerberos network" - kerberos::create_kerberos_network - else - echo "Skip creating kerberos network" - fi - docker-compose --log-level INFO \ - -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ - -f "${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}.yml" \ - "${INTEGRATIONS[@]}" \ - "${DOCKER_COMPOSE_LOCAL[@]}" \ - run airflow "${@}" - exit_code=$? - if [[ " ${INTEGRATIONS[*]} " =~ " kerberos " ]]; then - echo "Delete kerberos network" - kerberos::delete_kerberos_network - fi - if [[ ${exit_code} == "254" && ${try_num} != "5" ]]; then - echo - echo "Failed try num ${try_num}. Sleeping 5 seconds for retry" - echo - sleep 5 - continue - else - break - fi + export TEST_TYPE + mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${TEST_TYPE}" + mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${TEST_TYPE}" + export JOB_LOG="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${TEST_TYPE}/stdout" + export PARALLEL_JOB_STATUS="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${TEST_TYPE}/status" + # Each test job will get SIGTERM followed by SIGTERM 200ms later and SIGKILL 200ms later after 25 mins + # shellcheck disable=SC2086 + parallel --ungroup --bg --semaphore --semaphorename "${SEMAPHORE_NAME}" \ + --jobs "${MAX_PARALLEL_TEST_JOBS}" --timeout 1500 \ + "$( dirname "${BASH_SOURCE[0]}" )/ci_run_single_airflow_test_in_docker.sh" "${@}" >${JOB_LOG} 2>&1 done - if [[ ${TEST_TYPE:=} == "Quarantined" ]]; then - if [[ ${exit_code} == "1" ]]; then - echo - echo "Some Quarantined tests failed. but we recorded it in an issue" - echo - exit_code="0" - else - echo - echo "All Quarantined tests succeeded" - echo - fi - fi - set -u - set -e - return "${exit_code}" + parallel --semaphore --semaphorename "${SEMAPHORE_NAME}" --wait + parallel::kill_monitor + start_end::group_end } -function prepare_tests_to_run() { - DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/files.yml") - if [[ ${MOUNT_LOCAL_SOURCES} == "true" ]]; then - DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local.yml") - fi +# Runs all test types in parallel depending on the number of CPUs available +# We monitors their progress, display the progress and summarize the result when finished. +# +# In case there is not enough memory (MEMORY_REQUIRED_FOR_INTEGRATION_TEST_PARALLEL_RUN) available for +# the docker engine, the integration tests (which take a lot of memory for all the integrations) +# are run sequentially after all other tests were run in parallel. +# +# Input: +# * TEST_TYPES - contains all test types that should be executed +# * MEMORY_REQUIRED_FOR_INTEGRATION_TEST_PARALLEL_RUN - memory in bytes required to run integration tests +# in parallel to other tests +# * MEMORY_AVAILABLE_FOR_DOCKER - memory that is available in docker (set by cleanup_runners) +# +function run_all_test_types_in_parallel() { + parallel::cleanup_runner - if [[ ${GITHUB_ACTIONS} == "true" ]]; then - DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/ga.yml") - fi + start_end::group_start "Determine how to run the tests" + echo + echo "${COLOR_YELLOW}Running maximum ${MAX_PARALLEL_TEST_JOBS} test types in parallel${COLOR_RESET}" + echo - if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then - DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml") + local run_integration_tests_separately="false" + # shellcheck disable=SC2153 + local test_types_to_run=${TEST_TYPES} + + if [[ ${test_types_to_run} == *"Integration"* ]]; then + if (( MEMORY_AVAILABLE_FOR_DOCKER < MEMORY_REQUIRED_FOR_INTEGRATION_TEST_PARALLEL_RUN )) ; then + # In case of Integration tests - they need more resources (Memory) thus we only run them in + # parallel if we have more than 32 GB memory available. Otherwise we run them sequentially + # after cleaning up the memory and stopping all docker instances + echo "" + echo "${COLOR_YELLOW}There is not enough memory to run Integration test in parallel${COLOR_RESET}" + echo "${COLOR_YELLOW} Available memory: ${MEMORY_AVAILABLE_FOR_DOCKER}${COLOR_RESET}" + echo "${COLOR_YELLOW} Required memory: ${MEMORY_REQUIRED_FOR_INTEGRATION_TEST_PARALLEL_RUN}${COLOR_RESET}" + echo "" + echo "${COLOR_YELLOW}Integration tests will be run separately at the end after cleaning up docker${COLOR_RESET}" + echo "" + # Remove Integration from list of tests to run in parallel + test_types_to_run="${test_types_to_run//Integration/}" + run_integration_tests_separately="true" + fi fi + set +e + start_end::group_end - if [[ -n ${INSTALL_AIRFLOW_VERSION=} || -n ${INSTALL_AIRFLOW_REFERENCE} ]]; then - DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml") - fi - readonly DOCKER_COMPOSE_LOCAL + parallel::initialize_monitoring - if [[ -n "${TEST_TYPE=}" ]]; then - # Handle case where test type is passed from outside - export TEST_TYPES="${TEST_TYPE}" + run_test_types_in_parallel "${@}" + if [[ ${run_integration_tests_separately} == "true" ]]; then + parallel::cleanup_runner + test_types_to_run="Integration" + run_test_types_in_parallel "${@}" fi + set -e + # this will exit with error code in case some of the non-Quarantined tests failed + parallel::print_job_summary_and_return_status_code +} - if [[ -z "${TEST_TYPES=}" ]]; then - TEST_TYPES="Core Providers API CLI Integration Other WWW Heisentests" - echo - echo "Test types not specified. Running all: ${TEST_TYPES}" - echo - fi - if [[ -n "${TEST_TYPE=}" ]]; then - # Add Postgres/MySQL special test types in case we are running several test types - if [[ ${BACKEND} == "postgres" ]]; then - TEST_TYPES="${TEST_TYPES} Postgres" - fi - if [[ ${BACKEND} == "mysql" ]]; then - TEST_TYPES="${TEST_TYPES} MySQL" - fi - fi - readonly TEST_TYPES -} +testing::skip_tests_if_requested build_images::prepare_ci_build build_images::rebuild_ci_image_if_needed_with_group -prepare_tests_to_run +parallel::make_sure_gnu_parallel_is_installed +testing::get_maximum_parallel_test_jobs -for TEST_TYPE in ${TEST_TYPES} -do - start_end::group_start "Running tests ${TEST_TYPE}" +testing::get_test_types_to_run - INTEGRATIONS=() - export INTEGRATIONS +testing::get_docker_compose_local - if [[ ${TEST_TYPE:=} == "Integration" ]]; then - export ENABLED_INTEGRATIONS="${AVAILABLE_INTEGRATIONS}" - export RUN_INTEGRATION_TESTS="${AVAILABLE_INTEGRATIONS}" - else - export ENABLED_INTEGRATIONS="" - export RUN_INTEGRATION_TESTS="" - fi - - for _INT in ${ENABLED_INTEGRATIONS} - do - INTEGRATIONS+=("-f") - INTEGRATIONS+=("${SCRIPTS_CI_DIR}/docker-compose/integration-${_INT}.yml") - done - - export TEST_TYPE - - echo "**********************************************************************************************" - echo - echo " TEST_TYPE: ${TEST_TYPE}, ENABLED INTEGRATIONS: ${ENABLED_INTEGRATIONS}" - echo - echo "**********************************************************************************************" - - run_airflow_testing_in_docker "${@}" - start_end::group_end -done +run_all_test_types_in_parallel "${@}" diff --git a/scripts/ci/testing/ci_run_quarantined_tests.sh b/scripts/ci/testing/ci_run_quarantined_tests.sh new file mode 100755 index 0000000000000..57e4aca97db75 --- /dev/null +++ b/scripts/ci/testing/ci_run_quarantined_tests.sh @@ -0,0 +1,90 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +set -euo pipefail + +# Enable automated tests execution +RUN_TESTS="true" +export RUN_TESTS + +SKIPPED_FAILED_JOB="Quarantined" +export SKIPPED_FAILED_JOB + +SEMAPHORE_NAME="tests" +export SEMAPHORE_NAME + +# shellcheck source=scripts/ci/libraries/_script_init.sh +. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" + +initialization::set_output_color_variables + +BACKEND_TEST_TYPES=(mysql postgres sqlite) + +# Starts test types in parallel +# test_types_to_run - list of test types (it's not an array, it is space-separate list) +# ${@} - additional arguments to pass to test execution +function run_quarantined_backend_tests_in_parallel() { + start_end::group_start "Determining how to run the tests" + echo + echo "${COLOR_YELLOW}Running maximum ${MAX_PARALLEL_QUARANTINED_TEST_JOBS} test types in parallel${COLOR_RESET}" + echo + start_end::group_end + start_end::group_start "Monitoring Quarantined tests : ${BACKEND_TEST_TYPES[*]}" + parallel::initialize_monitoring + parallel::monitor_progress + mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}" + TEST_TYPE="Quarantined" + export TEST_TYPE + for BACKEND in "${BACKEND_TEST_TYPES[@]}" + do + export BACKEND + mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${BACKEND}" + mkdir -p "${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${BACKEND}" + export JOB_LOG="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${BACKEND}/stdout" + export PARALLEL_JOB_STATUS="${PARALLEL_MONITORED_DIR}/${SEMAPHORE_NAME}/${BACKEND}/status" + # Each test job will get SIGTERM followed by SIGTERM 200ms later and SIGKILL 200ms later after 25 mins + # shellcheck disable=SC2086 + parallel --ungroup --bg --semaphore --semaphorename "${SEMAPHORE_NAME}" \ + --jobs "${MAX_PARALLEL_QUARANTINED_TEST_JOBS}" --timeout 1500 \ + "$( dirname "${BASH_SOURCE[0]}" )/ci_run_single_airflow_test_in_docker.sh" "${@}" >${JOB_LOG} 2>&1 + done + parallel --semaphore --semaphorename "${SEMAPHORE_NAME}" --wait + parallel::kill_monitor + start_end::group_end +} + +testing::skip_tests_if_requested + +build_images::prepare_ci_build + +build_images::rebuild_ci_image_if_needed_with_group + +parallel::make_sure_gnu_parallel_is_installed + +testing::get_maximum_parallel_test_jobs + +testing::get_docker_compose_local + +run_quarantined_backend_tests_in_parallel "${@}" + +set +e + +parallel::print_job_summary_and_return_status_code + +echo "Those are quarantined tests so failure of those does not fail the whole build!" +echo "Please look above for the output of failed tests to fix them!" +echo diff --git a/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh b/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh new file mode 100755 index 0000000000000..0bf415f012c13 --- /dev/null +++ b/scripts/ci/testing/ci_run_single_airflow_test_in_docker.sh @@ -0,0 +1,188 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# Skip printing groups in CI +PRINT_INFO_FROM_SCRIPTS="false" +# shellcheck source=scripts/ci/libraries/_script_init.sh +. "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" + +PRINT_INFO_FROM_SCRIPTS="true" +export PRINT_INFO_FROM_SCRIPTS + +DOCKER_COMPOSE_LOCAL=() +INTEGRATIONS=() + +function prepare_tests() { + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/files.yml") + if [[ ${MOUNT_SELECTED_LOCAL_SOURCES} == "true" ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local.yml") + fi + if [[ ${MOUNT_ALL_LOCAL_SOURCES} == "true" ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/local-all-sources.yml") + fi + + if [[ ${GITHUB_ACTIONS=} == "true" ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/ga.yml") + fi + + if [[ ${FORWARD_CREDENTIALS} == "true" ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/forward-credentials.yml") + fi + + if [[ -n ${INSTALL_AIRFLOW_VERSION=} || -n ${INSTALL_AIRFLOW_REFERENCE} ]]; then + DOCKER_COMPOSE_LOCAL+=("-f" "${SCRIPTS_CI_DIR}/docker-compose/remove-sources.yml") + fi + readonly DOCKER_COMPOSE_LOCAL + + if [[ ${TEST_TYPE:=} == "Integration" ]]; then + export ENABLED_INTEGRATIONS="${AVAILABLE_INTEGRATIONS}" + export RUN_INTEGRATION_TESTS="${AVAILABLE_INTEGRATIONS}" + else + export ENABLED_INTEGRATIONS="" + export RUN_INTEGRATION_TESTS="" + fi + + for _INT in ${ENABLED_INTEGRATIONS} + do + INTEGRATIONS+=("-f") + INTEGRATIONS+=("${SCRIPTS_CI_DIR}/docker-compose/integration-${_INT}.yml") + done + + readonly INTEGRATIONS + + echo "**********************************************************************************************" + echo + echo " TEST_TYPE: ${TEST_TYPE}, ENABLED INTEGRATIONS: ${ENABLED_INTEGRATIONS}" + echo + echo "**********************************************************************************************" +} + +# Runs airflow testing in docker container +# You need to set variable TEST_TYPE - test type to run +# "${@}" - extra arguments to pass to docker command +function run_airflow_testing_in_docker() { + set +u + set +e + local exit_code + echo + echo "Semaphore grabbed. Running tests for ${TEST_TYPE}" + echo + for try_num in {1..5} + do + echo + echo "Starting try number ${try_num}" + echo + echo + echo "Making sure docker-compose is down and remnants removed" + echo + docker-compose --log-level INFO -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ + --project-name "airflow-${TEST_TYPE}-${BACKEND}" \ + down --remove-orphans \ + --volumes --timeout 10 + docker-compose --log-level INFO \ + -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ + -f "${SCRIPTS_CI_DIR}/docker-compose/backend-${BACKEND}.yml" \ + "${INTEGRATIONS[@]}" \ + "${DOCKER_COMPOSE_LOCAL[@]}" \ + --project-name "airflow-${TEST_TYPE}-${BACKEND}" \ + run airflow "${@}" + exit_code=$? + docker-compose --log-level INFO -f "${SCRIPTS_CI_DIR}/docker-compose/base.yml" \ + --project-name "airflow-${TEST_TYPE}-${BACKEND}" \ + down --remove-orphans \ + --volumes --timeout 10 + if [[ ${exit_code} == "254" && ${try_num} != "5" ]]; then + echo + echo "Failed try num ${try_num}. Sleeping 5 seconds for retry" + echo + sleep 5 + continue + else + break + fi + done + set -u + set -e + if [[ ${exit_code} != "0" ]]; then + EXTRA_ARGS="" + if [[ ${BACKEND} == "postgres" ]]; then + EXTRA_ARGS="--postgres-version ${POSTGRES_VERSION} " + elif [[ ${BACKEND} == "mysql" ]]; then + EXTRA_ARGS="--mysql-version ${MYSQL_VERSION} " + fi + echo "${COLOR_RED}***********************************************************************************************${COLOR_RESET}" + echo "${COLOR_RED}*${COLOR_RESET}" + echo "${COLOR_RED}* ERROR! Some tests failed, unfortunately. Those might be transient errors,${COLOR_RESET}" + echo "${COLOR_RED}* but usually you have to fix something.${COLOR_RESET}" + echo "${COLOR_RED}* See the above log for details.${COLOR_RESET}" + echo "${COLOR_RED}*${COLOR_RESET}" + echo "${COLOR_RED}***********************************************************************************************${COLOR_RESET}" + echo """ +* You can easily reproduce the failed tests on your dev machine/ +* +* When you have the source branch checked out locally: +* +* Run all tests: +* +* ./breeze --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} tests +* +* Enter docker shell: +* +* ./breeze --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} shell +*""" + if [[ -n "${GITHUB_REGISTRY_PULL_IMAGE_TAG=}" ]]; then + echo """ +* When you do not have sources: +* +* Run all tests: +* +* ./breeze --github-image-id ${GITHUB_REGISTRY_PULL_IMAGE_TAG} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} tests +* +* Enter docker shell: +* +* ./breeze --github-image-id ${GITHUB_REGISTRY_PULL_IMAGE_TAG} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} shell +*""" + fi + echo """ +* +* NOTE! Once you are in the docker shell, you can run failed test with: +* +* pytest [TEST_NAME] +* +* You can copy the test name from the output above +* +***********************************************************************************************""" + + fi + + echo ${exit_code} > "${PARALLEL_JOB_STATUS}" + + if [[ ${exit_code} == 0 ]]; then + echo + echo "${COLOR_GREEN}Test type: ${TEST_TYPE} succeeded.${COLOR_RESET}" + echo + else + echo + echo "${COLOR_RED}Test type: ${TEST_TYPE} failed.${COLOR_RESET}" + echo + fi + return "${exit_code}" +} + +prepare_tests + +run_airflow_testing_in_docker "${@}" diff --git a/scripts/ci/tools/ci_clear_tmp.sh b/scripts/ci/tools/ci_clear_tmp.sh index d367967e1ce1d..bef3fa5289375 100755 --- a/scripts/ci/tools/ci_clear_tmp.sh +++ b/scripts/ci/tools/ci_clear_tmp.sh @@ -27,7 +27,7 @@ sanity_checks::sanitize_mounted_files read -r -a EXTRA_DOCKER_FLAGS <<<"$(local_mounts::convert_local_mounts_to_docker_params)" -docker run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \ +docker_v run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \ --rm \ --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \ "${AIRFLOW_CI_IMAGE}" \ diff --git a/scripts/ci/tools/ci_fix_ownership.sh b/scripts/ci/tools/ci_fix_ownership.sh index 2d57d65155236..56463d211b375 100755 --- a/scripts/ci/tools/ci_fix_ownership.sh +++ b/scripts/ci/tools/ci_fix_ownership.sh @@ -33,7 +33,7 @@ sanity_checks::sanitize_mounted_files read -r -a EXTRA_DOCKER_FLAGS <<<"$(local_mounts::convert_local_mounts_to_docker_params)" -docker run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \ +docker_v run --entrypoint /bin/bash "${EXTRA_DOCKER_FLAGS[@]}" \ --rm \ --env-file "${AIRFLOW_SOURCES}/scripts/ci/docker-compose/_docker.env" \ "${AIRFLOW_CI_IMAGE}" \ diff --git a/scripts/ci/tools/ci_free_space_on_ci.sh b/scripts/ci/tools/ci_free_space_on_ci.sh index e3518952cdc59..a337545b77a07 100755 --- a/scripts/ci/tools/ci_free_space_on_ci.sh +++ b/scripts/ci/tools/ci_free_space_on_ci.sh @@ -18,8 +18,15 @@ # shellcheck source=scripts/ci/libraries/_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/../libraries/_script_init.sh" +echo "${COLOR_BLUE}Disable swap${COLOR_RESET}" sudo swapoff -a sudo rm -f /swapfile + +echo "${COLOR_BLUE}Cleaning apt${COLOR_RESET}" sudo apt clean -docker system prune --all --force + +echo "${COLOR_BLUE}Pruning docker${COLOR_RESET}" +docker_v system prune --all --force --volumes + +echo "${COLOR_BLUE}Free disk space ${COLOR_RESET}" df -h diff --git a/scripts/ci/tools/verify_docker_image.sh b/scripts/ci/tools/verify_docker_image.sh new file mode 100755 index 0000000000000..3ef5e3e0e03e1 --- /dev/null +++ b/scripts/ci/tools/verify_docker_image.sh @@ -0,0 +1,57 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# shellcheck source=scripts/ci/libraries/_script_init.sh +. "$(dirname "${BASH_SOURCE[0]}")/../libraries/_script_init.sh" + +usage() { +local cmdname +cmdname="$(basename -- "$0")" + +cat << EOF +Usage: ${cmdname} + +Verify the user-specified docker image. + +Image Type can be one of the two values: CI or PROD + +EOF +} + + +if [[ "$#" -ne 2 ]]; then + >&2 echo "You must provide two argument - image type [PROD/CI] and image name." + usage + exit 1 +fi + +IMAGE_TYPE="${1}" +IMAGE_NAME="${2}" + +if ! docker image inspect "${IMAGE_NAME}" &>/dev/null; then + >&2 echo "Image '${IMAGE_NAME}' doesn't exists in local registry." + exit 1 +fi + +if [ "$(echo "${IMAGE_TYPE}" | tr '[:lower:]' '[:upper:]')" = "PROD" ]; then + verify_image::verify_prod_image "${IMAGE_NAME}" +elif [ "$(echo "${IMAGE_TYPE}" | tr '[:lower:]' '[:upper:]')" = "CI" ]; then + verify_image::verify_ci_image "${IMAGE_NAME}" +else + >&2 echo "Unsupported image type. Supported values: PROD, CI" + exit 1 +fi diff --git a/scripts/docker/common.sh b/scripts/docker/common.sh new file mode 100755 index 0000000000000..28307e3333050 --- /dev/null +++ b/scripts/docker/common.sh @@ -0,0 +1,63 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +set -euo pipefail + +test -v INSTALL_MYSQL_CLIENT +test -v AIRFLOW_INSTALL_USER_FLAG +test -v AIRFLOW_REPO +test -v AIRFLOW_BRANCH +test -v AIRFLOW_PIP_VERSION + +set -x + +function common::get_airflow_version_specification() { + if [[ -z ${AIRFLOW_VERSION_SPECIFICATION} + && -n ${AIRFLOW_VERSION} + && ${AIRFLOW_INSTALLATION_METHOD} != "." ]]; then + AIRFLOW_VERSION_SPECIFICATION="==${AIRFLOW_VERSION}" + fi +} + +function common::get_constraints_location() { + # auto-detect Airflow-constraint reference and location + if [[ -z "${AIRFLOW_CONSTRAINTS_REFERENCE}" ]]; then + if [[ ${AIRFLOW_VERSION} =~ [^0-9]*1[^0-9]*10[^0-9]([0-9]*) ]]; then + # All types of references/versions match this regexp for 1.10 series + # for example v1_10_test, 1.10.10, 1.10.9 etc. ${BASH_REMATCH[1]} matches last + # minor digit of version and it's length is 0 for v1_10_test, 1 for 1.10.9 and 2 for 1.10.10+ + AIRFLOW_MINOR_VERSION_NUMBER=${BASH_REMATCH[1]} + if [[ ${#AIRFLOW_MINOR_VERSION_NUMBER} == "0" ]]; then + # For v1_10_* branches use constraints-1-10 branch + AIRFLOW_CONSTRAINTS_REFERENCE=constraints-1-10 + else + AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION} + fi + elif [[ ${AIRFLOW_VERSION} =~ v?2.* ]]; then + AIRFLOW_CONSTRAINTS_REFERENCE=constraints-${AIRFLOW_VERSION} + else + AIRFLOW_CONSTRAINTS_REFERENCE=${DEFAULT_CONSTRAINTS_BRANCH} + fi + fi + + if [[ -z ${AIRFLOW_CONSTRAINTS_LOCATION} ]]; then + local constraints_base="https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${AIRFLOW_CONSTRAINTS_REFERENCE}" + local python_version + python_version="$(python --version 2>/dev/stdout | cut -d " " -f 2 | cut -d "." -f 1-2)" + AIRFLOW_CONSTRAINTS_LOCATION="${constraints_base}/${AIRFLOW_CONSTRAINTS}-${python_version}.txt" + fi +} diff --git a/scripts/docker/compile_www_assets.sh b/scripts/docker/compile_www_assets.sh index b3eb13eb453ec..9a8fef8d191ec 100755 --- a/scripts/docker/compile_www_assets.sh +++ b/scripts/docker/compile_www_assets.sh @@ -17,8 +17,7 @@ # under the License. # shellcheck disable=SC2086 set -euo pipefail - -test -v PYTHON_MAJOR_MINOR_VERSION +set -x # Installs additional dependencies passed as Argument to the Docker build command function compile_www_assets() { @@ -29,22 +28,20 @@ function compile_www_assets() { md5sum_file="static/dist/sum.md5" readonly md5sum_file local airflow_site_package - airflow_site_package="/root/.local/lib/python${PYTHON_MAJOR_MINOR_VERSION}/site-packages/airflow" + airflow_site_package="$(python -m site --user-site)/airflow" local www_dir="" if [[ -f "${airflow_site_package}/www_rbac/package.json" ]]; then www_dir="${airflow_site_package}/www_rbac" elif [[ -f "${airflow_site_package}/www/package.json" ]]; then www_dir="${airflow_site_package}/www" fi - if [[ -n "${www_dir}" ]]; then - pushd ${www_dir} || exit 1 - yarn install --frozen-lockfile --no-cache - yarn run prod - find package.json yarn.lock static/css static/js -type f | sort | xargs md5sum > "${md5sum_file}" - rm -rf "${www_dir}/node_modules" - rm -vf "${www_dir}"/{package.json,yarn.lock,.eslintignore,.eslintrc,.stylelintignore,.stylelintrc,compile_assets.sh,webpack.config.js} - popd || exit 1 - fi + pushd ${www_dir} || exit 1 + yarn install --frozen-lockfile --no-cache + yarn run prod + find package.json yarn.lock static/css static/js -type f | sort | xargs md5sum > "${md5sum_file}" + rm -rf "${www_dir}/node_modules" + rm -vf "${www_dir}"/{package.json,yarn.lock,.eslintignore,.eslintrc,.stylelintignore,.stylelintrc,compile_assets.sh,webpack.config.js} + popd || exit 1 } compile_www_assets diff --git a/scripts/docker/install_additional_dependencies.sh b/scripts/docker/install_additional_dependencies.sh index 9c77da042d90f..7925ac8a03ece 100755 --- a/scripts/docker/install_additional_dependencies.sh +++ b/scripts/docker/install_additional_dependencies.sh @@ -25,6 +25,8 @@ test -v AIRFLOW_INSTALL_USER_FLAG test -v AIRFLOW_PIP_VERSION test -v CONTINUE_ON_PIP_CHECK_FAILURE +set -x + # Installs additional dependencies passed as Argument to the Docker build command function install_additional_dependencies() { if [[ "${UPGRADE_TO_NEWER_DEPENDENCIES}" != "false" ]]; then diff --git a/scripts/docker/install_airflow.sh b/scripts/docker/install_airflow.sh index 91f38604cd74c..bfcc7e91b5549 100755 --- a/scripts/docker/install_airflow.sh +++ b/scripts/docker/install_airflow.sh @@ -20,20 +20,14 @@ # AIRFLOW_INSTALLATION_METHOD - determines where to install airflow form: # "." - installs airflow from local sources # "apache-airflow" - installs airflow from PyPI 'apache-airflow' package -# AIRFLOW_INSTALL_VERSION - optionally specify version to install +# AIRFLOW_VERSION_SPECIFICATION - optional specification for Airflow version to install ( +# might be ==2.0.2 for example or <3.0.0 # UPGRADE_TO_NEWER_DEPENDENCIES - determines whether eager-upgrade should be performed with the # dependencies (with EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS added) # # shellcheck disable=SC2086 -set -euo pipefail - -test -v AIRFLOW_INSTALLATION_METHOD -test -v AIRFLOW_INSTALL_EDITABLE_FLAG -test -v AIRFLOW_INSTALL_USER_FLAG -test -v INSTALL_MYSQL_CLIENT -test -v UPGRADE_TO_NEWER_DEPENDENCIES -test -v CONTINUE_ON_PIP_CHECK_FAILURE -test -v AIRFLOW_CONSTRAINTS_LOCATION +# shellcheck source=scripts/docker/common.sh +. "$( dirname "${BASH_SOURCE[0]}" )/common.sh" function install_airflow() { # Sanity check for editable installation mode. @@ -54,18 +48,16 @@ function install_airflow() { echo # eager upgrade pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade --upgrade-strategy eager \ - "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_INSTALL_VERSION}" \ + "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \ ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} if [[ -n "${AIRFLOW_INSTALL_EDITABLE_FLAG}" ]]; then # Remove airflow and reinstall it using editable flag # We can only do it when we install airflow from sources pip uninstall apache-airflow --yes pip install ${AIRFLOW_INSTALL_EDITABLE_FLAG} \ - "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_INSTALL_VERSION}" + "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" fi - # Work around to install azure-storage-blob - pip uninstall azure-storage azure-storage-blob azure-storage-file --yes - pip install azure-storage-blob azure-storage-file + # make sure correct PIP version is used pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}" pip check || ${CONTINUE_ON_PIP_CHECK_FAILURE} @@ -74,21 +66,23 @@ function install_airflow() { echo Installing all packages with constraints and upgrade if needed echo pip install ${AIRFLOW_INSTALL_USER_FLAG} ${AIRFLOW_INSTALL_EDITABLE_FLAG} \ - "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_INSTALL_VERSION}" \ + "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \ --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}" # make sure correct PIP version is used pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}" # then upgrade if needed without using constraints to account for new limits in setup.py pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade --upgrade-strategy only-if-needed \ ${AIRFLOW_INSTALL_EDITABLE_FLAG} \ - "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_INSTALL_VERSION}" \ - # Work around to install azure-storage-blob - pip uninstall azure-storage azure-storage-blob azure-storage-file --yes - pip install azure-storage-blob azure-storage-file + "${AIRFLOW_INSTALLATION_METHOD}[${AIRFLOW_EXTRAS}]${AIRFLOW_VERSION_SPECIFICATION}" \ # make sure correct PIP version is used pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}" pip check || ${CONTINUE_ON_PIP_CHECK_FAILURE} fi + } +common::get_airflow_version_specification + +common::get_constraints_location + install_airflow diff --git a/scripts/docker/install_airflow_from_latest_master.sh b/scripts/docker/install_airflow_from_branch_tip.sh similarity index 80% rename from scripts/docker/install_airflow_from_latest_master.sh rename to scripts/docker/install_airflow_from_branch_tip.sh index ca7b315f73f1e..6e34d05159b43 100755 --- a/scripts/docker/install_airflow_from_latest_master.sh +++ b/scripts/docker/install_airflow_from_branch_tip.sh @@ -17,7 +17,7 @@ # under the License. # shellcheck disable=SC2086 -# Installs Airflow from latest master. This is pure optimisation. It is done because we do not want +# Installs Airflow from $AIRFLOW_BRANCH tip. This is pure optimisation. It is done because we do not want # to reinstall all dependencies from scratch when setup.py changes. Problem with Docker caching is that # when a file is changed, when added to docker context, it invalidates the cache and it causes Docker # build to reinstall all dependencies from scratch. This can take a loooooot of time. Therefore we install @@ -26,23 +26,18 @@ # # If INSTALL_MYSQL_CLIENT is set to false, mysql extra is removed # -set -euo pipefail +# shellcheck source=scripts/docker/common.sh +. "$( dirname "${BASH_SOURCE[0]}" )/common.sh" -test -v INSTALL_MYSQL_CLIENT -test -v AIRFLOW_INSTALL_USER_FLAG -test -v AIRFLOW_REPO -test -v AIRFLOW_BRANCH -test -v AIRFLOW_CONSTRAINTS_LOCATION -test -v AIRFLOW_PIP_VERSION -function install_airflow_from_latest_master() { +function install_airflow_from_branch_tip() { echo - echo Installing airflow from latest master. It is used to cache dependencies + echo "Installing airflow from ${AIRFLOW_BRANCH}. It is used to cache dependencies" echo if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,} fi - # Install latest master set of dependencies using constraints \ + # Install latest set of dependencies using constraints pip install ${AIRFLOW_INSTALL_USER_FLAG} \ "https://github.com/${AIRFLOW_REPO}/archive/${AIRFLOW_BRANCH}.tar.gz#egg=apache-airflow[${AIRFLOW_EXTRAS}]" \ --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}" @@ -55,4 +50,6 @@ function install_airflow_from_latest_master() { pip uninstall --yes apache-airflow } -install_airflow_from_latest_master +common::get_constraints_location + +install_airflow_from_branch_tip diff --git a/scripts/docker/install_from_docker_context_files.sh b/scripts/docker/install_from_docker_context_files.sh index 6ff53b2e60ab2..d1982cf771bc0 100755 --- a/scripts/docker/install_from_docker_context_files.sh +++ b/scripts/docker/install_from_docker_context_files.sh @@ -22,18 +22,13 @@ # The packages are prepared from current sources and placed in the 'docker-context-files folder # Then both airflow and provider packages are installed using those packages rather than # PyPI -set -euo pipefail - -test -v AIRFLOW_EXTRAS -test -v AIRFLOW_INSTALL_USER_FLAG -test -v AIRFLOW_CONSTRAINTS_LOCATION -test -v AIRFLOW_PIP_VERSION -test -v CONTINUE_ON_PIP_CHECK_FAILURE -test -v EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS -test -v UPGRADE_TO_NEWER_DEPENDENCIES - +# shellcheck source=scripts/docker/common.sh +. "$( dirname "${BASH_SOURCE[0]}" )/common.sh" function install_airflow_and_providers_from_docker_context_files(){ + if [[ ${INSTALL_MYSQL_CLIENT} != "true" ]]; then + AIRFLOW_EXTRAS=${AIRFLOW_EXTRAS/mysql,} + fi # Find Apache Airflow packages in docker-context files local reinstalling_apache_airflow_package reinstalling_apache_airflow_package=$(ls \ @@ -67,10 +62,17 @@ function install_airflow_and_providers_from_docker_context_files(){ echo echo Force re-installing airflow and providers from local files with constraints and upgrade if needed echo + if [[ ${AIRFLOW_CONSTRAINTS_LOCATION} == "/"* ]]; then + grep -ve '^apache-airflow' <"${AIRFLOW_CONSTRAINTS_LOCATION}" > /tmp/constraints.txt + else + # Remove provider packages from constraint files because they are locally prepared + curl -L "${AIRFLOW_CONSTRAINTS_LOCATION}" | grep -ve '^apache-airflow' > /tmp/constraints.txt + fi # force reinstall airflow + provider package local files with constraints + upgrade if needed pip install ${AIRFLOW_INSTALL_USER_FLAG} --force-reinstall \ ${reinstalling_apache_airflow_package} ${reinstalling_apache_airflow_providers_packages} \ - --constraint "${AIRFLOW_CONSTRAINTS_LOCATION}" + --constraint /tmp/constraints.txt + rm /tmp/constraints.txt # make sure correct PIP version is used \ pip install ${AIRFLOW_INSTALL_USER_FLAG} --upgrade "pip==${AIRFLOW_PIP_VERSION}" # then upgrade if needed without using constraints to account for new limits in setup.py @@ -102,5 +104,7 @@ install_all_other_packages_from_docker_context_files() { fi } +common::get_constraints_location + install_airflow_and_providers_from_docker_context_files install_all_other_packages_from_docker_context_files diff --git a/scripts/docker/install_mysql.sh b/scripts/docker/install_mysql.sh index 877e56c84523d..534ed9804f944 100755 --- a/scripts/docker/install_mysql.sh +++ b/scripts/docker/install_mysql.sh @@ -17,6 +17,7 @@ # under the License. set -euo pipefail +set -x declare -a packages MYSQL_VERSION="8.0" diff --git a/scripts/in_container/_in_container_utils.sh b/scripts/in_container/_in_container_utils.sh index d2910322e13a1..ad3083eb268e3 100644 --- a/scripts/in_container/_in_container_utils.sh +++ b/scripts/in_container/_in_container_utils.sh @@ -16,6 +16,9 @@ # specific language governing permissions and limitations # under the License. +OPTIONAL_VERBOSE_FLAG=() +PROVIDER_PACKAGES_DIR="${AIRFLOW_SOURCES}/dev/provider_packages" + ####################################################################################################### # # Adds trap to the traps already set. @@ -51,16 +54,6 @@ function assert_in_container() { } function in_container_script_start() { - OUTPUT_PRINTED_ONLY_ON_ERROR=$(mktemp) - export OUTPUT_PRINTED_ONLY_ON_ERROR - readonly OUTPUT_PRINTED_ONLY_ON_ERROR - - if [[ ${VERBOSE=} == "true" && ${GITHUB_ACTIONS=} != "true" ]]; then - echo - echo "Output is redirected to ${OUTPUT_PRINTED_ONLY_ON_ERROR} and will be printed on error only" - echo - fi - if [[ ${VERBOSE_COMMANDS:="false"} == "true" ]]; then set -x fi @@ -71,23 +64,9 @@ function in_container_script_end() { EXIT_CODE=$? if [[ ${EXIT_CODE} != 0 ]]; then if [[ "${PRINT_INFO_FROM_SCRIPTS="true"}" == "true" ]]; then - if [[ -f "${OUTPUT_PRINTED_ONLY_ON_ERROR}" ]]; then - echo "###########################################################################################" - echo - echo "${COLOR_BLUE} EXIT CODE: ${EXIT_CODE} in container (See above for error message). Below is the output of the last action! ${COLOR_RESET}" - echo - echo "${COLOR_BLUE}*** BEGINNING OF THE LAST COMMAND OUTPUT *** ${COLOR_RESET}" - cat "${OUTPUT_PRINTED_ONLY_ON_ERROR}" - echo "${COLOR_BLUE}*** END OF THE LAST COMMAND OUTPUT *** ${COLOR_RESET}" - echo - echo "${COLOR_BLUE} EXIT CODE: ${EXIT_CODE} in container. The actual error might be above the output! ${COLOR_RESET}" - echo - echo "###########################################################################################" - else - echo "########################################################################################################################" - echo "${COLOR_BLUE} [IN CONTAINER] EXITING ${0} WITH EXIT CODE ${EXIT_CODE} ${COLOR_RESET}" - echo "########################################################################################################################" - fi + echo "########################################################################################################################" + echo "${COLOR_BLUE} [IN CONTAINER] EXITING ${0} WITH EXIT CODE ${EXIT_CODE} ${COLOR_RESET}" + echo "########################################################################################################################" fi fi @@ -134,34 +113,21 @@ function in_container_cleanup_pycache() { function in_container_fix_ownership() { if [[ ${HOST_OS:=} == "Linux" ]]; then DIRECTORIES_TO_FIX=( - "/tmp" "/files" "/root/.aws" "/root/.azure" "/root/.config/gcloud" "/root/.docker" - "${AIRFLOW_SOURCES}" + "/opt/airflow/logs" + "/opt/airflow/docs" ) - if [[ ${VERBOSE} == "true" ]]; then - echo "Fixing ownership of mounted files" - fi - sudo find "${DIRECTORIES_TO_FIX[@]}" -print0 -user root 2>/dev/null | - sudo xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference || - true >/dev/null 2>&1 - if [[ ${VERBOSE} == "true" ]]; then - echo "Fixed ownership of mounted files" - fi + find "${DIRECTORIES_TO_FIX[@]}" -print0 -user root 2>/dev/null | + xargs --null chown "${HOST_USER_ID}.${HOST_GROUP_ID}" --no-dereference || true >/dev/null 2>&1 fi } function in_container_clear_tmp() { - if [[ ${VERBOSE} == "true" ]]; then - echo "Cleaning ${AIRFLOW_SOURCES}/tmp from the container" - fi rm -rf /tmp/* - if [[ ${VERBOSE} == "true" ]]; then - echo "Cleaned ${AIRFLOW_SOURCES}/tmp from the container" - fi } function in_container_go_to_airflow_sources() { @@ -282,7 +248,7 @@ function install_airflow_from_wheel() { >&2 echo exit 4 fi - pip install "${airflow_package}${1}" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1 + pip install "${airflow_package}${extras}" } function install_airflow_from_sdist() { @@ -299,43 +265,25 @@ function install_airflow_from_sdist() { >&2 echo exit 4 fi - pip install "${airflow_package}${1}" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1 -} - -function reinstall_azure_storage_blob() { - group_start "Reinstalls azure-storage-blob (temporary workaround)" - # Reinstall azure-storage-blob here until https://github.com/apache/airflow/pull/12188 is solved - # Azure-storage-blob need to be reinstalled to overwrite azure-storage-blob installed by old version - # of the `azure-storage` library - echo - echo "Reinstalling azure-storage-blob" - echo - pip uninstall azure-storage azure-storage-blob azure-storage-file --yes - pip install azure-storage-blob azure-storage-file --no-deps --force-reinstall - group_end + pip install "${airflow_package}${extras}" } function install_remaining_dependencies() { group_start "Installs all remaining dependencies that are not installed by '${AIRFLOW_EXTRAS}' " - pip install apache-beam[gcp] >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1 + pip install apache-beam[gcp] group_end } function uninstall_airflow() { - echo - echo "Uninstalling airflow" - echo pip uninstall -y apache-airflow || true - echo - echo "Remove all AIRFLOW_HOME remnants" - echo find /root/airflow/ -type f -print0 | xargs -0 rm -f -- } +function uninstall_all_pip_packages() { + pip uninstall -y -r <(pip freeze) +} + function uninstall_providers() { - echo - echo "Uninstalling all provider packages" - echo local provider_packages_to_uninstall provider_packages_to_uninstall=$(pip freeze | grep apache-airflow-providers || true) if [[ -n ${provider_packages_to_uninstall} ]]; then @@ -350,13 +298,51 @@ function uninstall_airflow_and_providers() { function install_released_airflow_version() { local version="${1}" - local extras="${2}" echo - echo "Installing released ${version} version of airflow with extras ${extras}" + echo "Installing released ${version} version of airflow without extras" echo rm -rf "${AIRFLOW_SOURCES}"/*.egg-info - pip install --upgrade "apache-airflow${extras}==${version}" >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1 + pip install --upgrade "apache-airflow==${version}" +} + +function install_local_airflow_with_eager_upgrade() { + local extras + extras="${1}" + # we add eager requirements to make sure to take into account limitations that will allow us to + # install all providers + # shellcheck disable=SC2086 + pip install -e ".${extras}" ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} \ + --upgrade --upgrade-strategy eager +} + + +function install_all_providers_from_pypi_with_eager_upgrade() { + ALL_PROVIDERS_PACKAGES=$(python -c 'import setup; print(setup.get_all_provider_packages())') + local packages_to_install=() + local provider_package + local res + for provider_package in ${ALL_PROVIDERS_PACKAGES} + do + echo -n "Checking if ${provider_package} is available in PyPI: " + res=$(curl --head -s -o /dev/null -w "%{http_code}" "https://pypi.org/project/${provider_package}/") + if [[ ${res} == "200" ]]; then + packages_to_install+=( "${provider_package}" ) + echo "${COLOR_GREEN}OK${COLOR_RESET}" + else + echo "${COLOR_YELLOW}Skipped${COLOR_RESET}" + fi + done + echo "Installing provider packages: ${packages_to_install[*]}" + # we add eager requirements to make sure to take into account limitations that will allow us to + # install all providers. We install only those packages that are available in PyPI - we might + # Have some new providers in the works and they might not yet be simply available in PyPI + # Installing it with Airflow makes sure that the version of package that matches current + # Airflow requirements will be used. + # shellcheck disable=SC2086 + pip install -e . "${packages_to_install[@]}" ${EAGER_UPGRADE_ADDITIONAL_REQUIREMENTS} \ + --upgrade --upgrade-strategy eager + } function install_all_provider_packages_from_wheels() { @@ -364,7 +350,7 @@ function install_all_provider_packages_from_wheels() { echo "Installing all provider packages from wheels" echo uninstall_providers - pip install /dist/apache_airflow*providers_*.whl >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1 + pip install /dist/apache_airflow*providers_*.whl } function install_all_provider_packages_from_sdist() { @@ -372,28 +358,21 @@ function install_all_provider_packages_from_sdist() { echo "Installing all provider packages from .tar.gz" echo uninstall_providers - pip install /dist/apache-airflow-*providers-*.tar.gz >"${OUTPUT_PRINTED_ONLY_ON_ERROR}" 2>&1 + pip install /dist/apache-airflow-*providers-*.tar.gz } function setup_provider_packages() { - if [[ ${BACKPORT_PACKAGES:=} == "true" ]]; then - export PACKAGE_TYPE="backport" - export PACKAGE_PREFIX_UPPERCASE="BACKPORT_" - export PACKAGE_PREFIX_LOWERCASE="backport_" - export PACKAGE_PREFIX_HYPHEN="backport-" - else - export PACKAGE_TYPE="regular" - export PACKAGE_PREFIX_UPPERCASE="" - export PACKAGE_PREFIX_LOWERCASE="" - export PACKAGE_PREFIX_HYPHEN="" + export PACKAGE_TYPE="regular" + export PACKAGE_PREFIX_UPPERCASE="" + export PACKAGE_PREFIX_LOWERCASE="" + export PACKAGE_PREFIX_HYPHEN="" + if [[ ${VERBOSE} == "true" ]]; then + OPTIONAL_VERBOSE_FLAG+=("--verbose") fi readonly PACKAGE_TYPE readonly PACKAGE_PREFIX_UPPERCASE readonly PACKAGE_PREFIX_LOWERCASE readonly PACKAGE_PREFIX_HYPHEN - - readonly BACKPORT_PACKAGES - export BACKPORT_PACKAGES } function verify_suffix_versions_for_package_preparation() { @@ -487,6 +466,12 @@ ${COLOR_RESET} group_end } +function install_supported_pip_version() { + group_start "Install supported PIP version ${AIRFLOW_PIP_VERSION}" + pip install --upgrade "pip==${AIRFLOW_PIP_VERSION}" + group_end +} + function filename_to_python_module() { # Turn the file name into a python package name file="$1" @@ -540,7 +525,74 @@ function in_container_set_colors() { export COLOR_YELLOW } -# Starts group for Github Actions - makes logs much more readable + +function check_missing_providers() { + PACKAGE_ERROR="false" + + pushd "${AIRFLOW_SOURCES}/airflow/providers" >/dev/null 2>&1 || exit 1 + + LIST_OF_DIRS_FILE=$(mktemp) + find . -type d | sed 's!./!!; s!/!.!g' | grep -E 'hooks|operators|sensors|secrets|utils' \ + > "${LIST_OF_DIRS_FILE}" + + popd >/dev/null 2>&1 || exit 1 + + # Check if all providers are included + for PACKAGE in "${PROVIDER_PACKAGES[@]}" + do + if ! grep -E "^${PACKAGE}" <"${LIST_OF_DIRS_FILE}" >/dev/null; then + echo "The package ${PACKAGE} is not available in providers dir" + PACKAGE_ERROR="true" + fi + sed -i "/^${PACKAGE}.*/d" "${LIST_OF_DIRS_FILE}" + done + + if [[ ${PACKAGE_ERROR} == "true" ]]; then + echo + echo "ERROR! Some packages from ${PROVIDER_PACKAGES_DIR}/prepare_provider_packages.py are missing in providers dir" + exit 1 + fi + + if [[ $(wc -l < "${LIST_OF_DIRS_FILE}") != "0" ]]; then + echo "ERROR! Some folders from providers package are not defined" + echo " Please add them to ${PROVIDER_PACKAGES_DIR}/prepare_provider_packages.py:" + echo + cat "${LIST_OF_DIRS_FILE}" + echo + + rm "$LIST_OF_DIRS_FILE" + exit 1 + fi + rm "$LIST_OF_DIRS_FILE" +} + +function get_providers_to_act_on() { + group_start "Get all providers" + if [[ -z "$*" ]]; then + while IFS='' read -r line; do PROVIDER_PACKAGES+=("$line"); done < <( + python3 "${PROVIDER_PACKAGES_DIR}/prepare_provider_packages.py" \ + "${OPTIONAL_BACKPORT_FLAG[@]}" \ + list-providers-packages + ) + else + if [[ "${1}" == "--help" ]]; then + echo + echo "Builds all provider packages." + echo + echo "You can provide list of packages to build out of:" + echo + python3 "${PROVIDER_PACKAGES_DIR}/prepare_provider_packages.py" \ + list-providers-packages \ + | tr '\n ' ' ' | fold -w 100 -s + echo + echo + exit + fi + fi + group_end +} + +# Starts group for GitHub Actions - makes logs much more readable function group_start { if [[ ${GITHUB_ACTIONS=} == "true" ]]; then echo "::group::${1}" @@ -551,7 +603,7 @@ function group_start { fi } -# Ends group for Github Actions +# Ends group for GitHub Actions function group_end { if [[ ${GITHUB_ACTIONS=} == "true" ]]; then echo -e "\033[0m" # Disable any colors set in the group diff --git a/scripts/in_container/check_environment.sh b/scripts/in_container/check_environment.sh index ec0fcf98d1471..22c6fe58d2092 100755 --- a/scripts/in_container/check_environment.sh +++ b/scripts/in_container/check_environment.sh @@ -24,7 +24,7 @@ DISABLED_INTEGRATIONS="" # We want to avoid misleading messages and perform only forward lookup of the service IP address. # Netcat when run without -n performs both forward and reverse lookup and fails if the reverse # lookup name does not match the original name even if the host is reachable via IP. This happens -# randomly with docker-compose in Github Actions. +# randomly with docker-compose in GitHub Actions. # Since we are not using reverse lookup elsewhere, we can perform forward lookup in python # And use the IP in NC and add '-n' switch to disable any DNS use. # Even if this message might be harmless, it might hide the real reason for the problem @@ -160,17 +160,17 @@ check_integration "MongoDB" "mongo" "run_nc mongo 27017" 50 check_integration "Redis" "redis" "run_nc redis 6379" 50 check_integration "Cassandra" "cassandra" "run_nc cassandra 9042" 50 check_integration "OpenLDAP" "openldap" "run_nc openldap 389" 50 -check_integration "Presto (HTTP)" "presto" "run_nc presto 8080" 50 -check_integration "Presto (HTTPS)" "presto" "run_nc presto 7778" 50 -check_integration "Presto (API)" "presto" \ - "curl --max-time 1 http://presto:8080/v1/info/ | grep '\"starting\":false'" 50 +check_integration "Trino (HTTP)" "trino" "run_nc trino 8080" 50 +check_integration "Trino (HTTPS)" "trino" "run_nc trino 7778" 50 +check_integration "Trino (API)" "trino" \ + "curl --max-time 1 http://trino:8080/v1/info/ | grep '\"starting\":false'" 50 check_integration "Pinot (HTTP)" "pinot" "run_nc pinot 9000" 50 CMD="curl --max-time 1 -X GET 'http://pinot:9000/health' -H 'accept: text/plain' | grep OK" -check_integration "Presto (Controller API)" "pinot" "${CMD}" 50 +check_integration "Pinot (Controller API)" "pinot" "${CMD}" 50 CMD="curl --max-time 1 -X GET 'http://pinot:9000/pinot-controller/admin' -H 'accept: text/plain' | grep GOOD" -check_integration "Presto (Controller API)" "pinot" "${CMD}" 50 +check_integration "Pinot (Controller API)" "pinot" "${CMD}" 50 CMD="curl --max-time 1 -X GET 'http://pinot:8000/health' -H 'accept: text/plain' | grep OK" -check_integration "Presto (Broker API)" "pinot" "${CMD}" 50 +check_integration "Pinot (Broker API)" "pinot" "${CMD}" 50 check_integration "RabbitMQ" "rabbitmq" "run_nc rabbitmq 5672" 50 echo "-----------------------------------------------------------------------------------------------" diff --git a/scripts/in_container/entrypoint_ci.sh b/scripts/in_container/entrypoint_ci.sh index 1c740dfc9dd4d..16aabbb07fee2 100755 --- a/scripts/in_container/entrypoint_ci.sh +++ b/scripts/in_container/entrypoint_ci.sh @@ -69,7 +69,9 @@ if [[ -z ${INSTALL_AIRFLOW_VERSION=} ]]; then echo echo "Using already installed airflow version" echo - "${AIRFLOW_SOURCES}/airflow/www/ask_for_recompile_assets_if_needed.sh" + pushd "${AIRFLOW_SOURCES}/airflow/www/" >/dev/null + ./ask_for_recompile_assets_if_needed.sh + popd >/dev/null # Cleanup the logs, tmp when entering the environment sudo rm -rf "${AIRFLOW_SOURCES}"/logs/* sudo rm -rf "${AIRFLOW_SOURCES}"/tmp/* @@ -96,9 +98,9 @@ elif [[ ${INSTALL_AIRFLOW_VERSION} == "sdist" ]]; then uninstall_providers else echo - echo "Install airflow from PyPI including [${AIRFLOW_EXTRAS}] extras" + echo "Install airflow from PyPI without extras" echo - install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" "[${AIRFLOW_EXTRAS}]" + install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" fi if [[ ${INSTALL_PACKAGES_FROM_DIST=} == "true" ]]; then echo @@ -208,44 +210,39 @@ if [[ "${RUN_TESTS}" != "true" ]]; then fi set -u -export RESULT_LOG_FILE="/files/test_result.xml" - -if [[ "${GITHUB_ACTIONS}" == "true" ]]; then - EXTRA_PYTEST_ARGS=( - "--verbosity=0" - "--strict-markers" - "--durations=100" - "--cov=airflow/" - "--cov-config=.coveragerc" - "--cov-report=xml:/files/coverage.xml" - "--color=yes" - "--maxfail=50" - "--pythonwarnings=ignore::DeprecationWarning" - "--pythonwarnings=ignore::PendingDeprecationWarning" - "--junitxml=${RESULT_LOG_FILE}" - # timeouts in seconds for individual tests - "--setup-timeout=20" - "--execution-timeout=60" - "--teardown-timeout=20" - # Only display summary for non-expected case - # f - failed - # E - error - # X - xpassed (passed even if expected to fail) - # The following cases are not displayed: - # s - skipped - # x - xfailed (expected to fail and failed) - # p - passed - # P - passed with output - "-rfEX" - ) - if [[ "${TEST_TYPE}" != "Helm" ]]; then - EXTRA_PYTEST_ARGS+=( - "--with-db-init" - ) - fi -else - EXTRA_PYTEST_ARGS=( - "-rfEX" +export RESULT_LOG_FILE="/files/test_result-${TEST_TYPE}-${BACKEND}.xml" + +EXTRA_PYTEST_ARGS=( + "--verbosity=0" + "--strict-markers" + "--durations=100" + "--cov=airflow/" + "--cov-config=.coveragerc" + "--cov-report=xml:/files/coverage-${TEST_TYPE}-${BACKEND}.xml" + "--color=yes" + "--maxfail=50" + "--pythonwarnings=ignore::DeprecationWarning" + "--pythonwarnings=ignore::PendingDeprecationWarning" + "--junitxml=${RESULT_LOG_FILE}" + # timeouts in seconds for individual tests + "--setup-timeout=20" + "--execution-timeout=60" + "--teardown-timeout=20" + # Only display summary for non-expected case + # f - failed + # E - error + # X - xpassed (passed even if expected to fail) + # The following cases are not displayed: + # s - skipped + # x - xfailed (expected to fail and failed) + # p - passed + # P - passed with output + "-rfEX" +) + +if [[ "${TEST_TYPE}" != "Helm" ]]; then + EXTRA_PYTEST_ARGS+=( + "--with-db-init" ) fi @@ -319,7 +316,7 @@ else elif [[ ${TEST_TYPE:=""} == "All" || ${TEST_TYPE} == "Quarantined" || \ ${TEST_TYPE} == "Always" || \ ${TEST_TYPE} == "Postgres" || ${TEST_TYPE} == "MySQL" || \ - ${TEST_TYPE} == "Heisentests" || ${TEST_TYPE} == "Long" || \ + ${TEST_TYPE} == "Long" || \ ${TEST_TYPE} == "Integration" ]]; then SELECTED_TESTS=("${ALL_TESTS[@]}") else @@ -344,11 +341,6 @@ elif [[ ${TEST_TYPE:=""} == "Long" ]]; then "-m" "long_running" "--include-long-running" ) -elif [[ ${TEST_TYPE:=""} == "Heisentests" ]]; then - EXTRA_PYTEST_ARGS+=( - "-m" "heisentests" - "--include-heisentests" - ) elif [[ ${TEST_TYPE:=""} == "Postgres" ]]; then EXTRA_PYTEST_ARGS+=( "--backend" diff --git a/scripts/in_container/prod/entrypoint_prod.sh b/scripts/in_container/prod/entrypoint_prod.sh index 00bec5814df4b..4ca8a756764c2 100755 --- a/scripts/in_container/prod/entrypoint_prod.sh +++ b/scripts/in_container/prod/entrypoint_prod.sh @@ -15,27 +15,63 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - # Might be empty AIRFLOW_COMMAND="${1}" set -euo pipefail -# We want to avoid misleading messages and perform only forward lookup of the service IP address. -# Netcat when run without -n performs both forward and reverse lookup and fails if the reverse -# lookup name does not match the original name even if the host is reachable via IP. This happens -# randomly with docker-compose in Github Actions. -# Since we are not using reverse lookup elsewhere, we can perform forward lookup in python -# And use the IP in NC and add '-n' switch to disable any DNS use. -# Even if this message might be harmless, it might hide the real reason for the problem -# Which is the long time needed to start some services, seeing this message might be totally misleading -# when you try to analyse the problem, that's why it's best to avoid it, +function run_check_with_retries { + local cmd + cmd="${1}" + local countdown + countdown="${CONNECTION_CHECK_MAX_COUNT}" + + while true + do + set +e + local last_check_result + local res + last_check_result=$(eval "${cmd} 2>&1") + res=$? + set -e + if [[ ${res} == 0 ]]; then + echo + break + else + echo -n "." + countdown=$((countdown-1)) + fi + if [[ ${countdown} == 0 ]]; then + echo + echo "ERROR! Maximum number of retries (${CONNECTION_CHECK_MAX_COUNT}) reached." + echo + echo "Last check result:" + echo "$ ${cmd}" + echo "${last_check_result}" + echo + exit 1 + else + sleep "${CONNECTION_CHECK_SLEEP_TIME}" + fi + done +} + function run_nc() { - local host=${1} - local port=${2} + # Checks if it is possible to connect to the host using netcat. + # + # We want to avoid misleading messages and perform only forward lookup of the service IP address. + # Netcat when run without -n performs both forward and reverse lookup and fails if the reverse + # lookup name does not match the original name even if the host is reachable via IP. This happens + # randomly with docker-compose in GitHub Actions. + # Since we are not using reverse lookup elsewhere, we can perform forward lookup in python + # And use the IP in NC and add '-n' switch to disable any DNS use. + # Even if this message might be harmless, it might hide the real reason for the problem + # Which is the long time needed to start some services, seeing this message might be totally misleading + # when you try to analyse the problem, that's why it's best to avoid it, + local host="${1}" + local port="${2}" local ip ip=$(python -c "import socket; print(socket.gethostbyname('${host}'))") - nc -zvvn "${ip}" "${port}" } @@ -47,78 +83,49 @@ function wait_for_connection { # It tries `CONNECTION_CHECK_MAX_COUNT` times and sleeps `CONNECTION_CHECK_SLEEP_TIME` between checks local connection_url connection_url="${1}" - local detected_backend="" local detected_host="" local detected_port="" + # Auto-detect DB parameters + # Examples: + # postgres://YourUserName:password@YourHostname:5432/YourDatabaseName + # postgres://YourUserName:password@YourHostname:5432/YourDatabaseName + # postgres://YourUserName:@YourHostname:/YourDatabaseName + # postgres://YourUserName@YourHostname/YourDatabaseName + [[ ${connection_url} =~ ([^:]*)://([^:@]*):?([^@]*)@?([^/:]*):?([0-9]*)/([^\?]*)\??(.*) ]] && \ + detected_backend=${BASH_REMATCH[1]} && + # Not used USER match + # Not used PASSWORD match + detected_host=${BASH_REMATCH[4]} && + detected_port=${BASH_REMATCH[5]} && + # Not used SCHEMA match + # Not used PARAMS match - if [[ ${connection_url} != sqlite* ]]; then - # Auto-detect DB parameters - [[ ${connection_url} =~ ([^:]*)://([^:]*[@.*]?):([^@]*)@?([^/:]*):?([0-9]*)/([^\?]*)\??(.*) ]] && \ - detected_backend=${BASH_REMATCH[1]} && - # Not used USER match - # Not used PASSWORD match - detected_host=${BASH_REMATCH[4]} && - detected_port=${BASH_REMATCH[5]} && - # Not used SCHEMA match - # Not used PARAMS match - - echo BACKEND="${BACKEND:=${detected_backend}}" - readonly BACKEND - - if [[ -z "${detected_port=}" ]]; then - if [[ ${BACKEND} == "postgres"* ]]; then - detected_port=5432 - elif [[ ${BACKEND} == "mysql"* ]]; then - detected_port=3306 - elif [[ ${BACKEND} == "redis"* ]]; then - detected_port=6379 - elif [[ ${BACKEND} == "amqp"* ]]; then - detected_port=5672 - fi - fi + echo BACKEND="${BACKEND:=${detected_backend}}" + readonly BACKEND - detected_host=${detected_host:="localhost"} - - # Allow the DB parameters to be overridden by environment variable - echo DB_HOST="${DB_HOST:=${detected_host}}" - readonly DB_HOST - - echo DB_PORT="${DB_PORT:=${detected_port}}" - readonly DB_PORT - local countdown - countdown="${CONNECTION_CHECK_MAX_COUNT}" - while true - do - set +e - local last_check_result - local res - last_check_result=$(run_nc "${DB_HOST}" "${DB_PORT}" >/dev/null 2>&1) - res=$? - set -e - if [[ ${res} == 0 ]]; then - echo - break - else - echo -n "." - countdown=$((countdown-1)) - fi - if [[ ${countdown} == 0 ]]; then - echo - echo "ERROR! Maximum number of retries (${CONNECTION_CHECK_MAX_COUNT}) reached." - echo " while checking ${BACKEND} connection." - echo - echo "Last check result:" - echo - echo "${last_check_result}" - echo - exit 1 - else - sleep "${CONNECTION_CHECK_SLEEP_TIME}" - fi - done + if [[ -z "${detected_port=}" ]]; then + if [[ ${BACKEND} == "postgres"* ]]; then + detected_port=5432 + elif [[ ${BACKEND} == "mysql"* ]]; then + detected_port=3306 + elif [[ ${BACKEND} == "redis"* ]]; then + detected_port=6379 + elif [[ ${BACKEND} == "amqp"* ]]; then + detected_port=5672 + fi fi + + detected_host=${detected_host:="localhost"} + + # Allow the DB parameters to be overridden by environment variable + echo DB_HOST="${DB_HOST:=${detected_host}}" + readonly DB_HOST + + echo DB_PORT="${DB_PORT:=${detected_port}}" + readonly DB_PORT + run_check_with_retries "run_nc ${DB_HOST@Q} ${DB_PORT@Q}" } function create_www_user() { @@ -173,14 +180,37 @@ function create_system_user_if_missing() { fi } +function set_pythonpath_for_root_user() { + # Airflow is installed as a local user application which means that if the container is running as root + # the application is not available. because Python then only load system-wide applications. + # Now also adds applications installed as local user "airflow". + if [[ $UID == "0" ]]; then + local python_major_minor + python_major_minor="$(python --version | cut -d " " -f 2 | cut -d "." -f 1-2)" + export PYTHONPATH="${AIRFLOW_USER_HOME_DIR}/.local/lib/python${python_major_minor}/site-packages:${PYTHONPATH:-}" + >&2 echo "The container is run as root user. For security, consider using a regular user account." + fi +} + function wait_for_airflow_db() { - # Verifies connection to the Airflow DB - if [[ -n "${AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD=}" ]]; then - wait_for_connection "$(eval "${AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD}")" + # Check if Airflow has a command to check the connection to the database. + if ! airflow db check --help >/dev/null 2>&1; then + run_check_with_retries "airflow db check" else - # if no DB configured - use sqlite db by default - AIRFLOW__CORE__SQL_ALCHEMY_CONN="${AIRFLOW__CORE__SQL_ALCHEMY_CONN:="sqlite:///${AIRFLOW_HOME}/airflow.db"}" - wait_for_connection "${AIRFLOW__CORE__SQL_ALCHEMY_CONN}" + # Verify connections to the Airflow DB by guessing the database address based on environment variables, + # then uses netcat to check that the host is reachable. + # This is only used by Airflow 1.10+ as there are no built-in commands to check the db connection. + local connection_url + if [[ -n "${AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD=}" ]]; then + connection_url="$(eval "${AIRFLOW__CORE__SQL_ALCHEMY_CONN_CMD}")" + else + # if no DB configured - use sqlite db by default + connection_url="${AIRFLOW__CORE__SQL_ALCHEMY_CONN:="sqlite:///${AIRFLOW_HOME}/airflow.db"}" + fi + # SQLite doesn't require a remote connection, so we don't have to wait. + if [[ ${connection_url} != sqlite* ]]; then + wait_for_connection "${connection_url}" + fi fi } @@ -213,6 +243,47 @@ function exec_to_bash_or_python_command_if_specified() { fi } +function check_uid_gid() { + if [[ $(id -g) == "0" ]]; then + return + fi + if [[ $(id -u) == "50000" ]]; then + >&2 echo + >&2 echo "WARNING! You should run the image with GID (Group ID) set to 0" + >&2 echo " even if you use 'airflow' user (UID=50000)" + >&2 echo + >&2 echo " You started the image with UID=$(id -u) and GID=$(id -g)" + >&2 echo + >&2 echo " This is to make sure you can run the image with an arbitrary UID in the future." + >&2 echo + >&2 echo " See more about it in the Airflow's docker image documentation" + >&2 echo " http://airflow.apache.org/docs/docker-stack/entrypoint" + >&2 echo + # We still allow the image to run with `airflow` user. + return + else + >&2 echo + >&2 echo "ERROR! You should run the image with GID=0" + >&2 echo + >&2 echo " You started the image with UID=$(id -u) and GID=$(id -g)" + >&2 echo + >&2 echo "The image should always be run with GID (Group ID) set to 0 regardless of the UID used." + >&2 echo " This is to make sure you can run the image with an arbitrary UID." + >&2 echo + >&2 echo " See more about it in the Airflow's docker image documentation" + >&2 echo " http://airflow.apache.org/docs/docker-stack/entrypoint" + # This will not work so we fail hard + exit 1 + fi +} + +check_uid_gid + +# Set umask to 0002 to make all the directories created by the current user group-writeable +# This allows the same directories to be writeable for any arbitrary user the image will be +# run with, when the directory is created on a mounted volume and when that volume is later +# reused with a different UID (but with GID=0) +umask 0002 CONNECTION_CHECK_MAX_COUNT=${CONNECTION_CHECK_MAX_COUNT:=20} readonly CONNECTION_CHECK_MAX_COUNT @@ -221,7 +292,10 @@ CONNECTION_CHECK_SLEEP_TIME=${CONNECTION_CHECK_SLEEP_TIME:=3} readonly CONNECTION_CHECK_SLEEP_TIME create_system_user_if_missing -wait_for_airflow_db +set_pythonpath_for_root_user +if [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]]; then + wait_for_airflow_db +fi if [[ -n "${_AIRFLOW_DB_UPGRADE=}" ]] ; then upgrade_db @@ -247,7 +321,8 @@ if [[ ${AIRFLOW_COMMAND} == "airflow" ]]; then fi # Note: the broker backend configuration concerns only a subset of Airflow components -if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|celery|worker|flower)$ ]]; then +if [[ ${AIRFLOW_COMMAND} =~ ^(scheduler|celery|worker|flower)$ ]] \ + && [[ "${CONNECTION_CHECK_MAX_COUNT}" -gt "0" ]]; then wait_for_celery_backend "${@}" fi diff --git a/scripts/ci/images/ci_wait_for_all_ci_images.sh b/scripts/in_container/run_anything.sh similarity index 73% rename from scripts/ci/images/ci_wait_for_all_ci_images.sh rename to scripts/in_container/run_anything.sh index 2451a880fd58a..233cb475b9b22 100755 --- a/scripts/ci/images/ci_wait_for_all_ci_images.sh +++ b/scripts/in_container/run_anything.sh @@ -15,12 +15,4 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -echo -echo "Waiting for all CI images to appear: ${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING}" -echo - -for PYTHON_MAJOR_MINOR_VERSION in ${CURRENT_PYTHON_MAJOR_MINOR_VERSIONS_AS_STRING} -do - export PYTHON_MAJOR_MINOR_VERSION - "$( dirname "${BASH_SOURCE[0]}" )/ci_wait_for_ci_image.sh" -done +"${@}" diff --git a/scripts/in_container/run_ci_tests.sh b/scripts/in_container/run_ci_tests.sh index 43be453338858..f14ae00881584 100755 --- a/scripts/in_container/run_ci_tests.sh +++ b/scripts/in_container/run_ci_tests.sh @@ -18,8 +18,6 @@ # shellcheck source=scripts/in_container/_in_container_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" -reinstall_azure_storage_blob - echo echo "Starting the tests with those pytest arguments:" "${@}" echo @@ -33,66 +31,12 @@ set +x if [[ "${RES}" == "0" && ${CI:="false"} == "true" ]]; then echo "All tests successful" cp .coverage /files -elif [[ "${RES}" != "0" ]]; then - EXTRA_ARGS="" - if [[ ${BACKEND} == "postgres" ]]; then - EXTRA_ARGS="--postgres-version ${POSTGRES_VERSION} " - elif [[ ${BACKEND} == "mysql" ]]; then - EXTRA_ARGS="--mysql-version ${MYSQL_VERSION} " - fi - echo """ -${COLOR_RED}ERROR: -*********************************************************************************************** -* -* ERROR! Some tests failed, unfortunately. Those might be transient errors, -* but usually you have to fix something. -* See the above log for details. -* -*********************************************************************************************** -* You can easily reproduce the failed tests on your dev machine/ -* -* When you have the source branch checked out locally: -* -* Run all tests: -* -* ./breeze --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} tests -* -* Enter docker shell: -* -* ./breeze --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} shell -* -""" - if [[ -n "${GITHUB_REGISTRY_PULL_IMAGE_TAG=}" ]]; then - echo """ -* When you do not have sources: -* -* Run all tests: -* -* ./breeze --github-image-id ${GITHUB_REGISTRY_PULL_IMAGE_TAG} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} tests -* -* Enter docker shell: -* -* ./breeze --github-image-id ${GITHUB_REGISTRY_PULL_IMAGE_TAG} --backend ${BACKEND} ${EXTRA_ARGS}--python ${PYTHON_MAJOR_MINOR_VERSION} --db-reset --skip-mounting-local-sources --test-type ${TEST_TYPE} shell -* -""" - fi - echo """ -* -* NOTE! Once you are in the docker shell, you can run failed test with: -* -* pytest [TEST_NAME] -* -* You can copy the test name from the output above -* -*********************************************************************************************** -${COLOR_RESET} -""" fi MAIN_GITHUB_REPOSITORY="apache/airflow" if [[ ${TEST_TYPE:=} == "Quarantined" ]]; then - if [[ ${GITHUB_REPOSITORY} == "${MAIN_GITHUB_REPOSITORY}" ]]; then + if [[ ${GITHUB_REPOSITORY=} == "${MAIN_GITHUB_REPOSITORY}" ]]; then if [[ ${RES} == "1" || ${RES} == "0" ]]; then echo echo "Pytest exited with ${RES} result. Updating Quarantine Issue!" diff --git a/scripts/in_container/run_fix_ownership.sh b/scripts/in_container/run_fix_ownership.sh index eaaee77a692fc..d9e98ff168a8d 100755 --- a/scripts/in_container/run_fix_ownership.sh +++ b/scripts/in_container/run_fix_ownership.sh @@ -15,7 +15,7 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. -# shellcheck source=scripts/in_container/_in_container_script_init.sh -. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" +# shellcheck source=scripts/in_container/_in_container_utils.sh +. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_utils.sh" in_container_fix_ownership diff --git a/scripts/in_container/run_generate_constraints.sh b/scripts/in_container/run_generate_constraints.sh index a6ea8ef4d8d22..d71fc9d9a8bac 100755 --- a/scripts/in_container/run_generate_constraints.sh +++ b/scripts/in_container/run_generate_constraints.sh @@ -20,15 +20,47 @@ CONSTRAINTS_DIR="/files/constraints-${PYTHON_MAJOR_MINOR_VERSION}" -LATEST_CONSTRAINT_FILE="${CONSTRAINTS_DIR}/original-constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" -CURRENT_CONSTRAINT_FILE="${CONSTRAINTS_DIR}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" +if [[ ${GENERATE_CONSTRAINTS_MODE} == "no-providers" ]]; then + AIRFLOW_CONSTRAINTS="constraints-no-providers" + NO_PROVIDERS_EXTRAS=$(python -c 'import setup; print(",".join(setup.CORE_EXTRAS_REQUIREMENTS.keys()))') + echo + echo "UnInstall All PIP packages." + echo + uninstall_all_pip_packages + echo + echo "Install airflow with [${NO_PROVIDERS_EXTRAS}] extras only (uninstall all packages first)." + echo + install_local_airflow_with_eager_upgrade "[${NO_PROVIDERS_EXTRAS}]" +elif [[ ${GENERATE_CONSTRAINTS_MODE} == "source-providers" ]]; then + AIRFLOW_CONSTRAINTS="constraints-source-providers" + echo + echo "Providers are already installed from sources." + echo +elif [[ ${GENERATE_CONSTRAINTS_MODE} == "pypi-providers" ]]; then + AIRFLOW_CONSTRAINTS="constraints" + echo + echo "Install all providers from PyPI so that they are included in the constraints." + echo + install_all_providers_from_pypi_with_eager_upgrade +else + echo + echo "${COLOR_RED}Error! GENERATE_CONSTRAINTS_MODE has wrong value: '${GENERATE_CONSTRAINTS_MODE}' ${COLOR_RESET}" + echo + exit 1 +fi + +readonly AIRFLOW_CONSTRAINTS + +LATEST_CONSTRAINT_FILE="${CONSTRAINTS_DIR}/original-${AIRFLOW_CONSTRAINTS}-${PYTHON_MAJOR_MINOR_VERSION}.txt" +CURRENT_CONSTRAINT_FILE="${CONSTRAINTS_DIR}/${AIRFLOW_CONSTRAINTS}-${PYTHON_MAJOR_MINOR_VERSION}.txt" mkdir -pv "${CONSTRAINTS_DIR}" -CONSTRAINTS_LOCATION="https://raw.githubusercontent.com/apache/airflow/${DEFAULT_CONSTRAINTS_BRANCH}/constraints-${PYTHON_MAJOR_MINOR_VERSION}.txt" +CONSTRAINTS_LOCATION="https://raw.githubusercontent.com/${CONSTRAINTS_GITHUB_REPOSITORY}/${DEFAULT_CONSTRAINTS_BRANCH}/${AIRFLOW_CONSTRAINTS}-${PYTHON_MAJOR_MINOR_VERSION}.txt" readonly CONSTRAINTS_LOCATION -curl --connect-timeout 60 --max-time 60 "${CONSTRAINTS_LOCATION}" --output "${LATEST_CONSTRAINT_FILE}" +touch "${LATEST_CONSTRAINT_FILE}" +curl --connect-timeout 60 --max-time 60 "${CONSTRAINTS_LOCATION}" --output "${LATEST_CONSTRAINT_FILE}" || true echo echo "Freezing constraints to ${CURRENT_CONSTRAINT_FILE}" diff --git a/scripts/in_container/run_install_and_test_provider_packages.sh b/scripts/in_container/run_install_and_test_provider_packages.sh index b3ee63b8ca98c..f6d31b6cbb4de 100755 --- a/scripts/in_container/run_install_and_test_provider_packages.sh +++ b/scripts/in_container/run_install_and_test_provider_packages.sh @@ -67,9 +67,9 @@ function install_airflow_as_specified() { uninstall_providers else echo - echo "Install airflow from PyPI including [${AIRFLOW_EXTRAS}] extras" + echo "Install airflow from PyPI without extras" echo - install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" "[${AIRFLOW_EXTRAS}]" + install_released_airflow_version "${INSTALL_AIRFLOW_VERSION}" uninstall_providers fi group_end @@ -95,7 +95,7 @@ function discover_all_provider_packages() { # Columns is to force it wider, so it doesn't wrap at 80 characters COLUMNS=180 airflow providers list - local expected_number_of_providers=61 + local expected_number_of_providers=66 local actual_number_of_providers actual_providers=$(airflow providers list --output yaml | grep package_name) actual_number_of_providers=$(wc -l <<<"$actual_providers") @@ -118,7 +118,7 @@ function discover_all_hooks() { group_start "Listing available hooks via 'airflow providers hooks'" COLUMNS=180 airflow providers hooks - local expected_number_of_hooks=59 + local expected_number_of_hooks=63 local actual_number_of_hooks actual_number_of_hooks=$(airflow providers hooks --output table | grep -c "| apache" | xargs) if [[ ${actual_number_of_hooks} != "${expected_number_of_hooks}" ]]; then @@ -157,7 +157,7 @@ function discover_all_connection_form_widgets() { COLUMNS=180 airflow providers widgets - local expected_number_of_widgets=19 + local expected_number_of_widgets=25 local actual_number_of_widgets actual_number_of_widgets=$(airflow providers widgets --output table | grep -c ^extra) if [[ ${actual_number_of_widgets} != "${expected_number_of_widgets}" ]]; then @@ -176,7 +176,7 @@ function discover_all_field_behaviours() { group_start "Listing connections with custom behaviours via 'airflow providers behaviours'" COLUMNS=180 airflow providers behaviours - local expected_number_of_connections_with_behaviours=11 + local expected_number_of_connections_with_behaviours=12 local actual_number_of_connections_with_behaviours actual_number_of_connections_with_behaviours=$(airflow providers behaviours --output table | grep -v "===" | \ grep -v field_behaviours | grep -cv "^ " | xargs) @@ -197,14 +197,11 @@ setup_provider_packages verify_parameters install_airflow_as_specified install_remaining_dependencies -reinstall_azure_storage_blob install_provider_packages import_all_provider_classes -if [[ ${BACKPORT_PACKAGES} != "true" ]]; then - discover_all_provider_packages - discover_all_hooks - discover_all_connection_form_widgets - discover_all_field_behaviours - discover_all_extra_links -fi +discover_all_provider_packages +discover_all_hooks +discover_all_connection_form_widgets +discover_all_field_behaviours +discover_all_extra_links diff --git a/scripts/in_container/run_prepare_provider_documentation.sh b/scripts/in_container/run_prepare_provider_documentation.sh new file mode 100755 index 0000000000000..71a22917dc8b1 --- /dev/null +++ b/scripts/in_container/run_prepare_provider_documentation.sh @@ -0,0 +1,123 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# shellcheck source=scripts/in_container/_in_container_script_init.sh +. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" + +function import_all_provider_classes() { + group_start "Importing all classes" + python3 "${AIRFLOW_SOURCES}/dev/import_all_classes.py" --path "airflow/providers" + group_end +} + +function verify_provider_packages_named_properly() { + python3 "${PROVIDER_PACKAGES_DIR}/prepare_provider_packages.py" \ + verify-provider-classes +} + +function run_prepare_documentation() { + local prepared_documentation=() + local skipped_documentation=() + local error_documentation=() + + # Delete the remote, so that we fetch it and update it once, not once per package we build! + git remote rm apache-https-for-providers 2>/dev/null || : + + local provider_package + for provider_package in "${PROVIDER_PACKAGES[@]}" + do + set +e + local res + # There is a separate group created in logs for each provider package + python3 "${PROVIDER_PACKAGES_DIR}/prepare_provider_packages.py" \ + update-package-documentation \ + --version-suffix "${TARGET_VERSION_SUFFIX}" \ + --no-git-update \ + "${OPTIONAL_VERBOSE_FLAG[@]}" \ + "${OPTIONAL_RELEASE_VERSION_ARGUMENT[@]}" \ + "${provider_package}" + res=$? + if [[ ${res} == "64" ]]; then + skipped_documentation+=("${provider_package}") + continue + echo "${COLOR_YELLOW}Skipping provider package '${provider_package}'${COLOR_RESET}" + fi + if [[ ${res} != "0" ]]; then + echo "${COLOR_RED}Error when generating provider package '${provider_package}'${COLOR_RESET}" + error_documentation+=("${provider_package}") + continue + fi + prepared_documentation+=("${provider_package}") + set -e + done + echo "${COLOR_BLUE}===================================================================================${COLOR_RESET}" + echo + echo "Summary of prepared documentations:" + echo + if [[ "${#prepared_documentation[@]}" != "0" ]]; then + echo "${COLOR_GREEN} Success:${COLOR_RESET}" + echo "${prepared_documentation[@]}" | fold -w 100 + fi + if [[ "${#skipped_documentation[@]}" != "0" ]]; then + echo "${COLOR_YELLOW} Skipped:${COLOR_RESET}" + echo "${skipped_documentation[@]}" | fold -w 100 + fi + if [[ "${#error_documentation[@]}" != "0" ]]; then + echo "${COLOR_RED} Errors:${COLOR_RESET}" + echo "${error_documentation[@]}" | fold -w 100 + fi + echo + echo "${COLOR_BLUE}===================================================================================${COLOR_RESET}" + if [[ ${#error_documentation[@]} != "0" ]]; then + echo + echo "${COLOR_RED}There were errors when preparing documentation. Exiting! ${COLOR_RESET}" + exit 1 + fi +} + + +setup_provider_packages + +cd "${AIRFLOW_SOURCES}" || exit 1 + +export PYTHONPATH="${AIRFLOW_SOURCES}" + +verify_suffix_versions_for_package_preparation + +install_supported_pip_version + +# install extra packages missing in devel_ci +# TODO: remove it when devel_all == devel_ci +install_remaining_dependencies + +import_all_provider_classes +verify_provider_packages_named_properly + +OPTIONAL_RELEASE_VERSION_ARGUMENT=() +if [[ $# != "0" && ${1} =~ ^[0-9][0-9][0-9][0-9]\.[0-9][0-9]\.[0-9][0-9]$ ]]; then + OPTIONAL_RELEASE_VERSION_ARGUMENT+=("--release-version" "${1}") + shift +fi + +PROVIDER_PACKAGES=("${@}") +get_providers_to_act_on "${@}" + +run_prepare_documentation + +echo +echo "${COLOR_GREEN}All good! Airflow Provider's documentation generated!${COLOR_RESET}" +echo diff --git a/scripts/in_container/run_prepare_provider_packages.sh b/scripts/in_container/run_prepare_provider_packages.sh index 3d68914958a68..7ddcd3a328f83 100755 --- a/scripts/in_container/run_prepare_provider_packages.sh +++ b/scripts/in_container/run_prepare_provider_packages.sh @@ -18,175 +18,145 @@ # shellcheck source=scripts/in_container/_in_container_script_init.sh . "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" -setup_provider_packages - -cd "${AIRFLOW_SOURCES}/provider_packages" || exit 1 - -PREPARE_PROVIDER_PACKAGES_PY="${AIRFLOW_SOURCES}/dev/provider_packages/prepare_provider_packages.py" -readonly PREPARE_PROVIDER_PACKAGES_PY - -verify_suffix_versions_for_package_preparation - -function check_missing_providers() { - PACKAGE_ERROR="false" +function copy_sources() { + group_start "Copy sources" + echo "===================================================================================" + echo " Copying sources for provider packages" + echo "===================================================================================" + pushd "${AIRFLOW_SOURCES}" + rm -rf "provider_packages/airflow" + cp -r airflow "provider_packages" + popd - pushd "${AIRFLOW_SOURCES}/airflow/providers" >/dev/null 2>&1 || exit 1 + group_end +} - LIST_OF_DIRS_FILE=$(mktemp) - find . -type d | sed 's!./!!; s!/!.!g' | grep -E 'hooks|operators|sensors|secrets|utils' \ - > "${LIST_OF_DIRS_FILE}" - popd >/dev/null 2>&1 || exit 1 +function build_provider_packages() { + rm -rf dist/* + local package_format_args=() + if [[ ${PACKAGE_FORMAT=} != "" ]]; then + package_format_args=("--package-format" "${PACKAGE_FORMAT}") + fi - # Check if all providers are included - for PACKAGE in "${PROVIDER_PACKAGES[@]}" - do - if ! grep -E "^${PACKAGE}" <"${LIST_OF_DIRS_FILE}" >/dev/null; then - echo "The package ${PACKAGE} is not available in providers dir" - PACKAGE_ERROR="true" - fi - sed -i "/^${PACKAGE}.*/d" "${LIST_OF_DIRS_FILE}" - done + local prepared_packages=() + local skipped_packages=() + local error_packages=() - if [[ ${PACKAGE_ERROR} == "true" ]]; then + echo "-----------------------------------------------------------------------------------" + if [[ "${VERSION_SUFFIX_FOR_PYPI}" == '' && "${VERSION_SUFFIX_FOR_SVN}" == '' + && ${FILE_VERSION_SUFFIX} == '' ]]; then echo - echo "ERROR! Some packages from dev/provider_packages/prepare_provider_packages.py are missing in providers dir" - exit 1 - fi - - if [[ $(wc -l < "${LIST_OF_DIRS_FILE}") != "0" ]]; then - echo "ERROR! Some folders from providers package are not defined" - echo " Please add them to dev/provider_packages/prepare_provider_packages.py:" + echo "Preparing official version of provider with no suffixes" echo - cat "${LIST_OF_DIRS_FILE}" + elif [[ ${FILE_VERSION_SUFFIX} != '' ]]; then + echo + echo " Preparing release candidate of providers with file version suffix only (resulting file will be renamed): ${FILE_VERSION_SUFFIX}" + echo + elif [[ "${VERSION_SUFFIX_FOR_PYPI}" == '' ]]; then + echo + echo " Package Version of providers of set for SVN version): ${TARGET_VERSION_SUFFIX}" + echo + elif [[ "${VERSION_SUFFIX_FOR_SVN}" == '' ]]; then + echo + echo " Package Version of providers suffix set for PyPI version: ${TARGET_VERSION_SUFFIX}" echo - - rm "$LIST_OF_DIRS_FILE" - exit 1 - fi - rm "$LIST_OF_DIRS_FILE" -} - -function copy_sources() { - if [[ ${BACKPORT_PACKAGES} == "true" ]]; then - group_start "Copy and refactor sources" - echo "===================================================================================" - echo " Copying sources and refactoring code for backport provider packages" - echo "===================================================================================" - else - group_start "Copy sources" - echo "===================================================================================" - echo " Copying sources for provider packages" - echo "===================================================================================" - fi - - python3 "${AIRFLOW_SOURCES}/dev/provider_packages/refactor_provider_packages.py" - - group_end -} - - -function get_providers_to_act_on() { - group_start "Get all providers" - if [[ -z "$*" ]]; then - if [[ ${BACKPORT_PACKAGES} == "true" ]]; then - list_subcmd="list-backportable-packages" - else - list_subcmd="list-providers-packages" - fi - while IFS='' read -r line; do PROVIDER_PACKAGES+=("$line"); done < <( - python3 "${PREPARE_PROVIDER_PACKAGES_PY}" "$list_subcmd" - ) - - if [[ "$BACKPORT_PACKAGES" != "true" ]]; then - # Don't check for missing packages when we are building backports -- we have filtered some out, - # and the non-backport build will check for any missing. - check_missing_providers - fi else - if [[ "${1}" == "--help" ]]; then - echo - echo "Builds all provider packages." - echo - echo "You can provide list of packages to build out of:" - echo - python3 "${PREPARE_PROVIDER_PACKAGES_PY}" list-providers-packages | tr '\n ' ' ' | fold -w 100 -s - echo - echo - exit - fi + # Both SV/PYPI are set to the same version here! + echo + echo " Pre-release version (alpha beta) suffix set in both SVN/PyPI: ${TARGET_VERSION_SUFFIX}" + echo fi - group_end -} + echo "-----------------------------------------------------------------------------------" -function build_provider_packages() { - rm -rf dist/* + # Delete the remote, so that we fetch it and update it once, not once per package we build! + git remote rm apache-https-for-providers 2>/dev/null || : - for PROVIDER_PACKAGE in "${PROVIDER_PACKAGES[@]}" + local provider_package + for provider_package in "${PROVIDER_PACKAGES[@]}" do - group_start " Preparing ${PACKAGE_TYPE} package ${PROVIDER_PACKAGE} format: ${PACKAGE_FORMAT}" rm -rf -- *.egg-info build/ - LOG_FILE=$(mktemp) - python3 "${PREPARE_PROVIDER_PACKAGES_PY}" --version-suffix "${VERSION_SUFFIX_FOR_PYPI}" \ - generate-setup-files "${PROVIDER_PACKAGE}" - if [[ "${VERSION_SUFFIX_FOR_PYPI}" == '' && "${VERSION_SUFFIX_FOR_SVN}" == '' - && ${FILE_VERSION_SUFFIX} == '' ]]; then - echo - echo "Preparing official version" - echo - elif [[ ${FILE_VERSION_SUFFIX} != '' ]]; then - echo - echo " Preparing release candidate with file version suffix only: ${FILE_VERSION_SUFFIX}" - echo - elif [[ "${VERSION_SUFFIX_FOR_PYPI}" == '' ]]; then - echo - echo " Package Version for SVN release candidate: ${TARGET_VERSION_SUFFIX}" - echo - elif [[ "${VERSION_SUFFIX_FOR_SVN}" == '' ]]; then - echo - echo " Package Version for PyPI release candidate: ${TARGET_VERSION_SUFFIX}" - echo - else - # Both SV/PYPI are set to the same version here! - echo - echo " Pre-release version: ${TARGET_VERSION_SUFFIX}" - echo + local res + set +e + python3 "${PROVIDER_PACKAGES_DIR}/prepare_provider_packages.py" \ + generate-setup-files \ + "${OPTIONAL_VERBOSE_FLAG[@]}" \ + --no-git-update \ + --version-suffix "${VERSION_SUFFIX_FOR_PYPI}" \ + "${provider_package}" + res=$? + set -e + if [[ ${res} == "64" ]]; then + skipped_packages+=("${provider_package}") + continue + fi + if [[ ${res} != "0" ]]; then + error_packages+=("${provider_package}") + continue fi - echo "-----------------------------------------------------------------------------------" set +e package_suffix="" - if [[ -z "${VERSION_SUFFIX_FOR_SVN}" && -n ${VERSION_SUFFIX_FOR_PYPI} ]]; then - # only adds suffix to setup.py if version suffix for PyPI is set but the SVN one is not + if [[ -z "${VERSION_SUFFIX_FOR_SVN}" && -n ${VERSION_SUFFIX_FOR_PYPI} || + -n "${VERSION_SUFFIX_FOR_SVN}" && -n "${VERSION_SUFFIX_FOR_PYPI}" ]]; then + # only adds suffix to setup.py if version suffix for PyPI is set but the SVN one is not set + # (so when rc is prepared) + # or when they are both set (so when we prepare alpha/beta/dev) package_suffix="${VERSION_SUFFIX_FOR_PYPI}" fi - python3 "${PREPARE_PROVIDER_PACKAGES_PY}" --version-suffix "${package_suffix}" \ - --packages "${PROVIDER_PACKAGE}">"${LOG_FILE}" 2>&1 - RES="${?}" + python3 "${PROVIDER_PACKAGES_DIR}/prepare_provider_packages.py" \ + build-provider-packages \ + "${OPTIONAL_VERBOSE_FLAG[@]}" \ + --no-git-update \ + --version-suffix "${package_suffix}" \ + "${package_format_args[@]}" \ + "${provider_package}" + res=$? set -e - if [[ ${RES} != "0" ]]; then - cat "${LOG_FILE}" - exit "${RES}" + if [[ ${res} == "64" ]]; then + skipped_packages+=("${provider_package}") + continue + fi + if [[ ${res} != "0" ]]; then + error_packages+=("${provider_package}") + echo "${COLOR_RED}Error when preparing ${provider_package} package${COLOR_RESET}" + continue fi - echo "===================================================================================" - echo "${COLOR_GREEN}OK Prepared ${PACKAGE_TYPE} package ${PROVIDER_PACKAGE} format ${PACKAGE_FORMAT}${COLOR_RESET}" - echo "===================================================================================" - group_end + prepared_packages+=("${provider_package}") done + echo "${COLOR_BLUE}===================================================================================${COLOR_RESET}" + echo + echo "Summary of prepared packages:" + echo + if [[ "${#prepared_packages[@]}" != "0" ]]; then + echo "${COLOR_GREEN} Prepared:${COLOR_RESET}" + echo "${prepared_packages[*]}" | fold -w 100 + fi + if [[ "${#skipped_packages[@]}" != "0" ]]; then + echo "${COLOR_YELLOW} Skipped:${COLOR_RESET}" + echo "${skipped_packages[*]}" | fold -w 100 + fi + if [[ "${#error_packages[@]}" != "0" ]]; then + echo "${COLOR_RED} Errors:${COLOR_RESET}" + echo "${error_packages[*]}" | fold -w 100 + fi + echo + echo "${COLOR_BLUE}===================================================================================${COLOR_RESET}" + if [[ ${#error_packages[@]} != "0" ]]; then + echo + echo "${COLOR_RED}There were errors when preparing packages. Exiting! ${COLOR_RESET}" + exit 1 + fi } function rename_packages_if_needed() { - group_start "Renaming packages if needed" - cd "${AIRFLOW_SOURCES}" || exit 1 - pushd dist >/dev/null 2>&1 || exit 1 - if [[ -n "${FILE_VERSION_SUFFIX}" ]]; then # In case we have FILE_VERSION_SUFFIX we rename prepared files if [[ "${PACKAGE_FORMAT}" == "sdist" || "${PACKAGE_FORMAT}" == "both" ]]; then for FILE in *.tar.gz do - mv "${FILE}" "${FILE//\.tar\.gz/${FILE_VERSION_SUFFIX}-bin.tar.gz}" + mv "${FILE}" "${FILE//\.tar\.gz/${FILE_VERSION_SUFFIX}.tar.gz}" done fi if [[ "${PACKAGE_FORMAT}" == "wheel" || "${PACKAGE_FORMAT}" == "both" ]]; then @@ -196,18 +166,25 @@ function rename_packages_if_needed() { done fi fi - popd >/dev/null - echo - echo "${COLOR_GREEN}OK Airflow packages are prepared in dist folder${COLOR_RESET}" - echo - - group_end } -PROVIDER_PACKAGES=("${@}") +install_remaining_dependencies +setup_provider_packages + +cd "${PROVIDER_PACKAGES_DIR}" || exit 1 + +verify_suffix_versions_for_package_preparation +install_supported_pip_version + +PROVIDER_PACKAGES=("${@}") get_providers_to_act_on "${@}" + copy_sources build_provider_packages rename_packages_if_needed + +echo +echo "${COLOR_GREEN}All good! Airflow packages are prepared in dist folder${COLOR_RESET}" +echo diff --git a/scripts/in_container/run_prepare_provider_readme.sh b/scripts/in_container/run_prepare_provider_readme.sh deleted file mode 100755 index 69c42feefbdfa..0000000000000 --- a/scripts/in_container/run_prepare_provider_readme.sh +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env bash -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# shellcheck source=scripts/in_container/_in_container_script_init.sh -. "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" - -setup_provider_packages - -cd "${AIRFLOW_SOURCES}" || exit 1 - -# install extra packages missing in devel_ci -export PYTHONPATH="${AIRFLOW_SOURCES}" - -verify_suffix_versions_for_package_preparation - -pip install --upgrade "pip==${AIRFLOW_PIP_VERSION}" - -# TODO: remove it when devel_all == devel_ci -install_remaining_dependencies -reinstall_azure_storage_blob - -cd "${AIRFLOW_SOURCES}/provider_packages" || exit 1 - -python3 "${AIRFLOW_SOURCES}/dev/provider_packages/prepare_provider_packages.py" \ - --version-suffix "${TARGET_VERSION_SUFFIX}" \ - update-package-release-notes "$@" - -AIRFLOW_PROVIDER_README_TGZ_FILE="/files/airflow-readme-$(date +"%Y-%m-%d-%H.%M.%S").tar.gz" - -cd "${AIRFLOW_SOURCES}" || exit 1 - -find airflow/providers \( \ - -name "${PACKAGE_PREFIX_UPPERCASE}PROVIDERS_CHANGES*" \ - -o -name "${PACKAGE_PREFIX_UPPERCASE}README.md" \ - -o -name "${PACKAGE_PREFIX_UPPERCASE}setup.py" \ - -o -name "${PACKAGE_PREFIX_UPPERCASE}setup.cfg" \ - \) \ - -print0 | \ - tar --null --no-recursion -cvzf "${AIRFLOW_PROVIDER_README_TGZ_FILE}" -T - -echo -echo "Airflow readme for ${PACKAGE_TYPE} provider packages format ${PACKAGE_FORMAT} are tar-gzipped in ${AIRFLOW_PROVIDER_README_TGZ_FILE}" -echo diff --git a/scripts/in_container/run_pylint.sh b/scripts/in_container/run_pylint.sh index d62a1b50d7017..f1b8306e349ea 100755 --- a/scripts/in_container/run_pylint.sh +++ b/scripts/in_container/run_pylint.sh @@ -19,7 +19,7 @@ . "$( dirname "${BASH_SOURCE[0]}" )/_in_container_script_init.sh" # Pylint is _very_ unhappy with implicit namespaces, so for this test only, we need to make it not -trap "rm airflow/providers/__init__.py" EXIT +trap "rm -f airflow/providers/__init__.py" EXIT touch airflow/providers/__init__.py @@ -43,7 +43,7 @@ if [[ ${#@} == "0" ]]; then -name "*.py" \ -not -name 'webserver_config.py' | \ grep ".*.py$" | \ - grep -vFf scripts/ci/pylint_todo.txt | xargs pylint --output-format=colorized + grep -vFf scripts/ci/pylint_todo.txt | sort | xargs pylint -j 0 --output-format=colorized else /usr/local/bin/pylint --output-format=colorized "$@" fi diff --git a/scripts/tools/generate-integrations-json.py b/scripts/tools/generate-integrations-json.py new file mode 100755 index 0000000000000..6fb4d86da2669 --- /dev/null +++ b/scripts/tools/generate-integrations-json.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import json +import os +import re +import shutil + +# pylint: disable=no-name-in-module +from docs.exts.provider_yaml_utils import load_package_data + +# pylint: enable=no-name-in-module + +AIRFLOW_SITE_DIR = os.environ.get('AIRFLOW_SITE_DIRECTORY') +ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir)) +DOCS_DIR = os.path.join(ROOT_DIR, 'docs') + +if __name__ != "__main__": + raise SystemExit( + "This file is intended to be executed as an executable program. You cannot use it as a module." + "To run this script, run the ./generate-integrations-json.py command" + ) + +if not ( + AIRFLOW_SITE_DIR + and os.path.isdir(AIRFLOW_SITE_DIR) + and os.path.isdir(os.path.join(AIRFLOW_SITE_DIR, 'docs-archive')) +): + raise SystemExit( + 'Before using this script, set the environment variable AIRFLOW_SITE_DIRECTORY. This variable ' + 'should contain the path to the airflow-site repository directory. ' + '${AIRFLOW_SITE_DIRECTORY}/docs-archive must exists.' + ) + +ALL_PROVIDER_YAMLS = load_package_data() + +result_integrations = [] +for provider_info in ALL_PROVIDER_YAMLS: + for integration in provider_info.get('integrations', []): + doc_url = integration.get("how-to-guide") + if doc_url: + doc_url = doc_url[0].strip() + doc_url = re.sub(f'/{provider_info["package-name"]}/', r"\g<0>stable/", doc_url) + doc_url = re.sub(r'\.rst', '.html', doc_url) + else: + doc_url = f"/docs/{provider_info['package-name'].lower()}/stable/index.html" + logo = integration.get("logo") + + result = { + 'name': integration['integration-name'], + 'url': doc_url, + } + if logo: + result['logo'] = logo + result_integrations.append(result) + +result_integrations = sorted(result_integrations, key=lambda x: x['name'].lower()) +with open(os.path.join(AIRFLOW_SITE_DIR, 'landing-pages/site/static/integrations.json'), 'w') as f: + f.write( + json.dumps( + result_integrations, + indent=4, + ) + ) + +shutil.copytree( + src=os.path.join(DOCS_DIR, 'integration-logos'), + dst=os.path.join(AIRFLOW_SITE_DIR, 'landing-pages/site/static/integration-logos'), + dirs_exist_ok=True, +) diff --git a/setup.cfg b/setup.cfg index b0f13c30a6571..ac103baeda9ab 100644 --- a/setup.cfg +++ b/setup.cfg @@ -82,6 +82,7 @@ install_requires = alembic>=1.2, <2.0 argcomplete~=1.10 attrs>=20.0, <21.0 + blinker cached_property~=1.5 # cattrs >= 1.1.0 dropped support for Python 3.6 cattrs>=1.0, <1.1.0;python_version<="3.6" @@ -92,7 +93,7 @@ install_requires = cryptography>=0.9.3 dill>=0.2.2, <0.4 flask>=1.1.0, <2.0 - flask-appbuilder~=3.1.1 + flask-appbuilder~=3.1,>=3.1.1 flask-caching>=1.5.0, <2.0.0 flask-login>=0.3, <0.5 flask-wtf>=0.14.3, <0.15 @@ -109,7 +110,12 @@ install_requires = markdown>=2.5.2, <4.0 markupsafe>=1.1.1, <2.0 marshmallow-oneofschema>=2.0.1 - pandas>=0.17.1, <2.0 + # Numpy stopped releasing 3.6 binaries for 1.20.* series. + numpy<1.20;python_version<"3.7" + numpy;python_version>="3.7" + # Pandas stopped releasing 3.6 binaries for 1.2.* series. + pandas>=0.17.1, <1.2;python_version<"3.7" + pandas>=0.17.1, <2.0;python_version>="3.7" pendulum~=2.0 pep562~=1.0;python_version<"3.7" psutil>=4.2.0, <6.0.0 @@ -130,7 +136,8 @@ install_requires = requests>=2.20.0 rich==9.2.0 setproctitle>=1.1.8, <2 - sqlalchemy>=1.3.18, <2 + # SQLAlchemy 1.4 breaks sqlalchemy-utils https://github.com/kvesteri/sqlalchemy-utils/issues/505 + sqlalchemy>=1.3.18, <1.4 sqlalchemy_jsonfield~=1.0 tabulate>=0.7.5, <0.9 tenacity~=6.2.0 diff --git a/setup.py b/setup.py index 9e49c2d774965..0ae7bd080af5b 100644 --- a/setup.py +++ b/setup.py @@ -21,10 +21,11 @@ import os import subprocess import unittest +from copy import deepcopy from distutils import log from os.path import dirname, relpath from textwrap import wrap -from typing import Dict, List, Set, Tuple +from typing import Dict, List, Tuple from setuptools import Command, Distribution, find_namespace_packages, setup from setuptools.command.develop import develop as develop_orig @@ -38,7 +39,7 @@ logger = logging.getLogger(__name__) -version = '2.0.1' +version = '2.0.2' my_dir = dirname(__file__) @@ -168,7 +169,7 @@ def write_version(filename: str = os.path.join(*[my_dir, "airflow", "git_version :param str filename: Destination file to write """ - text = "{}".format(git_version(version)) + text = f"{git_version(version)}" with open(filename, 'w') as file: file.write(text) @@ -193,8 +194,7 @@ def get_sphinx_theme_version() -> str: # If you change this mark you should also change ./scripts/ci/check_order_setup.py # Start dependencies group amazon = [ - 'boto3>=1.15.0,<1.16.0', - 'botocore>=1.18.0,<1.19.0', + 'boto3>=1.15.0,<1.18.0', 'watchtower~=0.7.3', ] apache_beam = [ @@ -216,8 +216,11 @@ def get_sphinx_theme_version() -> str: 'azure-keyvault>=4.1.0', 'azure-kusto-data>=0.0.43,<0.1', 'azure-mgmt-containerinstance>=1.5.0,<2.0', + 'azure-mgmt-datafactory>=1.0.0,<2.0', 'azure-mgmt-datalake-store>=0.5.0', 'azure-mgmt-resource>=2.2.0', + 'azure-storage-blob>=12.7.0', + 'azure-storage-common>=2.1.0', 'azure-storage-file>=2.1.0', ] cassandra = [ @@ -234,7 +237,12 @@ def get_sphinx_theme_version() -> str: cloudant = [ 'cloudant>=2.0', ] -dask = ['cloudpickle>=1.4.1, <1.5.0', 'distributed>=2.11.1, <2.20'] +dask = [ + 'cloudpickle>=1.4.1, <1.5.0', + 'dask<2021.3.1;python_version>"3.7"', # dask stopped supporting python 3.6 in 2021.3.1 version + 'dask>=2.9.0;python_version>="3.7"', + 'distributed>=2.11.1, <2.20', +] databricks = [ 'requests>=2.20.0, <3', ] @@ -242,7 +250,8 @@ def get_sphinx_theme_version() -> str: 'datadog>=0.14.0', ] doc = [ - 'sphinx>=2.1.2', + # Sphinx is limited to < 3.5.0 because of https://github.com/sphinx-doc/sphinx/issues/8880 + 'sphinx>=2.1.2, <3.5.0', f'sphinx-airflow-theme{get_sphinx_theme_version()}', 'sphinx-argparse>=0.1.13', 'sphinx-autoapi==1.0.0', @@ -278,36 +287,41 @@ def get_sphinx_theme_version() -> str: google = [ 'PyOpenSSL', 'google-ads>=4.0.0,<8.0.0', + 'google-api-core>=1.25.1,<2.0.0', 'google-api-python-client>=1.6.0,<2.0.0', 'google-auth>=1.0.0,<2.0.0', 'google-auth-httplib2>=0.0.1', - 'google-cloud-automl>=0.4.0,<2.0.0', - 'google-cloud-bigquery-datatransfer>=0.4.0,<2.0.0', + 'google-cloud-automl>=2.1.0,<3.0.0', + 'google-cloud-bigquery-datatransfer>=3.0.0,<4.0.0', 'google-cloud-bigtable>=1.0.0,<2.0.0', 'google-cloud-container>=0.1.1,<2.0.0', - 'google-cloud-datacatalog>=0.5.0, <0.8', # TODO: we should migrate to 1.0 likely and add <2.0.0 then - 'google-cloud-dataproc>=1.0.1,<2.0.0', + 'google-cloud-datacatalog>=3.0.0,<4.0.0', + 'google-cloud-dataproc>=2.2.0,<3.0.0', 'google-cloud-dlp>=0.11.0,<2.0.0', - 'google-cloud-kms>=1.2.1,<2.0.0', + 'google-cloud-kms>=2.0.0,<3.0.0', 'google-cloud-language>=1.1.1,<2.0.0', - 'google-cloud-logging>=1.14.0,<2.0.0', + 'google-cloud-logging>=2.1.1,<3.0.0', 'google-cloud-memcache>=0.2.0', - 'google-cloud-monitoring>=0.34.0,<2.0.0', - 'google-cloud-os-login>=1.0.0,<2.0.0', - 'google-cloud-pubsub>=1.0.0,<2.0.0', - 'google-cloud-redis>=0.3.0,<2.0.0', + 'google-cloud-monitoring>=2.0.0,<3.0.0', + 'google-cloud-os-login>=2.0.0,<3.0.0', + 'google-cloud-pubsub>=2.0.0,<3.0.0', + 'google-cloud-redis>=2.0.0,<3.0.0', 'google-cloud-secret-manager>=0.2.0,<2.0.0', 'google-cloud-spanner>=1.10.0,<2.0.0', 'google-cloud-speech>=0.36.3,<2.0.0', - 'google-cloud-storage>=1.16,<2.0.0', - 'google-cloud-tasks>=1.2.1,<2.0.0', + 'google-cloud-storage>=1.30,<2.0.0', + 'google-cloud-tasks>=2.0.0,<3.0.0', 'google-cloud-texttospeech>=0.4.0,<2.0.0', 'google-cloud-translate>=1.5.0,<2.0.0', 'google-cloud-videointelligence>=1.7.0,<2.0.0', 'google-cloud-vision>=0.35.2,<2.0.0', + 'google-cloud-workflows>=0.1.0,<2.0.0', 'grpcio-gcp>=0.2.2', 'json-merge-patch~=0.2', - 'pandas-gbq', + # pandas-gbq 0.15.0 release broke google provider's bigquery import + # _check_google_client_version (airflow/providers/google/cloud/hooks/bigquery.py:49) + 'pandas-gbq<0.15.0', + 'plyvel', ] grpc = [ 'google-auth>=1.0.0, <2.0.0dev', @@ -357,8 +371,9 @@ def get_sphinx_theme_version() -> str: ] mysql = [ 'mysql-connector-python>=8.0.11, <=8.0.22', - 'mysqlclient>=1.3.6,<1.4', + 'mysqlclient>=1.3.6,<3', ] +neo4j = ['neo4j>=4.2.1'] odbc = [ 'pyodbc', ] @@ -382,7 +397,7 @@ def get_sphinx_theme_version() -> str: 'pinotdb>0.1.2,<1.0.0', ] plexus = [ - 'arrow>=0.16.0', + 'arrow>=0.16.0,<1.0.0', ] postgres = [ 'psycopg2-binary>=2.7.4', @@ -399,6 +414,7 @@ def get_sphinx_theme_version() -> str: ] salesforce = [ 'simple-salesforce>=1.0.0', + 'tableauserverclient', ] samba = [ 'pysmbclient>=0.1.3', @@ -415,23 +431,10 @@ def get_sphinx_theme_version() -> str: ] singularity = ['spython>=0.0.56'] slack = [ - 'slackclient>=2.0.0,<3.0.0', + 'slack_sdk>=3.0.0,<4.0.0', ] snowflake = [ - # The `azure` provider uses legacy `azure-storage` library, where `snowflake` uses the - # newer and more stable versions of those libraries. Most of `azure` operators and hooks work - # fine together with `snowflake` because the deprecated library does not overlap with the - # new libraries except the `blob` classes. So while `azure` works fine for most cases - # blob is the only exception - # Solution to that is being worked on in https://github.com/apache/airflow/pull/12188 - # once it is merged, we can move those two back to `azure` extra. - 'azure-core>=1.10.0', - 'azure-storage-blob', - 'azure-storage-common', - # Snowflake conector > 2.3.8 is needed because it has vendored urrllib3 and requests libraries which - # are monkey-patched. In earlier versions of the library, monkeypatching the libraries by snowflake - # caused other providers to fail (Google, Amazon etc.) - 'snowflake-connector-python>=2.3.8', + 'snowflake-connector-python>=2.4.1', 'snowflake-sqlalchemy>=1.1.0', ] spark = [ @@ -446,11 +449,12 @@ def get_sphinx_theme_version() -> str: 'statsd>=3.3.0, <4.0', ] tableau = [ - 'tableauserverclient~=0.12', + 'tableauserverclient', ] telegram = [ 'python-telegram-bot==13.0', ] +trino = ['trino'] vertica = [ 'vertica-python>=0.5.1', ] @@ -472,6 +476,7 @@ def get_sphinx_theme_version() -> str: # End dependencies group devel = [ + 'aws_xray_sdk', 'beautifulsoup4~=4.7.1', 'black', 'blinker', @@ -489,28 +494,26 @@ def get_sphinx_theme_version() -> str: 'ipdb', 'jira', 'jsonpath-ng', - # HACK: Moto is not compatible with newer versions - # See: https://github.com/spulec/moto/issues/3535 - 'mock<4.0.3', + 'jsondiff', 'mongomock', - 'moto', + 'moto~=2.0', 'mypy==0.770', 'parameterized', 'paramiko', 'pipdeptree', 'pre-commit', - 'pylint==2.6.0', + 'pylint>=2.7.0', 'pysftp', - 'pytest', + 'pytest~=6.0', 'pytest-cov', 'pytest-instafail', - 'pytest-rerunfailures', + 'pytest-rerunfailures~=9.1', 'pytest-timeouts', 'pytest-xdist', + 'python-jose', 'pywinrm', 'qds-sdk>=1.9.6', 'requests_mock', - 'testfixtures', 'wheel', 'yamllint', ] @@ -520,7 +523,9 @@ def get_sphinx_theme_version() -> str: # Dict of all providers which are part of the Apache Airflow repository together with their requirements PROVIDERS_REQUIREMENTS: Dict[str, List[str]] = { + 'airbyte': [], 'amazon': amazon, + 'apache.beam': apache_beam, 'apache.cassandra': cassandra, 'apache.druid': druid, 'apache.hdfs': hdfs, @@ -556,6 +561,7 @@ def get_sphinx_theme_version() -> str: 'microsoft.winrm': winrm, 'mongo': mongo, 'mysql': mysql, + 'neo4j': neo4j, 'odbc': odbc, 'openfaas': [], 'opsgenie': [], @@ -577,20 +583,31 @@ def get_sphinx_theme_version() -> str: 'snowflake': snowflake, 'sqlite': [], 'ssh': ssh, + 'tableau': tableau, 'telegram': telegram, + 'trino': trino, 'vertica': vertica, 'yandex': yandex, 'zendesk': zendesk, } - -# Those are all extras which do not have own 'providers' -EXTRAS_REQUIREMENTS: Dict[str, List[str]] = { +# Those are all additional extras which do not have their own 'providers' +# The 'apache.atlas' and 'apache.webhdfs' are extras that provide additional libraries +# but they do not have separate providers (yet?), they are merely there to add extra libraries +# That can be used in custom python/bash operators. +ADDITIONAL_EXTRAS_REQUIREMENTS: Dict[str, List[str]] = { 'apache.atlas': atlas, - 'apache.beam': apache_beam, 'apache.webhdfs': webhdfs, +} + + +# Those are extras that are extensions of the 'core' Airflow. They provide additional features +# To airflow core. They do not have separate providers because they do not have any operators/hooks etc. +CORE_EXTRAS_REQUIREMENTS: Dict[str, List[str]] = { 'async': async_packages, + 'celery': celery, # also has provider, but it extends the core with the Celery executor 'cgroups': cgroups, + 'cncf.kubernetes': kubernetes, # also has provider, but it extends the core with the KubernetesExecutor 'dask': dask, 'github_enterprise': flask_oauth, 'google_auth': flask_oauth, @@ -600,11 +617,13 @@ def get_sphinx_theme_version() -> str: 'rabbitmq': rabbitmq, 'sentry': sentry, 'statsd': statsd, - 'tableau': tableau, 'virtualenv': virtualenv, } +EXTRAS_REQUIREMENTS: Dict[str, List[str]] = deepcopy(CORE_EXTRAS_REQUIREMENTS) + + def add_extras_for_all_providers() -> None: """ Adds extras for all providers. @@ -615,7 +634,14 @@ def add_extras_for_all_providers() -> None: EXTRAS_REQUIREMENTS[provider_name] = provider_requirement +def add_additional_extras() -> None: + """Adds extras for all additional extras.""" + for extra_name, extra_requirement in ADDITIONAL_EXTRAS_REQUIREMENTS.items(): + EXTRAS_REQUIREMENTS[extra_name] = extra_requirement + + add_extras_for_all_providers() +add_additional_extras() ############################################################################################################# # The whole section can be removed in Airflow 3.0 as those old aliases are deprecated in 2.* series @@ -691,8 +717,10 @@ def add_extras_for_all_deprecated_aliases() -> None: 'microsoft.mssql', 'mongo', 'mysql', + 'neo4j', 'postgres', 'presto', + 'trino', 'vertica', ] @@ -721,11 +749,15 @@ def add_extras_for_all_deprecated_aliases() -> None: ] ) -# Those packages are excluded because they break tests (downgrading mock and few other requirements) -# and they are not needed to run our test suite. This can be removed as soon as we get non-conflicting -# requirements for the apache-beam as well. This waits for azure fixes: +# Those packages are excluded because they break tests and they are not needed to run our test suite. +# This can be removed as soon as we get non-conflicting +# requirements for the apache-beam as well. # -# * Azure: https://github.com/apache/airflow/issues/11968 +# Currently Apache Beam has very narrow and old dependencies for 'mock' package which +# are required only for our tests. +# once https://github.com/apache/beam/pull/14328 is solved and new version of apache-beam is released +# we will be able to remove this exclusion and get rid of `install_remaining_dependencies` +# function in `scripts/in_container`. # PACKAGES_EXCLUDED_FOR_CI = [ 'apache-beam', @@ -781,11 +813,6 @@ def sort_extras_requirements() -> Dict[str, List[str]]: EXTRAS_REQUIREMENTS = sort_extras_requirements() -# A set that keeps all extras that install some providers. -# It is used by pre-commit that verifies if documentation in docs/apache-airflow/extra-packages-ref.rst -# are synchronized. -EXTRAS_WITH_PROVIDERS: Set[str] = set() - # Those providers are pre-installed always when airflow is installed. # Those providers do not have dependency on airflow2.0 because that would lead to circular dependencies. # This is not a problem for PIP but some tools (pipdeptree) show those as a warning. @@ -808,6 +835,11 @@ def get_provider_package_from_package_id(package_id: str): return f"apache-airflow-providers-{package_suffix}" +def get_all_provider_packages(): + """Returns all provider packages configured in setup.py""" + return " ".join([get_provider_package_from_package_id(package) for package in PROVIDERS_REQUIREMENTS]) + + class AirflowDistribution(Distribution): """ The setuptools.Distribution subclass with Airflow specific behaviour @@ -839,14 +871,50 @@ def parse_config_files(self, *args, **kwargs): # pylint: disable=signature-diff ) -def add_provider_packages_to_extras_requirements(extra: str, providers: List[str]) -> None: +def replace_extra_requirement_with_provider_packages(extra: str, providers: List[str]) -> None: """ - Adds provider packages to requirements of extra. + Replaces extra requirement with provider package. The intention here is that when + the provider is added as dependency of extra, there is no need to add the dependencies + separately. This is not needed and even harmful, because in case of future versions of + the provider, the requirements might change, so hard-coding requirements from the version + that was available at the release time might cause dependency conflicts in the future. + + Say for example that you have salesforce provider with those deps: + + { 'salesforce': ['simple-salesforce>=1.0.0', 'tableauserverclient'] } + + Initially ['salesforce'] extra has those requirements and it works like that when you install + it when INSTALL_PROVIDERS_FROM_SOURCES is set to `true` (during the development). However, when + the production installation is used, The dependencies are changed: + + { 'salesforce': ['apache-airflow-providers-salesforce'] } + + And then, 'apache-airflow-providers-salesforce' package has those 'install_requires' dependencies: + ['simple-salesforce>=1.0.0', 'tableauserverclient'] + + So transitively 'salesforce' extra has all the requirements it needs and in case the provider + changes it's dependencies, they will transitively change as well. + + In the constraint mechanism we save both - provider versions and it's dependencies + version, which means that installation using constraints is repeatable. + + :param extra: Name of the extra to add providers to + :param providers: list of provider ids + """ + EXTRAS_REQUIREMENTS[extra] = [ + get_provider_package_from_package_id(package_name) for package_name in providers + ] + + +def add_provider_packages_to_extra_requirements(extra: str, providers: List[str]) -> None: + """ + Adds provider packages as requirements to extra. This is used to add provider packages as requirements + to the "bulk" kind of extras. Those bulk extras do not have the detailed 'extra' requirements as + initial values, so instead of replacing them (see previous function) we can extend them. :param extra: Name of the extra to add providers to :param providers: list of provider ids """ - EXTRAS_WITH_PROVIDERS.add(extra) EXTRAS_REQUIREMENTS[extra].extend( [get_provider_package_from_package_id(package_name) for package_name in providers] ) @@ -863,12 +931,14 @@ def add_all_provider_packages() -> None: """ for provider in ALL_PROVIDERS: - add_provider_packages_to_extras_requirements(provider, [provider]) - add_provider_packages_to_extras_requirements("all", ALL_PROVIDERS) - add_provider_packages_to_extras_requirements("devel_ci", ALL_PROVIDERS) - add_provider_packages_to_extras_requirements("devel_all", ALL_PROVIDERS) - add_provider_packages_to_extras_requirements("all_dbs", ALL_DB_PROVIDERS) - add_provider_packages_to_extras_requirements("devel_hadoop", ["apache.hdfs", "apache.hive", "presto"]) + replace_extra_requirement_with_provider_packages(provider, [provider]) + add_provider_packages_to_extra_requirements("all", ALL_PROVIDERS) + add_provider_packages_to_extra_requirements("devel_ci", ALL_PROVIDERS) + add_provider_packages_to_extra_requirements("devel_all", ALL_PROVIDERS) + add_provider_packages_to_extra_requirements("all_dbs", ALL_DB_PROVIDERS) + add_provider_packages_to_extra_requirements( + "devel_hadoop", ["apache.hdfs", "apache.hive", "presto", "trino"] + ) class Develop(develop_orig): diff --git a/tests/airflow_pylint/disable_checks_for_tests.py b/tests/airflow_pylint/disable_checks_for_tests.py deleted file mode 100644 index cbdf9b2ce936f..0000000000000 --- a/tests/airflow_pylint/disable_checks_for_tests.py +++ /dev/null @@ -1,65 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - - -from astroid import MANAGER, scoped_nodes -from pylint.lint import PyLinter - -DISABLED_CHECKS_FOR_TESTS = ( - "missing-docstring, no-self-use, too-many-public-methods, protected-access, do-not-use-asserts" -) - - -def register(_: PyLinter): - """ - Skip registering any plugin. This is not a real plugin - we only need it to register transform before - running pylint. - - :param _: - :return: - """ - - -def transform(mod): - """ - It's a small hack but one that gives us a lot of speedup in pylint tests. We are replacing the first - line of the file with pylint-disable (or update existing one) when file name start with `test_` or - (for providers) when it is the full path of the package (both cases occur in pylint) - - :param mod: astroid module - :return: None - """ - if ( - mod.name.startswith("test_") - or mod.name.startswith("tests.") - or mod.name.startswith("kubernetes_tests.") - or mod.name.startswith("chart.") - ): - decoded_lines = mod.stream().read().decode("utf-8").split("\n") - if decoded_lines[0].startswith("# pylint: disable="): - decoded_lines[0] = decoded_lines[0] + " " + DISABLED_CHECKS_FOR_TESTS - elif decoded_lines[0].startswith("#") or decoded_lines[0].strip() == "": - decoded_lines[0] = "# pylint: disable=" + DISABLED_CHECKS_FOR_TESTS - else: - raise Exception( - f"The first line of module {mod.name} is not a comment or empty. " f"Please make sure it is!" - ) - # pylint will read from `.file_bytes` attribute later when tokenization - mod.file_bytes = "\n".join(decoded_lines).encode("utf-8") - - -MANAGER.register_transform(scoped_nodes.Module, transform) diff --git a/tests/api_connexion/endpoints/test_dag_endpoint.py b/tests/api_connexion/endpoints/test_dag_endpoint.py index 146720c87d001..6041b5fe1c257 100644 --- a/tests/api_connexion/endpoints/test_dag_endpoint.py +++ b/tests/api_connexion/endpoints/test_dag_endpoint.py @@ -75,7 +75,13 @@ def setUpClass(cls) -> None: access_control={'TestGranularDag': [permissions.ACTION_CAN_EDIT, permissions.ACTION_CAN_READ]}, ) - with DAG(cls.dag_id, start_date=datetime(2020, 6, 15), doc_md="details", params={"foo": 1}) as dag: + with DAG( + cls.dag_id, + start_date=datetime(2020, 6, 15), + doc_md="details", + params={"foo": 1}, + tags=['example'], + ) as dag: DummyOperator(task_id=cls.task_id) with DAG(cls.dag2_id, start_date=datetime(2020, 6, 15)) as dag2: # no doc_md @@ -212,7 +218,7 @@ def test_should_respond_200(self): "is_paused": None, "is_subdag": False, "orientation": "LR", - "owners": [], + "owners": ['airflow'], "params": {"foo": 1}, "schedule_interval": { "__type": "TimeDelta", @@ -221,7 +227,7 @@ def test_should_respond_200(self): "seconds": 0, }, "start_date": "2020-06-15T00:00:00+00:00", - "tags": None, + "tags": [{'name': 'example'}], "timezone": "Timezone('UTC')", } assert response.json == expected @@ -244,7 +250,7 @@ def test_should_response_200_with_doc_md_none(self): "is_paused": None, "is_subdag": False, "orientation": "LR", - "owners": [], + "owners": ['airflow'], "params": {}, "schedule_interval": { "__type": "TimeDelta", @@ -253,7 +259,7 @@ def test_should_response_200_with_doc_md_none(self): "seconds": 0, }, "start_date": "2020-06-15T00:00:00+00:00", - "tags": None, + "tags": [], "timezone": "Timezone('UTC')", } assert response.json == expected @@ -276,7 +282,7 @@ def test_should_response_200_for_null_start_date(self): "is_paused": None, "is_subdag": False, "orientation": "LR", - "owners": [], + "owners": ['airflow'], "params": {}, "schedule_interval": { "__type": "TimeDelta", @@ -285,7 +291,7 @@ def test_should_response_200_for_null_start_date(self): "seconds": 0, }, "start_date": None, - "tags": None, + "tags": [], "timezone": "Timezone('UTC')", } assert response.json == expected @@ -313,7 +319,7 @@ def test_should_respond_200_serialized(self): "is_paused": None, "is_subdag": False, "orientation": "LR", - "owners": [], + "owners": ['airflow'], "params": {"foo": 1}, "schedule_interval": { "__type": "TimeDelta", @@ -322,7 +328,7 @@ def test_should_respond_200_serialized(self): "seconds": 0, }, "start_date": "2020-06-15T00:00:00+00:00", - "tags": None, + "tags": [{'name': 'example'}], "timezone": "Timezone('UTC')", } response = client.get( @@ -349,11 +355,11 @@ def test_should_respond_200_serialized(self): 'is_paused': None, 'is_subdag': False, 'orientation': 'LR', - 'owners': [], + 'owners': ['airflow'], "params": {"foo": 1}, 'schedule_interval': {'__type': 'TimeDelta', 'days': 1, 'microseconds': 0, 'seconds': 0}, 'start_date': '2020-06-15T00:00:00+00:00', - 'tags': None, + 'tags': [{'name': 'example'}], 'timezone': "Timezone('UTC')", } assert response.json == expected diff --git a/tests/api_connexion/endpoints/test_task_instance_endpoint.py b/tests/api_connexion/endpoints/test_task_instance_endpoint.py index 84c957fb1643d..4f8028e21f0c7 100644 --- a/tests/api_connexion/endpoints/test_task_instance_endpoint.py +++ b/tests/api_connexion/endpoints/test_task_instance_endpoint.py @@ -166,6 +166,37 @@ def test_should_respond_200(self, session): "unixname": getpass.getuser(), } + @provide_session + def test_should_respond_200_with_task_state_in_removed(self, session): + self.create_task_instances(session, task_instances=[{"state": State.REMOVED}], update_extras=True) + response = self.client.get( + "/api/v1/dags/example_python_operator/dagRuns/TEST_DAG_RUN_ID/taskInstances/print_the_context", + environ_overrides={"REMOTE_USER": "test"}, + ) + assert response.status_code == 200 + assert response.json == { + "dag_id": "example_python_operator", + "duration": 10000.0, + "end_date": "2020-01-03T00:00:00+00:00", + "execution_date": "2020-01-01T00:00:00+00:00", + "executor_config": "{}", + "hostname": "", + "max_tries": 0, + "operator": "PythonOperator", + "pid": 100, + "pool": "default_pool", + "pool_slots": 1, + "priority_weight": 6, + "queue": "default_queue", + "queued_when": None, + "sla_miss": None, + "start_date": "2020-01-02T00:00:00+00:00", + "state": "removed", + "task_id": "print_the_context", + "try_number": 0, + "unixname": getpass.getuser(), + } + @provide_session def test_should_respond_200_task_instance_with_sla(self, session): self.create_task_instances(session) diff --git a/tests/api_connexion/schemas/test_dag_schema.py b/tests/api_connexion/schemas/test_dag_schema.py index 2811bf34008a4..96aba1f2ec506 100644 --- a/tests/api_connexion/schemas/test_dag_schema.py +++ b/tests/api_connexion/schemas/test_dag_schema.py @@ -107,6 +107,7 @@ def test_serialize(self): orientation="LR", default_view="duration", params={"foo": 1}, + tags=['example1', 'example2'], ) schema = DAGDetailSchema() expected = { @@ -126,7 +127,7 @@ def test_serialize(self): 'params': {'foo': 1}, 'schedule_interval': {'__type': 'TimeDelta', 'days': 1, 'seconds': 0, 'microseconds': 0}, 'start_date': '2020-06-19T00:00:00+00:00', - 'tags': None, + 'tags': [{'name': "example1"}, {'name': "example2"}], 'timezone': "Timezone('UTC')", } assert schema.dump(dag) == expected diff --git a/tests/cli/commands/test_connection_command.py b/tests/cli/commands/test_connection_command.py index ae78892ff755f..b974e15abd5b8 100644 --- a/tests/cli/commands/test_connection_command.py +++ b/tests/cli/commands/test_connection_command.py @@ -100,6 +100,10 @@ class TestCliListConnections(unittest.TestCase): 'sqlite_default', 'sqlite', ), + ( + 'trino_default', + 'trino', + ), ( 'vertica_default', 'vertica', diff --git a/tests/cli/commands/test_info_command.py b/tests/cli/commands/test_info_command.py index 7fad6e83dcbfa..e169c5cf65426 100644 --- a/tests/cli/commands/test_info_command.py +++ b/tests/cli/commands/test_info_command.py @@ -119,14 +119,14 @@ def test_should_read_logging_configuration(self): with conf_vars( { ('logging', 'remote_logging'): 'True', - ('logging', 'remote_base_log_folder'): 'stackdriver://logs-name', + ('logging', 'remote_base_log_folder'): 's3://logs-name', } ): importlib.reload(airflow_local_settings) configure_logging() instance = info_command.ConfigInfo(info_command.NullAnonymizer()) text = capture_show_output(instance) - assert "stackdriver" in text + assert "S3TaskHandler" in text def tearDown(self) -> None: importlib.reload(airflow_local_settings) diff --git a/tests/cli/commands/test_kubernetes_command.py b/tests/cli/commands/test_kubernetes_command.py index 8ae2eef052f79..707eb554c7cb6 100644 --- a/tests/cli/commands/test_kubernetes_command.py +++ b/tests/cli/commands/test_kubernetes_command.py @@ -55,6 +55,13 @@ def test_generate_dag_yaml(self): class TestCleanUpPodsCommand(unittest.TestCase): + label_selector = kubernetes.client.V1LabelSelector( + match_expressions=[ + kubernetes.client.V1LabelSelectorRequirement(key=label, operator="Exists") + for label in ['dag_id', 'task_id', 'execution_date', 'try_number', 'airflow_version'] + ] + ) + @classmethod def setUpClass(cls): cls.parser = cli_parser.get_parser() @@ -79,7 +86,9 @@ def test_running_pods_are_not_cleaned(self, load_incluster_config, list_namespac kubernetes_command.cleanup_pods( self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace']) ) - list_namespaced_pod.assert_called_once_with(namespace='awesome-namespace', limit=500) + list_namespaced_pod.assert_called_once_with( + namespace='awesome-namespace', limit=500, label_selector=self.label_selector + ) delete_pod.assert_not_called() load_incluster_config.assert_called_once() @@ -98,7 +107,9 @@ def test_cleanup_succeeded_pods(self, load_incluster_config, list_namespaced_pod kubernetes_command.cleanup_pods( self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace']) ) - list_namespaced_pod.assert_called_once_with(namespace='awesome-namespace', limit=500) + list_namespaced_pod.assert_called_once_with( + namespace='awesome-namespace', limit=500, label_selector=self.label_selector + ) delete_pod.assert_called_with('dummy', 'awesome-namespace') load_incluster_config.assert_called_once() @@ -120,7 +131,9 @@ def test_no_cleanup_failed_pods_wo_restart_policy_never( kubernetes_command.cleanup_pods( self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace']) ) - list_namespaced_pod.assert_called_once_with(namespace='awesome-namespace', limit=500) + list_namespaced_pod.assert_called_once_with( + namespace='awesome-namespace', limit=500, label_selector=self.label_selector + ) delete_pod.assert_not_called() load_incluster_config.assert_called_once() @@ -142,7 +155,9 @@ def test_cleanup_failed_pods_w_restart_policy_never( kubernetes_command.cleanup_pods( self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace']) ) - list_namespaced_pod.assert_called_once_with(namespace='awesome-namespace', limit=500) + list_namespaced_pod.assert_called_once_with( + namespace='awesome-namespace', limit=500, label_selector=self.label_selector + ) delete_pod.assert_called_with('dummy3', 'awesome-namespace') load_incluster_config.assert_called_once() @@ -162,7 +177,9 @@ def test_cleanup_evicted_pods(self, load_incluster_config, list_namespaced_pod, kubernetes_command.cleanup_pods( self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace']) ) - list_namespaced_pod.assert_called_once_with(namespace='awesome-namespace', limit=500) + list_namespaced_pod.assert_called_once_with( + namespace='awesome-namespace', limit=500, label_selector=self.label_selector + ) delete_pod.assert_called_with('dummy4', 'awesome-namespace') load_incluster_config.assert_called_once() @@ -182,7 +199,9 @@ def test_cleanup_api_exception_continue(self, load_incluster_config, list_namesp kubernetes_command.cleanup_pods( self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace']) ) - list_namespaced_pod.assert_called_once_with(namespace='awesome-namespace', limit=500) + list_namespaced_pod.assert_called_once_with( + namespace='awesome-namespace', limit=500, label_selector=self.label_selector + ) load_incluster_config.assert_called_once() @mock.patch('airflow.cli.commands.kubernetes_command._delete_pod') @@ -204,8 +223,13 @@ def test_list_pod_with_continue_token(self, load_incluster_config, list_namespac self.parser.parse_args(['kubernetes', 'cleanup-pods', '--namespace', 'awesome-namespace']) ) calls = [ - call.first(namespace='awesome-namespace', limit=500), - call.second(namespace='awesome-namespace', limit=500, _continue='dummy-token'), + call.first(namespace='awesome-namespace', limit=500, label_selector=self.label_selector), + call.second( + namespace='awesome-namespace', + limit=500, + label_selector=self.label_selector, + _continue='dummy-token', + ), ] list_namespaced_pod.assert_has_calls(calls) delete_pod.assert_called_with('dummy', 'awesome-namespace') diff --git a/tests/cli/commands/test_task_command.py b/tests/cli/commands/test_task_command.py index a011ee6308ed7..84d8162dde97e 100644 --- a/tests/cli/commands/test_task_command.py +++ b/tests/cli/commands/test_task_command.py @@ -62,6 +62,7 @@ def setUpClass(cls): cls.dagbag = DagBag(include_examples=True) cls.parser = cli_parser.get_parser() + @pytest.mark.skip(reason="This test hangs in v2-0-test branch") def test_cli_list_tasks(self): for dag_id in self.dagbag.dags: args = self.parser.parse_args(['tasks', 'list', dag_id]) @@ -313,6 +314,8 @@ def test_local_run(self): assert state == State.SUCCESS +# For this test memory spins out of control on Python 3.6. TODO(potiuk): FIXME") +@pytest.mark.quarantined class TestLogsfromTaskRunCommand(unittest.TestCase): def setUp(self) -> None: self.dag_id = "test_logging_dag" diff --git a/tests/cli/test_cli_parser.py b/tests/cli/test_cli_parser.py index 1c2e2aaab2e6d..0ba5dff290388 100644 --- a/tests/cli/test_cli_parser.py +++ b/tests/cli/test_cli_parser.py @@ -144,6 +144,16 @@ def test_commands_and_command_group_sections(self): assert "Commands" in stdout assert "Groups" in stdout + def test_dag_parser_commands_and_comamnd_group_sections(self): + parser = cli_parser.get_parser(dag_parser=True) + + with contextlib.redirect_stdout(io.StringIO()) as stdout: + with self.assertRaises(SystemExit): + parser.parse_args(['--help']) + stdout = stdout.getvalue() + self.assertIn("Commands", stdout) + self.assertIn("Groups", stdout) + def test_should_display_help(self): parser = cli_parser.get_parser() @@ -160,6 +170,22 @@ def test_should_display_help(self): with pytest.raises(SystemExit): parser.parse_args([*cmd_args, '--help']) + def test_dag_cli_should_display_help(self): + parser = cli_parser.get_parser(dag_parser=True) + + all_command_as_args = [ + command_as_args + for top_command in cli_parser.dag_cli_commands + for command_as_args in ( + [[top_command.name]] + if isinstance(top_command, cli_parser.ActionCommand) + else [[top_command.name, nested_command.name] for nested_command in top_command.subcommands] + ) + ] + for cmd_args in all_command_as_args: + with self.assertRaises(SystemExit): + parser.parse_args([*cmd_args, '--help']) + def test_positive_int(self): assert 1 == cli_parser.positive_int('1') diff --git a/tests/conftest.py b/tests/conftest.py index 98a7d1f5b0478..de7e903f8c520 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -128,7 +128,7 @@ def pytest_addoption(parser): action="append", metavar="INTEGRATIONS", help="only run tests matching integration specified: " - "[cassandra,kerberos,mongo,openldap,presto,rabbitmq,redis]. ", + "[cassandra,kerberos,mongo,openldap,rabbitmq,redis,statsd,trino]. ", ) group.addoption( "--backend", @@ -152,11 +152,6 @@ def pytest_addoption(parser): action="store_true", help="Includes quarantined tests (marked with quarantined marker). They are skipped by default.", ) - group.addoption( - "--include-heisentests", - action="store_true", - help="Includes heisentests (marked with heisentests marker). They are skipped by default.", - ) allowed_trace_sql_columns_list = ",".join(ALLOWED_TRACE_SQL_COLUMNS) group.addoption( "--trace-sql", @@ -242,9 +237,6 @@ def pytest_configure(config): config.addinivalue_line( "markers", "quarantined: mark test that are in quarantine (i.e. flaky, need to be isolated and fixed)" ) - config.addinivalue_line( - "markers", "heisentests: mark test that should be run in isolation due to resource consumption" - ) config.addinivalue_line( "markers", "credential_file(name): mark tests that require credential file in CREDENTIALS_DIR" ) @@ -314,14 +306,6 @@ def skip_quarantined_test(item): ) -def skip_heisen_test(item): - for _ in item.iter_markers(name="heisentests"): - pytest.skip( - "The test is skipped because it has heisentests marker. " - "And --include-heisentests flag is passed to pytest. {item}".format(item=item) - ) - - def skip_if_integration_disabled(marker, item): integration_name = marker.args[0] environment_variable_name = "INTEGRATION_" + integration_name.upper() @@ -378,7 +362,6 @@ def pytest_runtest_setup(item): include_long_running = item.config.getoption("--include-long-running") include_quarantined = item.config.getoption("--include-quarantined") - include_heisentests = item.config.getoption("--include-heisentests") for marker in item.iter_markers(name="integration"): skip_if_integration_disabled(marker, item) @@ -397,8 +380,6 @@ def pytest_runtest_setup(item): skip_long_running_test(item) if not include_quarantined: skip_quarantined_test(item) - if not include_heisentests: - skip_heisen_test(item) skip_if_credential_file_missing(item) skip_if_airflow_2_test(item) diff --git a/tests/core/test_configuration.py b/tests/core/test_configuration.py index 4e32c5e947dcf..a64407e70bd3c 100644 --- a/tests/core/test_configuration.py +++ b/tests/core/test_configuration.py @@ -19,6 +19,7 @@ import os import re import tempfile +import textwrap import unittest import warnings from collections import OrderedDict @@ -28,7 +29,6 @@ from airflow import configuration from airflow.configuration import ( - DEFAULT_CONFIG, AirflowConfigException, AirflowConfigParser, conf, @@ -561,8 +561,17 @@ def test_command_from_env(self): assert test_cmdenv_conf.get('testcmdenv', 'notacommand') == 'OK' def test_parameterized_config_gen(self): + config = textwrap.dedent( + """ + [core] + dags_folder = {AIRFLOW_HOME}/dags + sql_alchemy_conn = sqlite:///{AIRFLOW_HOME}/airflow.db + parallelism = 32 + fernet_key = {FERNET_KEY} + """ + ) - cfg = parameterized_config(DEFAULT_CONFIG) + cfg = parameterized_config(config) # making sure some basic building blocks are present: assert "[core]" in cfg diff --git a/tests/core/test_providers_manager.py b/tests/core/test_providers_manager.py index 4c03984077db9..7299971749eb0 100644 --- a/tests/core/test_providers_manager.py +++ b/tests/core/test_providers_manager.py @@ -21,7 +21,9 @@ from airflow.providers_manager import ProvidersManager ALL_PROVIDERS = [ + 'apache-airflow-providers-airbyte', 'apache-airflow-providers-amazon', + 'apache-airflow-providers-apache-beam', 'apache-airflow-providers-apache-cassandra', 'apache-airflow-providers-apache-druid', 'apache-airflow-providers-apache-hdfs', @@ -57,6 +59,7 @@ 'apache-airflow-providers-microsoft-winrm', 'apache-airflow-providers-mongo', 'apache-airflow-providers-mysql', + 'apache-airflow-providers-neo4j', 'apache-airflow-providers-odbc', 'apache-airflow-providers-openfaas', 'apache-airflow-providers-opsgenie', @@ -79,7 +82,9 @@ # 'apache-airflow-providers-snowflake', 'apache-airflow-providers-sqlite', 'apache-airflow-providers-ssh', + 'apache-airflow-providers-tableau', 'apache-airflow-providers-telegram', + 'apache-airflow-providers-trino', 'apache-airflow-providers-vertica', 'apache-airflow-providers-yandex', 'apache-airflow-providers-zendesk', @@ -122,6 +127,7 @@ 'mongo', 'mssql', 'mysql', + 'neo4j', 'odbc', 'oracle', 'pig_cli', @@ -142,6 +148,7 @@ 'sqoop', 'ssh', 'tableau', + 'trino', 'vault', 'vertica', 'wasb', diff --git a/tests/core/test_stats.py b/tests/core/test_stats.py index 428192b8b0d23..83169e2935b24 100644 --- a/tests/core/test_stats.py +++ b/tests/core/test_stats.py @@ -31,17 +31,7 @@ class CustomStatsd(statsd.StatsClient): - incr_calls = 0 - - def __init__(self, host=None, port=None, prefix=None): - super().__init__() - - def incr(self, stat, count=1, rate=1): # pylint: disable=unused-argument - CustomStatsd.incr_calls += 1 - - @classmethod - def _reset(cls): - cls.incr_calls = 0 + pass class InvalidCustomStatsd: @@ -50,25 +40,14 @@ class InvalidCustomStatsd: statsd.StatsClient. """ - incr_calls = 0 - def __init__(self, host=None, port=None, prefix=None): pass - def incr(self, stat, count=1, rate=1): # pylint: disable=unused-argument - InvalidCustomStatsd.incr_calls += 1 - - @classmethod - def _reset(cls): - cls.incr_calls = 0 - class TestStats(unittest.TestCase): def setUp(self): - self.statsd_client = Mock() + self.statsd_client = Mock(spec=statsd.StatsClient) self.stats = SafeStatsdLogger(self.statsd_client) - CustomStatsd._reset() - InvalidCustomStatsd._reset() def test_increment_counter_with_valid_name(self): self.stats.incr('test_stats_run') @@ -86,49 +65,56 @@ def test_stat_name_must_only_include_allowed_characters(self): self.stats.incr('test/$tats') self.statsd_client.assert_not_called() - @conf_vars({('metrics', 'statsd_on'): 'True'}) - @mock.patch("statsd.StatsClient") - def test_does_send_stats_using_statsd(self, mock_statsd): - importlib.reload(airflow.stats) - airflow.stats.Stats.incr("dummy_key") - mock_statsd.return_value.incr.assert_called_once_with('dummy_key', 1, 1) + def test_timer(self): + with self.stats.timer("dummy_timer"): + pass + self.statsd_client.timer.assert_called_once_with('dummy_timer') - @conf_vars({('metrics', 'statsd_on'): 'True'}) - @mock.patch("datadog.DogStatsd") - def test_does_not_send_stats_using_dogstatsd(self, mock_dogstatsd): - importlib.reload(airflow.stats) - airflow.stats.Stats.incr("dummy_key") - mock_dogstatsd.return_value.assert_not_called() + def test_empty_timer(self): + with self.stats.timer(): + pass + self.statsd_client.timer.assert_not_called() - @conf_vars( - { - ("metrics", "statsd_on"): "True", - ("metrics", "statsd_custom_client_path"): "tests.core.test_stats.CustomStatsd", - } - ) - def test_load_custom_statsd_client(self): + def test_timing(self): + self.stats.timing("dummy_timer", 123) + self.statsd_client.timing.assert_called_once_with('dummy_timer', 123) + + def test_gauge(self): + self.stats.gauge("dummy", 123) + self.statsd_client.gauge.assert_called_once_with('dummy', 123, 1, False) + + def test_decr(self): + self.stats.decr("dummy") + self.statsd_client.decr.assert_called_once_with('dummy', 1, 1) + + def test_enabled_by_config(self): + """Test that enabling this sets the right instance properties""" + with conf_vars({('metrics', 'statsd_on'): 'True'}): + importlib.reload(airflow.stats) + assert isinstance(airflow.stats.Stats.statsd, statsd.StatsClient) + assert not hasattr(airflow.stats.Stats, 'dogstatsd') + # Avoid side-effects importlib.reload(airflow.stats) - assert 'CustomStatsd' == type(airflow.stats.Stats.statsd).__name__ # noqa: E721 - @conf_vars( - { - ("metrics", "statsd_on"): "True", - ("metrics", "statsd_custom_client_path"): "tests.core.test_stats.CustomStatsd", - } - ) - def test_does_use_custom_statsd_client(self): + def test_load_custom_statsd_client(self): + with conf_vars( + { + ("metrics", "statsd_on"): "True", + ("metrics", "statsd_custom_client_path"): f"{__name__}.CustomStatsd", + } + ): + importlib.reload(airflow.stats) + assert isinstance(airflow.stats.Stats.statsd, CustomStatsd) + # Avoid side-effects importlib.reload(airflow.stats) - airflow.stats.Stats.incr("dummy_key") - assert airflow.stats.Stats.statsd.incr_calls == 1 - @conf_vars( - { - ("metrics", "statsd_on"): "True", - ("metrics", "statsd_custom_client_path"): "tests.core.test_stats.InvalidCustomStatsd", - } - ) def test_load_invalid_custom_stats_client(self): - with pytest.raises( + with conf_vars( + { + ("metrics", "statsd_on"): "True", + ("metrics", "statsd_custom_client_path"): f"{__name__}.InvalidCustomStatsd", + } + ), pytest.raises( AirflowConfigException, match=re.escape( 'Your custom Statsd client must extend the statsd.' @@ -136,15 +122,16 @@ def test_load_invalid_custom_stats_client(self): ), ): importlib.reload(airflow.stats) - - def tearDown(self) -> None: - # To avoid side-effect + airflow.stats.Stats.incr("dummy_key") importlib.reload(airflow.stats) class TestDogStats(unittest.TestCase): def setUp(self): - self.dogstatsd_client = Mock() + pytest.importorskip('datadog') + from datadog import DogStatsd + + self.dogstatsd_client = Mock(spec=DogStatsd) self.dogstatsd = SafeDogStatsdLogger(self.dogstatsd_client) def test_increment_counter_with_valid_name_with_dogstatsd(self): @@ -165,48 +152,72 @@ def test_stat_name_must_only_include_allowed_characters_with_dogstatsd(self): self.dogstatsd.incr('test/$tats') self.dogstatsd_client.assert_not_called() - @conf_vars({('metrics', 'statsd_datadog_enabled'): 'True'}) - @mock.patch("datadog.DogStatsd") - def test_does_send_stats_using_dogstatsd_when_dogstatsd_on(self, mock_dogstatsd): - importlib.reload(airflow.stats) - airflow.stats.Stats.incr("dummy_key") - mock_dogstatsd.return_value.increment.assert_called_once_with( + def test_does_send_stats_using_dogstatsd_when_dogstatsd_on(self): + self.dogstatsd.incr("dummy_key") + self.dogstatsd_client.increment.assert_called_once_with( metric='dummy_key', sample_rate=1, tags=[], value=1 ) - @conf_vars({('metrics', 'statsd_datadog_enabled'): 'True'}) - @mock.patch("datadog.DogStatsd") - def test_does_send_stats_using_dogstatsd_with_tags(self, mock_dogstatsd): - importlib.reload(airflow.stats) - airflow.stats.Stats.incr("dummy_key", 1, 1, ['key1:value1', 'key2:value2']) - mock_dogstatsd.return_value.increment.assert_called_once_with( + def test_does_send_stats_using_dogstatsd_with_tags(self): + self.dogstatsd.incr("dummy_key", 1, 1, ['key1:value1', 'key2:value2']) + self.dogstatsd_client.increment.assert_called_once_with( metric='dummy_key', sample_rate=1, tags=['key1:value1', 'key2:value2'], value=1 ) - @conf_vars({('metrics', 'statsd_on'): 'True', ('metrics', 'statsd_datadog_enabled'): 'True'}) - @mock.patch("datadog.DogStatsd") - def test_does_send_stats_using_dogstatsd_when_statsd_and_dogstatsd_both_on(self, mock_dogstatsd): - importlib.reload(airflow.stats) - airflow.stats.Stats.incr("dummy_key") - mock_dogstatsd.return_value.increment.assert_called_once_with( + def test_does_send_stats_using_dogstatsd_when_statsd_and_dogstatsd_both_on(self): + self.dogstatsd.incr("dummy_key") + self.dogstatsd_client.increment.assert_called_once_with( metric='dummy_key', sample_rate=1, tags=[], value=1 ) - @conf_vars({('metrics', 'statsd_on'): 'True', ('metrics', 'statsd_datadog_enabled'): 'True'}) - @mock.patch("statsd.StatsClient") - def test_does_not_send_stats_using_statsd_when_statsd_and_dogstatsd_both_on(self, mock_statsd): + def test_timer(self): + with self.dogstatsd.timer("dummy_timer"): + pass + self.dogstatsd_client.timed.assert_called_once_with('dummy_timer', tags=[]) + + def test_empty_timer(self): + with self.dogstatsd.timer(): + pass + self.dogstatsd_client.timed.assert_not_called() + + def test_timing(self): + self.dogstatsd.timing("dummy_timer", 123) + self.dogstatsd_client.timing.assert_called_once_with(metric='dummy_timer', value=123, tags=[]) + + def test_gauge(self): + self.dogstatsd.gauge("dummy", 123) + self.dogstatsd_client.gauge.assert_called_once_with(metric='dummy', sample_rate=1, value=123, tags=[]) + + def test_decr(self): + self.dogstatsd.decr("dummy") + self.dogstatsd_client.decrement.assert_called_once_with( + metric='dummy', sample_rate=1, value=1, tags=[] + ) + + def test_enabled_by_config(self): + """Test that enabling this sets the right instance properties""" + from datadog import DogStatsd + + with conf_vars({('metrics', 'statsd_datadog_enabled'): 'True'}): + importlib.reload(airflow.stats) + assert isinstance(airflow.stats.Stats.dogstatsd, DogStatsd) + assert not hasattr(airflow.stats.Stats, 'statsd') + # Avoid side-effects importlib.reload(airflow.stats) - airflow.stats.Stats.incr("dummy_key") - mock_statsd.return_value.assert_not_called() - def tearDown(self) -> None: - # To avoid side-effect + def test_does_not_send_stats_using_statsd_when_statsd_and_dogstatsd_both_on(self): + from datadog import DogStatsd + + with conf_vars({('metrics', 'statsd_on'): 'True', ('metrics', 'statsd_datadog_enabled'): 'True'}): + importlib.reload(airflow.stats) + assert isinstance(airflow.stats.Stats.dogstatsd, DogStatsd) + assert not hasattr(airflow.stats.Stats, 'statsd') importlib.reload(airflow.stats) class TestStatsWithAllowList(unittest.TestCase): def setUp(self): - self.statsd_client = Mock() + self.statsd_client = Mock(spec=statsd.StatsClient) self.stats = SafeStatsdLogger(self.statsd_client, AllowListValidator("stats_one, stats_two")) def test_increment_counter_with_allowed_key(self): @@ -224,7 +235,10 @@ def test_not_increment_counter_if_not_allowed(self): class TestDogStatsWithAllowList(unittest.TestCase): def setUp(self): - self.dogstatsd_client = Mock() + pytest.importorskip('datadog') + from datadog import DogStatsd + + self.dogstatsd_client = Mock(speck=DogStatsd) self.dogstats = SafeDogStatsdLogger(self.dogstatsd_client, AllowListValidator("stats_one, stats_two")) def test_increment_counter_with_allowed_key(self): diff --git a/tests/executors/kubernetes_executor_template_files/basic_template.yaml b/tests/executors/kubernetes_executor_template_files/basic_template.yaml new file mode 100644 index 0000000000000..1fb00f2ddfa6c --- /dev/null +++ b/tests/executors/kubernetes_executor_template_files/basic_template.yaml @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +--- +kind: Pod +apiVersion: v1 +metadata: + name: dummy-name-dont-delete + namespace: dummy-name-dont-delete + labels: + mylabel: foo +spec: + containers: + - name: base + image: dummy-name-dont-delete + securityContext: + runAsUser: 50000 + fsGroup: 50000 + imagePullSecrets: + - name: airflow-registry + schedulerName: default-scheduler diff --git a/tests/executors/test_celery_executor.py b/tests/executors/test_celery_executor.py index 944fa49c4b932..4f930073f8bfc 100644 --- a/tests/executors/test_celery_executor.py +++ b/tests/executors/test_celery_executor.py @@ -414,7 +414,9 @@ class TestBulkStateFetcher(unittest.TestCase): def test_should_support_kv_backend(self, mock_mget): with _prepare_app(): mock_backend = BaseKeyValueStoreBackend(app=celery_executor.app) - with mock.patch.object(celery_executor.app, 'backend', mock_backend): + with mock.patch.object(celery_executor.app, 'backend', mock_backend), self.assertLogs( + "airflow.executors.celery_executor.BulkStateFetcher", level="DEBUG" + ) as cm: fetcher = BulkStateFetcher() result = fetcher.get_many( [ @@ -429,6 +431,9 @@ def test_should_support_kv_backend(self, mock_mget): mock_mget.assert_called_once_with(mock.ANY) assert result == {'123': ('SUCCESS', None), '456': ("PENDING", None)} + assert [ + 'DEBUG:airflow.executors.celery_executor.BulkStateFetcher:Fetched 2 state(s) for 2 task(s)' + ] == cm.output @mock.patch("celery.backends.database.DatabaseBackend.ResultSession") @pytest.mark.integration("redis") @@ -438,21 +443,26 @@ def test_should_support_db_backend(self, mock_session): with _prepare_app(): mock_backend = DatabaseBackend(app=celery_executor.app, url="sqlite3://") - with mock.patch.object(celery_executor.app, 'backend', mock_backend): + with mock.patch.object(celery_executor.app, 'backend', mock_backend), self.assertLogs( + "airflow.executors.celery_executor.BulkStateFetcher", level="DEBUG" + ) as cm: mock_session = mock_backend.ResultSession.return_value # pylint: disable=no-member mock_session.query.return_value.filter.return_value.all.return_value = [ mock.MagicMock(**{"to_dict.return_value": {"status": "SUCCESS", "task_id": "123"}}) ] - fetcher = BulkStateFetcher() - result = fetcher.get_many( - [ - mock.MagicMock(task_id="123"), - mock.MagicMock(task_id="456"), - ] - ) + fetcher = BulkStateFetcher() + result = fetcher.get_many( + [ + mock.MagicMock(task_id="123"), + mock.MagicMock(task_id="456"), + ] + ) assert result == {'123': ('SUCCESS', None), '456': ("PENDING", None)} + assert [ + 'DEBUG:airflow.executors.celery_executor.BulkStateFetcher:Fetched 2 state(s) for 2 task(s)' + ] == cm.output @pytest.mark.integration("redis") @pytest.mark.integration("rabbitmq") @@ -461,7 +471,9 @@ def test_should_support_base_backend(self): with _prepare_app(): mock_backend = mock.MagicMock(autospec=BaseBackend) - with mock.patch.object(celery_executor.app, 'backend', mock_backend): + with mock.patch.object(celery_executor.app, 'backend', mock_backend), self.assertLogs( + "airflow.executors.celery_executor.BulkStateFetcher", level="DEBUG" + ) as cm: fetcher = BulkStateFetcher(1) result = fetcher.get_many( [ @@ -471,3 +483,6 @@ def test_should_support_base_backend(self): ) assert result == {'123': ('SUCCESS', None), '456': ("PENDING", None)} + assert [ + 'DEBUG:airflow.executors.celery_executor.BulkStateFetcher:Fetched 2 state(s) for 2 task(s)' + ] == cm.output diff --git a/tests/executors/test_kubernetes_executor.py b/tests/executors/test_kubernetes_executor.py index 9abb32884310b..8d3d5b4d450e2 100644 --- a/tests/executors/test_kubernetes_executor.py +++ b/tests/executors/test_kubernetes_executor.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. # +import pathlib import random import re import string @@ -22,6 +23,7 @@ from datetime import datetime from unittest import mock +import pytest from kubernetes.client import models as k8s from urllib3 import HTTPResponse @@ -34,11 +36,12 @@ from airflow.executors.kubernetes_executor import ( AirflowKubernetesScheduler, KubernetesExecutor, + KubernetesJobWatcher, create_pod_id, get_base_pod_from_template, ) from airflow.kubernetes import pod_generator - from airflow.kubernetes.pod_generator import PodGenerator + from airflow.kubernetes.pod_generator import PodGenerator, datetime_to_label_safe_datestring from airflow.utils.state import State except ImportError: AirflowKubernetesScheduler = None # type: ignore @@ -194,6 +197,113 @@ def test_gauge_executor_metrics(self, mock_stats_gauge, mock_trigger_tasks, mock ] mock_stats_gauge.assert_has_calls(calls) + @mock.patch('airflow.executors.kubernetes_executor.KubernetesJobWatcher') + @mock.patch('airflow.executors.kubernetes_executor.get_kube_client') + def test_invalid_executor_config(self, mock_get_kube_client, mock_kubernetes_job_watcher): + executor = self.kubernetes_executor + executor.start() + + assert executor.event_buffer == {} + executor.execute_async( + key=('dag', 'task', datetime.utcnow(), 1), + queue=None, + command=['airflow', 'tasks', 'run', 'true', 'some_parameter'], + executor_config=k8s.V1Pod( + spec=k8s.V1PodSpec( + containers=[k8s.V1Container(name="base", image="myimage", image_pull_policy="Always")] + ) + ), + ) + + assert list(executor.event_buffer.values())[0][1] == "Invalid executor_config passed" + + @pytest.mark.execution_timeout(10) + @unittest.skipIf(AirflowKubernetesScheduler is None, 'kubernetes python package is not installed') + @mock.patch('airflow.kubernetes.pod_launcher.PodLauncher.run_pod_async') + @mock.patch('airflow.executors.kubernetes_executor.get_kube_client') + def test_pod_template_file_override_in_executor_config(self, mock_get_kube_client, mock_run_pod_async): + current_folder = pathlib.Path(__file__).parent.absolute() + template_file = str( + (current_folder / "kubernetes_executor_template_files" / "basic_template.yaml").absolute() + ) + + mock_kube_client = mock.patch('kubernetes.client.CoreV1Api', autospec=True) + mock_get_kube_client.return_value = mock_kube_client + + with conf_vars({('kubernetes', 'pod_template_file'): ''}): + executor = self.kubernetes_executor + executor.start() + + assert executor.event_buffer == {} + assert executor.task_queue.empty() + + execution_date = datetime.utcnow() + + executor.execute_async( + key=('dag', 'task', execution_date, 1), + queue=None, + command=['airflow', 'tasks', 'run', 'true', 'some_parameter'], + executor_config={ + "pod_template_file": template_file, + "pod_override": k8s.V1Pod( + metadata=k8s.V1ObjectMeta(labels={"release": "stable"}), + spec=k8s.V1PodSpec( + containers=[k8s.V1Container(name="base", image="airflow:3.6")], + ), + ), + }, + ) + + assert not executor.task_queue.empty() + task = executor.task_queue.get_nowait() + _, _, expected_executor_config, expected_pod_template_file = task + + # Test that the correct values have been put to queue + assert expected_executor_config.metadata.labels == {'release': 'stable'} + assert expected_pod_template_file == template_file + + self.kubernetes_executor.kube_scheduler.run_next(task) + mock_run_pod_async.assert_called_once_with( + k8s.V1Pod( + api_version="v1", + kind="Pod", + metadata=k8s.V1ObjectMeta( + name=mock.ANY, + namespace="default", + annotations={ + 'dag_id': 'dag', + 'execution_date': execution_date.isoformat(), + 'task_id': 'task', + 'try_number': '1', + }, + labels={ + 'airflow-worker': '5', + 'airflow_version': mock.ANY, + 'dag_id': 'dag', + 'execution_date': datetime_to_label_safe_datestring(execution_date), + 'kubernetes_executor': 'True', + 'mylabel': 'foo', + 'release': 'stable', + 'task_id': 'task', + 'try_number': '1', + }, + ), + spec=k8s.V1PodSpec( + containers=[ + k8s.V1Container( + name="base", + image="airflow:3.6", + args=['airflow', 'tasks', 'run', 'true', 'some_parameter'], + env=[k8s.V1EnvVar(name='AIRFLOW_IS_K8S_EXECUTOR_POD', value='True')], + ) + ], + image_pull_secrets=[k8s.V1LocalObjectReference(name='airflow-registry')], + scheduler_name='default-scheduler', + security_context=k8s.V1PodSecurityContext(fs_group=50000, run_as_user=50000), + ), + ) + ) + @mock.patch('airflow.executors.kubernetes_executor.KubernetesJobWatcher') @mock.patch('airflow.executors.kubernetes_executor.get_kube_client') def test_change_state_running(self, mock_get_kube_client, mock_kubernetes_job_watcher): @@ -308,3 +418,94 @@ def test_not_adopt_unassigned_task(self, mock_kube_client): executor.adopt_launched_task(mock_kube_client, pod=pod, pod_ids=pod_ids) assert not mock_kube_client.patch_namespaced_pod.called assert pod_ids == {"foobar": {}} + + +class TestKubernetesJobWatcher(unittest.TestCase): + def setUp(self): + self.watcher = KubernetesJobWatcher( + namespace="airflow", + multi_namespace_mode=False, + watcher_queue=mock.MagicMock(), + resource_version="0", + scheduler_job_id="123", + kube_config=mock.MagicMock(), + ) + self.kube_client = mock.MagicMock() + self.core_annotations = { + "dag_id": "dag", + "task_id": "task", + "execution_date": "dt", + "try_number": "1", + } + self.pod = k8s.V1Pod( + metadata=k8s.V1ObjectMeta( + name="foo", + annotations={"airflow-worker": "bar", **self.core_annotations}, + namespace="airflow", + resource_version="456", + ), + status=k8s.V1PodStatus(phase="Pending"), + ) + self.events = [] + + def _run(self): + with mock.patch('airflow.executors.kubernetes_executor.watch') as mock_watch: + mock_watch.Watch.return_value.stream.return_value = self.events + latest_resource_version = self.watcher._run( + self.kube_client, + self.watcher.resource_version, + self.watcher.scheduler_job_id, + self.watcher.kube_config, + ) + assert self.pod.metadata.resource_version == latest_resource_version + + def assert_watcher_queue_called_once_with_state(self, state): + self.watcher.watcher_queue.put.assert_called_once_with( + ( + self.pod.metadata.name, + self.watcher.namespace, + state, + self.core_annotations, + self.pod.metadata.resource_version, + ) + ) + + def test_process_status_pending(self): + self.events.append({"type": 'MODIFIED', "object": self.pod}) + + self._run() + self.watcher.watcher_queue.put.assert_not_called() + + def test_process_status_pending_deleted(self): + self.events.append({"type": 'DELETED', "object": self.pod}) + + self._run() + self.assert_watcher_queue_called_once_with_state(State.FAILED) + + def test_process_status_failed(self): + self.pod.status.phase = "Failed" + self.events.append({"type": 'MODIFIED', "object": self.pod}) + + self._run() + self.assert_watcher_queue_called_once_with_state(State.FAILED) + + def test_process_status_succeeded(self): + self.pod.status.phase = "Succeeded" + self.events.append({"type": 'MODIFIED', "object": self.pod}) + + self._run() + self.assert_watcher_queue_called_once_with_state(None) + + def test_process_status_running(self): + self.pod.status.phase = "Running" + self.events.append({"type": 'MODIFIED', "object": self.pod}) + + self._run() + self.watcher.watcher_queue.put.assert_not_called() + + def test_process_status_catchall(self): + self.pod.status.phase = "Unknown" + self.events.append({"type": 'MODIFIED', "object": self.pod}) + + self._run() + self.watcher.watcher_queue.put.assert_not_called() diff --git a/tests/jobs/test_backfill_job.py b/tests/jobs/test_backfill_job.py index 9826f18299707..c6f620a20a824 100644 --- a/tests/jobs/test_backfill_job.py +++ b/tests/jobs/test_backfill_job.py @@ -56,7 +56,6 @@ DEFAULT_DATE = timezone.datetime(2016, 1, 1) -@pytest.mark.heisentests class TestBackfillJob(unittest.TestCase): def _get_dummy_dag(self, dag_id, pool=Pool.DEFAULT_POOL_NAME, task_concurrency=None): dag = DAG(dag_id=dag_id, start_date=DEFAULT_DATE, schedule_interval='@daily') @@ -809,6 +808,7 @@ def test_backfill_pooled_tasks(self): ti.refresh_from_db() assert ti.state == State.SUCCESS + @pytest.mark.quarantined def test_backfill_depends_on_past(self): """ Test that backfill respects ignore_depends_on_past @@ -1517,3 +1517,20 @@ def test_job_id_is_assigned_to_dag_run(self): job.run() dr: DagRun = dag.get_last_dagrun() assert dr.creating_job_id == job.id + + def test_backfill_has_job_id(self): + """Make sure that backfill jobs are assigned job_ids.""" + dag = self.dagbag.get_dag("test_start_date_scheduling") + dag.clear() + + executor = MockExecutor(parallelism=16) + + job = BackfillJob( + executor=executor, + dag=dag, + start_date=DEFAULT_DATE, + end_date=DEFAULT_DATE + datetime.timedelta(days=1), + run_backwards=True, + ) + job.run() + assert executor.job_id is not None diff --git a/tests/jobs/test_local_task_job.py b/tests/jobs/test_local_task_job.py index 537a242af14ce..d8a0386603838 100644 --- a/tests/jobs/test_local_task_job.py +++ b/tests/jobs/test_local_task_job.py @@ -170,7 +170,7 @@ def test_heartbeat_failed_fast(self): time2 = heartbeat_records[i] # Assert that difference small enough delta = (time2 - time1).total_seconds() - assert abs(delta - job.heartrate) < 0.05 + assert abs(delta - job.heartrate) < 0.5 @pytest.mark.quarantined def test_mark_success_no_kill(self): diff --git a/tests/jobs/test_scheduler_job.py b/tests/jobs/test_scheduler_job.py index 38678333798f9..a5fd7941c0f6a 100644 --- a/tests/jobs/test_scheduler_job.py +++ b/tests/jobs/test_scheduler_job.py @@ -16,19 +16,21 @@ # specific language governing permissions and limitations # under the License. # - +# pylint: disable=attribute-defined-outside-init import datetime import os import shutil import unittest from datetime import timedelta from tempfile import NamedTemporaryFile, mkdtemp +from time import sleep from unittest import mock from unittest.mock import MagicMock, patch from zipfile import ZipFile import psutil import pytest +from freezegun import freeze_time from parameterized import parameterized from sqlalchemy import func @@ -116,8 +118,12 @@ def setUp(self): # Speed up some tests by not running the tasks, just look at what we # enqueue! self.null_exec = MockExecutor() + self.scheduler_job = None def tearDown(self) -> None: + if self.scheduler_job and self.scheduler_job.processor_agent: + self.scheduler_job.processor_agent.end() + self.scheduler_job = None self.clean_db() def create_test_dag(self, start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + timedelta(hours=1), **kwargs): @@ -397,9 +403,9 @@ def test_dag_file_processor_process_task_instances(self, state, start_date, end_ dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.processor_agent = mock.MagicMock() - scheduler.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.processor_agent = mock.MagicMock() + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) dag.clear() dr = dag.create_dagrun( run_type=DagRunType.SCHEDULED, @@ -414,7 +420,7 @@ def test_dag_file_processor_process_task_instances(self, state, start_date, end_ ti.start_date = start_date ti.end_date = end_date - count = scheduler._schedule_dag_run(dr, set(), session) + count = self.scheduler_job._schedule_dag_run(dr, set(), session) assert count == 1 session.refresh(ti) @@ -454,9 +460,9 @@ def test_dag_file_processor_process_task_instances_with_task_concurrency( dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.processor_agent = mock.MagicMock() - scheduler.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.processor_agent = mock.MagicMock() + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) dag.clear() dr = dag.create_dagrun( run_type=DagRunType.SCHEDULED, @@ -471,7 +477,7 @@ def test_dag_file_processor_process_task_instances_with_task_concurrency( ti.start_date = start_date ti.end_date = end_date - count = scheduler._schedule_dag_run(dr, set(), session) + count = self.scheduler_job._schedule_dag_run(dr, set(), session) assert count == 1 session.refresh(ti) @@ -513,9 +519,9 @@ def test_dag_file_processor_process_task_instances_depends_on_past(self, state, dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.processor_agent = mock.MagicMock() - scheduler.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.processor_agent = mock.MagicMock() + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) dag.clear() dr = dag.create_dagrun( run_type=DagRunType.SCHEDULED, @@ -531,7 +537,7 @@ def test_dag_file_processor_process_task_instances_depends_on_past(self, state, ti.start_date = start_date ti.end_date = end_date - count = scheduler._schedule_dag_run(dr, set(), session) + count = self.scheduler_job._schedule_dag_run(dr, set(), session) assert count == 2 session.refresh(tis[0]) @@ -546,21 +552,23 @@ def test_scheduler_job_add_new_task(self): dag = DAG(dag_id='test_scheduler_add_new_task', start_date=DEFAULT_DATE) BashOperator(task_id='dummy', dag=dag, owner='airflow', bash_command='echo test') - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) # Since we don't want to store the code for the DAG defined in this file with mock.patch.object(settings, "STORE_DAG_CODE", False): - scheduler.dagbag.sync_to_db() + self.scheduler_job.dagbag.sync_to_db() session = settings.Session() orm_dag = session.query(DagModel).get(dag.dag_id) assert orm_dag is not None - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.processor_agent = mock.MagicMock() - dag = scheduler.dagbag.get_dag('test_scheduler_add_new_task', session=session) - scheduler._create_dag_runs([orm_dag], session) + if self.scheduler_job.processor_agent: + self.scheduler_job.processor_agent.end() + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.processor_agent = mock.MagicMock() + dag = self.scheduler_job.dagbag.get_dag('test_scheduler_add_new_task', session=session) + self.scheduler_job._create_dag_runs([orm_dag], session) drs = DagRun.find(dag_id=dag.dag_id, session=session) assert len(drs) == 1 @@ -572,7 +580,7 @@ def test_scheduler_job_add_new_task(self): BashOperator(task_id='dummy2', dag=dag, owner='airflow', bash_command='echo test') SerializedDagModel.write_dag(dag=dag) - scheduled_tis = scheduler._schedule_dag_run(dr, set(), session) + scheduled_tis = self.scheduler_job._schedule_dag_run(dr, set(), session) session.flush() assert scheduled_tis == 2 @@ -600,9 +608,9 @@ def test_runs_respected_after_clear(self): session.close() dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.processor_agent = mock.MagicMock() - scheduler.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.processor_agent = mock.MagicMock() + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) dag.clear() date = DEFAULT_DATE @@ -636,11 +644,11 @@ def test_runs_respected_after_clear(self): # and schedule them in, so we can check how many # tasks are put on the task_instances_list (should be one, not 3) with create_session() as session: - num_scheduled = scheduler._schedule_dag_run(dr1, set(), session) + num_scheduled = self.scheduler_job._schedule_dag_run(dr1, set(), session) assert num_scheduled == 1 - num_scheduled = scheduler._schedule_dag_run(dr2, {dr1.execution_date}, session) + num_scheduled = self.scheduler_job._schedule_dag_run(dr2, {dr1.execution_date}, session) assert num_scheduled == 0 - num_scheduled = scheduler._schedule_dag_run(dr3, {dr1.execution_date}, session) + num_scheduled = self.scheduler_job._schedule_dag_run(dr3, {dr1.execution_date}, session) assert num_scheduled == 0 @patch.object(TaskInstance, 'handle_failure_with_callback') @@ -709,25 +717,25 @@ def test_should_mark_dummy_task_as_success(self): dagbag = DagBag(dag_folder=dag_file, include_examples=False, read_dags_from_db=False) dagbag.sync_to_db() - scheduler_job = SchedulerJob(subdir=os.devnull) - scheduler_job.processor_agent = mock.MagicMock() - dag = scheduler_job.dagbag.get_dag("test_only_dummy_tasks") + self.scheduler_job_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job_job.processor_agent = mock.MagicMock() + dag = self.scheduler_job_job.dagbag.get_dag("test_only_dummy_tasks") # Create DagRun session = settings.Session() orm_dag = session.query(DagModel).get(dag.dag_id) - scheduler_job._create_dag_runs([orm_dag], session) + self.scheduler_job_job._create_dag_runs([orm_dag], session) drs = DagRun.find(dag_id=dag.dag_id, session=session) assert len(drs) == 1 dr = drs[0] # Schedule TaskInstances - scheduler_job._schedule_dag_run(dr, {}, session) + self.scheduler_job_job._schedule_dag_run(dr, {}, session) with create_session() as session: tis = session.query(TaskInstance).all() - dags = scheduler_job.dagbag.dags.values() + dags = self.scheduler_job_job.dagbag.dags.values() assert ['test_only_dummy_tasks'] == [dag.dag_id for dag in dags] assert 5 == len(tis) assert { @@ -749,7 +757,7 @@ def test_should_mark_dummy_task_as_success(self): assert end_date is None assert duration is None - scheduler_job._schedule_dag_run(dr, {}, session) + self.scheduler_job_job._schedule_dag_run(dr, {}, session) with create_session() as session: tis = session.query(TaskInstance).all() @@ -776,13 +784,20 @@ def test_should_mark_dummy_task_as_success(self): @pytest.mark.usefixtures("disable_load_example") class TestSchedulerJob(unittest.TestCase): - def setUp(self): + @staticmethod + def clean_db(): clear_db_runs() clear_db_pools() clear_db_dags() clear_db_sla_miss() clear_db_errors() + clear_db_jobs() + # DO NOT try to run clear_db_serialized_dags() here - this will break the tests + # The tests expect DAGs to be fully loaded here via setUpClass method below + def setUp(self): + self.clean_db() + self.scheduler_job = None # Speed up some tests by not running the tasks, just look at what we # enqueue! self.null_exec = MockExecutor() @@ -794,8 +809,12 @@ def setUp(self): self.patcher_dag_code.start() def tearDown(self): + if self.scheduler_job and self.scheduler_job.processor_agent: + self.scheduler_job.processor_agent.end() + self.scheduler_job = None self.patcher.stop() self.patcher_dag_code.stop() + self.clean_db() @classmethod def setUpClass(cls): @@ -805,23 +824,25 @@ def setUpClass(cls): cls.dagbag = DagBag(read_dags_from_db=True) def test_is_alive(self): - job = SchedulerJob(None, heartrate=10, state=State.RUNNING) - assert job.is_alive() + self.scheduler_job = SchedulerJob(None, heartrate=10, state=State.RUNNING) + assert self.scheduler_job.is_alive() - job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(seconds=20) - assert job.is_alive() + self.scheduler_job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(seconds=20) + assert self.scheduler_job.is_alive() - job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(seconds=31) - assert not job.is_alive() + self.scheduler_job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(seconds=31) + assert not self.scheduler_job.is_alive() # test because .seconds was used before instead of total_seconds # internal repr of datetime is (days, seconds) - job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(days=1) - assert not job.is_alive() + self.scheduler_job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(days=1) + assert not self.scheduler_job.is_alive() - job.state = State.SUCCESS - job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(seconds=10) - assert not job.is_alive(), "Completed jobs even with recent heartbeat should not be alive" + self.scheduler_job.state = State.SUCCESS + self.scheduler_job.latest_heartbeat = timezone.utcnow() - datetime.timedelta(seconds=10) + assert ( + not self.scheduler_job.is_alive() + ), "Completed jobs even with recent heartbeat should not be alive" def run_single_scheduler_loop_with_no_dags(self, dags_folder): """ @@ -833,19 +854,20 @@ def run_single_scheduler_loop_with_no_dags(self, dags_folder): :param dags_folder: the directory to traverse :type dags_folder: str """ - scheduler = SchedulerJob( + self.scheduler_job = SchedulerJob( executor=self.null_exec, num_times_parse_dags=1, subdir=os.path.join(dags_folder) ) - scheduler.heartrate = 0 - scheduler.run() + self.scheduler_job.heartrate = 0 + self.scheduler_job.run() - @pytest.mark.quarantined def test_no_orphan_process_will_be_left(self): empty_dir = mkdtemp() current_process = psutil.Process() old_children = current_process.children(recursive=True) - scheduler = SchedulerJob(subdir=empty_dir, num_runs=1, executor=MockExecutor(do_update=False)) - scheduler.run() + self.scheduler_job = SchedulerJob( + subdir=empty_dir, num_runs=1, executor=MockExecutor(do_update=False) + ) + self.scheduler_job.run() shutil.rmtree(empty_dir) # Remove potential noise created by previous tests. @@ -869,8 +891,8 @@ def test_process_executor_events(self, mock_stats_incr, mock_task_callback): executor = MockExecutor(do_update=False) task_callback = mock.MagicMock() mock_task_callback.return_value = task_callback - scheduler = SchedulerJob(executor=executor) - scheduler.processor_agent = mock.MagicMock() + self.scheduler_job = SchedulerJob(executor=executor) + self.scheduler_job.processor_agent = mock.MagicMock() session = settings.Session() dag.sync_to_db(session=session) @@ -883,7 +905,7 @@ def test_process_executor_events(self, mock_stats_incr, mock_task_callback): executor.event_buffer[ti1.key] = State.FAILED, None - scheduler._process_executor_events(session=session) + self.scheduler_job._process_executor_events(session=session) ti1.refresh_from_db() assert ti1.state == State.QUEUED mock_task_callback.assert_called_once_with( @@ -894,8 +916,8 @@ def test_process_executor_events(self, mock_stats_incr, mock_task_callback): 'finished (failed) although the task says its queued. (Info: None) ' 'Was the task killed externally?', ) - scheduler.processor_agent.send_callback_to_execute.assert_called_once_with(task_callback) - scheduler.processor_agent.reset_mock() + self.scheduler_job.processor_agent.send_callback_to_execute.assert_called_once_with(task_callback) + self.scheduler_job.processor_agent.reset_mock() # ti in success state ti1.state = State.SUCCESS @@ -903,10 +925,10 @@ def test_process_executor_events(self, mock_stats_incr, mock_task_callback): session.commit() executor.event_buffer[ti1.key] = State.SUCCESS, None - scheduler._process_executor_events(session=session) + self.scheduler_job._process_executor_events(session=session) ti1.refresh_from_db() assert ti1.state == State.SUCCESS - scheduler.processor_agent.send_callback_to_execute.assert_not_called() + self.scheduler_job.processor_agent.send_callback_to_execute.assert_not_called() mock_stats_incr.assert_called_once_with('scheduler.tasks.killed_externally') @@ -917,8 +939,8 @@ def test_process_executor_events_uses_inmemory_try_number(self): try_number = 42 executor = MagicMock() - scheduler = SchedulerJob(executor=executor) - scheduler.processor_agent = MagicMock() + self.scheduler_job = SchedulerJob(executor=executor) + self.scheduler_job.processor_agent = MagicMock() event_buffer = {TaskInstanceKey(dag_id, task_id, execution_date, try_number): (State.SUCCESS, None)} executor.get_event_buffer.return_value = event_buffer @@ -930,7 +952,7 @@ def test_process_executor_events_uses_inmemory_try_number(self): ti.state = State.SUCCESS session.merge(ti) - scheduler._process_executor_events() + self.scheduler_job._process_executor_events() # Assert that the even_buffer is empty so the task was popped using right # task instance key assert event_buffer == {} @@ -943,7 +965,7 @@ def test_execute_task_instances_is_paused_wont_execute(self): task1 = DummyOperator(dag=dag, task_id=task_id_1) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dagmodel = DagModel( @@ -964,7 +986,7 @@ def test_execute_task_instances_is_paused_wont_execute(self): session.add(dagmodel) session.flush() - scheduler._critical_section_execute_task_instances(session) + self.scheduler_job._critical_section_execute_task_instances(session) session.flush() ti1.refresh_from_db() assert State.SCHEDULED == ti1.state @@ -981,7 +1003,7 @@ def test_execute_task_instances_no_dagrun_task_will_execute(self): task1 = DummyOperator(dag=dag, task_id=task_id_1) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dag_model = DagModel( @@ -1002,7 +1024,7 @@ def test_execute_task_instances_no_dagrun_task_will_execute(self): session.merge(ti1) session.flush() - scheduler._critical_section_execute_task_instances(session) + self.scheduler_job._critical_section_execute_task_instances(session) session.flush() ti1.refresh_from_db() assert State.QUEUED == ti1.state @@ -1019,7 +1041,7 @@ def test_execute_task_instances_backfill_tasks_wont_execute(self): task1 = DummyOperator(dag=dag, task_id=task_id_1) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dag_model = DagModel( @@ -1043,7 +1065,7 @@ def test_execute_task_instances_backfill_tasks_wont_execute(self): assert dr1.is_backfill - scheduler._critical_section_execute_task_instances(session) + self.scheduler_job._critical_section_execute_task_instances(session) session.flush() ti1.refresh_from_db() assert State.SCHEDULED == ti1.state @@ -1056,7 +1078,7 @@ def test_find_executable_task_instances_backfill_nodagrun(self): task1 = DummyOperator(dag=dag, task_id=task_id_1) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dag_model = DagModel( @@ -1091,7 +1113,7 @@ def test_find_executable_task_instances_backfill_nodagrun(self): session.merge(ti_with_dagrun) session.flush() - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert 2 == len(res) res_keys = map(lambda x: x.key, res) @@ -1108,7 +1130,7 @@ def test_find_executable_task_instances_pool(self): task2 = DummyOperator(dag=dag, task_id=task_id_2, pool='b') dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dag_model = DagModel( @@ -1144,7 +1166,7 @@ def test_find_executable_task_instances_pool(self): session.add(pool2) session.flush() - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) session.flush() assert 3 == len(res) res_keys = [] @@ -1165,7 +1187,7 @@ def test_find_executable_task_instances_in_default_pool(self): dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) executor = MockExecutor(do_update=True) - scheduler = SchedulerJob(executor=executor) + self.scheduler_job = SchedulerJob(executor=executor) session = settings.Session() dag_model = DagModel( dag_id=dag_id, @@ -1195,7 +1217,7 @@ def test_find_executable_task_instances_in_default_pool(self): session.flush() # Two tasks w/o pool up for execution and our default pool size is 1 - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert 1 == len(res) ti2.state = State.RUNNING @@ -1203,7 +1225,7 @@ def test_find_executable_task_instances_in_default_pool(self): session.flush() # One task w/o pool up for execution and one task task running - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert 0 == len(res) session.rollback() @@ -1216,7 +1238,7 @@ def test_nonexistent_pool(self): task = DummyOperator(dag=dag, task_id=task_id, pool="this_pool_doesnt_exist") dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dag_model = DagModel( @@ -1237,7 +1259,7 @@ def test_nonexistent_pool(self): session.merge(ti) session.commit() - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) session.flush() assert 0 == len(res) session.rollback() @@ -1249,7 +1271,7 @@ def test_find_executable_task_instances_none(self): DummyOperator(dag=dag, task_id=task_id_1) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dag_model = DagModel( @@ -1266,7 +1288,7 @@ def test_find_executable_task_instances_none(self): ) session.flush() - assert 0 == len(scheduler._executable_task_instances_to_queued(max_tis=32, session=session)) + assert 0 == len(self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session)) session.rollback() def test_find_executable_task_instances_concurrency(self): @@ -1276,7 +1298,7 @@ def test_find_executable_task_instances_concurrency(self): task1 = DummyOperator(dag=dag, task_id=task_id_1) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dag_model = DagModel( @@ -1314,7 +1336,7 @@ def test_find_executable_task_instances_concurrency(self): session.flush() - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert 1 == len(res) res_keys = map(lambda x: x.key, res) @@ -1324,7 +1346,7 @@ def test_find_executable_task_instances_concurrency(self): session.merge(ti2) session.flush() - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert 0 == len(res) session.rollback() @@ -1337,7 +1359,7 @@ def test_find_executable_task_instances_concurrency_queued(self): task3 = DummyOperator(dag=dag, task_id='dummy3') dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dag_model = DagModel( dag_id=dag_id, @@ -1365,7 +1387,7 @@ def test_find_executable_task_instances_concurrency_queued(self): session.flush() - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert 1 == len(res) assert res[0].key == ti3.key @@ -1381,11 +1403,11 @@ def test_find_executable_task_instances_task_concurrency(self): # pylint: disab task2 = DummyOperator(dag=dag, task_id=task_id_2) executor = MockExecutor(do_update=True) - scheduler = SchedulerJob(executor=executor) + self.scheduler_job = SchedulerJob(executor=executor) session = settings.Session() - scheduler.dagbag.bag_dag(dag, root_dag=dag) - scheduler.dagbag.sync_to_db(session=session) + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job.dagbag.sync_to_db(session=session) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) dr1 = dag.create_dagrun( run_type=DagRunType.SCHEDULED, @@ -1412,7 +1434,7 @@ def test_find_executable_task_instances_task_concurrency(self): # pylint: disab session.merge(ti2) session.flush() - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert 2 == len(res) @@ -1425,7 +1447,7 @@ def test_find_executable_task_instances_task_concurrency(self): # pylint: disab session.merge(ti1_2) session.flush() - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert 1 == len(res) @@ -1436,7 +1458,7 @@ def test_find_executable_task_instances_task_concurrency(self): # pylint: disab session.merge(ti1_3) session.flush() - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert 0 == len(res) @@ -1448,7 +1470,7 @@ def test_find_executable_task_instances_task_concurrency(self): # pylint: disab session.merge(ti1_3) session.flush() - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert 2 == len(res) @@ -1460,7 +1482,7 @@ def test_find_executable_task_instances_task_concurrency(self): # pylint: disab session.merge(ti1_3) session.flush() - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert 1 == len(res) session.rollback() @@ -1472,7 +1494,7 @@ def test_change_state_for_executable_task_instances_no_tis_with_state(self): task1 = DummyOperator(dag=dag, task_id=task_id_1) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() date = DEFAULT_DATE @@ -1506,7 +1528,7 @@ def test_change_state_for_executable_task_instances_no_tis_with_state(self): session.flush() - res = scheduler._executable_task_instances_to_queued(max_tis=100, session=session) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=100, session=session) assert 0 == len(res) session.rollback() @@ -1518,7 +1540,7 @@ def test_enqueue_task_instances_with_queued_state(self): task1 = DummyOperator(dag=dag, task_id=task_id_1) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dag_model = DagModel( @@ -1539,7 +1561,7 @@ def test_enqueue_task_instances_with_queued_state(self): session.flush() with patch.object(BaseExecutor, 'queue_command') as mock_queue_command: - scheduler._enqueue_task_instances_with_queued_state([ti1]) + self.scheduler_job._enqueue_task_instances_with_queued_state([ti1]) assert mock_queue_command.called session.rollback() @@ -1557,7 +1579,7 @@ def test_critical_section_execute_task_instances(self): task2 = DummyOperator(dag=dag, task_id=task_id_2) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() # create first dag run with 1 running and 1 queued @@ -1606,7 +1628,7 @@ def test_critical_section_execute_task_instances(self): assert State.RUNNING == dr2.state - res = scheduler._critical_section_execute_task_instances(session) + res = self.scheduler_job._critical_section_execute_task_instances(session) # check that concurrency is respected ti1.refresh_from_db() @@ -1634,7 +1656,7 @@ def test_execute_task_instances_limit(self): task2 = DummyOperator(dag=dag, task_id=task_id_2) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dag_model = DagModel( @@ -1664,20 +1686,20 @@ def test_execute_task_instances_limit(self): session.merge(ti1) session.merge(ti2) session.flush() - scheduler.max_tis_per_query = 2 - res = scheduler._critical_section_execute_task_instances(session) + self.scheduler_job.max_tis_per_query = 2 + res = self.scheduler_job._critical_section_execute_task_instances(session) assert 2 == res - scheduler.max_tis_per_query = 8 + self.scheduler_job.max_tis_per_query = 8 with mock.patch.object( - type(scheduler.executor), 'slots_available', new_callable=mock.PropertyMock + type(self.scheduler_job.executor), 'slots_available', new_callable=mock.PropertyMock ) as mock_slots: mock_slots.return_value = 2 # Check that we don't "overfill" the executor assert 2 == res - res = scheduler._critical_section_execute_task_instances(session) + res = self.scheduler_job._critical_section_execute_task_instances(session) - res = scheduler._critical_section_execute_task_instances(session) + res = self.scheduler_job._critical_section_execute_task_instances(session) assert 4 == res for ti in tis: ti.refresh_from_db() @@ -1695,7 +1717,7 @@ def test_execute_task_instances_unlimited(self): task2 = DummyOperator(dag=dag, task_id=task_id_2) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dag_model = DagModel( @@ -1725,10 +1747,10 @@ def test_execute_task_instances_unlimited(self): session.merge(ti1) session.merge(ti2) session.flush() - scheduler.max_tis_per_query = 0 - scheduler.executor = MagicMock(slots_available=36) + self.scheduler_job.max_tis_per_query = 0 + self.scheduler_job.executor = MagicMock(slots_available=36) - res = scheduler._critical_section_execute_task_instances(session) + res = self.scheduler_job._critical_section_execute_task_instances(session) # 20 dag runs * 2 tasks each = 40, but limited by number of slots available self.assertEqual(36, res) session.rollback() @@ -1787,10 +1809,10 @@ def test_change_state_for_tis_without_dagrun(self): dagbag.sync_to_db(session) session.commit() - scheduler = SchedulerJob(num_runs=0) - scheduler.dagbag.collect_dags_from_db() + self.scheduler_job = SchedulerJob(num_runs=0) + self.scheduler_job.dagbag.collect_dags_from_db() - scheduler._change_state_for_tis_without_dagrun( + self.scheduler_job._change_state_for_tis_without_dagrun( old_states=[State.SCHEDULED, State.QUEUED], new_state=State.NONE, session=session ) @@ -1819,7 +1841,7 @@ def test_change_state_for_tis_without_dagrun(self): session.merge(dr1) session.commit() - scheduler._change_state_for_tis_without_dagrun( + self.scheduler_job._change_state_for_tis_without_dagrun( old_states=[State.SCHEDULED, State.QUEUED], new_state=State.NONE, session=session ) @@ -1844,12 +1866,12 @@ def test_change_state_for_tasks_failed_to_execute(self): # If there's no left over task in executor.queued_tasks, nothing happens session = settings.Session() - scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) mock_logger = mock.MagicMock() test_executor = MockExecutor(do_update=False) - scheduler_job.executor = test_executor - scheduler_job._logger = mock_logger - scheduler_job._change_state_for_tasks_failed_to_execute() # pylint: disable=no-value-for-parameter + self.scheduler_job.executor = test_executor + self.scheduler_job._logger = mock_logger + self.scheduler_job._change_state_for_tasks_failed_to_execute() mock_logger.info.assert_not_called() # Tasks failed to execute with QUEUED state will be set to SCHEDULED state. @@ -1862,7 +1884,7 @@ def test_change_state_for_tasks_failed_to_execute(self): session.merge(ti) # pylint: disable=no-value-for-parameter session.commit() - scheduler_job._change_state_for_tasks_failed_to_execute() # pylint: disable=no-value-for-parameter + self.scheduler_job._change_state_for_tasks_failed_to_execute() ti.refresh_from_db() assert State.SCHEDULED == ti.state @@ -1875,7 +1897,7 @@ def test_change_state_for_tasks_failed_to_execute(self): session.merge(ti) session.commit() - scheduler_job._change_state_for_tasks_failed_to_execute() # pylint: disable=no-value-for-parameter + self.scheduler_job._change_state_for_tasks_failed_to_execute() ti.refresh_from_db() assert State.RUNNING == ti.state @@ -1915,10 +1937,10 @@ def test_adopt_or_reset_orphaned_tasks(self): processor = mock.MagicMock() - scheduler = SchedulerJob(num_runs=0) - scheduler.processor_agent = processor + self.scheduler_job = SchedulerJob(num_runs=0) + self.scheduler_job.processor_agent = processor - scheduler.adopt_or_reset_orphaned_tasks() + self.scheduler_job.adopt_or_reset_orphaned_tasks() ti = dr.get_task_instance(task_id=op1.task_id, session=session) assert ti.state == State.NONE @@ -1965,19 +1987,19 @@ def test_scheduler_loop_should_change_state_for_tis_without_dagrun( # This poll interval is large, bug the scheduler doesn't sleep that # long, instead we hit the clean_tis_without_dagrun interval instead - scheduler = SchedulerJob(num_runs=2, processor_poll_interval=30) - scheduler.dagbag = dagbag + self.scheduler_job = SchedulerJob(num_runs=2, processor_poll_interval=30) + self.scheduler_job.dagbag = dagbag executor = MockExecutor(do_update=False) executor.queued_tasks - scheduler.executor = executor + self.scheduler_job.executor = executor processor = mock.MagicMock() processor.done = False - scheduler.processor_agent = processor + self.scheduler_job.processor_agent = processor with mock.patch.object(settings, "USE_JOB_SCHEDULE", False), conf_vars( {('scheduler', 'clean_tis_without_dagrun_interval'): '0.001'} ): - scheduler._run_scheduler_loop() + self.scheduler_job._run_scheduler_loop() ti = dr.get_task_instance(task_id=op1.task_id, session=session) assert ti.state == expected_task_state @@ -2001,10 +2023,10 @@ def test_dagrun_timeout_verify_max_active_runs(self): DummyOperator(task_id='dummy', dag=dag, owner='airflow') - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) - scheduler.dagbag.sync_to_db() + self.scheduler_job.dagbag.sync_to_db() session = settings.Session() orm_dag = session.query(DagModel).get(dag.dag_id) @@ -2012,8 +2034,8 @@ def test_dagrun_timeout_verify_max_active_runs(self): dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) - scheduler._create_dag_runs([orm_dag], session) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job._create_dag_runs([orm_dag], session) drs = DagRun.find(dag_id=dag.dag_id, session=session) assert len(drs) == 1 @@ -2029,10 +2051,10 @@ def test_dagrun_timeout_verify_max_active_runs(self): session.flush() # Mock that processor_agent is started - scheduler.processor_agent = mock.Mock() - scheduler.processor_agent.send_callback_to_execute = mock.Mock() + self.scheduler_job.processor_agent = mock.Mock() + self.scheduler_job.processor_agent.send_callback_to_execute = mock.Mock() - scheduler._schedule_dag_run(dr, {}, session) + self.scheduler_job._schedule_dag_run(dr, {}, session) session.flush() session.refresh(dr) @@ -2050,7 +2072,7 @@ def test_dagrun_timeout_verify_max_active_runs(self): ) # Verify dag failure callback request is sent to file processor - scheduler.processor_agent.send_callback_to_execute.assert_called_once_with(expected_callback) + self.scheduler_job.processor_agent.send_callback_to_execute.assert_called_once_with(expected_callback) session.rollback() session.close() @@ -2064,9 +2086,9 @@ def test_dagrun_timeout_fails_run(self): DummyOperator(task_id='dummy', dag=dag, owner='airflow') - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.dagbag.bag_dag(dag, root_dag=dag) - scheduler.dagbag.sync_to_db() + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job.dagbag.sync_to_db() session = settings.Session() orm_dag = session.query(DagModel).get(dag.dag_id) @@ -2074,8 +2096,8 @@ def test_dagrun_timeout_fails_run(self): dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(subdir=os.devnull) - scheduler._create_dag_runs([orm_dag], session) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job._create_dag_runs([orm_dag], session) drs = DagRun.find(dag_id=dag.dag_id, session=session) assert len(drs) == 1 @@ -2086,10 +2108,10 @@ def test_dagrun_timeout_fails_run(self): session.flush() # Mock that processor_agent is started - scheduler.processor_agent = mock.Mock() - scheduler.processor_agent.send_callback_to_execute = mock.Mock() + self.scheduler_job.processor_agent = mock.Mock() + self.scheduler_job.processor_agent.send_callback_to_execute = mock.Mock() - scheduler._schedule_dag_run(dr, {}, session) + self.scheduler_job._schedule_dag_run(dr, {}, session) session.flush() session.refresh(dr) @@ -2104,7 +2126,7 @@ def test_dagrun_timeout_fails_run(self): ) # Verify dag failure callback request is sent to file processor - scheduler.processor_agent.send_callback_to_execute.assert_called_once_with(expected_callback) + self.scheduler_job.processor_agent.send_callback_to_execute.assert_called_once_with(expected_callback) session.rollback() session.close() @@ -2124,22 +2146,22 @@ def test_dagrun_callbacks_are_called(self, state, expected_callback_msg): DummyOperator(task_id='dummy', dag=dag, owner='airflow') - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.processor_agent = mock.Mock() - scheduler.processor_agent.send_callback_to_execute = mock.Mock() - scheduler._send_sla_callbacks_to_processor = mock.Mock() + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.processor_agent = mock.Mock() + self.scheduler_job.processor_agent.send_callback_to_execute = mock.Mock() + self.scheduler_job._send_sla_callbacks_to_processor = mock.Mock() # Sync DAG into DB with mock.patch.object(settings, "STORE_DAG_CODE", False): - scheduler.dagbag.bag_dag(dag, root_dag=dag) - scheduler.dagbag.sync_to_db() + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job.dagbag.sync_to_db() session = settings.Session() orm_dag = session.query(DagModel).get(dag.dag_id) assert orm_dag is not None # Create DagRun - scheduler._create_dag_runs([orm_dag], session) + self.scheduler_job._create_dag_runs([orm_dag], session) drs = DagRun.find(dag_id=dag.dag_id, session=session) assert len(drs) == 1 @@ -2148,7 +2170,7 @@ def test_dagrun_callbacks_are_called(self, state, expected_callback_msg): ti = dr.get_task_instance('dummy') ti.set_state(state, session) - scheduler._schedule_dag_run(dr, {}, session) + self.scheduler_job._schedule_dag_run(dr, {}, session) expected_callback = DagCallbackRequest( full_filepath=dr.dag.fileloc, @@ -2159,10 +2181,10 @@ def test_dagrun_callbacks_are_called(self, state, expected_callback_msg): ) # Verify dag failure callback request is sent to file processor - scheduler.processor_agent.send_callback_to_execute.assert_called_once_with(expected_callback) + self.scheduler_job.processor_agent.send_callback_to_execute.assert_called_once_with(expected_callback) # This is already tested separately # In this test we just want to verify that this function is called - scheduler._send_sla_callbacks_to_processor.assert_called_once_with(dag) + self.scheduler_job._send_sla_callbacks_to_processor.assert_called_once_with(dag) session.rollback() session.close() @@ -2179,22 +2201,22 @@ def test_dagrun_callbacks_are_not_added_when_callbacks_are_not_defined(self, sta BashOperator(task_id='test_task', dag=dag, owner='airflow', bash_command='echo hi') - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.processor_agent = mock.Mock() - scheduler.processor_agent.send_callback_to_execute = mock.Mock() - scheduler._send_dag_callbacks_to_processor = mock.Mock() + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.processor_agent = mock.Mock() + self.scheduler_job.processor_agent.send_callback_to_execute = mock.Mock() + self.scheduler_job._send_dag_callbacks_to_processor = mock.Mock() # Sync DAG into DB with mock.patch.object(settings, "STORE_DAG_CODE", False): - scheduler.dagbag.bag_dag(dag, root_dag=dag) - scheduler.dagbag.sync_to_db() + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job.dagbag.sync_to_db() session = settings.Session() orm_dag = session.query(DagModel).get(dag.dag_id) assert orm_dag is not None # Create DagRun - scheduler._create_dag_runs([orm_dag], session) + self.scheduler_job._create_dag_runs([orm_dag], session) drs = DagRun.find(dag_id=dag.dag_id, session=session) assert len(drs) == 1 @@ -2203,10 +2225,10 @@ def test_dagrun_callbacks_are_not_added_when_callbacks_are_not_defined(self, sta ti = dr.get_task_instance('test_task') ti.set_state(state, session) - scheduler._schedule_dag_run(dr, set(), session) + self.scheduler_job._schedule_dag_run(dr, set(), session) # Verify Callback is not set (i.e is None) when no callbacks are set on DAG - scheduler._send_dag_callbacks_to_processor.assert_called_once_with(dr, None) + self.scheduler_job._send_dag_callbacks_to_processor.assert_called_once_with(dr, None) session.rollback() session.close() @@ -2232,8 +2254,8 @@ def test_do_not_schedule_removed_task(self): # Re-create the DAG, but remove the task dag = DAG(dag_id='test_scheduler_do_not_schedule_removed_task', start_date=DEFAULT_DATE) - scheduler = SchedulerJob(subdir=os.devnull) - res = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + res = self.scheduler_job._executable_task_instances_to_queued(max_tis=32, session=session) assert [] == res session.rollback() @@ -2377,8 +2399,8 @@ def test_dagrun_root_after_dagrun_unfinished(self): dag_id = 'test_dagrun_states_root_future' dag = self.dagbag.get_dag(dag_id) dag.sync_to_db() - scheduler = SchedulerJob(num_runs=1, executor=self.null_exec, subdir=dag.fileloc) - scheduler.run() + self.scheduler_job = SchedulerJob(num_runs=1, executor=self.null_exec, subdir=dag.fileloc) + self.scheduler_job.run() first_run = DagRun.find(dag_id=dag_id, execution_date=DEFAULT_DATE)[0] ti_ids = [(ti.task_id, ti.state) for ti in first_run.get_task_instances()] @@ -2437,8 +2459,8 @@ def test_scheduler_start_date(self): other_dag.is_paused_upon_creation = True other_dag.sync_to_db() - scheduler = SchedulerJob(executor=self.null_exec, subdir=dag.fileloc, num_runs=1) - scheduler.run() + self.scheduler_job = SchedulerJob(executor=self.null_exec, subdir=dag.fileloc, num_runs=1) + self.scheduler_job.run() # zero tasks ran assert len(session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).all()) == 0 @@ -2460,8 +2482,8 @@ def test_scheduler_start_date(self): ] == bf_exec.sorted_tasks session.commit() - scheduler = SchedulerJob(dag.fileloc, executor=self.null_exec, num_runs=1) - scheduler.run() + self.scheduler_job = SchedulerJob(dag.fileloc, executor=self.null_exec, num_runs=1) + self.scheduler_job.run() # still one task assert len(session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id).all()) == 1 @@ -2487,8 +2509,8 @@ def test_scheduler_task_start_date(self): dagbag.sync_to_db() - scheduler = SchedulerJob(executor=self.null_exec, subdir=dag.fileloc, num_runs=2) - scheduler.run() + self.scheduler_job = SchedulerJob(executor=self.null_exec, subdir=dag.fileloc, num_runs=2) + self.scheduler_job.run() session = settings.Session() tiq = session.query(TaskInstance).filter(TaskInstance.dag_id == dag_id) @@ -2508,12 +2530,12 @@ def test_scheduler_multiprocessing(self): dag = self.dagbag.get_dag(dag_id) dag.clear() - scheduler = SchedulerJob( + self.scheduler_job = SchedulerJob( executor=self.null_exec, subdir=os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py'), num_runs=1, ) - scheduler.run() + self.scheduler_job.run() # zero tasks ran dag_id = 'test_start_date_scheduling' @@ -2531,13 +2553,13 @@ def test_scheduler_multiprocessing_with_spawn_method(self): dag = self.dagbag.get_dag(dag_id) dag.clear() - scheduler = SchedulerJob( + self.scheduler_job = SchedulerJob( executor=self.null_exec, subdir=os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py'), num_runs=1, ) - scheduler.run() + self.scheduler_job.run() # zero tasks ran dag_id = 'test_start_date_scheduling' @@ -2572,9 +2594,10 @@ def test_scheduler_verify_pool_full(self): session.flush() dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) + SerializedDagModel.write_dag(dag) - scheduler = SchedulerJob(executor=self.null_exec) - scheduler.processor_agent = mock.MagicMock() + self.scheduler_job = SchedulerJob(executor=self.null_exec) + self.scheduler_job.processor_agent = mock.MagicMock() # Create 2 dagruns, which will create 2 task instances. dr = dag.create_dagrun( @@ -2582,15 +2605,16 @@ def test_scheduler_verify_pool_full(self): execution_date=DEFAULT_DATE, state=State.RUNNING, ) - scheduler._schedule_dag_run(dr, {}, session) + self.scheduler_job._schedule_dag_run(dr, {}, session) dr = dag.create_dagrun( run_type=DagRunType.SCHEDULED, execution_date=dag.following_schedule(dr.execution_date), state=State.RUNNING, ) - scheduler._schedule_dag_run(dr, {}, session) - - task_instances_list = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + self.scheduler_job._schedule_dag_run(dr, {}, session) + task_instances_list = self.scheduler_job._executable_task_instances_to_queued( + max_tis=32, session=session + ) assert len(task_instances_list) == 1 @@ -2623,11 +2647,12 @@ def test_scheduler_verify_pool_full_2_slots_per_task(self): pool = Pool(pool='test_scheduler_verify_pool_full_2_slots_per_task', slots=6) session.add(pool) session.commit() + SerializedDagModel.write_dag(dag) dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) - scheduler = SchedulerJob(executor=self.null_exec) - scheduler.processor_agent = mock.MagicMock() + self.scheduler_job = SchedulerJob(executor=self.null_exec) + self.scheduler_job.processor_agent = mock.MagicMock() # Create 5 dagruns, which will create 5 task instances. date = DEFAULT_DATE @@ -2637,14 +2662,91 @@ def test_scheduler_verify_pool_full_2_slots_per_task(self): execution_date=date, state=State.RUNNING, ) - scheduler._schedule_dag_run(dr, {}, session) + self.scheduler_job._schedule_dag_run(dr, {}, session) date = dag.following_schedule(date) - task_instances_list = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + task_instances_list = self.scheduler_job._executable_task_instances_to_queued( + max_tis=32, session=session + ) # As tasks require 2 slots, only 3 can fit into 6 available assert len(task_instances_list) == 3 + def test_scheduler_keeps_scheduling_pool_full(self): + """ + Test task instances in a pool that isn't full keep getting scheduled even when a pool is full. + """ + dag_d1 = DAG(dag_id='test_scheduler_keeps_scheduling_pool_full_d1', start_date=DEFAULT_DATE) + BashOperator( + task_id='test_scheduler_keeps_scheduling_pool_full_t1', + dag=dag_d1, + owner='airflow', + pool='test_scheduler_keeps_scheduling_pool_full_p1', + bash_command='echo hi', + ) + + dag_d2 = DAG(dag_id='test_scheduler_keeps_scheduling_pool_full_d2', start_date=DEFAULT_DATE) + BashOperator( + task_id='test_scheduler_keeps_scheduling_pool_full_t2', + dag=dag_d2, + owner='airflow', + pool='test_scheduler_keeps_scheduling_pool_full_p2', + bash_command='echo hi', + ) + dagbag = DagBag( + dag_folder=os.path.join(settings.DAGS_FOLDER, "no_dags.py"), + include_examples=False, + read_dags_from_db=True, + ) + dagbag.bag_dag(dag=dag_d1, root_dag=dag_d1) + dagbag.bag_dag(dag=dag_d2, root_dag=dag_d2) + dagbag.sync_to_db() + + session = settings.Session() + pool_p1 = Pool(pool='test_scheduler_keeps_scheduling_pool_full_p1', slots=1) + pool_p2 = Pool(pool='test_scheduler_keeps_scheduling_pool_full_p2', slots=10) + session.add(pool_p1) + session.add(pool_p2) + session.commit() + + dag_d1 = SerializedDAG.from_dict(SerializedDAG.to_dict(dag_d1)) + + scheduler = SchedulerJob(executor=self.null_exec) + scheduler.processor_agent = mock.MagicMock() + + # Create 5 dagruns for each DAG. + # To increase the chances the TIs from the "full" pool will get retrieved first, we schedule all + # TIs from the first dag first. + date = DEFAULT_DATE + for _ in range(5): + dr = dag_d1.create_dagrun( + run_type=DagRunType.SCHEDULED, + execution_date=date, + state=State.RUNNING, + ) + scheduler._schedule_dag_run(dr, {}, session) + date = dag_d1.following_schedule(date) + + date = DEFAULT_DATE + for _ in range(5): + dr = dag_d2.create_dagrun( + run_type=DagRunType.SCHEDULED, + execution_date=date, + state=State.RUNNING, + ) + scheduler._schedule_dag_run(dr, {}, session) + date = dag_d2.following_schedule(date) + + scheduler._executable_task_instances_to_queued(max_tis=2, session=session) + task_instances_list2 = scheduler._executable_task_instances_to_queued(max_tis=2, session=session) + + # Make sure we get TIs from a non-full pool in the 2nd list + assert len(task_instances_list2) > 0 + assert all( + task_instance.pool != 'test_scheduler_keeps_scheduling_pool_full_p1' + for task_instance in task_instances_list2 + ) + def test_scheduler_verify_priority_and_slots(self): """ Test task instances with higher priority are not queued @@ -2699,18 +2801,21 @@ def test_scheduler_verify_priority_and_slots(self): session.commit() dag = SerializedDAG.from_dict(SerializedDAG.to_dict(dag)) + SerializedDagModel.write_dag(dag) - scheduler = SchedulerJob(executor=self.null_exec) - scheduler.processor_agent = mock.MagicMock() + self.scheduler_job = SchedulerJob(executor=self.null_exec) + self.scheduler_job.processor_agent = mock.MagicMock() dr = dag.create_dagrun( run_type=DagRunType.SCHEDULED, execution_date=DEFAULT_DATE, state=State.RUNNING, ) - scheduler._schedule_dag_run(dr, {}, session) + self.scheduler_job._schedule_dag_run(dr, {}, session) - task_instances_list = scheduler._executable_task_instances_to_queued(max_tis=32, session=session) + task_instances_list = self.scheduler_job._executable_task_instances_to_queued( + max_tis=32, session=session + ) # Only second and third assert len(task_instances_list) == 2 @@ -2746,18 +2851,18 @@ def test_verify_integrity_if_dag_not_changed(self): dag = DAG(dag_id='test_verify_integrity_if_dag_not_changed', start_date=DEFAULT_DATE) BashOperator(task_id='dummy', dag=dag, owner='airflow', bash_command='echo hi') - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.dagbag.bag_dag(dag, root_dag=dag) - scheduler.dagbag.sync_to_db() + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job.dagbag.sync_to_db() session = settings.Session() orm_dag = session.query(DagModel).get(dag.dag_id) assert orm_dag is not None - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.processor_agent = mock.MagicMock() - dag = scheduler.dagbag.get_dag('test_verify_integrity_if_dag_not_changed', session=session) - scheduler._create_dag_runs([orm_dag], session) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.processor_agent = mock.MagicMock() + dag = self.scheduler_job.dagbag.get_dag('test_verify_integrity_if_dag_not_changed', session=session) + self.scheduler_job._create_dag_runs([orm_dag], session) drs = DagRun.find(dag_id=dag.dag_id, session=session) assert len(drs) == 1 @@ -2765,7 +2870,7 @@ def test_verify_integrity_if_dag_not_changed(self): # Verify that DagRun.verify_integrity is not called with mock.patch('airflow.jobs.scheduler_job.DagRun.verify_integrity') as mock_verify_integrity: - scheduled_tis = scheduler._schedule_dag_run(dr, {}, session) + scheduled_tis = self.scheduler_job._schedule_dag_run(dr, {}, session) mock_verify_integrity.assert_not_called() session.flush() @@ -2799,18 +2904,18 @@ def test_verify_integrity_if_dag_changed(self): dag = DAG(dag_id='test_verify_integrity_if_dag_changed', start_date=DEFAULT_DATE) BashOperator(task_id='dummy', dag=dag, owner='airflow', bash_command='echo hi') - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.dagbag.bag_dag(dag, root_dag=dag) - scheduler.dagbag.sync_to_db() + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.dagbag.bag_dag(dag, root_dag=dag) + self.scheduler_job.dagbag.sync_to_db() session = settings.Session() orm_dag = session.query(DagModel).get(dag.dag_id) assert orm_dag is not None - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.processor_agent = mock.MagicMock() - dag = scheduler.dagbag.get_dag('test_verify_integrity_if_dag_changed', session=session) - scheduler._create_dag_runs([orm_dag], session) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.processor_agent = mock.MagicMock() + dag = self.scheduler_job.dagbag.get_dag('test_verify_integrity_if_dag_changed', session=session) + self.scheduler_job._create_dag_runs([orm_dag], session) drs = DagRun.find(dag_id=dag.dag_id, session=session) assert len(drs) == 1 @@ -2818,8 +2923,8 @@ def test_verify_integrity_if_dag_changed(self): dag_version_1 = SerializedDagModel.get_latest_version_hash(dr.dag_id, session=session) assert dr.dag_hash == dag_version_1 - assert scheduler.dagbag.dags == {'test_verify_integrity_if_dag_changed': dag} - assert len(scheduler.dagbag.dags.get("test_verify_integrity_if_dag_changed").tasks) == 1 + assert self.scheduler_job.dagbag.dags == {'test_verify_integrity_if_dag_changed': dag} + assert len(self.scheduler_job.dagbag.dags.get("test_verify_integrity_if_dag_changed").tasks) == 1 # Now let's say the DAG got updated (new task got added) BashOperator(task_id='bash_task_1', dag=dag, bash_command='echo hi') @@ -2828,7 +2933,7 @@ def test_verify_integrity_if_dag_changed(self): dag_version_2 = SerializedDagModel.get_latest_version_hash(dr.dag_id, session=session) assert dag_version_2 != dag_version_1 - scheduled_tis = scheduler._schedule_dag_run(dr, {}, session) + scheduled_tis = self.scheduler_job._schedule_dag_run(dr, {}, session) session.flush() assert scheduled_tis == 2 @@ -2837,8 +2942,8 @@ def test_verify_integrity_if_dag_changed(self): assert len(drs) == 1 dr = drs[0] assert dr.dag_hash == dag_version_2 - assert scheduler.dagbag.dags == {'test_verify_integrity_if_dag_changed': dag} - assert len(scheduler.dagbag.dags.get("test_verify_integrity_if_dag_changed").tasks) == 2 + assert self.scheduler_job.dagbag.dags == {'test_verify_integrity_if_dag_changed': dag} + assert len(self.scheduler_job.dagbag.dags.get("test_verify_integrity_if_dag_changed").tasks) == 2 tis_count = ( session.query(func.count(TaskInstance.task_id)) @@ -2887,11 +2992,11 @@ def do_schedule(mock_dagbag): # Use a empty file since the above mock will return the # expected DAGs. Also specify only a single file so that it doesn't # try to schedule the above DAG repeatedly. - scheduler = SchedulerJob( + self.scheduler_job = SchedulerJob( num_runs=1, executor=executor, subdir=os.path.join(settings.DAGS_FOLDER, "no_dags.py") ) - scheduler.heartrate = 0 - scheduler.run() + self.scheduler_job.heartrate = 0 + self.scheduler_job.run() do_schedule() # pylint: disable=no-value-for-parameter with create_session() as session: @@ -2932,7 +3037,7 @@ def run_with_error(ti, ignore_ti_state=False): ti.refresh_from_db() assert ti.state == State.SUCCESS - @pytest.mark.quarantined + @pytest.mark.skip(reason="This test needs fixing. It's very wrong now and always fails") def test_retry_handling_job(self): """ Integration test of the scheduler not accidentally resetting @@ -2942,9 +3047,9 @@ def test_retry_handling_job(self): dag_task1 = dag.get_task("test_retry_handling_op") dag.clear() - scheduler = SchedulerJob(dag_id=dag.dag_id, num_runs=1) - scheduler.heartrate = 0 - scheduler.run() + self.scheduler_job = SchedulerJob(dag_id=dag.dag_id, num_runs=1) + self.scheduler_job.heartrate = 0 + self.scheduler_job.run() session = settings.Session() ti = ( @@ -2952,7 +3057,6 @@ def test_retry_handling_job(self): .filter(TaskInstance.dag_id == dag.dag_id, TaskInstance.task_id == dag_task1.task_id) .first() ) - # make sure the counter has increased assert ti.try_number == 2 assert ti.state == State.UP_FOR_RETRY @@ -3302,9 +3406,9 @@ def test_list_py_file_paths(self): def test_adopt_or_reset_orphaned_tasks_nothing(self): """Try with nothing. """ - scheduler = SchedulerJob() + self.scheduler_job = SchedulerJob() session = settings.Session() - assert 0 == scheduler.adopt_or_reset_orphaned_tasks(session=session) + assert 0 == self.scheduler_job.adopt_or_reset_orphaned_tasks(session=session) def test_adopt_or_reset_orphaned_tasks_external_triggered_dag(self): dag_id = 'test_reset_orphaned_tasks_external_triggered_dag' @@ -3312,7 +3416,7 @@ def test_adopt_or_reset_orphaned_tasks_external_triggered_dag(self): task_id = dag_id + '_task' DummyOperator(task_id=task_id, dag=dag) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() dr1 = dag.create_dagrun( @@ -3329,7 +3433,7 @@ def test_adopt_or_reset_orphaned_tasks_external_triggered_dag(self): session.merge(dr1) session.commit() - num_reset_tis = scheduler.adopt_or_reset_orphaned_tasks(session=session) + num_reset_tis = self.scheduler_job.adopt_or_reset_orphaned_tasks(session=session) assert 1 == num_reset_tis def test_adopt_or_reset_orphaned_tasks_backfill_dag(self): @@ -3338,9 +3442,9 @@ def test_adopt_or_reset_orphaned_tasks_backfill_dag(self): task_id = dag_id + '_task' DummyOperator(task_id=task_id, dag=dag) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() - session.add(scheduler) + session.add(self.scheduler_job) session.flush() dr1 = dag.create_dagrun( @@ -3357,7 +3461,7 @@ def test_adopt_or_reset_orphaned_tasks_backfill_dag(self): session.flush() assert dr1.is_backfill - assert 0 == scheduler.adopt_or_reset_orphaned_tasks(session=session) + assert 0 == self.scheduler_job.adopt_or_reset_orphaned_tasks(session=session) session.rollback() def test_reset_orphaned_tasks_nonexistent_dagrun(self): @@ -3367,7 +3471,7 @@ def test_reset_orphaned_tasks_nonexistent_dagrun(self): task_id = dag_id + '_task' task = DummyOperator(task_id=task_id, dag=dag) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() ti = TaskInstance(task=task, execution_date=DEFAULT_DATE) @@ -3379,7 +3483,7 @@ def test_reset_orphaned_tasks_nonexistent_dagrun(self): session.merge(ti) session.flush() - assert 0 == scheduler.adopt_or_reset_orphaned_tasks(session=session) + assert 0 == self.scheduler_job.adopt_or_reset_orphaned_tasks(session=session) session.rollback() def test_reset_orphaned_tasks_no_orphans(self): @@ -3388,9 +3492,9 @@ def test_reset_orphaned_tasks_no_orphans(self): task_id = dag_id + '_task' DummyOperator(task_id=task_id, dag=dag) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() - session.add(scheduler) + session.add(self.scheduler_job) session.flush() dr1 = dag.create_dagrun( @@ -3402,12 +3506,12 @@ def test_reset_orphaned_tasks_no_orphans(self): ) tis = dr1.get_task_instances(session=session) tis[0].state = State.RUNNING - tis[0].queued_by_job_id = scheduler.id + tis[0].queued_by_job_id = self.scheduler_job.id session.merge(dr1) session.merge(tis[0]) session.flush() - assert 0 == scheduler.adopt_or_reset_orphaned_tasks(session=session) + assert 0 == self.scheduler_job.adopt_or_reset_orphaned_tasks(session=session) tis[0].refresh_from_db() assert State.RUNNING == tis[0].state @@ -3418,9 +3522,9 @@ def test_reset_orphaned_tasks_non_running_dagruns(self): task_id = dag_id + '_task' DummyOperator(task_id=task_id, dag=dag) - scheduler = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() - session.add(scheduler) + session.add(self.scheduler_job) session.flush() dr1 = dag.create_dagrun( @@ -3433,12 +3537,12 @@ def test_reset_orphaned_tasks_non_running_dagruns(self): tis = dr1.get_task_instances(session=session) assert 1 == len(tis) tis[0].state = State.SCHEDULED - tis[0].queued_by_job_id = scheduler.id + tis[0].queued_by_job_id = self.scheduler_job.id session.merge(dr1) session.merge(tis[0]) session.flush() - assert 0 == scheduler.adopt_or_reset_orphaned_tasks(session=session) + assert 0 == self.scheduler_job.adopt_or_reset_orphaned_tasks(session=session) session.rollback() def test_adopt_or_reset_orphaned_tasks_stale_scheduler_jobs(self): @@ -3447,11 +3551,11 @@ def test_adopt_or_reset_orphaned_tasks_stale_scheduler_jobs(self): DummyOperator(task_id='task1', dag=dag) DummyOperator(task_id='task2', dag=dag) - scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) session = settings.Session() - scheduler_job.state = State.RUNNING - scheduler_job.latest_heartbeat = timezone.utcnow() - session.add(scheduler_job) + self.scheduler_job.state = State.RUNNING + self.scheduler_job.latest_heartbeat = timezone.utcnow() + session.add(self.scheduler_job) old_job = SchedulerJob(subdir=os.devnull) old_job.state = State.RUNNING @@ -3475,11 +3579,11 @@ def test_adopt_or_reset_orphaned_tasks_stale_scheduler_jobs(self): session.merge(ti1) ti2.state = State.SCHEDULED - ti2.queued_by_job_id = scheduler_job.id + ti2.queued_by_job_id = self.scheduler_job.id session.merge(ti2) session.flush() - num_reset_tis = scheduler_job.adopt_or_reset_orphaned_tasks(session=session) + num_reset_tis = self.scheduler_job.adopt_or_reset_orphaned_tasks(session=session) assert 1 == num_reset_tis @@ -3488,6 +3592,8 @@ def test_adopt_or_reset_orphaned_tasks_stale_scheduler_jobs(self): session.refresh(ti2) assert State.SCHEDULED == ti2.state session.rollback() + if old_job.processor_agent: + old_job.processor_agent.end() def test_send_sla_callbacks_to_processor_sla_disabled(self): """Test SLA Callbacks are not sent when check_slas is False""" @@ -3496,13 +3602,13 @@ def test_send_sla_callbacks_to_processor_sla_disabled(self): DummyOperator(task_id='task1', dag=dag) with patch.object(settings, "CHECK_SLAS", False): - scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) mock_agent = mock.MagicMock() - scheduler_job.processor_agent = mock_agent + self.scheduler_job.processor_agent = mock_agent - scheduler_job._send_sla_callbacks_to_processor(dag) - scheduler_job.processor_agent.send_sla_callback_request_to_execute.assert_not_called() + self.scheduler_job._send_sla_callbacks_to_processor(dag) + self.scheduler_job.processor_agent.send_sla_callback_request_to_execute.assert_not_called() def test_send_sla_callbacks_to_processor_sla_no_task_slas(self): """Test SLA Callbacks are not sent when no task SLAs are defined""" @@ -3511,13 +3617,13 @@ def test_send_sla_callbacks_to_processor_sla_no_task_slas(self): DummyOperator(task_id='task1', dag=dag) with patch.object(settings, "CHECK_SLAS", True): - scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) mock_agent = mock.MagicMock() - scheduler_job.processor_agent = mock_agent + self.scheduler_job.processor_agent = mock_agent - scheduler_job._send_sla_callbacks_to_processor(dag) - scheduler_job.processor_agent.send_sla_callback_request_to_execute.assert_not_called() + self.scheduler_job._send_sla_callbacks_to_processor(dag) + self.scheduler_job.processor_agent.send_sla_callback_request_to_execute.assert_not_called() def test_send_sla_callbacks_to_processor_sla_with_task_slas(self): """Test SLA Callbacks are sent to the DAG Processor when SLAs are defined on tasks""" @@ -3529,18 +3635,26 @@ def test_send_sla_callbacks_to_processor_sla_with_task_slas(self): dag = SerializedDAG.from_json(SerializedDAG.to_json(dag)) with patch.object(settings, "CHECK_SLAS", True): - scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job = SchedulerJob(subdir=os.devnull) mock_agent = mock.MagicMock() - scheduler_job.processor_agent = mock_agent + self.scheduler_job.processor_agent = mock_agent - scheduler_job._send_sla_callbacks_to_processor(dag) - scheduler_job.processor_agent.send_sla_callback_request_to_execute.assert_called_once_with( + self.scheduler_job._send_sla_callbacks_to_processor(dag) + self.scheduler_job.processor_agent.send_sla_callback_request_to_execute.assert_called_once_with( full_filepath=dag.fileloc, dag_id=dag_id ) - def test_scheduler_sets_job_id_on_dag_run(self): - dag = DAG(dag_id='test_scheduler_sets_job_id_on_dag_run', start_date=DEFAULT_DATE) + @freeze_time(DEFAULT_DATE + datetime.timedelta(days=1, seconds=9)) + @mock.patch('airflow.jobs.scheduler_job.Stats.timing') + def test_create_dag_runs(self, stats_timing): + """ + Test various invariants of _create_dag_runs. + + - That the run created has the creating_job_id set + - That we emit the right DagRun metrics + """ + dag = DAG(dag_id='test_create_dag_runs', start_date=DEFAULT_DATE) DummyOperator( task_id='dummy', @@ -3548,7 +3662,7 @@ def test_scheduler_sets_job_id_on_dag_run(self): ) dagbag = DagBag( - dag_folder=os.path.join(settings.DAGS_FOLDER, "no_dags.py"), + dag_folder=os.devnull, include_examples=False, read_dags_from_db=True, ) @@ -3556,13 +3670,17 @@ def test_scheduler_sets_job_id_on_dag_run(self): dagbag.sync_to_db() dag_model = DagModel.get_dagmodel(dag.dag_id) - scheduler = SchedulerJob(executor=self.null_exec) - scheduler.processor_agent = mock.MagicMock() + self.scheduler_job = SchedulerJob(executor=self.null_exec) + self.scheduler_job.processor_agent = mock.MagicMock() with create_session() as session: - scheduler._create_dag_runs([dag_model], session) + self.scheduler_job._create_dag_runs([dag_model], session) + + stats_timing.assert_called_once_with( + "dagrun.schedule_delay.test_create_dag_runs", datetime.timedelta(seconds=9) + ) - assert dag.get_last_dagrun().creating_job_id == scheduler.id + assert dag.get_last_dagrun().creating_job_id == self.scheduler_job.id def test_extra_operator_links_not_loaded_in_scheduler_loop(self): """ @@ -3590,12 +3708,12 @@ def test_extra_operator_links_not_loaded_in_scheduler_loop(self): # Test that custom_task has >= 1 Operator Links (after de-serialization) assert custom_task.operator_extra_links - scheduler = SchedulerJob(executor=self.null_exec) - scheduler.processor_agent = mock.MagicMock() - scheduler._run_scheduler_loop() + self.scheduler_job = SchedulerJob(executor=self.null_exec) + self.scheduler_job.processor_agent = mock.MagicMock() + self.scheduler_job._run_scheduler_loop() # Get serialized dag - s_dag_2 = scheduler.dagbag.get_dag(dag.dag_id) + s_dag_2 = self.scheduler_job.dagbag.get_dag(dag.dag_id) custom_task = s_dag_2.task_dict['custom_task'] # Test that custom_task has no Operator Links (after de-serialization) in the Scheduling Loop assert not custom_task.operator_extra_links @@ -3622,13 +3740,13 @@ def test_scheduler_create_dag_runs_does_not_raise_error(self): DAG.bulk_write_to_db(dagbag.dags.values()) dag_model = DagModel.get_dagmodel(dag.dag_id) - scheduler = SchedulerJob(subdir=os.devnull, executor=self.null_exec) - scheduler.processor_agent = mock.MagicMock() + self.scheduler_job = SchedulerJob(subdir=os.devnull, executor=self.null_exec) + self.scheduler_job.processor_agent = mock.MagicMock() with create_session() as session, self.assertLogs( 'airflow.jobs.scheduler_job', level="ERROR" ) as log_output: - scheduler._create_dag_runs([dag_model], session) + self.scheduler_job._create_dag_runs([dag_model], session) assert ( "airflow.exceptions.SerializedDagNotFound: DAG " @@ -3666,13 +3784,13 @@ def test_bulk_write_to_db_external_trigger_dont_skip_scheduled_run(self): dag_model = session.query(DagModel).get(dag.dag_id) assert dag_model.next_dagrun == DEFAULT_DATE - job = SchedulerJob(subdir=os.devnull) - job.executor = MockExecutor(do_update=False) - job.processor_agent = mock.MagicMock(spec=DagFileProcessorAgent) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.executor = MockExecutor(do_update=False) + self.scheduler_job.processor_agent = mock.MagicMock(spec=DagFileProcessorAgent) # Verify a DagRun is created with the correct execution_date # when Scheduler._do_scheduling is run in the Scheduler Loop - job._do_scheduling(session) + self.scheduler_job._do_scheduling(session) dr1 = dag.get_dagrun(DEFAULT_DATE, session) assert dr1 is not None assert dr1.state == State.RUNNING @@ -3747,12 +3865,12 @@ def test_scheduler_create_dag_runs_check_existing_run(self): assert dag.get_last_dagrun(session) == dagrun - scheduler = SchedulerJob(subdir=os.devnull, executor=self.null_exec) - scheduler.dagbag = dagbag - scheduler.processor_agent = mock.MagicMock() + self.scheduler_job = SchedulerJob(subdir=os.devnull, executor=self.null_exec) + self.scheduler_job.dagbag = dagbag + self.scheduler_job.processor_agent = mock.MagicMock() # Test that this does not raise any error - scheduler._create_dag_runs([dag_model], session) + self.scheduler_job._create_dag_runs([dag_model], session) # Assert dag_model.next_dagrun is set correctly to next execution date assert dag_model.next_dagrun == DEFAULT_DATE + timedelta(days=1) @@ -3803,16 +3921,78 @@ def test_do_schedule_max_active_runs_upstream_failed(self): dag.sync_to_db(session=session) # Update the date fields - job = SchedulerJob(subdir=os.devnull) - job.executor = MockExecutor(do_update=False) - job.processor_agent = mock.MagicMock(spec=DagFileProcessorAgent) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.executor = MockExecutor(do_update=False) + self.scheduler_job.processor_agent = mock.MagicMock(spec=DagFileProcessorAgent) - num_queued = job._do_scheduling(session) + num_queued = self.scheduler_job._do_scheduling(session) assert num_queued == 1 ti = run2.get_task_instance(task1.task_id, session) assert ti.state == State.QUEUED + def test_do_schedule_max_active_runs_dag_timed_out(self): + """Test that tasks are set to a finished state when their DAG times out""" + + dag = DAG( + dag_id='test_max_active_run_with_dag_timed_out', + start_date=DEFAULT_DATE, + schedule_interval='@once', + max_active_runs=1, + catchup=True, + ) + dag.dagrun_timeout = datetime.timedelta(seconds=1) + + with dag: + task1 = BashOperator( + task_id='task1', + bash_command=' for((i=1;i<=600;i+=1)); do sleep "$i"; done', + ) + + session = settings.Session() + dagbag = DagBag( + dag_folder=os.devnull, + include_examples=False, + read_dags_from_db=True, + ) + + dagbag.bag_dag(dag=dag, root_dag=dag) + dagbag.sync_to_db(session=session) + + run1 = dag.create_dagrun( + run_type=DagRunType.SCHEDULED, + execution_date=DEFAULT_DATE, + state=State.RUNNING, + session=session, + ) + run1_ti = run1.get_task_instance(task1.task_id, session) + run1_ti.state = State.RUNNING + + sleep(1) + + run2 = dag.create_dagrun( + run_type=DagRunType.SCHEDULED, + execution_date=DEFAULT_DATE + timedelta(seconds=10), + state=State.RUNNING, + session=session, + ) + + dag.sync_to_db(session=session) + + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.executor = MockExecutor() + self.scheduler_job.processor_agent = mock.MagicMock(spec=DagFileProcessorAgent) + + _ = self.scheduler_job._do_scheduling(session) + + assert run1.state == State.FAILED + assert run1_ti.state == State.SKIPPED + assert run2.state == State.RUNNING + + _ = self.scheduler_job._do_scheduling(session) + run2_ti = run2.get_task_instance(task1.task_id, session) + assert run2_ti.state == State.QUEUED + def test_do_schedule_max_active_runs_task_removed(self): """Test that tasks in removed state don't count as actively running.""" @@ -3847,11 +4027,11 @@ def test_do_schedule_max_active_runs_task_removed(self): dag.sync_to_db(session=session) # Update the date fields - job = SchedulerJob(subdir=os.devnull) - job.executor = MockExecutor(do_update=False) - job.processor_agent = mock.MagicMock(spec=DagFileProcessorAgent) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.executor = MockExecutor(do_update=False) + self.scheduler_job.processor_agent = mock.MagicMock(spec=DagFileProcessorAgent) - num_queued = job._do_scheduling(session) + num_queued = self.scheduler_job._do_scheduling(session) assert num_queued == 1 ti = run1.get_task_instance(task1.task_id, session) @@ -3896,11 +4076,11 @@ def test_do_schedule_max_active_runs_and_manual_trigger(self): dag.sync_to_db(session=session) # Update the date fields - job = SchedulerJob(subdir=os.devnull) - job.executor = MockExecutor(do_update=False) - job.processor_agent = mock.MagicMock(spec=DagFileProcessorAgent) + self.scheduler_job = SchedulerJob(subdir=os.devnull) + self.scheduler_job.executor = MockExecutor(do_update=False) + self.scheduler_job.processor_agent = mock.MagicMock(spec=DagFileProcessorAgent) - num_queued = job._do_scheduling(session) + num_queued = self.scheduler_job._do_scheduling(session) # Add it back in to the session so we can refresh it. (_do_scheduling does an expunge_all to reduce # memory) session.add(dag_run) @@ -3920,7 +4100,7 @@ def test_do_schedule_max_active_runs_and_manual_trigger(self): session.flush() # At this point, ti2 and ti3 of the scheduled dag run should be running - num_queued = job._do_scheduling(session) + num_queued = self.scheduler_job._do_scheduling(session) assert num_queued == 1 # Should have queued task2 @@ -3940,7 +4120,7 @@ def test_do_schedule_max_active_runs_and_manual_trigger(self): ) session.flush() - num_queued = job._do_scheduling(session) + num_queued = self.scheduler_job._do_scheduling(session) assert num_queued == 1 # Should have queued task2 again. @@ -3989,6 +4169,8 @@ def test_task_with_upstream_skip_process_task_instances(): assert tis[dummy3.task_id].state == State.SKIPPED +# TODO(potiuk): unquarantine me where we get rid of those pesky 195 -> 196 problem! +@pytest.mark.quarantined class TestSchedulerJobQueriesCount(unittest.TestCase): """ These tests are designed to detect changes in the number of queries for @@ -3996,27 +4178,36 @@ class TestSchedulerJobQueriesCount(unittest.TestCase): made that affects the performance of the SchedulerJob. """ - def setUp(self) -> None: + @staticmethod + def clean_db(): clear_db_runs() clear_db_pools() clear_db_dags() clear_db_sla_miss() clear_db_errors() + clear_db_jobs() clear_db_serialized_dags() - clear_db_dags() + + def setUp(self) -> None: + self.clean_db() + + def tearDown(self): + if self.scheduler_job and self.scheduler_job.processor_agent: + self.scheduler_job.processor_agent.end() + self.scheduler_job = None + self.clean_db() @parameterized.expand( [ # expected, dag_count, task_count # One DAG with one task per DAG file - (23, 1, 1), # noqa + (24, 1, 1), # noqa # One DAG with five tasks per DAG file - (23, 1, 5), # noqa + (28, 1, 5), # noqa # 10 DAGs with 10 tasks per DAG file - (95, 10, 10), # noqa + (195, 10, 10), # noqa ] ) - @pytest.mark.quarantined def test_execute_queries_count_with_harvested_dags(self, expected_query_count, dag_count, task_count): with mock.patch.dict( "os.environ", @@ -4031,10 +4222,13 @@ def test_execute_queries_count_with_harvested_dags(self, expected_query_count, d { ('scheduler', 'use_job_schedule'): 'True', ('core', 'load_examples'): 'False', - ('core', 'store_serialized_dags'): 'True', + # For longer running tests under heavy load, the min_serialized_dag_fetch_interval + # and min_serialized_dag_update_interval might kick-in and re-retrieve the record. + # This will increase the count of serliazied_dag.py.get() count. + # That's why we keep the values high + ('core', 'min_serialized_dag_update_interval'): '100', + ('core', 'min_serialized_dag_fetch_interval'): '100', } - ), mock.patch.object( - settings, 'STORE_SERIALIZED_DAGS', True ): dagruns = [] dagbag = DagBag(dag_folder=ELASTIC_DAG_FILE, include_examples=False, read_dags_from_db=False) @@ -4055,53 +4249,52 @@ def test_execute_queries_count_with_harvested_dags(self, expected_query_count, d mock_agent = mock.MagicMock() - job = SchedulerJob(subdir=PERF_DAGS_FOLDER, num_runs=1) - job.executor = MockExecutor(do_update=False) - job.heartbeat = mock.MagicMock() - job.processor_agent = mock_agent + self.scheduler_job = SchedulerJob(subdir=PERF_DAGS_FOLDER, num_runs=1) + self.scheduler_job.executor = MockExecutor(do_update=False) + self.scheduler_job.heartbeat = mock.MagicMock() + self.scheduler_job.processor_agent = mock_agent with assert_queries_count(expected_query_count): with mock.patch.object(DagRun, 'next_dagruns_to_examine') as mock_dagruns: mock_dagruns.return_value = dagruns - job._run_scheduler_loop() + self.scheduler_job._run_scheduler_loop() @parameterized.expand( [ # expected, dag_count, task_count, start_ago, schedule_interval, shape # One DAG with one task per DAG file - ([8, 8, 8, 8], 1, 1, "1d", "None", "no_structure"), # noqa - ([8, 8, 8, 8], 1, 1, "1d", "None", "linear"), # noqa - ([20, 11, 11, 11], 1, 1, "1d", "@once", "no_structure"), # noqa - ([20, 11, 11, 11], 1, 1, "1d", "@once", "linear"), # noqa - ([20, 21, 23, 25], 1, 1, "1d", "30m", "no_structure"), # noqa - ([20, 21, 23, 25], 1, 1, "1d", "30m", "linear"), # noqa - ([20, 21, 23, 25], 1, 1, "1d", "30m", "binary_tree"), # noqa - ([20, 21, 23, 25], 1, 1, "1d", "30m", "star"), # noqa - ([20, 21, 23, 25], 1, 1, "1d", "30m", "grid"), # noqa + ([9, 9, 9, 9], 1, 1, "1d", "None", "no_structure"), # noqa + ([9, 9, 9, 9], 1, 1, "1d", "None", "linear"), # noqa + ([21, 12, 12, 12], 1, 1, "1d", "@once", "no_structure"), # noqa + ([21, 12, 12, 12], 1, 1, "1d", "@once", "linear"), # noqa + ([21, 22, 24, 26], 1, 1, "1d", "30m", "no_structure"), # noqa + ([21, 22, 24, 26], 1, 1, "1d", "30m", "linear"), # noqa + ([21, 22, 24, 26], 1, 1, "1d", "30m", "binary_tree"), # noqa + ([21, 22, 24, 26], 1, 1, "1d", "30m", "star"), # noqa + ([21, 22, 24, 26], 1, 1, "1d", "30m", "grid"), # noqa # One DAG with five tasks per DAG file - ([8, 8, 8, 8], 1, 5, "1d", "None", "no_structure"), # noqa - ([8, 8, 8, 8], 1, 5, "1d", "None", "linear"), # noqa - ([20, 11, 11, 11], 1, 5, "1d", "@once", "no_structure"), # noqa - ([21, 12, 12, 12], 1, 5, "1d", "@once", "linear"), # noqa - ([20, 21, 23, 25], 1, 5, "1d", "30m", "no_structure"), # noqa - ([21, 23, 26, 29], 1, 5, "1d", "30m", "linear"), # noqa - ([21, 23, 26, 29], 1, 5, "1d", "30m", "binary_tree"), # noqa - ([21, 23, 26, 29], 1, 5, "1d", "30m", "star"), # noqa - ([21, 23, 26, 29], 1, 5, "1d", "30m", "grid"), # noqa + ([9, 9, 9, 9], 1, 5, "1d", "None", "no_structure"), # noqa + ([9, 9, 9, 9], 1, 5, "1d", "None", "linear"), # noqa + ([21, 12, 12, 12], 1, 5, "1d", "@once", "no_structure"), # noqa + ([22, 13, 13, 13], 1, 5, "1d", "@once", "linear"), # noqa + ([21, 22, 24, 26], 1, 5, "1d", "30m", "no_structure"), # noqa + ([22, 24, 27, 30], 1, 5, "1d", "30m", "linear"), # noqa + ([22, 24, 27, 30], 1, 5, "1d", "30m", "binary_tree"), # noqa + ([22, 24, 27, 30], 1, 5, "1d", "30m", "star"), # noqa + ([22, 24, 27, 30], 1, 5, "1d", "30m", "grid"), # noqa # 10 DAGs with 10 tasks per DAG file - ([8, 8, 8, 8], 10, 10, "1d", "None", "no_structure"), # noqa - ([8, 8, 8, 8], 10, 10, "1d", "None", "linear"), # noqa - ([83, 26, 26, 26], 10, 10, "1d", "@once", "no_structure"), # noqa - ([93, 39, 39, 39], 10, 10, "1d", "@once", "linear"), # noqa - ([83, 87, 87, 87], 10, 10, "1d", "30m", "no_structure"), # noqa - ([93, 113, 113, 113], 10, 10, "1d", "30m", "linear"), # noqa - ([93, 107, 107, 107], 10, 10, "1d", "30m", "binary_tree"), # noqa - ([93, 107, 107, 107], 10, 10, "1d", "30m", "star"), # noqa - ([93, 107, 107, 107], 10, 10, "1d", "30m", "grid"), # noqa + ([9, 9, 9, 9], 10, 10, "1d", "None", "no_structure"), # noqa + ([9, 9, 9, 9], 10, 10, "1d", "None", "linear"), # noqa + ([84, 27, 27, 27], 10, 10, "1d", "@once", "no_structure"), # noqa + ([94, 40, 40, 40], 10, 10, "1d", "@once", "linear"), # noqa + ([84, 88, 88, 88], 10, 10, "1d", "30m", "no_structure"), # noqa + ([94, 114, 114, 114], 10, 10, "1d", "30m", "linear"), # noqa + ([94, 108, 108, 108], 10, 10, "1d", "30m", "binary_tree"), # noqa + ([94, 108, 108, 108], 10, 10, "1d", "30m", "star"), # noqa + ([94, 108, 108, 108], 10, 10, "1d", "30m", "grid"), # noqa ] ) - @pytest.mark.quarantined def test_process_dags_queries_count( self, expected_query_counts, dag_count, task_count, start_ago, schedule_interval, shape ): @@ -4118,6 +4311,12 @@ def test_process_dags_queries_count( { ('scheduler', 'use_job_schedule'): 'True', ('core', 'store_serialized_dags'): 'True', + # For longer running tests under heavy load, the min_serialized_dag_fetch_interval + # and min_serialized_dag_update_interval might kick-in and re-retrieve the record. + # This will increase the count of serliazied_dag.py.get() count. + # That's why we keep the values high + ('core', 'min_serialized_dag_update_interval'): '100', + ('core', 'min_serialized_dag_fetch_interval'): '100', } ): @@ -4126,11 +4325,11 @@ def test_process_dags_queries_count( mock_agent = mock.MagicMock() - job = SchedulerJob(subdir=PERF_DAGS_FOLDER, num_runs=1) - job.executor = MockExecutor(do_update=False) - job.heartbeat = mock.MagicMock() - job.processor_agent = mock_agent + self.scheduler_job = SchedulerJob(subdir=PERF_DAGS_FOLDER, num_runs=1) + self.scheduler_job.executor = MockExecutor(do_update=False) + self.scheduler_job.heartbeat = mock.MagicMock() + self.scheduler_job.processor_agent = mock_agent for expected_query_count in expected_query_counts: with create_session() as session: with assert_queries_count(expected_query_count): - job._do_scheduling(session) + self.scheduler_job._do_scheduling(session) diff --git a/tests/kubernetes/test_pod_generator.py b/tests/kubernetes/test_pod_generator.py index 17a942a5a1352..da2a48867f3d4 100644 --- a/tests/kubernetes/test_pod_generator.py +++ b/tests/kubernetes/test_pod_generator.py @@ -500,6 +500,9 @@ def test_ensure_max_label_length(self, mock_uuid): for _, v in result.metadata.labels.items(): assert len(v) <= 63 + assert 'a' * 512 == result.metadata.annotations['dag_id'] + assert 'a' * 512 == result.metadata.annotations['task_id'] + def test_merge_objects_empty(self): annotations = {'foo1': 'bar1'} base_obj = k8s.V1ObjectMeta(annotations=annotations) diff --git a/tests/kubernetes/test_pod_launcher.py b/tests/kubernetes/test_pod_launcher.py index 9e7cc82651d9a..6e4026462b0a1 100644 --- a/tests/kubernetes/test_pod_launcher.py +++ b/tests/kubernetes/test_pod_launcher.py @@ -21,7 +21,7 @@ from requests.exceptions import BaseHTTPError from airflow.exceptions import AirflowException -from airflow.kubernetes.pod_launcher import PodLauncher +from airflow.kubernetes.pod_launcher import PodLauncher, PodStatus class TestPodLauncher(unittest.TestCase): @@ -170,6 +170,22 @@ def test_read_pod_retries_successfully(self): ] ) + def test_monitor_pod_empty_logs(self): + mock.sentinel.metadata = mock.MagicMock() + running_status = mock.MagicMock() + running_status.configure_mock(**{'name': 'base', 'state.running': True}) + pod_info_running = mock.MagicMock(**{'status.container_statuses': [running_status]}) + pod_info_succeeded = mock.MagicMock(**{'status.phase': PodStatus.SUCCEEDED}) + + def pod_state_gen(): + yield pod_info_running + while True: + yield pod_info_succeeded + + self.mock_kube_client.read_namespaced_pod.side_effect = pod_state_gen() + self.mock_kube_client.read_namespaced_pod_log.return_value = iter(()) + self.pod_launcher.monitor_pod(mock.sentinel, get_logs=True) + def test_read_pod_retries_fails(self): mock.sentinel.metadata = mock.MagicMock() self.mock_kube_client.read_namespaced_pod.side_effect = [ diff --git a/tests/lineage/test_lineage.py b/tests/lineage/test_lineage.py index 350a8be29a4d5..b5ebbea1efa0e 100644 --- a/tests/lineage/test_lineage.py +++ b/tests/lineage/test_lineage.py @@ -16,16 +16,24 @@ # specific language governing permissions and limitations # under the License. import unittest +from unittest import mock -from airflow.lineage import AUTO +from airflow.lineage import AUTO, apply_lineage, get_backend, prepare_lineage +from airflow.lineage.backend import LineageBackend from airflow.lineage.entities import File from airflow.models import DAG, TaskInstance as TI from airflow.operators.dummy import DummyOperator from airflow.utils import timezone +from tests.test_utils.config import conf_vars DEFAULT_DATE = timezone.datetime(2016, 1, 1) +class CustomLineageBackend(LineageBackend): + def send_lineage(self, operator, inlets=None, outlets=None, context=None): + pass + + class TestLineage(unittest.TestCase): def test_lineage(self): dag = DAG(dag_id='test_prepare_lineage', start_date=DEFAULT_DATE) @@ -111,3 +119,42 @@ def test_lineage_render(self): op1.pre_execute(ctx1) assert op1.inlets[0].url == f1s.format(DEFAULT_DATE) assert op1.outlets[0].url == f1s.format(DEFAULT_DATE) + + @mock.patch("airflow.lineage.get_backend") + def test_lineage_is_sent_to_backend(self, mock_get_backend): + class TestBackend(LineageBackend): + def send_lineage(self, operator, inlets=None, outlets=None, context=None): + assert len(inlets) == 1 + assert len(outlets) == 1 + + func = mock.Mock() + func.__name__ = 'foo' + + mock_get_backend.return_value = TestBackend() + + dag = DAG(dag_id='test_lineage_is_sent_to_backend', start_date=DEFAULT_DATE) + + with dag: + op1 = DummyOperator(task_id='task1') + + file1 = File("/tmp/some_file") + + op1.inlets.append(file1) + op1.outlets.append(file1) + + ctx1 = {"ti": TI(task=op1, execution_date=DEFAULT_DATE), "execution_date": DEFAULT_DATE} + + prep = prepare_lineage(func) + prep(op1, ctx1) + post = apply_lineage(func) + post(op1, ctx1) + + def test_empty_lineage_backend(self): + backend = get_backend() + assert backend is None + + @conf_vars({("lineage", "backend"): "tests.lineage.test_lineage.CustomLineageBackend"}) + def test_resolve_lineage_class(self): + backend = get_backend() + assert issubclass(backend.__class__, LineageBackend) + assert isinstance(backend, CustomLineageBackend) diff --git a/tests/models/test_cleartasks.py b/tests/models/test_cleartasks.py index f54bacce339f4..1c5606e66acd8 100644 --- a/tests/models/test_cleartasks.py +++ b/tests/models/test_cleartasks.py @@ -20,8 +20,9 @@ import unittest from airflow import settings -from airflow.models import DAG, TaskInstance as TI, clear_task_instances +from airflow.models import DAG, TaskInstance as TI, TaskReschedule, clear_task_instances from airflow.operators.dummy import DummyOperator +from airflow.sensors.python import PythonSensor from airflow.utils.session import create_session from airflow.utils.state import State from airflow.utils.types import DagRunType @@ -138,6 +139,50 @@ def test_clear_task_instances_without_dag(self): assert ti1.try_number == 2 assert ti1.max_tries == 2 + def test_clear_task_instances_with_task_reschedule(self): + """Test that TaskReschedules are deleted correctly when TaskInstances are cleared""" + + with DAG( + 'test_clear_task_instances_with_task_reschedule', + start_date=DEFAULT_DATE, + end_date=DEFAULT_DATE + datetime.timedelta(days=10), + ) as dag: + task0 = PythonSensor(task_id='0', python_callable=lambda: False, mode="reschedule") + task1 = PythonSensor(task_id='1', python_callable=lambda: False, mode="reschedule") + + ti0 = TI(task=task0, execution_date=DEFAULT_DATE) + ti1 = TI(task=task1, execution_date=DEFAULT_DATE) + + dag.create_dagrun( + execution_date=ti0.execution_date, + state=State.RUNNING, + run_type=DagRunType.SCHEDULED, + ) + + ti0.run() + ti1.run() + + with create_session() as session: + + def count_task_reschedule(task_id): + return ( + session.query(TaskReschedule) + .filter( + TaskReschedule.dag_id == dag.dag_id, + TaskReschedule.task_id == task_id, + TaskReschedule.execution_date == DEFAULT_DATE, + TaskReschedule.try_number == 1, + ) + .count() + ) + + assert count_task_reschedule(ti0.task_id) == 1 + assert count_task_reschedule(ti1.task_id) == 1 + qry = session.query(TI).filter(TI.dag_id == dag.dag_id, TI.task_id == ti0.task_id).all() + clear_task_instances(qry, session, dag=dag) + assert count_task_reschedule(ti0.task_id) == 0 + assert count_task_reschedule(ti1.task_id) == 1 + def test_dag_clear(self): dag = DAG( 'test_dag_clear', start_date=DEFAULT_DATE, end_date=DEFAULT_DATE + datetime.timedelta(days=10) diff --git a/tests/models/test_dag.py b/tests/models/test_dag.py index 60171d8989bc4..0aae3712f4a0d 100644 --- a/tests/models/test_dag.py +++ b/tests/models/test_dag.py @@ -646,15 +646,19 @@ def test_bulk_write_to_db(self): ('dag-bulk-sync-2', 'test-dag'), ('dag-bulk-sync-3', 'test-dag'), } == set(session.query(DagTag.dag_id, DagTag.name).all()) + + for row in session.query(DagModel.last_parsed_time).all(): + assert row[0] is not None + # Re-sync should do fewer queries - with assert_queries_count(3): + with assert_queries_count(4): DAG.bulk_write_to_db(dags) - with assert_queries_count(3): + with assert_queries_count(4): DAG.bulk_write_to_db(dags) # Adding tags for dag in dags: dag.tags.append("test-dag2") - with assert_queries_count(4): + with assert_queries_count(5): DAG.bulk_write_to_db(dags) with create_session() as session: assert {'dag-bulk-sync-0', 'dag-bulk-sync-1', 'dag-bulk-sync-2', 'dag-bulk-sync-3'} == { @@ -673,7 +677,7 @@ def test_bulk_write_to_db(self): # Removing tags for dag in dags: dag.tags.remove("test-dag") - with assert_queries_count(4): + with assert_queries_count(5): DAG.bulk_write_to_db(dags) with create_session() as session: assert {'dag-bulk-sync-0', 'dag-bulk-sync-1', 'dag-bulk-sync-2', 'dag-bulk-sync-3'} == { @@ -686,6 +690,9 @@ def test_bulk_write_to_db(self): ('dag-bulk-sync-3', 'test-dag2'), } == set(session.query(DagTag.dag_id, DagTag.name).all()) + for row in session.query(DagModel.last_parsed_time).all(): + assert row[0] is not None + def test_bulk_write_to_db_max_active_runs(self): """ Test that DagModel.next_dagrun_create_after is set to NULL when the dag cannot be created due to max @@ -1298,6 +1305,61 @@ def test_clear_set_dagrun_state(self, dag_run_state): dagrun = dagruns[0] # type: DagRun assert dagrun.state == dag_run_state + @parameterized.expand( + [ + (State.NONE,), + (State.RUNNING,), + ] + ) + def test_clear_set_dagrun_state_for_subdag(self, dag_run_state): + dag_id = 'test_clear_set_dagrun_state_subdag' + self._clean_up(dag_id) + task_id = 't1' + dag = DAG(dag_id, start_date=DEFAULT_DATE, max_active_runs=1) + t_1 = DummyOperator(task_id=task_id, dag=dag) + subdag = DAG(dag_id + '.test', start_date=DEFAULT_DATE, max_active_runs=1) + SubDagOperator(task_id='test', subdag=subdag, dag=dag) + t_2 = DummyOperator(task_id='task', dag=subdag) + + session = settings.Session() + dagrun_1 = dag.create_dagrun( + run_type=DagRunType.BACKFILL_JOB, + state=State.FAILED, + start_date=DEFAULT_DATE, + execution_date=DEFAULT_DATE, + ) + dagrun_2 = subdag.create_dagrun( + run_type=DagRunType.BACKFILL_JOB, + state=State.FAILED, + start_date=DEFAULT_DATE, + execution_date=DEFAULT_DATE, + ) + session.merge(dagrun_1) + session.merge(dagrun_2) + task_instance_1 = TI(t_1, execution_date=DEFAULT_DATE, state=State.RUNNING) + task_instance_2 = TI(t_2, execution_date=DEFAULT_DATE, state=State.RUNNING) + session.merge(task_instance_1) + session.merge(task_instance_2) + session.commit() + + dag.clear( + start_date=DEFAULT_DATE, + end_date=DEFAULT_DATE + datetime.timedelta(days=1), + dag_run_state=dag_run_state, + include_subdags=True, + include_parentdag=False, + session=session, + ) + + dagrun = ( + session.query( + DagRun, + ) + .filter(DagRun.dag_id == subdag.dag_id) + .one() + ) + assert dagrun.state == dag_run_state + @parameterized.expand( [(state, State.NONE) for state in State.task_states if state != State.RUNNING] + [(State.RUNNING, State.SHUTDOWN)] diff --git a/tests/models/test_dagbag.py b/tests/models/test_dagbag.py index 6c6b1cb9dddcb..95d0e7936763a 100644 --- a/tests/models/test_dagbag.py +++ b/tests/models/test_dagbag.py @@ -323,27 +323,27 @@ def standard_subdag(): from airflow.operators.dummy import DummyOperator from airflow.operators.subdag import SubDagOperator - dag_name = 'master' + dag_name = 'parent' default_args = {'owner': 'owner1', 'start_date': datetime.datetime(2016, 1, 1)} dag = DAG(dag_name, default_args=default_args) - # master: + # parent: # A -> opSubDag_0 - # master.opsubdag_0: + # parent.opsubdag_0: # -> subdag_0.task # A -> opSubDag_1 - # master.opsubdag_1: + # parent.opsubdag_1: # -> subdag_1.task with dag: def subdag_0(): - subdag_0 = DAG('master.op_subdag_0', default_args=default_args) + subdag_0 = DAG('parent.op_subdag_0', default_args=default_args) DummyOperator(task_id='subdag_0.task', dag=subdag_0) return subdag_0 def subdag_1(): - subdag_1 = DAG('master.op_subdag_1', default_args=default_args) + subdag_1 = DAG('parent.op_subdag_1', default_args=default_args) DummyOperator(task_id='subdag_1.task', dag=subdag_1) return subdag_1 @@ -374,58 +374,58 @@ def nested_subdags(): from airflow.operators.dummy import DummyOperator from airflow.operators.subdag import SubDagOperator - dag_name = 'master' + dag_name = 'parent' default_args = {'owner': 'owner1', 'start_date': datetime.datetime(2016, 1, 1)} dag = DAG(dag_name, default_args=default_args) - # master: + # parent: # A -> op_subdag_0 - # master.op_subdag_0: + # parent.op_subdag_0: # -> opSubDag_A - # master.op_subdag_0.opSubdag_A: + # parent.op_subdag_0.opSubdag_A: # -> subdag_a.task # -> opSubdag_B - # master.op_subdag_0.opSubdag_B: + # parent.op_subdag_0.opSubdag_B: # -> subdag_b.task # A -> op_subdag_1 - # master.op_subdag_1: + # parent.op_subdag_1: # -> opSubdag_C - # master.op_subdag_1.opSubdag_C: + # parent.op_subdag_1.opSubdag_C: # -> subdag_c.task # -> opSubDag_D - # master.op_subdag_1.opSubdag_D: + # parent.op_subdag_1.opSubdag_D: # -> subdag_d.task with dag: def subdag_a(): - subdag_a = DAG('master.op_subdag_0.opSubdag_A', default_args=default_args) + subdag_a = DAG('parent.op_subdag_0.opSubdag_A', default_args=default_args) DummyOperator(task_id='subdag_a.task', dag=subdag_a) return subdag_a def subdag_b(): - subdag_b = DAG('master.op_subdag_0.opSubdag_B', default_args=default_args) + subdag_b = DAG('parent.op_subdag_0.opSubdag_B', default_args=default_args) DummyOperator(task_id='subdag_b.task', dag=subdag_b) return subdag_b def subdag_c(): - subdag_c = DAG('master.op_subdag_1.opSubdag_C', default_args=default_args) + subdag_c = DAG('parent.op_subdag_1.opSubdag_C', default_args=default_args) DummyOperator(task_id='subdag_c.task', dag=subdag_c) return subdag_c def subdag_d(): - subdag_d = DAG('master.op_subdag_1.opSubdag_D', default_args=default_args) + subdag_d = DAG('parent.op_subdag_1.opSubdag_D', default_args=default_args) DummyOperator(task_id='subdag_d.task', dag=subdag_d) return subdag_d def subdag_0(): - subdag_0 = DAG('master.op_subdag_0', default_args=default_args) + subdag_0 = DAG('parent.op_subdag_0', default_args=default_args) SubDagOperator(task_id='opSubdag_A', dag=subdag_0, subdag=subdag_a()) SubDagOperator(task_id='opSubdag_B', dag=subdag_0, subdag=subdag_b()) return subdag_0 def subdag_1(): - subdag_1 = DAG('master.op_subdag_1', default_args=default_args) + subdag_1 = DAG('parent.op_subdag_1', default_args=default_args) SubDagOperator(task_id='opSubdag_C', dag=subdag_1, subdag=subdag_c()) SubDagOperator(task_id='opSubdag_D', dag=subdag_1, subdag=subdag_d()) return subdag_1 diff --git a/tests/models/test_taskinstance.py b/tests/models/test_taskinstance.py index cd99b020d012b..43a37565510fe 100644 --- a/tests/models/test_taskinstance.py +++ b/tests/models/test_taskinstance.py @@ -1488,6 +1488,39 @@ def test_previous_start_date_success(self, _, schedule_interval, catchup) -> Non assert ti_list[3].get_previous_start_date(state=State.SUCCESS) == ti_list[1].start_date assert ti_list[3].get_previous_start_date(state=State.SUCCESS) != ti_list[2].start_date + def test_get_previous_start_date_none(self): + """ + Test that get_previous_start_date() can handle TaskInstance with no start_date. + """ + with DAG("test_get_previous_start_date_none", start_date=DEFAULT_DATE, schedule_interval=None) as dag: + task = DummyOperator(task_id="op") + + day_1 = DEFAULT_DATE + day_2 = DEFAULT_DATE + datetime.timedelta(days=1) + + # Create a DagRun for day_1 and day_2. Calling ti_2.get_previous_start_date() + # should return the start_date of ti_1 (which is None because ti_1 was not run). + # It should not raise an error. + dagrun_1 = dag.create_dagrun( + execution_date=day_1, + state=State.RUNNING, + run_type=DagRunType.MANUAL, + ) + + dagrun_2 = dag.create_dagrun( + execution_date=day_2, + state=State.RUNNING, + run_type=DagRunType.MANUAL, + ) + + ti_1 = dagrun_1.get_task_instance(task.task_id) + ti_2 = dagrun_2.get_task_instance(task.task_id) + ti_1.task = task + ti_2.task = task + + assert ti_2.get_previous_start_date() == ti_1.start_date + assert ti_1.start_date is None + def test_pendulum_template_dates(self): dag = models.DAG( dag_id='test_pendulum_template_dates', @@ -1938,8 +1971,8 @@ def test_fast_follow( for upstream, downstream in dependencies.items(): dag.set_dependency(upstream, downstream) - scheduler = SchedulerJob(subdir=os.devnull) - scheduler.dagbag.bag_dag(dag, root_dag=dag) + scheduler_job = SchedulerJob(subdir=os.devnull) + scheduler_job.dagbag.bag_dag(dag, root_dag=dag) dag_run = dag.create_dagrun(run_id='test_dagrun_fast_follow', state=State.RUNNING) @@ -1966,9 +1999,11 @@ def test_fast_follow( self.validate_ti_states(dag_run, first_run_state, error_message) if second_run_state: - scheduler._critical_section_execute_task_instances(session=session) + scheduler_job._critical_section_execute_task_instances(session=session) task_instance_b.run() self.validate_ti_states(dag_run, second_run_state, error_message) + if scheduler_job.processor_agent: + scheduler_job.processor_agent.end() def test_set_state_up_for_retry(self): dag = DAG('dag', start_date=DEFAULT_DATE) diff --git a/tests/operators/test_generic_transfer.py b/tests/operators/test_generic_transfer.py index a94b56ff00427..4780b41b54bc1 100644 --- a/tests/operators/test_generic_transfer.py +++ b/tests/operators/test_generic_transfer.py @@ -17,6 +17,7 @@ # under the License. import unittest +from contextlib import closing import pytest from parameterized import parameterized @@ -43,9 +44,11 @@ def setUp(self): def tearDown(self): drop_tables = {'test_mysql_to_mysql', 'test_airflow'} - with MySqlHook().get_conn() as conn: + with closing(MySqlHook().get_conn()) as conn: for table in drop_tables: - conn.execute(f"DROP TABLE IF EXISTS {table}") + # Previous version tried to run execute directly on dbapi call, which was accidentally working + with closing(conn.cursor()) as cur: + cur.execute(f"DROP TABLE IF EXISTS {table}") @parameterized.expand( [ diff --git a/tests/plugins/test_plugin.py b/tests/plugins/test_plugin.py index ae725f32f07c8..ca02a39ee5878 100644 --- a/tests/plugins/test_plugin.py +++ b/tests/plugins/test_plugin.py @@ -77,12 +77,16 @@ def test(self): v_nomenu_appbuilder_package = {"view": v_appbuilder_view} -# Creating a flask appbuilder Menu Item +# Creating flask appbuilder Menu Items appbuilder_mitem = { "name": "Google", - "category": "Search", - "category_icon": "fa-th", "href": "https://www.google.com", + "category": "Search", +} +appbuilder_mitem_toplevel = { + "name": "apache", + "href": "https://www.apache.org/", + "label": "The Apache Software Foundation", } # Creating a flask blueprint to intergrate the templates and static folder @@ -105,7 +109,7 @@ class AirflowTestPlugin(AirflowPlugin): macros = [plugin_macro] flask_blueprints = [bp] appbuilder_views = [v_appbuilder_package] - appbuilder_menu_items = [appbuilder_mitem] + appbuilder_menu_items = [appbuilder_mitem, appbuilder_mitem_toplevel] global_operator_extra_links = [ AirflowLink(), GithubLink(), @@ -123,3 +127,10 @@ class MockPluginB(AirflowPlugin): class MockPluginC(AirflowPlugin): name = 'plugin-c' + + +class AirflowTestOnLoadPlugin(AirflowPlugin): + name = 'preload' + + def on_load(self, *args, **kwargs): + self.name = 'postload' diff --git a/tests/plugins/test_plugins_manager.py b/tests/plugins/test_plugins_manager.py index d454754ca4555..7c4d86aa860b1 100644 --- a/tests/plugins/test_plugins_manager.py +++ b/tests/plugins/test_plugins_manager.py @@ -17,18 +17,33 @@ # under the License. import importlib import logging +import os import sys +import tempfile import unittest from unittest import mock +import pytest + from airflow.hooks.base import BaseHook from airflow.plugins_manager import AirflowPlugin from airflow.www import app as application +from tests.test_utils.config import conf_vars from tests.test_utils.mock_plugins import mock_plugin_manager py39 = sys.version_info >= (3, 9) importlib_metadata = 'importlib.metadata' if py39 else 'importlib_metadata' +ON_LOAD_EXCEPTION_PLUGIN = """ +from airflow.plugins_manager import AirflowPlugin + +class AirflowTestOnLoadExceptionPlugin(AirflowPlugin): + name = 'preload' + + def on_load(self, *args, **kwargs): + raise Exception("oops") +""" + class TestPluginsRBAC(unittest.TestCase): def setUp(self): @@ -77,21 +92,32 @@ class AirflowNoMenuViewsPlugin(AirflowPlugin): assert len(plugin_views) == 1 def test_flaskappbuilder_menu_links(self): - from tests.plugins.test_plugin import appbuilder_mitem + from tests.plugins.test_plugin import appbuilder_mitem, appbuilder_mitem_toplevel - # menu item should exist matching appbuilder_mitem - links = [ + # menu item (category) should exist matching appbuilder_mitem.category + categories = [ menu_item for menu_item in self.appbuilder.menu.menu if menu_item.name == appbuilder_mitem['category'] ] + assert len(categories) == 1 - assert len(links) == 1 + # menu link should be a child in the category + category = categories[0] + assert category.name == appbuilder_mitem['category'] + assert category.childs[0].name == appbuilder_mitem['name'] + assert category.childs[0].href == appbuilder_mitem['href'] - # menu link should also have a link matching the name of the package. - link = links[0] - assert link.name == appbuilder_mitem['category'] - assert link.childs[0].name == appbuilder_mitem['name'] + # a top level link isn't nested in a category + top_levels = [ + menu_item + for menu_item in self.appbuilder.menu.menu + if menu_item.name == appbuilder_mitem_toplevel['name'] + ] + assert len(top_levels) == 1 + link = top_levels[0] + assert link.href == appbuilder_mitem_toplevel['href'] + assert link.label == appbuilder_mitem_toplevel['label'] def test_app_blueprints(self): from tests.plugins.test_plugin import bp @@ -134,6 +160,40 @@ class TestPropertyHook(BaseHook): assert caplog.records[-1].levelname == 'DEBUG' assert caplog.records[-1].msg == 'Loading %d plugin(s) took %.2f seconds' + def test_loads_filesystem_plugins(self, caplog): + from airflow import plugins_manager + + with mock.patch('airflow.plugins_manager.plugins', []): + plugins_manager.load_plugins_from_plugin_directory() + + assert 5 == len(plugins_manager.plugins) + for plugin in plugins_manager.plugins: + if 'AirflowTestOnLoadPlugin' not in str(plugin): + continue + assert 'postload' == plugin.name + break + else: + pytest.fail("Wasn't able to find a registered `AirflowTestOnLoadPlugin`") + + assert caplog.record_tuples == [] + + def test_loads_filesystem_plugins_exception(self, caplog): + from airflow import plugins_manager + + with mock.patch('airflow.plugins_manager.plugins', []): + with tempfile.TemporaryDirectory() as tmpdir: + with open(os.path.join(tmpdir, 'testplugin.py'), "w") as f: + f.write(ON_LOAD_EXCEPTION_PLUGIN) + + with conf_vars({('core', 'plugins_folder'): tmpdir}): + plugins_manager.load_plugins_from_plugin_directory() + + assert plugins_manager.plugins == [] + + received_logs = caplog.text + assert 'Failed to import plugin' in received_logs + assert 'testplugin.py' in received_logs + def test_should_warning_about_incompatible_plugins(self, caplog): class AirflowAdminViewsPlugin(AirflowPlugin): name = "test_admin_views_plugin" diff --git a/tests/providers/airbyte/__init__.py b/tests/providers/airbyte/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/airbyte/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/airbyte/hooks/__init__.py b/tests/providers/airbyte/hooks/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/airbyte/hooks/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/airbyte/hooks/test_airbyte.py b/tests/providers/airbyte/hooks/test_airbyte.py new file mode 100644 index 0000000000000..09f10beffc255 --- /dev/null +++ b/tests/providers/airbyte/hooks/test_airbyte.py @@ -0,0 +1,126 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +import unittest +from unittest import mock + +import pytest +import requests_mock + +from airflow.exceptions import AirflowException +from airflow.models import Connection +from airflow.providers.airbyte.hooks.airbyte import AirbyteHook +from airflow.utils import db + + +class TestAirbyteHook(unittest.TestCase): + """ + Test all functions from Airbyte Hook + """ + + airbyte_conn_id = 'airbyte_conn_id_test' + connection_id = 'conn_test_sync' + job_id = 1 + sync_connection_endpoint = 'http://test-airbyte:8001/api/v1/connections/sync' + get_job_endpoint = 'http://test-airbyte:8001/api/v1/jobs/get' + _mock_sync_conn_success_response_body = {'job': {'id': 1}} + _mock_job_status_success_response_body = {'job': {'status': 'succeeded'}} + + def setUp(self): + db.merge_conn( + Connection( + conn_id='airbyte_conn_id_test', conn_type='http', host='http://test-airbyte', port=8001 + ) + ) + self.hook = AirbyteHook(airbyte_conn_id=self.airbyte_conn_id) + + def return_value_get_job(self, status): + response = mock.Mock() + response.json.return_value = {'job': {'status': status}} + return response + + @requests_mock.mock() + def test_submit_sync_connection(self, m): + m.post( + self.sync_connection_endpoint, status_code=200, json=self._mock_sync_conn_success_response_body + ) + resp = self.hook.submit_sync_connection(connection_id=self.connection_id) + assert resp.status_code == 200 + assert resp.json() == self._mock_sync_conn_success_response_body + + @requests_mock.mock() + def test_get_job_status(self, m): + m.post(self.get_job_endpoint, status_code=200, json=self._mock_job_status_success_response_body) + resp = self.hook.get_job(job_id=self.job_id) + assert resp.status_code == 200 + assert resp.json() == self._mock_job_status_success_response_body + + @mock.patch('airflow.providers.airbyte.hooks.airbyte.AirbyteHook.get_job') + def test_wait_for_job_succeeded(self, mock_get_job): + mock_get_job.side_effect = [self.return_value_get_job(self.hook.SUCCEEDED)] + self.hook.wait_for_job(job_id=self.job_id, wait_seconds=0) + mock_get_job.assert_called_once_with(job_id=self.job_id) + + @mock.patch('airflow.providers.airbyte.hooks.airbyte.AirbyteHook.get_job') + def test_wait_for_job_error(self, mock_get_job): + mock_get_job.side_effect = [ + self.return_value_get_job(self.hook.RUNNING), + self.return_value_get_job(self.hook.ERROR), + ] + with pytest.raises(AirflowException, match="Job failed"): + self.hook.wait_for_job(job_id=self.job_id, wait_seconds=0) + + calls = [mock.call(job_id=self.job_id), mock.call(job_id=self.job_id)] + assert mock_get_job.has_calls(calls) + + @mock.patch('airflow.providers.airbyte.hooks.airbyte.AirbyteHook.get_job') + def test_wait_for_job_timeout(self, mock_get_job): + mock_get_job.side_effect = [ + self.return_value_get_job(self.hook.PENDING), + self.return_value_get_job(self.hook.RUNNING), + self.return_value_get_job(self.hook.RUNNING), + ] + with pytest.raises(AirflowException, match="Timeout"): + self.hook.wait_for_job(job_id=self.job_id, wait_seconds=2, timeout=1) + + calls = [mock.call(job_id=self.job_id), mock.call(job_id=self.job_id), mock.call(job_id=self.job_id)] + assert mock_get_job.has_calls(calls) + + @mock.patch('airflow.providers.airbyte.hooks.airbyte.AirbyteHook.get_job') + def test_wait_for_job_state_unrecognized(self, mock_get_job): + mock_get_job.side_effect = [ + self.return_value_get_job(self.hook.RUNNING), + self.return_value_get_job("UNRECOGNIZED"), + ] + with pytest.raises(Exception, match="unexpected state"): + self.hook.wait_for_job(job_id=self.job_id, wait_seconds=0) + + calls = [mock.call(job_id=self.job_id), mock.call(job_id=self.job_id)] + assert mock_get_job.has_calls(calls) + + @mock.patch('airflow.providers.airbyte.hooks.airbyte.AirbyteHook.get_job') + def test_wait_for_job_cancelled(self, mock_get_job): + mock_get_job.side_effect = [ + self.return_value_get_job(self.hook.RUNNING), + self.return_value_get_job(self.hook.CANCELLED), + ] + with pytest.raises(AirflowException, match="Job was cancelled"): + self.hook.wait_for_job(job_id=self.job_id, wait_seconds=0) + + calls = [mock.call(job_id=self.job_id), mock.call(job_id=self.job_id)] + assert mock_get_job.has_calls(calls) diff --git a/tests/providers/airbyte/operators/__init__.py b/tests/providers/airbyte/operators/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/airbyte/operators/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/airbyte/operators/test_airbyte.py b/tests/providers/airbyte/operators/test_airbyte.py new file mode 100644 index 0000000000000..bc56c5d38312b --- /dev/null +++ b/tests/providers/airbyte/operators/test_airbyte.py @@ -0,0 +1,55 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +import unittest +from unittest import mock + +from airflow.providers.airbyte.operators.airbyte import AirbyteTriggerSyncOperator + + +class TestAirbyteTriggerSyncOp(unittest.TestCase): + """ + Test execute function from Airbyte Operator + """ + + airbyte_conn_id = 'test_airbyte_conn_id' + connection_id = 'test_airbyte_connection' + job_id = 1 + wait_seconds = 0 + timeout = 360 + + @mock.patch('airflow.providers.airbyte.hooks.airbyte.AirbyteHook.submit_sync_connection') + @mock.patch('airflow.providers.airbyte.hooks.airbyte.AirbyteHook.wait_for_job', return_value=None) + def test_execute(self, mock_wait_for_job, mock_submit_sync_connection): + mock_submit_sync_connection.return_value = mock.Mock( + **{'json.return_value': {'job': {'id': self.job_id}}} + ) + + op = AirbyteTriggerSyncOperator( + task_id='test_Airbyte_op', + airbyte_conn_id=self.airbyte_conn_id, + connection_id=self.connection_id, + wait_seconds=self.wait_seconds, + timeout=self.timeout, + ) + op.execute({}) + + mock_submit_sync_connection.assert_called_once_with(connection_id=self.connection_id) + mock_wait_for_job.assert_called_once_with( + job_id=self.job_id, wait_seconds=self.wait_seconds, timeout=self.timeout + ) diff --git a/tests/providers/airbyte/sensors/__init__.py b/tests/providers/airbyte/sensors/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/airbyte/sensors/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/airbyte/sensors/test_airbyte.py b/tests/providers/airbyte/sensors/test_airbyte.py new file mode 100644 index 0000000000000..5bd69b803ceeb --- /dev/null +++ b/tests/providers/airbyte/sensors/test_airbyte.py @@ -0,0 +1,93 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import unittest +from unittest import mock + +import pytest + +from airflow import AirflowException +from airflow.providers.airbyte.sensors.airbyte import AirbyteJobSensor + + +class TestAirbyteJobSensor(unittest.TestCase): + + task_id = "task-id" + airbyte_conn_id = "airbyte-conn-test" + job_id = 1 + timeout = 120 + + def get_job(self, status): + response = mock.Mock() + response.json.return_value = {'job': {'status': status}} + return response + + @mock.patch('airflow.providers.airbyte.hooks.airbyte.AirbyteHook.get_job') + def test_done(self, mock_get_job): + mock_get_job.return_value = self.get_job('succeeded') + + sensor = AirbyteJobSensor( + task_id=self.task_id, + airbyte_job_id=self.job_id, + airbyte_conn_id=self.airbyte_conn_id, + ) + ret = sensor.poke(context={}) + mock_get_job.assert_called_once_with(job_id=self.job_id) + assert ret + + @mock.patch('airflow.providers.airbyte.hooks.airbyte.AirbyteHook.get_job') + def test_failed(self, mock_get_job): + mock_get_job.return_value = self.get_job('failed') + + sensor = AirbyteJobSensor( + task_id=self.task_id, + airbyte_job_id=self.job_id, + airbyte_conn_id=self.airbyte_conn_id, + ) + with pytest.raises(AirflowException, match="Job failed"): + sensor.poke(context={}) + + mock_get_job.assert_called_once_with(job_id=self.job_id) + + @mock.patch('airflow.providers.airbyte.hooks.airbyte.AirbyteHook.get_job') + def test_running(self, mock_get_job): + mock_get_job.return_value = self.get_job('running') + + sensor = AirbyteJobSensor( + task_id=self.task_id, + airbyte_job_id=self.job_id, + airbyte_conn_id=self.airbyte_conn_id, + ) + ret = sensor.poke(context={}) + + mock_get_job.assert_called_once_with(job_id=self.job_id) + + assert not ret + + @mock.patch('airflow.providers.airbyte.hooks.airbyte.AirbyteHook.get_job') + def test_cancelled(self, mock_get_job): + mock_get_job.return_value = self.get_job('cancelled') + + sensor = AirbyteJobSensor( + task_id=self.task_id, + airbyte_job_id=self.job_id, + airbyte_conn_id=self.airbyte_conn_id, + ) + with pytest.raises(AirflowException, match="Job was cancelled"): + sensor.poke(context={}) + + mock_get_job.assert_called_once_with(job_id=self.job_id) diff --git a/tests/providers/amazon/aws/.gitignore b/tests/providers/amazon/aws/.gitignore new file mode 100644 index 0000000000000..40830374235df --- /dev/null +++ b/tests/providers/amazon/aws/.gitignore @@ -0,0 +1 @@ +local diff --git a/tests/providers/amazon/aws/hooks/test_glacier.py b/tests/providers/amazon/aws/hooks/test_glacier.py index c1c86a5d9a7a0..c22620f3c0d84 100644 --- a/tests/providers/amazon/aws/hooks/test_glacier.py +++ b/tests/providers/amazon/aws/hooks/test_glacier.py @@ -19,8 +19,6 @@ import unittest from unittest import mock -from testfixtures import LogCapture - from airflow.providers.amazon.aws.hooks.glacier import GlacierHook CREDENTIALS = "aws_conn" @@ -52,26 +50,20 @@ def test_retrieve_inventory_should_log_mgs(self, mock_conn): # given job_id = {"jobId": "1234abcd"} # when - with LogCapture() as log: + with self.assertLogs() as log: mock_conn.return_value.initiate_job.return_value = job_id self.hook.retrieve_inventory(VAULT_NAME) # then - log.check( - ( - 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook', - 'INFO', - f"Retrieving inventory for vault: {VAULT_NAME}", - ), - ( - 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook', - 'INFO', - f"Initiated inventory-retrieval job for: {VAULT_NAME}", - ), - ( - 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook', - 'INFO', - f"Retrieval Job ID: {job_id.get('jobId')}", - ), + self.assertEqual( + log.output, + [ + 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' + + f"Retrieving inventory for vault: {VAULT_NAME}", + 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' + + f"Initiated inventory-retrieval job for: {VAULT_NAME}", + 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' + + f"Retrieval Job ID: {job_id.get('jobId')}", + ], ) @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn") @@ -86,16 +78,16 @@ def test_retrieve_inventory_results_should_return_response(self, mock_conn): @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn") def test_retrieve_inventory_results_should_log_mgs(self, mock_conn): # when - with LogCapture() as log: + with self.assertLogs() as log: mock_conn.return_value.get_job_output.return_value = REQUEST_RESULT self.hook.retrieve_inventory_results(VAULT_NAME, JOB_ID) # then - log.check( - ( - 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook', - 'INFO', - f"Retrieving the job results for vault: {VAULT_NAME}...", - ), + self.assertEqual( + log.output, + [ + 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' + + f"Retrieving the job results for vault: {VAULT_NAME}...", + ], ) @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn") @@ -110,19 +102,16 @@ def test_describe_job_should_return_status_succeeded(self, mock_conn): @mock.patch("airflow.providers.amazon.aws.hooks.glacier.GlacierHook.get_conn") def test_describe_job_should_log_mgs(self, mock_conn): # when - with LogCapture() as log: + with self.assertLogs() as log: mock_conn.return_value.describe_job.return_value = JOB_STATUS self.hook.describe_job(VAULT_NAME, JOB_ID) # then - log.check( - ( - 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook', - 'INFO', - f"Retrieving status for vault: {VAULT_NAME} and job {JOB_ID}", - ), - ( - 'airflow.providers.amazon.aws.hooks.glacier.GlacierHook', - 'INFO', - f"Job status: {JOB_STATUS.get('Action')}, code status: {JOB_STATUS.get('StatusCode')}", - ), + self.assertEqual( + log.output, + [ + 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' + + f"Retrieving status for vault: {VAULT_NAME} and job {JOB_ID}", + 'INFO:airflow.providers.amazon.aws.hooks.glacier.GlacierHook:' + + f"Job status: {JOB_STATUS.get('Action')}, code status: {JOB_STATUS.get('StatusCode')}", + ], ) diff --git a/tests/providers/amazon/aws/hooks/test_s3.py b/tests/providers/amazon/aws/hooks/test_s3.py index d96206813bdc8..e7f05137c70f5 100644 --- a/tests/providers/amazon/aws/hooks/test_s3.py +++ b/tests/providers/amazon/aws/hooks/test_s3.py @@ -36,6 +36,18 @@ mock_s3 = None +# This class needs to be separated out because if there are earlier mocks in the same class +# the tests will fail on teardown. +class TestAwsS3HookNoMock: + def test_check_for_bucket_raises_error_with_invalid_conn_id(self, monkeypatch): + monkeypatch.delenv('AWS_PROFILE', raising=False) + monkeypatch.delenv('AWS_ACCESS_KEY_ID', raising=False) + monkeypatch.delenv('AWS_SECRET_ACCESS_KEY', raising=False) + hook = S3Hook(aws_conn_id="does_not_exist") + with pytest.raises(NoCredentialsError): + hook.check_for_bucket("test-non-existing-bucket") + + @pytest.mark.skipif(mock_s3 is None, reason='moto package not present') class TestAwsS3Hook: @mock_s3 @@ -52,14 +64,6 @@ def test_check_for_bucket(self, s3_bucket): assert hook.check_for_bucket(s3_bucket) is True assert hook.check_for_bucket('not-a-bucket') is False - def test_check_for_bucket_raises_error_with_invalid_conn_id(self, s3_bucket, monkeypatch): - monkeypatch.delenv('AWS_PROFILE', raising=False) - monkeypatch.delenv('AWS_ACCESS_KEY_ID', raising=False) - monkeypatch.delenv('AWS_SECRET_ACCESS_KEY', raising=False) - hook = S3Hook(aws_conn_id="does_not_exist") - with pytest.raises(NoCredentialsError): - hook.check_for_bucket(s3_bucket) - @mock_s3 def test_get_bucket(self): hook = S3Hook() @@ -156,14 +160,6 @@ def test_check_for_key(self, s3_bucket): assert hook.check_for_key('b', s3_bucket) is False assert hook.check_for_key(f's3://{s3_bucket}//b') is False - def test_check_for_key_raises_error_with_invalid_conn_id(self, monkeypatch, s3_bucket): - monkeypatch.delenv('AWS_PROFILE', raising=False) - monkeypatch.delenv('AWS_ACCESS_KEY_ID', raising=False) - monkeypatch.delenv('AWS_SECRET_ACCESS_KEY', raising=False) - hook = S3Hook(aws_conn_id="does_not_exist") - with pytest.raises(NoCredentialsError): - hook.check_for_key('a', s3_bucket) - def test_get_key(self, s3_bucket): hook = S3Hook() bucket = hook.get_bucket(s3_bucket) diff --git a/tests/providers/amazon/aws/hooks/test_secrets_manager.py b/tests/providers/amazon/aws/hooks/test_secrets_manager.py index bfcd847aa7593..b41b3816f7338 100644 --- a/tests/providers/amazon/aws/hooks/test_secrets_manager.py +++ b/tests/providers/amazon/aws/hooks/test_secrets_manager.py @@ -43,12 +43,17 @@ def test_get_secret_string(self): secret_value = '{"user": "test"}' hook = SecretsManagerHook(aws_conn_id='aws_default') - param = { + create_param = { + 'Name': secret_name, + } + + put_param = { 'SecretId': secret_name, 'SecretString': secret_value, } - hook.get_conn().put_secret_value(**param) + hook.get_conn().create_secret(**create_param) + hook.get_conn().put_secret_value(**put_param) secret = hook.get_secret(secret_name) assert secret == secret_value @@ -60,12 +65,17 @@ def test_get_secret_dict(self): secret_value = '{"user": "test"}' hook = SecretsManagerHook(aws_conn_id='aws_default') - param = { + create_param = { + 'Name': secret_name, + } + + put_param = { 'SecretId': secret_name, 'SecretString': secret_value, } - hook.get_conn().put_secret_value(**param) + hook.get_conn().create_secret(**create_param) + hook.get_conn().put_secret_value(**put_param) secret = hook.get_secret_as_dict(secret_name) assert secret == json.loads(secret_value) @@ -76,13 +86,17 @@ def test_get_secret_binary(self): secret_name = "arn:aws:secretsmanager:us-east-2:999999999999:secret:db_cluster-YYYYYYY" secret_value_binary = base64.b64encode(b'{"username": "test"}') hook = SecretsManagerHook(aws_conn_id='aws_default') + create_param = { + 'Name': secret_name, + } - param = { + put_param = { 'SecretId': secret_name, 'SecretBinary': secret_value_binary, } - hook.get_conn().put_secret_value(**param) + hook.get_conn().create_secret(**create_param) + hook.get_conn().put_secret_value(**put_param) secret = hook.get_secret(secret_name) assert secret == base64.b64decode(secret_value_binary) diff --git a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py index 827e69f766ab8..878041d3ee280 100644 --- a/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py +++ b/tests/providers/amazon/aws/log/test_cloudwatch_task_handler.py @@ -18,7 +18,7 @@ import unittest from unittest import mock -from unittest.mock import call +from unittest.mock import ANY, call from watchtower import CloudWatchLogHandler @@ -92,8 +92,10 @@ def test_hook_raises(self): mock_error.assert_called_once_with( 'Could not create an AwsLogsHook with connection id "%s". Please make ' - 'sure that airflow[aws] is installed and the Cloudwatch logs connection exists.', + 'sure that airflow[aws] is installed and the Cloudwatch ' + 'logs connection exists. Exception: "%s"', 'aws_default', + ANY, ) def test_handler(self): diff --git a/tests/providers/amazon/aws/log/test_s3_task_handler.py b/tests/providers/amazon/aws/log/test_s3_task_handler.py index c35339637993a..d1437ca196d80 100644 --- a/tests/providers/amazon/aws/log/test_s3_task_handler.py +++ b/tests/providers/amazon/aws/log/test_s3_task_handler.py @@ -19,6 +19,7 @@ import os import unittest from unittest import mock +from unittest.mock import ANY import pytest from botocore.exceptions import ClientError @@ -91,8 +92,9 @@ def test_hook_raises(self): mock_error.assert_called_once_with( 'Could not create an S3Hook with connection id "%s". Please make ' - 'sure that airflow[aws] is installed and the S3 connection exists.', + 'sure that airflow[aws] is installed and the S3 connection exists. Exception : "%s"', 'aws_default', + ANY, exc_info=True, ) diff --git a/tests/providers/amazon/aws/secrets/test_secrets_manager.py b/tests/providers/amazon/aws/secrets/test_secrets_manager.py index d45f1aa89cc73..c84dc6a3ed727 100644 --- a/tests/providers/amazon/aws/secrets/test_secrets_manager.py +++ b/tests/providers/amazon/aws/secrets/test_secrets_manager.py @@ -32,12 +32,19 @@ def test_aws_secrets_manager_get_connections(self, mock_get_uri): @mock_secretsmanager def test_get_conn_uri(self): + + secret_id = 'airflow/connections/test_postgres' + create_param = { + 'Name': secret_id, + } + param = { - 'SecretId': 'airflow/connections/test_postgres', + 'SecretId': secret_id, 'SecretString': 'postgresql://airflow:airflow@host:5432/airflow', } secrets_manager_backend = SecretsManagerBackend() + secrets_manager_backend.client.create_secret(**create_param) secrets_manager_backend.client.put_secret_value(**param) returned_uri = secrets_manager_backend.get_conn_uri(conn_id="test_postgres") @@ -50,12 +57,19 @@ def test_get_conn_uri_non_existent_key(self): SecretsManagerBackend.get_connections should return None """ conn_id = "test_mysql" + + secret_id = 'airflow/connections/test_postgres' + create_param = { + 'Name': secret_id, + } + param = { - 'SecretId': 'airflow/connections/test_postgres', + 'SecretId': secret_id, 'SecretString': 'postgresql://airflow:airflow@host:5432/airflow', } secrets_manager_backend = SecretsManagerBackend() + secrets_manager_backend.client.create_secret(**create_param) secrets_manager_backend.client.put_secret_value(**param) assert secrets_manager_backend.get_conn_uri(conn_id=conn_id) is None @@ -63,9 +77,16 @@ def test_get_conn_uri_non_existent_key(self): @mock_secretsmanager def test_get_variable(self): - param = {'SecretId': 'airflow/variables/hello', 'SecretString': 'world'} + + secret_id = 'airflow/variables/hello' + create_param = { + 'Name': secret_id, + } + + param = {'SecretId': secret_id, 'SecretString': 'world'} secrets_manager_backend = SecretsManagerBackend() + secrets_manager_backend.client.create_secret(**create_param) secrets_manager_backend.client.put_secret_value(**param) returned_uri = secrets_manager_backend.get_variable('hello') @@ -77,9 +98,14 @@ def test_get_variable_non_existent_key(self): Test that if Variable key is not present, SystemsManagerParameterStoreBackend.get_variables should return None """ - param = {'SecretId': 'airflow/variables/hello', 'SecretString': 'world'} + secret_id = 'airflow/variables/hello' + create_param = { + 'Name': secret_id, + } + param = {'SecretId': secret_id, 'SecretString': 'world'} secrets_manager_backend = SecretsManagerBackend() + secrets_manager_backend.client.create_secret(**create_param) secrets_manager_backend.client.put_secret_value(**param) assert secrets_manager_backend.get_variable("test_mysql") is None diff --git a/tests/providers/apache/beam/__init__.py b/tests/providers/apache/beam/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/apache/beam/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/apache/beam/hooks/__init__.py b/tests/providers/apache/beam/hooks/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/apache/beam/hooks/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/apache/beam/hooks/test_beam.py b/tests/providers/apache/beam/hooks/test_beam.py new file mode 100644 index 0000000000000..d0d713e1129e8 --- /dev/null +++ b/tests/providers/apache/beam/hooks/test_beam.py @@ -0,0 +1,271 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import copy +import subprocess +import unittest +from unittest import mock +from unittest.mock import MagicMock + +from parameterized import parameterized + +from airflow.exceptions import AirflowException +from airflow.providers.apache.beam.hooks.beam import BeamCommandRunner, BeamHook, beam_options_to_args + +PY_FILE = 'apache_beam.examples.wordcount' +JAR_FILE = 'unitest.jar' +JOB_CLASS = 'com.example.UnitTest' +PY_OPTIONS = ['-m'] +TEST_JOB_ID = 'test-job-id' + +DEFAULT_RUNNER = "DirectRunner" +BEAM_STRING = 'airflow.providers.apache.beam.hooks.beam.{}' +BEAM_VARIABLES_PY = {'output': 'gs://test/output', 'labels': {'foo': 'bar'}} +BEAM_VARIABLES_JAVA = { + 'output': 'gs://test/output', + 'labels': {'foo': 'bar'}, +} + +APACHE_BEAM_V_2_14_0_JAVA_SDK_LOG = f""""\ +Dataflow SDK version: 2.14.0 +Jun 15, 2020 2:57:28 PM org.apache.beam.runners.dataflow.DataflowRunner run +INFO: To access the Dataflow monitoring console, please navigate to https://console.cloud.google.com/dataflow\ +/jobsDetail/locations/europe-west3/jobs/{TEST_JOB_ID}?project=XXX +Submitted job: {TEST_JOB_ID} +Jun 15, 2020 2:57:28 PM org.apache.beam.runners.dataflow.DataflowRunner run +INFO: To cancel the job using the 'gcloud' tool, run: +> gcloud dataflow jobs --project=XXX cancel --region=europe-west3 {TEST_JOB_ID} +""" + + +class TestBeamHook(unittest.TestCase): + @mock.patch(BEAM_STRING.format('BeamCommandRunner')) + def test_start_python_pipeline(self, mock_runner): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + process_line_callback = MagicMock() + + hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + variables=copy.deepcopy(BEAM_VARIABLES_PY), + py_file=PY_FILE, + py_options=PY_OPTIONS, + process_line_callback=process_line_callback, + ) + + expected_cmd = [ + "python3", + '-m', + PY_FILE, + f'--runner={DEFAULT_RUNNER}', + '--output=gs://test/output', + '--labels=foo=bar', + ] + mock_runner.assert_called_once_with(cmd=expected_cmd, process_line_callback=process_line_callback) + wait_for_done.assert_called_once_with() + + @parameterized.expand( + [ + ('default_to_python3', 'python3'), + ('major_version_2', 'python2'), + ('major_version_3', 'python3'), + ('minor_version', 'python3.6'), + ] + ) + @mock.patch(BEAM_STRING.format('BeamCommandRunner')) + def test_start_python_pipeline_with_custom_interpreter(self, _, py_interpreter, mock_runner): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + process_line_callback = MagicMock() + + hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + variables=copy.deepcopy(BEAM_VARIABLES_PY), + py_file=PY_FILE, + py_options=PY_OPTIONS, + py_interpreter=py_interpreter, + process_line_callback=process_line_callback, + ) + + expected_cmd = [ + py_interpreter, + '-m', + PY_FILE, + f'--runner={DEFAULT_RUNNER}', + '--output=gs://test/output', + '--labels=foo=bar', + ] + mock_runner.assert_called_once_with(cmd=expected_cmd, process_line_callback=process_line_callback) + wait_for_done.assert_called_once_with() + + @parameterized.expand( + [ + (['foo-bar'], False), + (['foo-bar'], True), + ([], True), + ] + ) + @mock.patch(BEAM_STRING.format('prepare_virtualenv')) + @mock.patch(BEAM_STRING.format('BeamCommandRunner')) + def test_start_python_pipeline_with_non_empty_py_requirements_and_without_system_packages( + self, current_py_requirements, current_py_system_site_packages, mock_runner, mock_virtualenv + ): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + mock_virtualenv.return_value = '/dummy_dir/bin/python' + process_line_callback = MagicMock() + + hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + variables=copy.deepcopy(BEAM_VARIABLES_PY), + py_file=PY_FILE, + py_options=PY_OPTIONS, + py_requirements=current_py_requirements, + py_system_site_packages=current_py_system_site_packages, + process_line_callback=process_line_callback, + ) + + expected_cmd = [ + '/dummy_dir/bin/python', + '-m', + PY_FILE, + f'--runner={DEFAULT_RUNNER}', + '--output=gs://test/output', + '--labels=foo=bar', + ] + mock_runner.assert_called_once_with(cmd=expected_cmd, process_line_callback=process_line_callback) + wait_for_done.assert_called_once_with() + mock_virtualenv.assert_called_once_with( + venv_directory=mock.ANY, + python_bin="python3", + system_site_packages=current_py_system_site_packages, + requirements=current_py_requirements, + ) + + @mock.patch(BEAM_STRING.format('BeamCommandRunner')) + def test_start_python_pipeline_with_empty_py_requirements_and_without_system_packages(self, mock_runner): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + process_line_callback = MagicMock() + + with self.assertRaisesRegex(AirflowException, "Invalid method invocation."): + hook.start_python_pipeline( # pylint: disable=no-value-for-parameter + variables=copy.deepcopy(BEAM_VARIABLES_PY), + py_file=PY_FILE, + py_options=PY_OPTIONS, + py_requirements=[], + process_line_callback=process_line_callback, + ) + + mock_runner.assert_not_called() + wait_for_done.assert_not_called() + + @mock.patch(BEAM_STRING.format('BeamCommandRunner')) + def test_start_java_pipeline(self, mock_runner): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + process_line_callback = MagicMock() + + hook.start_java_pipeline( # pylint: disable=no-value-for-parameter + jar=JAR_FILE, + variables=copy.deepcopy(BEAM_VARIABLES_JAVA), + process_line_callback=process_line_callback, + ) + + expected_cmd = [ + 'java', + '-jar', + JAR_FILE, + f'--runner={DEFAULT_RUNNER}', + '--output=gs://test/output', + '--labels={"foo":"bar"}', + ] + mock_runner.assert_called_once_with(cmd=expected_cmd, process_line_callback=process_line_callback) + wait_for_done.assert_called_once_with() + + @mock.patch(BEAM_STRING.format('BeamCommandRunner')) + def test_start_java_pipeline_with_job_class(self, mock_runner): + hook = BeamHook(runner=DEFAULT_RUNNER) + wait_for_done = mock_runner.return_value.wait_for_done + process_line_callback = MagicMock() + + hook.start_java_pipeline( # pylint: disable=no-value-for-parameter + jar=JAR_FILE, + variables=copy.deepcopy(BEAM_VARIABLES_JAVA), + job_class=JOB_CLASS, + process_line_callback=process_line_callback, + ) + + expected_cmd = [ + 'java', + '-cp', + JAR_FILE, + JOB_CLASS, + f'--runner={DEFAULT_RUNNER}', + '--output=gs://test/output', + '--labels={"foo":"bar"}', + ] + mock_runner.assert_called_once_with(cmd=expected_cmd, process_line_callback=process_line_callback) + wait_for_done.assert_called_once_with() + + +class TestBeamRunner(unittest.TestCase): + @mock.patch('airflow.providers.apache.beam.hooks.beam.BeamCommandRunner.log') + @mock.patch('subprocess.Popen') + @mock.patch('select.select') + def test_beam_wait_for_done_logging(self, mock_select, mock_popen, mock_logging): + cmd = ['test', 'cmd'] + mock_logging.info = MagicMock() + mock_logging.warning = MagicMock() + mock_proc = MagicMock() + mock_proc.stderr = MagicMock() + mock_proc.stderr.readlines = MagicMock(return_value=['test\n', 'error\n']) + mock_stderr_fd = MagicMock() + mock_proc.stderr.fileno = MagicMock(return_value=mock_stderr_fd) + mock_proc_poll = MagicMock() + mock_select.return_value = [[mock_stderr_fd]] + + def poll_resp_error(): + mock_proc.return_code = 1 + return True + + mock_proc_poll.side_effect = [None, poll_resp_error] + mock_proc.poll = mock_proc_poll + mock_popen.return_value = mock_proc + beam = BeamCommandRunner(cmd) + mock_logging.info.assert_called_once_with('Running command: %s', " ".join(cmd)) + mock_popen.assert_called_once_with( + cmd, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + close_fds=True, + ) + self.assertRaises(Exception, beam.wait_for_done) + + +class TestBeamOptionsToArgs(unittest.TestCase): + @parameterized.expand( + [ + ({"key": "val"}, ["--key=val"]), + ({"key": None}, ["--key"]), + ({"key": True}, ["--key"]), + ({"key": False}, ["--key=False"]), + ({"key": ["a", "b", "c"]}, ["--key=a", "--key=b", "--key=c"]), + ] + ) + def test_beam_options_to_args(self, options, expected_args): + args = beam_options_to_args(options) + assert args == expected_args diff --git a/tests/providers/apache/beam/operators/__init__.py b/tests/providers/apache/beam/operators/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/apache/beam/operators/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/apache/beam/operators/test_beam.py b/tests/providers/apache/beam/operators/test_beam.py new file mode 100644 index 0000000000000..c31ff336f1490 --- /dev/null +++ b/tests/providers/apache/beam/operators/test_beam.py @@ -0,0 +1,274 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +import unittest +from unittest import mock + +from airflow.providers.apache.beam.operators.beam import ( + BeamRunJavaPipelineOperator, + BeamRunPythonPipelineOperator, +) +from airflow.providers.google.cloud.operators.dataflow import DataflowConfiguration +from airflow.version import version + +TASK_ID = 'test-beam-operator' +DEFAULT_RUNNER = "DirectRunner" +JOB_NAME = 'test-dataflow-pipeline-name' +JOB_ID = 'test-dataflow-pipeline-id' +JAR_FILE = 'gs://my-bucket/example/test.jar' +JOB_CLASS = 'com.test.NotMain' +PY_FILE = 'gs://my-bucket/my-object.py' +PY_INTERPRETER = 'python3' +PY_OPTIONS = ['-m'] +DEFAULT_OPTIONS_PYTHON = DEFAULT_OPTIONS_JAVA = { + 'project': 'test', + 'stagingLocation': 'gs://test/staging', +} +ADDITIONAL_OPTIONS = {'output': 'gs://test/output', 'labels': {'foo': 'bar'}} +TEST_VERSION = f"v{version.replace('.', '-').replace('+', '-')}" +EXPECTED_ADDITIONAL_OPTIONS = { + 'output': 'gs://test/output', + 'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION}, +} + + +class TestBeamRunPythonPipelineOperator(unittest.TestCase): + def setUp(self): + self.operator = BeamRunPythonPipelineOperator( + task_id=TASK_ID, + py_file=PY_FILE, + py_options=PY_OPTIONS, + default_pipeline_options=DEFAULT_OPTIONS_PYTHON, + pipeline_options=ADDITIONAL_OPTIONS, + ) + + def test_init(self): + """Test BeamRunPythonPipelineOperator instance is properly initialized.""" + self.assertEqual(self.operator.task_id, TASK_ID) + self.assertEqual(self.operator.py_file, PY_FILE) + self.assertEqual(self.operator.runner, DEFAULT_RUNNER) + self.assertEqual(self.operator.py_options, PY_OPTIONS) + self.assertEqual(self.operator.py_interpreter, PY_INTERPRETER) + self.assertEqual(self.operator.default_pipeline_options, DEFAULT_OPTIONS_PYTHON) + self.assertEqual(self.operator.pipeline_options, EXPECTED_ADDITIONAL_OPTIONS) + + @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook') + def test_exec_direct_runner(self, gcs_hook, beam_hook_mock): + """Test BeamHook is created and the right args are passed to + start_python_workflow. + """ + start_python_hook = beam_hook_mock.return_value.start_python_pipeline + gcs_provide_file = gcs_hook.return_value.provide_file + self.operator.execute(None) + beam_hook_mock.assert_called_once_with(runner=DEFAULT_RUNNER) + expected_options = { + 'project': 'test', + 'staging_location': 'gs://test/staging', + 'output': 'gs://test/output', + 'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION}, + } + gcs_provide_file.assert_called_once_with(object_url=PY_FILE) + start_python_hook.assert_called_once_with( + variables=expected_options, + py_file=gcs_provide_file.return_value.__enter__.return_value.name, + py_options=PY_OPTIONS, + py_interpreter=PY_INTERPRETER, + py_requirements=None, + py_system_site_packages=False, + process_line_callback=None, + ) + + @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook') + def test_exec_dataflow_runner(self, gcs_hook, dataflow_hook_mock, beam_hook_mock): + """Test DataflowHook is created and the right args are passed to + start_python_dataflow. + """ + dataflow_config = DataflowConfiguration() + self.operator.runner = "DataflowRunner" + self.operator.dataflow_config = dataflow_config + gcs_provide_file = gcs_hook.return_value.provide_file + self.operator.execute(None) + job_name = dataflow_hook_mock.build_dataflow_job_name.return_value + dataflow_hook_mock.assert_called_once_with( + gcp_conn_id=dataflow_config.gcp_conn_id, + delegate_to=dataflow_config.delegate_to, + poll_sleep=dataflow_config.poll_sleep, + impersonation_chain=dataflow_config.impersonation_chain, + drain_pipeline=dataflow_config.drain_pipeline, + cancel_timeout=dataflow_config.cancel_timeout, + wait_until_finished=dataflow_config.wait_until_finished, + ) + expected_options = { + 'project': dataflow_hook_mock.return_value.project_id, + 'job_name': job_name, + 'staging_location': 'gs://test/staging', + 'output': 'gs://test/output', + 'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION}, + 'region': 'us-central1', + } + gcs_provide_file.assert_called_once_with(object_url=PY_FILE) + beam_hook_mock.return_value.start_python_pipeline.assert_called_once_with( + variables=expected_options, + py_file=gcs_provide_file.return_value.__enter__.return_value.name, + py_options=PY_OPTIONS, + py_interpreter=PY_INTERPRETER, + py_requirements=None, + py_system_site_packages=False, + process_line_callback=mock.ANY, + ) + dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with( + job_id=self.operator.dataflow_job_id, + job_name=job_name, + location='us-central1', + multiple_jobs=False, + ) + + @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook') + def test_on_kill_dataflow_runner(self, dataflow_hook_mock, _, __): + self.operator.runner = "DataflowRunner" + dataflow_cancel_job = dataflow_hook_mock.return_value.cancel_job + self.operator.execute(None) + self.operator.dataflow_job_id = JOB_ID + self.operator.on_kill() + dataflow_cancel_job.assert_called_once_with( + job_id=JOB_ID, project_id=self.operator.dataflow_config.project_id + ) + + @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook') + def test_on_kill_direct_runner(self, _, dataflow_mock, __): + dataflow_cancel_job = dataflow_mock.return_value.cancel_job + self.operator.execute(None) + self.operator.on_kill() + dataflow_cancel_job.assert_not_called() + + +class TestBeamRunJavaPipelineOperator(unittest.TestCase): + def setUp(self): + self.operator = BeamRunJavaPipelineOperator( + task_id=TASK_ID, + jar=JAR_FILE, + job_class=JOB_CLASS, + default_pipeline_options=DEFAULT_OPTIONS_JAVA, + pipeline_options=ADDITIONAL_OPTIONS, + ) + + def test_init(self): + """Test BeamRunJavaPipelineOperator instance is properly initialized.""" + self.assertEqual(self.operator.task_id, TASK_ID) + self.assertEqual(self.operator.runner, DEFAULT_RUNNER) + self.assertEqual(self.operator.default_pipeline_options, DEFAULT_OPTIONS_JAVA) + self.assertEqual(self.operator.job_class, JOB_CLASS) + self.assertEqual(self.operator.jar, JAR_FILE) + self.assertEqual(self.operator.pipeline_options, ADDITIONAL_OPTIONS) + + @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook') + def test_exec_direct_runner(self, gcs_hook, beam_hook_mock): + """Test BeamHook is created and the right args are passed to + start_java_workflow. + """ + start_java_hook = beam_hook_mock.return_value.start_java_pipeline + gcs_provide_file = gcs_hook.return_value.provide_file + self.operator.execute(None) + + beam_hook_mock.assert_called_once_with(runner=DEFAULT_RUNNER) + gcs_provide_file.assert_called_once_with(object_url=JAR_FILE) + start_java_hook.assert_called_once_with( + variables={**DEFAULT_OPTIONS_JAVA, **ADDITIONAL_OPTIONS}, + jar=gcs_provide_file.return_value.__enter__.return_value.name, + job_class=JOB_CLASS, + process_line_callback=None, + ) + + @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook') + def test_exec_dataflow_runner(self, gcs_hook, dataflow_hook_mock, beam_hook_mock): + """Test DataflowHook is created and the right args are passed to + start_java_dataflow. + """ + dataflow_config = DataflowConfiguration() + self.operator.runner = "DataflowRunner" + self.operator.dataflow_config = dataflow_config + gcs_provide_file = gcs_hook.return_value.provide_file + dataflow_hook_mock.return_value.is_job_dataflow_running.return_value = False + self.operator.execute(None) + job_name = dataflow_hook_mock.build_dataflow_job_name.return_value + self.assertEqual(job_name, self.operator._dataflow_job_name) + dataflow_hook_mock.assert_called_once_with( + gcp_conn_id=dataflow_config.gcp_conn_id, + delegate_to=dataflow_config.delegate_to, + poll_sleep=dataflow_config.poll_sleep, + impersonation_chain=dataflow_config.impersonation_chain, + drain_pipeline=dataflow_config.drain_pipeline, + cancel_timeout=dataflow_config.cancel_timeout, + wait_until_finished=dataflow_config.wait_until_finished, + ) + gcs_provide_file.assert_called_once_with(object_url=JAR_FILE) + + expected_options = { + 'project': dataflow_hook_mock.return_value.project_id, + 'jobName': job_name, + 'stagingLocation': 'gs://test/staging', + 'region': 'us-central1', + 'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION}, + 'output': 'gs://test/output', + } + + beam_hook_mock.return_value.start_java_pipeline.assert_called_once_with( + variables=expected_options, + jar=gcs_provide_file.return_value.__enter__.return_value.name, + job_class=JOB_CLASS, + process_line_callback=mock.ANY, + ) + dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with( + job_id=self.operator.dataflow_job_id, + job_name=job_name, + location='us-central1', + multiple_jobs=dataflow_config.multiple_jobs, + project_id=dataflow_hook_mock.return_value.project_id, + ) + + @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook') + def test_on_kill_dataflow_runner(self, dataflow_hook_mock, _, __): + self.operator.runner = "DataflowRunner" + dataflow_hook_mock.return_value.is_job_dataflow_running.return_value = False + dataflow_cancel_job = dataflow_hook_mock.return_value.cancel_job + self.operator.execute(None) + self.operator.dataflow_job_id = JOB_ID + self.operator.on_kill() + dataflow_cancel_job.assert_called_once_with( + job_id=JOB_ID, project_id=self.operator.dataflow_config.project_id + ) + + @mock.patch('airflow.providers.apache.beam.operators.beam.BeamHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.DataflowHook') + @mock.patch('airflow.providers.apache.beam.operators.beam.GCSHook') + def test_on_kill_direct_runner(self, _, dataflow_mock, __): + dataflow_cancel_job = dataflow_mock.return_value.cancel_job + self.operator.execute(None) + self.operator.on_kill() + dataflow_cancel_job.assert_not_called() diff --git a/tests/providers/apache/beam/operators/test_beam_system.py b/tests/providers/apache/beam/operators/test_beam_system.py new file mode 100644 index 0000000000000..0798f35d2e337 --- /dev/null +++ b/tests/providers/apache/beam/operators/test_beam_system.py @@ -0,0 +1,47 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +import os + +import pytest + +from tests.test_utils import AIRFLOW_MAIN_FOLDER +from tests.test_utils.system_tests_class import SystemTest + +BEAM_DAG_FOLDER = os.path.join(AIRFLOW_MAIN_FOLDER, "airflow", "providers", "apache", "beam", "example_dags") + + +@pytest.mark.system("apache.beam") +class BeamExampleDagsSystemTest(SystemTest): + def test_run_example_dag_beam_python(self): + self.run_dag('example_beam_native_python', BEAM_DAG_FOLDER) + + def test_run_example_dag_beam_python_dataflow_async(self): + self.run_dag('example_beam_native_python_dataflow_async', BEAM_DAG_FOLDER) + + def test_run_example_dag_beam_java_direct_runner(self): + self.run_dag('example_beam_native_java_direct_runner', BEAM_DAG_FOLDER) + + def test_run_example_dag_beam_java_dataflow_runner(self): + self.run_dag('example_beam_native_java_dataflow_runner', BEAM_DAG_FOLDER) + + def test_run_example_dag_beam_java_spark_runner(self): + self.run_dag('example_beam_native_java_spark_runner', BEAM_DAG_FOLDER) + + def test_run_example_dag_beam_java_flink_runner(self): + self.run_dag('example_beam_native_java_flink_runner', BEAM_DAG_FOLDER) diff --git a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py index c6f7736e4e623..436b8e8ea9bb7 100644 --- a/tests/providers/apache/hive/transfers/test_mysql_to_hive.py +++ b/tests/providers/apache/hive/transfers/test_mysql_to_hive.py @@ -18,6 +18,7 @@ import unittest from collections import OrderedDict +from contextlib import closing from os import path from unittest import mock @@ -129,24 +130,25 @@ def setUp(self): 'AIRFLOW_CTX_DAG_EMAIL': 'test@airflow.com', } - with MySqlHook().get_conn() as cur: - cur.execute( + with closing(MySqlHook().get_conn()) as conn: + with closing(conn.cursor()) as cur: + cur.execute( + ''' + CREATE TABLE IF NOT EXISTS baby_names ( + org_year integer(4), + baby_name VARCHAR(25), + rate FLOAT(7,6), + sex VARCHAR(4) + ) ''' - CREATE TABLE IF NOT EXISTS baby_names ( - org_year integer(4), - baby_name VARCHAR(25), - rate FLOAT(7,6), - sex VARCHAR(4) - ) - ''' - ) - - for row in rows: - cur.execute("INSERT INTO baby_names VALUES(%s, %s, %s, %s);", row) + ) + for row in rows: + cur.execute("INSERT INTO baby_names VALUES(%s, %s, %s, %s);", row) def tearDown(self): - with MySqlHook().get_conn() as cur: - cur.execute("DROP TABLE IF EXISTS baby_names CASCADE;") + with closing(MySqlHook().get_conn()) as conn: + with closing(conn.cursor()) as cur: + cur.execute("DROP TABLE IF EXISTS baby_names CASCADE;") @mock.patch('subprocess.Popen') def test_mysql_to_hive(self, mock_popen): @@ -314,11 +316,12 @@ def test_mysql_to_hive_type_conversion(self, mock_load_file): hook = MySqlHook() try: - with hook.get_conn() as conn: - conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") - conn.execute( - """ - CREATE TABLE {} ( + with closing(hook.get_conn()) as conn: + with closing(conn.cursor()) as cursor: + cursor.execute(f"DROP TABLE IF EXISTS {mysql_table}") + cursor.execute( + """ + CREATE TABLE {} ( c0 TINYINT, c1 SMALLINT, c2 MEDIUMINT, @@ -327,9 +330,9 @@ def test_mysql_to_hive_type_conversion(self, mock_load_file): c5 TIMESTAMP ) """.format( - mysql_table + mysql_table + ) ) - ) op = MySqlToHiveOperator( task_id='test_m2h', @@ -350,8 +353,9 @@ def test_mysql_to_hive_type_conversion(self, mock_load_file): ordered_dict["c5"] = "TIMESTAMP" assert mock_load_file.call_args[1]["field_dict"] == ordered_dict finally: - with hook.get_conn() as conn: - conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") + with closing(hook.get_conn()) as conn: + with closing(conn.cursor()) as cursor: + cursor.execute(f"DROP TABLE IF EXISTS {mysql_table}") @mock.patch('subprocess.Popen') def test_mysql_to_hive_verify_csv_special_char(self, mock_popen): @@ -365,27 +369,28 @@ def test_mysql_to_hive_verify_csv_special_char(self, mock_popen): try: db_record = ('c0', '["true"]') - with hook.get_conn() as conn: - conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") - conn.execute( - """ - CREATE TABLE {} ( + with closing(hook.get_conn()) as conn: + with closing(conn.cursor()) as cursor: + cursor.execute(f"DROP TABLE IF EXISTS {mysql_table}") + cursor.execute( + """ + CREATE TABLE {} ( c0 VARCHAR(25), c1 VARCHAR(25) ) """.format( - mysql_table - ) - ) - conn.execute( - """ - INSERT INTO {} VALUES ( - '{}', '{}' + mysql_table + ) ) - """.format( - mysql_table, *db_record + cursor.execute( + """ + INSERT INTO {} VALUES ( + '{}', '{}' + ) + """.format( + mysql_table, *db_record + ) ) - ) with mock.patch.dict('os.environ', self.env_vars): import unicodecsv as csv @@ -445,8 +450,9 @@ def test_mysql_to_hive_verify_csv_special_char(self, mock_popen): close_fds=True, ) finally: - with hook.get_conn() as conn: - conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") + with closing(hook.get_conn()) as conn: + with closing(conn.cursor()) as cursor: + cursor.execute(f"DROP TABLE IF EXISTS {mysql_table}") @mock.patch('subprocess.Popen') def test_mysql_to_hive_verify_loaded_values(self, mock_popen): @@ -472,11 +478,12 @@ def test_mysql_to_hive_verify_loaded_values(self, mock_popen): -9223372036854775808, ) - with hook.get_conn() as conn: - conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") - conn.execute( - """ - CREATE TABLE {} ( + with closing(hook.get_conn()) as conn: + with closing(conn.cursor()) as cursor: + cursor.execute(f"DROP TABLE IF EXISTS {mysql_table}") + cursor.execute( + """ + CREATE TABLE {} ( c0 TINYINT UNSIGNED, c1 SMALLINT UNSIGNED, c2 MEDIUMINT UNSIGNED, @@ -489,18 +496,18 @@ def test_mysql_to_hive_verify_loaded_values(self, mock_popen): c9 BIGINT ) """.format( - mysql_table + mysql_table + ) ) - ) - conn.execute( - """ - INSERT INTO {} VALUES ( - {}, {}, {}, {}, {}, {}, {}, {}, {}, {} + cursor.execute( + """ + INSERT INTO {} VALUES ( + {}, {}, {}, {}, {}, {}, {}, {}, {}, {} + ) + """.format( + mysql_table, *minmax + ) ) - """.format( - mysql_table, *minmax - ) - ) with mock.patch.dict('os.environ', self.env_vars): op = MySqlToHiveOperator( @@ -556,5 +563,6 @@ def test_mysql_to_hive_verify_loaded_values(self, mock_popen): ) finally: - with hook.get_conn() as conn: - conn.execute(f"DROP TABLE IF EXISTS {mysql_table}") + with closing(hook.get_conn()) as conn: + with closing(conn.cursor()) as cursor: + cursor.execute(f"DROP TABLE IF EXISTS {mysql_table}") diff --git a/tests/providers/google/cloud/hooks/test_automl.py b/tests/providers/google/cloud/hooks/test_automl.py index 898001c3b17a2..c9de712b520b3 100644 --- a/tests/providers/google/cloud/hooks/test_automl.py +++ b/tests/providers/google/cloud/hooks/test_automl.py @@ -19,7 +19,7 @@ import unittest from unittest import mock -from google.cloud.automl_v1beta1 import AutoMlClient, PredictionServiceClient +from google.cloud.automl_v1beta1 import AutoMlClient from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook from tests.providers.google.cloud.utils.base_gcp_mock import mock_base_gcp_hook_no_default_project_id @@ -38,9 +38,9 @@ "tables_model_metadata": {"train_budget_milli_node_hours": 1000}, } -LOCATION_PATH = AutoMlClient.location_path(GCP_PROJECT_ID, GCP_LOCATION) -MODEL_PATH = PredictionServiceClient.model_path(GCP_PROJECT_ID, GCP_LOCATION, MODEL_ID) -DATASET_PATH = AutoMlClient.dataset_path(GCP_PROJECT_ID, GCP_LOCATION, DATASET_ID) +LOCATION_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}" +MODEL_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}/models/{MODEL_ID}" +DATASET_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}/datasets/{DATASET_ID}" INPUT_CONFIG = {"input": "value"} OUTPUT_CONFIG = {"output": "value"} @@ -81,7 +81,7 @@ def test_create_model(self, mock_create_model): self.hook.create_model(model=MODEL, location=GCP_LOCATION, project_id=GCP_PROJECT_ID) mock_create_model.assert_called_once_with( - parent=LOCATION_PATH, model=MODEL, retry=None, timeout=None, metadata=None + request=dict(parent=LOCATION_PATH, model=MODEL), retry=None, timeout=None, metadata=() ) @mock.patch("airflow.providers.google.cloud.hooks.automl.PredictionServiceClient.batch_predict") @@ -95,13 +95,12 @@ def test_batch_predict(self, mock_batch_predict): ) mock_batch_predict.assert_called_once_with( - name=MODEL_PATH, - input_config=INPUT_CONFIG, - output_config=OUTPUT_CONFIG, - params=None, + request=dict( + name=MODEL_PATH, input_config=INPUT_CONFIG, output_config=OUTPUT_CONFIG, params=None + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch("airflow.providers.google.cloud.hooks.automl.PredictionServiceClient.predict") @@ -114,12 +113,10 @@ def test_predict(self, mock_predict): ) mock_predict.assert_called_once_with( - name=MODEL_PATH, - payload=PAYLOAD, - params=None, + request=dict(name=MODEL_PATH, payload=PAYLOAD, params=None), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.create_dataset") @@ -127,11 +124,10 @@ def test_create_dataset(self, mock_create_dataset): self.hook.create_dataset(dataset=DATASET, location=GCP_LOCATION, project_id=GCP_PROJECT_ID) mock_create_dataset.assert_called_once_with( - parent=LOCATION_PATH, - dataset=DATASET, + request=dict(parent=LOCATION_PATH, dataset=DATASET), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.import_data") @@ -144,11 +140,10 @@ def test_import_dataset(self, mock_import_data): ) mock_import_data.assert_called_once_with( - name=DATASET_PATH, - input_config=INPUT_CONFIG, + request=dict(name=DATASET_PATH, input_config=INPUT_CONFIG), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.list_column_specs") @@ -169,26 +164,27 @@ def test_list_column_specs(self, mock_list_column_specs): parent = AutoMlClient.table_spec_path(GCP_PROJECT_ID, GCP_LOCATION, DATASET_ID, table_spec) mock_list_column_specs.assert_called_once_with( - parent=parent, - field_mask=MASK, - filter_=filter_, - page_size=page_size, + request=dict(parent=parent, field_mask=MASK, filter=filter_, page_size=page_size), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.get_model") def test_get_model(self, mock_get_model): self.hook.get_model(model_id=MODEL_ID, location=GCP_LOCATION, project_id=GCP_PROJECT_ID) - mock_get_model.assert_called_once_with(name=MODEL_PATH, retry=None, timeout=None, metadata=None) + mock_get_model.assert_called_once_with( + request=dict(name=MODEL_PATH), retry=None, timeout=None, metadata=() + ) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.delete_model") def test_delete_model(self, mock_delete_model): self.hook.delete_model(model_id=MODEL_ID, location=GCP_LOCATION, project_id=GCP_PROJECT_ID) - mock_delete_model.assert_called_once_with(name=MODEL_PATH, retry=None, timeout=None, metadata=None) + mock_delete_model.assert_called_once_with( + request=dict(name=MODEL_PATH), retry=None, timeout=None, metadata=() + ) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.update_dataset") def test_update_dataset(self, mock_update_dataset): @@ -198,7 +194,7 @@ def test_update_dataset(self, mock_update_dataset): ) mock_update_dataset.assert_called_once_with( - dataset=DATASET, update_mask=MASK, retry=None, timeout=None, metadata=None + request=dict(dataset=DATASET, update_mask=MASK), retry=None, timeout=None, metadata=() ) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.deploy_model") @@ -213,11 +209,13 @@ def test_deploy_model(self, mock_deploy_model): ) mock_deploy_model.assert_called_once_with( - name=MODEL_PATH, + request=dict( + name=MODEL_PATH, + image_object_detection_model_deployment_metadata=image_detection_metadata, + ), retry=None, timeout=None, - metadata=None, - image_object_detection_model_deployment_metadata=image_detection_metadata, + metadata=(), ) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.list_table_specs") @@ -234,12 +232,10 @@ def test_list_table_specs(self, mock_list_table_specs): ) mock_list_table_specs.assert_called_once_with( - parent=DATASET_PATH, - filter_=filter_, - page_size=page_size, + request=dict(parent=DATASET_PATH, filter=filter_, page_size=page_size), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.list_datasets") @@ -247,7 +243,7 @@ def test_list_datasets(self, mock_list_datasets): self.hook.list_datasets(location=GCP_LOCATION, project_id=GCP_PROJECT_ID) mock_list_datasets.assert_called_once_with( - parent=LOCATION_PATH, retry=None, timeout=None, metadata=None + request=dict(parent=LOCATION_PATH), retry=None, timeout=None, metadata=() ) @mock.patch("airflow.providers.google.cloud.hooks.automl.AutoMlClient.delete_dataset") @@ -255,5 +251,5 @@ def test_delete_dataset(self, mock_delete_dataset): self.hook.delete_dataset(dataset_id=DATASET_ID, location=GCP_LOCATION, project_id=GCP_PROJECT_ID) mock_delete_dataset.assert_called_once_with( - name=DATASET_PATH, retry=None, timeout=None, metadata=None + request=dict(name=DATASET_PATH), retry=None, timeout=None, metadata=() ) diff --git a/tests/providers/google/cloud/hooks/test_bigquery_dts.py b/tests/providers/google/cloud/hooks/test_bigquery_dts.py index 64ad79cad8983..b53cb7637e1e3 100644 --- a/tests/providers/google/cloud/hooks/test_bigquery_dts.py +++ b/tests/providers/google/cloud/hooks/test_bigquery_dts.py @@ -20,9 +20,7 @@ from copy import deepcopy from unittest import mock -from google.cloud.bigquery_datatransfer_v1 import DataTransferServiceClient from google.cloud.bigquery_datatransfer_v1.types import TransferConfig -from google.protobuf.json_format import ParseDict from airflow.providers.google.cloud.hooks.bigquery_dts import BiqQueryDataTransferServiceHook from airflow.version import version @@ -33,21 +31,18 @@ PARAMS = { "field_delimiter": ",", - "max_bad_records": "0", - "skip_leading_rows": "1", + "max_bad_records": 0, + "skip_leading_rows": 1, "data_path_template": "bucket", "destination_table_name_template": "name", "file_format": "CSV", } -TRANSFER_CONFIG = ParseDict( - { - "destination_dataset_id": "dataset", - "display_name": "GCS Test Config", - "data_source_id": "google_cloud_storage", - "params": PARAMS, - }, - TransferConfig(), +TRANSFER_CONFIG = TransferConfig( + destination_dataset_id="dataset", + display_name="GCS Test Config", + data_source_id="google_cloud_storage", + params=PARAMS, ) TRANSFER_CONFIG_ID = "id1234" @@ -77,14 +72,12 @@ def test_disable_auto_scheduling(self): ) def test_create_transfer_config(self, service_mock): self.hook.create_transfer_config(transfer_config=TRANSFER_CONFIG, project_id=PROJECT_ID) - parent = DataTransferServiceClient.project_path(PROJECT_ID) + parent = f"projects/{PROJECT_ID}" expected_config = deepcopy(TRANSFER_CONFIG) expected_config.schedule_options.disable_auto_scheduling = True service_mock.assert_called_once_with( - parent=parent, - transfer_config=expected_config, - authorization_code=None, - metadata=None, + request=dict(parent=parent, transfer_config=expected_config, authorization_code=None), + metadata=(), retry=None, timeout=None, ) @@ -96,8 +89,8 @@ def test_create_transfer_config(self, service_mock): def test_delete_transfer_config(self, service_mock): self.hook.delete_transfer_config(transfer_config_id=TRANSFER_CONFIG_ID, project_id=PROJECT_ID) - name = DataTransferServiceClient.project_transfer_config_path(PROJECT_ID, TRANSFER_CONFIG_ID) - service_mock.assert_called_once_with(name=name, metadata=None, retry=None, timeout=None) + name = f"projects/{PROJECT_ID}/transferConfigs/{TRANSFER_CONFIG_ID}" + service_mock.assert_called_once_with(request=dict(name=name), metadata=(), retry=None, timeout=None) @mock.patch( "airflow.providers.google.cloud.hooks.bigquery_dts." @@ -106,12 +99,10 @@ def test_delete_transfer_config(self, service_mock): def test_start_manual_transfer_runs(self, service_mock): self.hook.start_manual_transfer_runs(transfer_config_id=TRANSFER_CONFIG_ID, project_id=PROJECT_ID) - parent = DataTransferServiceClient.project_transfer_config_path(PROJECT_ID, TRANSFER_CONFIG_ID) + parent = f"projects/{PROJECT_ID}/transferConfigs/{TRANSFER_CONFIG_ID}" service_mock.assert_called_once_with( - parent=parent, - requested_time_range=None, - requested_run_time=None, - metadata=None, + request=dict(parent=parent, requested_time_range=None, requested_run_time=None), + metadata=(), retry=None, timeout=None, ) diff --git a/tests/providers/google/cloud/hooks/test_cloud_memorystore.py b/tests/providers/google/cloud/hooks/test_cloud_memorystore.py index 40de3b8e21cc3..9e6f442236b4a 100644 --- a/tests/providers/google/cloud/hooks/test_cloud_memorystore.py +++ b/tests/providers/google/cloud/hooks/test_cloud_memorystore.py @@ -85,7 +85,10 @@ def test_create_instance_when_exists(self, mock_get_conn, mock_project_id): metadata=TEST_METADATA, ) mock_get_conn.return_value.get_instance.assert_called_once_with( - name=TEST_NAME_DEFAULT_PROJECT_ID, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA + request=dict(name=TEST_NAME_DEFAULT_PROJECT_ID), + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, ) assert Instance(name=TEST_NAME) == result @@ -116,13 +119,15 @@ def test_create_instance_when_not_exists(self, mock_get_conn, mock_project_id): ] ) mock_get_conn.return_value.create_instance.assert_called_once_with( - instance=Instance( - name=TEST_NAME, - labels={"airflow-version": "v" + version.version.replace(".", "-").replace("+", "-")}, + request=dict( + parent=TEST_PARENT_DEFAULT_PROJECT_ID, + instance=Instance( + name=TEST_NAME, + labels={"airflow-version": "v" + version.version.replace(".", "-").replace("+", "-")}, + ), + instance_id=TEST_INSTANCE_ID, ), - instance_id=TEST_INSTANCE_ID, metadata=TEST_METADATA, - parent=TEST_PARENT_DEFAULT_PROJECT_ID, retry=TEST_RETRY, timeout=TEST_TIMEOUT, ) @@ -143,7 +148,10 @@ def test_delete_instance(self, mock_get_conn, mock_project_id): metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_instance.assert_called_once_with( - name=TEST_NAME_DEFAULT_PROJECT_ID, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA + request=dict(name=TEST_NAME_DEFAULT_PROJECT_ID), + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, ) @mock.patch( @@ -161,7 +169,10 @@ def test_get_instance(self, mock_get_conn, mock_project_id): metadata=TEST_METADATA, ) mock_get_conn.return_value.get_instance.assert_called_once_with( - name=TEST_NAME_DEFAULT_PROJECT_ID, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA + request=dict(name=TEST_NAME_DEFAULT_PROJECT_ID), + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, ) @mock.patch( @@ -179,8 +190,7 @@ def test_list_instances(self, mock_get_conn, mock_project_id): metadata=TEST_METADATA, ) mock_get_conn.return_value.list_instances.assert_called_once_with( - parent=TEST_PARENT_DEFAULT_PROJECT_ID, - page_size=TEST_PAGE_SIZE, + request=dict(parent=TEST_PARENT_DEFAULT_PROJECT_ID, page_size=TEST_PAGE_SIZE), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -203,8 +213,7 @@ def test_update_instance(self, mock_get_conn, mock_project_id): metadata=TEST_METADATA, ) mock_get_conn.return_value.update_instance.assert_called_once_with( - update_mask=TEST_UPDATE_MASK, - instance=Instance(name=TEST_NAME_DEFAULT_PROJECT_ID), + request=dict(update_mask=TEST_UPDATE_MASK, instance=Instance(name=TEST_NAME_DEFAULT_PROJECT_ID)), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -234,7 +243,7 @@ def test_create_instance_when_exists(self, mock_get_conn): metadata=TEST_METADATA, ) mock_get_conn.return_value.get_instance.assert_called_once_with( - name="projects/test-project-id/locations/test-location/instances/test-instance-id", + request=dict(name="projects/test-project-id/locations/test-location/instances/test-instance-id"), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -275,13 +284,15 @@ def test_create_instance_when_not_exists(self, mock_get_conn): ) mock_get_conn.return_value.create_instance.assert_called_once_with( - instance=Instance( - name=TEST_NAME, - labels={"airflow-version": "v" + version.version.replace(".", "-").replace("+", "-")}, + request=dict( + parent=TEST_PARENT, + instance=Instance( + name=TEST_NAME, + labels={"airflow-version": "v" + version.version.replace(".", "-").replace("+", "-")}, + ), + instance_id=TEST_INSTANCE_ID, ), - instance_id=TEST_INSTANCE_ID, metadata=TEST_METADATA, - parent=TEST_PARENT, retry=TEST_RETRY, timeout=TEST_TIMEOUT, ) @@ -316,7 +327,7 @@ def test_delete_instance(self, mock_get_conn): metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_instance.assert_called_once_with( - name=TEST_NAME, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA + request=dict(name=TEST_NAME), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA ) @mock.patch( @@ -347,7 +358,7 @@ def test_get_instance(self, mock_get_conn): metadata=TEST_METADATA, ) mock_get_conn.return_value.get_instance.assert_called_once_with( - name=TEST_NAME, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA + request=dict(name=TEST_NAME), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA ) @mock.patch( @@ -378,8 +389,7 @@ def test_list_instances(self, mock_get_conn): metadata=TEST_METADATA, ) mock_get_conn.return_value.list_instances.assert_called_once_with( - parent=TEST_PARENT, - page_size=TEST_PAGE_SIZE, + request=dict(parent=TEST_PARENT, page_size=TEST_PAGE_SIZE), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -413,8 +423,7 @@ def test_update_instance(self, mock_get_conn): project_id=TEST_PROJECT_ID, ) mock_get_conn.return_value.update_instance.assert_called_once_with( - update_mask=TEST_UPDATE_MASK, - instance=Instance(name=TEST_NAME), + request=dict(update_mask={'paths': ['memory_size_gb']}, instance=Instance(name=TEST_NAME)), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, diff --git a/tests/providers/google/cloud/hooks/test_datacatalog.py b/tests/providers/google/cloud/hooks/test_datacatalog.py index f5192c5004c1f..99d785fa61637 100644 --- a/tests/providers/google/cloud/hooks/test_datacatalog.py +++ b/tests/providers/google/cloud/hooks/test_datacatalog.py @@ -22,6 +22,7 @@ import pytest from google.api_core.retry import Retry +from google.cloud.datacatalog_v1beta1 import CreateTagRequest, CreateTagTemplateRequest from google.cloud.datacatalog_v1beta1.types import Entry, Tag, TagTemplate from airflow import AirflowException @@ -38,7 +39,7 @@ TEST_ENTRY: Dict = {} TEST_RETRY: Retry = Retry() TEST_TIMEOUT: float = 4 -TEST_METADATA: Sequence[Tuple[str, str]] = [] +TEST_METADATA: Sequence[Tuple[str, str]] = () TEST_ENTRY_GROUP_ID: str = "test-entry-group-id" TEST_ENTRY_GROUP: Dict = {} TEST_TAG: Dict = {} @@ -102,7 +103,7 @@ def test_lookup_entry_with_linked_resource(self, mock_get_conn, mock_get_creds_a metadata=TEST_METADATA, ) mock_get_conn.return_value.lookup_entry.assert_called_once_with( - linked_resource=TEST_LINKED_RESOURCE, + request=dict(linked_resource=TEST_LINKED_RESOURCE), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -118,7 +119,10 @@ def test_lookup_entry_with_sql_resource(self, mock_get_conn, mock_get_creds_and_ sql_resource=TEST_SQL_RESOURCE, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA ) mock_get_conn.return_value.lookup_entry.assert_called_once_with( - sql_resource=TEST_SQL_RESOURCE, retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA + request=dict(sql_resource=TEST_SQL_RESOURCE), + retry=TEST_RETRY, + timeout=TEST_TIMEOUT, + metadata=TEST_METADATA, ) @mock.patch( @@ -148,10 +152,9 @@ def test_search_catalog(self, mock_get_conn, mock_get_creds_and_project_id) -> N metadata=TEST_METADATA, ) mock_get_conn.return_value.search_catalog.assert_called_once_with( - scope=TEST_SCOPE, - query=TEST_QUERY, - page_size=TEST_PAGE_SIZE, - order_by=TEST_ORDER_BY, + request=dict( + scope=TEST_SCOPE, query=TEST_QUERY, page_size=TEST_PAGE_SIZE, order_by=TEST_ORDER_BY + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -184,9 +187,11 @@ def test_create_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non metadata=TEST_METADATA, ) mock_get_conn.return_value.create_entry.assert_called_once_with( - parent=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1), - entry_id=TEST_ENTRY_ID, - entry=TEST_ENTRY, + request=dict( + parent=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1), + entry_id=TEST_ENTRY_ID, + entry=TEST_ENTRY, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -207,9 +212,11 @@ def test_create_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) metadata=TEST_METADATA, ) mock_get_conn.return_value.create_entry_group.assert_called_once_with( - parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_1), - entry_group_id=TEST_ENTRY_GROUP_ID, - entry_group=TEST_ENTRY_GROUP, + request=dict( + parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_1), + entry_group_id=TEST_ENTRY_GROUP_ID, + entry_group=TEST_ENTRY_GROUP, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -232,8 +239,10 @@ def test_create_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: metadata=TEST_METADATA, ) mock_get_conn.return_value.create_tag.assert_called_once_with( - parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), - tag={"template": TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)}, + request=CreateTagRequest( + parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), + tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)), + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -256,8 +265,10 @@ def test_create_tag_protobuff(self, mock_get_conn, mock_get_creds_and_project_id metadata=TEST_METADATA, ) mock_get_conn.return_value.create_tag.assert_called_once_with( - parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), - tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)), + request=CreateTagRequest( + parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), + tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)), + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -278,9 +289,11 @@ def test_create_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) metadata=TEST_METADATA, ) mock_get_conn.return_value.create_tag_template.assert_called_once_with( - parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_1), - tag_template_id=TEST_TAG_TEMPLATE_ID, - tag_template=TEST_TAG_TEMPLATE, + request=CreateTagTemplateRequest( + parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_1), + tag_template_id=TEST_TAG_TEMPLATE_ID, + tag_template=TEST_TAG_TEMPLATE, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -302,9 +315,11 @@ def test_create_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje metadata=TEST_METADATA, ) mock_get_conn.return_value.create_tag_template_field.assert_called_once_with( - parent=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1), - tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID, - tag_template_field=TEST_TAG_TEMPLATE_FIELD, + request=dict( + parent=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1), + tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID, + tag_template_field=TEST_TAG_TEMPLATE_FIELD, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -325,7 +340,9 @@ def test_delete_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_entry.assert_called_once_with( - name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), + request=dict( + name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -345,7 +362,9 @@ def test_delete_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_entry_group.assert_called_once_with( - name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1), + request=dict( + name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1), + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -367,7 +386,9 @@ def test_delete_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_tag.assert_called_once_with( - name=TEST_TAG_PATH.format(TEST_PROJECT_ID_1), + request=dict( + name=TEST_TAG_PATH.format(TEST_PROJECT_ID_1), + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -388,8 +409,7 @@ def test_delete_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_tag_template.assert_called_once_with( - name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1), - force=TEST_FORCE, + request=dict(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1), force=TEST_FORCE), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -411,8 +431,10 @@ def test_delete_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_tag_template_field.assert_called_once_with( - name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1), - force=TEST_FORCE, + request=dict( + name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1), + force=TEST_FORCE, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -433,7 +455,9 @@ def test_get_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: metadata=TEST_METADATA, ) mock_get_conn.return_value.get_entry.assert_called_once_with( - name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), + request=dict( + name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -454,8 +478,10 @@ def test_get_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) -> metadata=TEST_METADATA, ) mock_get_conn.return_value.get_entry_group.assert_called_once_with( - name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1), - read_mask=TEST_READ_MASK, + request=dict( + name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_1), + read_mask=TEST_READ_MASK, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -475,7 +501,9 @@ def test_get_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> metadata=TEST_METADATA, ) mock_get_conn.return_value.get_tag_template.assert_called_once_with( - name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1), + request=dict( + name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1), + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -497,8 +525,10 @@ def test_list_tags(self, mock_get_conn, mock_get_creds_and_project_id) -> None: metadata=TEST_METADATA, ) mock_get_conn.return_value.list_tags.assert_called_once_with( - parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), - page_size=TEST_PAGE_SIZE, + request=dict( + parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), + page_size=TEST_PAGE_SIZE, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -524,8 +554,10 @@ def test_get_tag_for_template_name(self, mock_get_conn, mock_get_creds_and_proje metadata=TEST_METADATA, ) mock_get_conn.return_value.list_tags.assert_called_once_with( - parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), - page_size=100, + request=dict( + parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1), + page_size=100, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -548,8 +580,10 @@ def test_rename_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje metadata=TEST_METADATA, ) mock_get_conn.return_value.rename_tag_template_field.assert_called_once_with( - name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1), - new_tag_template_field_id=TEST_NEW_TAG_TEMPLATE_FIELD_ID, + request=dict( + name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1), + new_tag_template_field_id=TEST_NEW_TAG_TEMPLATE_FIELD_ID, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -572,8 +606,10 @@ def test_update_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non metadata=TEST_METADATA, ) mock_get_conn.return_value.update_entry.assert_called_once_with( - entry=Entry(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1)), - update_mask=TEST_UPDATE_MASK, + request=dict( + entry=Entry(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_1)), + update_mask=TEST_UPDATE_MASK, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -597,8 +633,7 @@ def test_update_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: metadata=TEST_METADATA, ) mock_get_conn.return_value.update_tag.assert_called_once_with( - tag=Tag(name=TEST_TAG_PATH.format(TEST_PROJECT_ID_1)), - update_mask=TEST_UPDATE_MASK, + request=dict(tag=Tag(name=TEST_TAG_PATH.format(TEST_PROJECT_ID_1)), update_mask=TEST_UPDATE_MASK), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -620,8 +655,10 @@ def test_update_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) metadata=TEST_METADATA, ) mock_get_conn.return_value.update_tag_template.assert_called_once_with( - tag_template=TagTemplate(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)), - update_mask=TEST_UPDATE_MASK, + request=dict( + tag_template=TagTemplate(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_1)), + update_mask=TEST_UPDATE_MASK, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -644,9 +681,11 @@ def test_update_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje metadata=TEST_METADATA, ) mock_get_conn.return_value.update_tag_template_field.assert_called_once_with( - name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1), - tag_template_field=TEST_TAG_TEMPLATE_FIELD, - update_mask=TEST_UPDATE_MASK, + request=dict( + name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_1), + tag_template_field=TEST_TAG_TEMPLATE_FIELD, + update_mask=TEST_UPDATE_MASK, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -680,9 +719,11 @@ def test_create_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non metadata=TEST_METADATA, ) mock_get_conn.return_value.create_entry.assert_called_once_with( - parent=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2), - entry_id=TEST_ENTRY_ID, - entry=TEST_ENTRY, + request=dict( + parent=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2), + entry_id=TEST_ENTRY_ID, + entry=TEST_ENTRY, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -704,9 +745,11 @@ def test_create_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) metadata=TEST_METADATA, ) mock_get_conn.return_value.create_entry_group.assert_called_once_with( - parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_2), - entry_group_id=TEST_ENTRY_GROUP_ID, - entry_group=TEST_ENTRY_GROUP, + request=dict( + parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_2), + entry_group_id=TEST_ENTRY_GROUP_ID, + entry_group=TEST_ENTRY_GROUP, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -730,8 +773,10 @@ def test_create_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: metadata=TEST_METADATA, ) mock_get_conn.return_value.create_tag.assert_called_once_with( - parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), - tag={"template": TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)}, + request=CreateTagRequest( + parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), + tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)), + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -755,8 +800,10 @@ def test_create_tag_protobuff(self, mock_get_conn, mock_get_creds_and_project_id metadata=TEST_METADATA, ) mock_get_conn.return_value.create_tag.assert_called_once_with( - parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), - tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)), + request=CreateTagRequest( + parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), + tag=Tag(template=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)), + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -778,9 +825,11 @@ def test_create_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) metadata=TEST_METADATA, ) mock_get_conn.return_value.create_tag_template.assert_called_once_with( - parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_2), - tag_template_id=TEST_TAG_TEMPLATE_ID, - tag_template=TEST_TAG_TEMPLATE, + request=CreateTagTemplateRequest( + parent=TEST_LOCATION_PATH.format(TEST_PROJECT_ID_2), + tag_template_id=TEST_TAG_TEMPLATE_ID, + tag_template=TEST_TAG_TEMPLATE, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -803,9 +852,11 @@ def test_create_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje metadata=TEST_METADATA, ) mock_get_conn.return_value.create_tag_template_field.assert_called_once_with( - parent=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2), - tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID, - tag_template_field=TEST_TAG_TEMPLATE_FIELD, + request=dict( + parent=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2), + tag_template_field_id=TEST_TAG_TEMPLATE_FIELD_ID, + tag_template_field=TEST_TAG_TEMPLATE_FIELD, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -827,7 +878,7 @@ def test_delete_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_entry.assert_called_once_with( - name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), + request=dict(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2)), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -848,7 +899,7 @@ def test_delete_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_entry_group.assert_called_once_with( - name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2), + request=dict(name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2)), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -871,7 +922,7 @@ def test_delete_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_tag.assert_called_once_with( - name=TEST_TAG_PATH.format(TEST_PROJECT_ID_2), + request=dict(name=TEST_TAG_PATH.format(TEST_PROJECT_ID_2)), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -893,8 +944,7 @@ def test_delete_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_tag_template.assert_called_once_with( - name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2), - force=TEST_FORCE, + request=dict(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2), force=TEST_FORCE), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -917,8 +967,7 @@ def test_delete_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje metadata=TEST_METADATA, ) mock_get_conn.return_value.delete_tag_template_field.assert_called_once_with( - name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2), - force=TEST_FORCE, + request=dict(name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2), force=TEST_FORCE), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -940,7 +989,7 @@ def test_get_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> None: metadata=TEST_METADATA, ) mock_get_conn.return_value.get_entry.assert_called_once_with( - name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), + request=dict(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2)), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -962,8 +1011,10 @@ def test_get_entry_group(self, mock_get_conn, mock_get_creds_and_project_id) -> metadata=TEST_METADATA, ) mock_get_conn.return_value.get_entry_group.assert_called_once_with( - name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2), - read_mask=TEST_READ_MASK, + request=dict( + name=TEST_ENTRY_GROUP_PATH.format(TEST_PROJECT_ID_2), + read_mask=TEST_READ_MASK, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -984,7 +1035,7 @@ def test_get_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) -> metadata=TEST_METADATA, ) mock_get_conn.return_value.get_tag_template.assert_called_once_with( - name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2), + request=dict(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -1007,8 +1058,7 @@ def test_list_tags(self, mock_get_conn, mock_get_creds_and_project_id) -> None: metadata=TEST_METADATA, ) mock_get_conn.return_value.list_tags.assert_called_once_with( - parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), - page_size=TEST_PAGE_SIZE, + request=dict(parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), page_size=TEST_PAGE_SIZE), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -1035,8 +1085,7 @@ def test_get_tag_for_template_name(self, mock_get_conn, mock_get_creds_and_proje metadata=TEST_METADATA, ) mock_get_conn.return_value.list_tags.assert_called_once_with( - parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), - page_size=100, + request=dict(parent=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2), page_size=100), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -1060,8 +1109,10 @@ def test_rename_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje metadata=TEST_METADATA, ) mock_get_conn.return_value.rename_tag_template_field.assert_called_once_with( - name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2), - new_tag_template_field_id=TEST_NEW_TAG_TEMPLATE_FIELD_ID, + request=dict( + name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2), + new_tag_template_field_id=TEST_NEW_TAG_TEMPLATE_FIELD_ID, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -1085,8 +1136,9 @@ def test_update_entry(self, mock_get_conn, mock_get_creds_and_project_id) -> Non metadata=TEST_METADATA, ) mock_get_conn.return_value.update_entry.assert_called_once_with( - entry=Entry(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2)), - update_mask=TEST_UPDATE_MASK, + request=dict( + entry=Entry(name=TEST_ENTRY_PATH.format(TEST_PROJECT_ID_2)), update_mask=TEST_UPDATE_MASK + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -1111,8 +1163,7 @@ def test_update_tag(self, mock_get_conn, mock_get_creds_and_project_id) -> None: metadata=TEST_METADATA, ) mock_get_conn.return_value.update_tag.assert_called_once_with( - tag=Tag(name=TEST_TAG_PATH.format(TEST_PROJECT_ID_2)), - update_mask=TEST_UPDATE_MASK, + request=dict(tag=Tag(name=TEST_TAG_PATH.format(TEST_PROJECT_ID_2)), update_mask=TEST_UPDATE_MASK), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -1135,8 +1186,10 @@ def test_update_tag_template(self, mock_get_conn, mock_get_creds_and_project_id) metadata=TEST_METADATA, ) mock_get_conn.return_value.update_tag_template.assert_called_once_with( - tag_template=TagTemplate(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)), - update_mask=TEST_UPDATE_MASK, + request=dict( + tag_template=TagTemplate(name=TEST_TAG_TEMPLATE_PATH.format(TEST_PROJECT_ID_2)), + update_mask=TEST_UPDATE_MASK, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -1160,9 +1213,11 @@ def test_update_tag_template_field(self, mock_get_conn, mock_get_creds_and_proje metadata=TEST_METADATA, ) mock_get_conn.return_value.update_tag_template_field.assert_called_once_with( - name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2), - tag_template_field=TEST_TAG_TEMPLATE_FIELD, - update_mask=TEST_UPDATE_MASK, + request=dict( + name=TEST_TAG_TEMPLATE_FIELD_PATH.format(TEST_PROJECT_ID_2), + tag_template_field=TEST_TAG_TEMPLATE_FIELD, + update_mask=TEST_UPDATE_MASK, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, diff --git a/tests/providers/google/cloud/hooks/test_dataflow.py b/tests/providers/google/cloud/hooks/test_dataflow.py index 5297b307fb76b..c0da0305d91ce 100644 --- a/tests/providers/google/cloud/hooks/test_dataflow.py +++ b/tests/providers/google/cloud/hooks/test_dataflow.py @@ -30,16 +30,20 @@ from parameterized import parameterized from airflow.exceptions import AirflowException +from airflow.providers.apache.beam.hooks.beam import BeamCommandRunner, BeamHook from airflow.providers.google.cloud.hooks.dataflow import ( DEFAULT_DATAFLOW_LOCATION, DataflowHook, DataflowJobStatus, DataflowJobType, _DataflowJobsController, - _DataflowRunner, _fallback_to_project_id_from_variables, + process_line_and_extract_dataflow_job_id_callback, ) +DEFAULT_RUNNER = "DirectRunner" +BEAM_STRING = 'airflow.providers.apache.beam.hooks.beam.{}' + TASK_ID = 'test-dataflow-operator' JOB_NAME = 'test-dataflow-pipeline' MOCK_UUID = UUID('cf4a56d2-8101-4217-b027-2af6216feb48') @@ -183,6 +187,7 @@ class TestDataflowHook(unittest.TestCase): def setUp(self): with mock.patch(BASE_STRING.format('GoogleBaseHook.__init__'), new=mock_init): self.dataflow_hook = DataflowHook(gcp_conn_id='test') + self.dataflow_hook.beam_hook = MagicMock() @mock.patch("airflow.providers.google.cloud.hooks.dataflow.DataflowHook._authorize") @mock.patch("airflow.providers.google.cloud.hooks.dataflow.build") @@ -194,186 +199,229 @@ def test_dataflow_client_creation(self, mock_build, mock_authorize): assert mock_build.return_value == result @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) - def test_start_python_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): + @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done')) + @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback')) + def test_start_python_dataflow(self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid): + mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter - job_name=JOB_NAME, - variables=DATAFLOW_VARIABLES_PY, - dataflow=PY_FILE, + on_new_job_id_callback = MagicMock() + py_requirements = ["pands", "numpy"] + job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" + + with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): + self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + job_name=JOB_NAME, + variables=DATAFLOW_VARIABLES_PY, + dataflow=PY_FILE, + py_options=PY_OPTIONS, + py_interpreter=DEFAULT_PY_INTERPRETER, + py_requirements=py_requirements, + on_new_job_id_callback=on_new_job_id_callback, + ) + + expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY) + expected_variables["job_name"] = job_name + expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback) + mock_beam_start_python_pipeline.assert_called_once_with( + variables=expected_variables, + py_file=PY_FILE, + py_interpreter=DEFAULT_PY_INTERPRETER, py_options=PY_OPTIONS, + py_requirements=py_requirements, + py_system_site_packages=False, + process_line_callback=mock_callback_on_job_id.return_value, + ) + + mock_dataflow_wait_for_done.assert_called_once_with( + job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION ) - expected_cmd = [ - "python3", - '-m', - PY_FILE, - '--region=us-central1', - '--runner=DataflowRunner', - '--project=test', - '--labels=foo=bar', - '--staging_location=gs://test/staging', - f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}', - ] - assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) + @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done')) + @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback')) def test_start_python_dataflow_with_custom_region_as_variable( - self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid + self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid ): + mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None - variables = copy.deepcopy(DATAFLOW_VARIABLES_PY) - variables['region'] = TEST_LOCATION - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter - job_name=JOB_NAME, - variables=variables, - dataflow=PY_FILE, + on_new_job_id_callback = MagicMock() + py_requirements = ["pands", "numpy"] + job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" + + passed_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY) + passed_variables["region"] = TEST_LOCATION + + with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): + self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + job_name=JOB_NAME, + variables=passed_variables, + dataflow=PY_FILE, + py_options=PY_OPTIONS, + py_interpreter=DEFAULT_PY_INTERPRETER, + py_requirements=py_requirements, + on_new_job_id_callback=on_new_job_id_callback, + ) + + expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY) + expected_variables["job_name"] = job_name + expected_variables["region"] = TEST_LOCATION + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback) + mock_beam_start_python_pipeline.assert_called_once_with( + variables=expected_variables, + py_file=PY_FILE, + py_interpreter=DEFAULT_PY_INTERPRETER, py_options=PY_OPTIONS, + py_requirements=py_requirements, + py_system_site_packages=False, + process_line_callback=mock_callback_on_job_id.return_value, + ) + + mock_dataflow_wait_for_done.assert_called_once_with( + job_id=mock.ANY, job_name=job_name, location=TEST_LOCATION ) - expected_cmd = [ - "python3", - '-m', - PY_FILE, - f'--region={TEST_LOCATION}', - '--runner=DataflowRunner', - '--project=test', - '--labels=foo=bar', - '--staging_location=gs://test/staging', - f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}', - ] - assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) + @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done')) + @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback')) def test_start_python_dataflow_with_custom_region_as_parameter( - self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid + self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid ): + mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter - job_name=JOB_NAME, - variables=DATAFLOW_VARIABLES_PY, - dataflow=PY_FILE, + on_new_job_id_callback = MagicMock() + py_requirements = ["pands", "numpy"] + job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" + + passed_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY) + + with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): + self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + job_name=JOB_NAME, + variables=passed_variables, + dataflow=PY_FILE, + py_options=PY_OPTIONS, + py_interpreter=DEFAULT_PY_INTERPRETER, + py_requirements=py_requirements, + on_new_job_id_callback=on_new_job_id_callback, + location=TEST_LOCATION, + ) + + expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY) + expected_variables["job_name"] = job_name + expected_variables["region"] = TEST_LOCATION + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback) + mock_beam_start_python_pipeline.assert_called_once_with( + variables=expected_variables, + py_file=PY_FILE, + py_interpreter=DEFAULT_PY_INTERPRETER, py_options=PY_OPTIONS, - location=TEST_LOCATION, + py_requirements=py_requirements, + py_system_site_packages=False, + process_line_callback=mock_callback_on_job_id.return_value, + ) + + mock_dataflow_wait_for_done.assert_called_once_with( + job_id=mock.ANY, job_name=job_name, location=TEST_LOCATION ) - expected_cmd = [ - "python3", - '-m', - PY_FILE, - f'--region={TEST_LOCATION}', - '--runner=DataflowRunner', - '--project=test', - '--labels=foo=bar', - '--staging_location=gs://test/staging', - f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}', - ] - assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) + @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done')) + @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback')) def test_start_python_dataflow_with_multiple_extra_packages( - self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid + self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid ): + mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None - variables: Dict[str, Any] = copy.deepcopy(DATAFLOW_VARIABLES_PY) - variables['extra-package'] = ['a.whl', 'b.whl'] + on_new_job_id_callback = MagicMock() + py_requirements = ["pands", "numpy"] + job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter - job_name=JOB_NAME, - variables=variables, - dataflow=PY_FILE, + passed_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY) + passed_variables['extra-package'] = ['a.whl', 'b.whl'] + + with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): + self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + job_name=JOB_NAME, + variables=passed_variables, + dataflow=PY_FILE, + py_options=PY_OPTIONS, + py_interpreter=DEFAULT_PY_INTERPRETER, + py_requirements=py_requirements, + on_new_job_id_callback=on_new_job_id_callback, + ) + + expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY) + expected_variables["job_name"] = job_name + expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION + expected_variables['extra-package'] = ['a.whl', 'b.whl'] + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback) + mock_beam_start_python_pipeline.assert_called_once_with( + variables=expected_variables, + py_file=PY_FILE, + py_interpreter=DEFAULT_PY_INTERPRETER, py_options=PY_OPTIONS, + py_requirements=py_requirements, + py_system_site_packages=False, + process_line_callback=mock_callback_on_job_id.return_value, + ) + + mock_dataflow_wait_for_done.assert_called_once_with( + job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION ) - expected_cmd = [ - "python3", - '-m', - PY_FILE, - '--extra-package=a.whl', - '--extra-package=b.whl', - '--region=us-central1', - '--runner=DataflowRunner', - '--project=test', - '--labels=foo=bar', - '--staging_location=gs://test/staging', - f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}', - ] - assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd) @parameterized.expand( [ - ('default_to_python3', 'python3'), - ('major_version_2', 'python2'), - ('major_version_3', 'python3'), - ('minor_version', 'python3.6'), + ('python3',), + ('python2',), + ('python3',), + ('python3.6',), ] ) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) + @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done')) + @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback')) def test_start_python_dataflow_with_custom_interpreter( - self, - name, - py_interpreter, - mock_conn, - mock_dataflow, - mock_dataflowjob, - mock_uuid, + self, py_interpreter, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid ): - del name # unused variable + mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter - job_name=JOB_NAME, - variables=DATAFLOW_VARIABLES_PY, - dataflow=PY_FILE, - py_options=PY_OPTIONS, + on_new_job_id_callback = MagicMock() + job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" + + with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): + self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + job_name=JOB_NAME, + variables=DATAFLOW_VARIABLES_PY, + dataflow=PY_FILE, + py_options=PY_OPTIONS, + py_interpreter=py_interpreter, + py_requirements=None, + on_new_job_id_callback=on_new_job_id_callback, + ) + + expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY) + expected_variables["job_name"] = job_name + expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback) + mock_beam_start_python_pipeline.assert_called_once_with( + variables=expected_variables, + py_file=PY_FILE, py_interpreter=py_interpreter, + py_options=PY_OPTIONS, + py_requirements=None, + py_system_site_packages=False, + process_line_callback=mock_callback_on_job_id.return_value, + ) + + mock_dataflow_wait_for_done.assert_called_once_with( + job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION ) - expected_cmd = [ - py_interpreter, - '-m', - PY_FILE, - '--region=us-central1', - '--runner=DataflowRunner', - '--project=test', - '--labels=foo=bar', - '--staging_location=gs://test/staging', - f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}', - ] - assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd) @parameterized.expand( [ @@ -382,225 +430,229 @@ def test_start_python_dataflow_with_custom_interpreter( ([], True), ] ) - @mock.patch(DATAFLOW_STRING.format('prepare_virtualenv')) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) + @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done')) + @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback')) def test_start_python_dataflow_with_non_empty_py_requirements_and_without_system_packages( self, current_py_requirements, current_py_system_site_packages, - mock_conn, - mock_dataflow, - mock_dataflowjob, + mock_callback_on_job_id, + mock_dataflow_wait_for_done, mock_uuid, - mock_virtualenv, ): + mock_beam_start_python_pipeline = self.dataflow_hook.beam_hook.start_python_pipeline mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None - mock_virtualenv.return_value = '/dummy_dir/bin/python' - self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter - job_name=JOB_NAME, - variables=DATAFLOW_VARIABLES_PY, - dataflow=PY_FILE, + on_new_job_id_callback = MagicMock() + job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" + + with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): + self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter + job_name=JOB_NAME, + variables=DATAFLOW_VARIABLES_PY, + dataflow=PY_FILE, + py_options=PY_OPTIONS, + py_interpreter=DEFAULT_PY_INTERPRETER, + py_requirements=current_py_requirements, + py_system_site_packages=current_py_system_site_packages, + on_new_job_id_callback=on_new_job_id_callback, + ) + + expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_PY) + expected_variables["job_name"] = job_name + expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback) + mock_beam_start_python_pipeline.assert_called_once_with( + variables=expected_variables, + py_file=PY_FILE, + py_interpreter=DEFAULT_PY_INTERPRETER, py_options=PY_OPTIONS, py_requirements=current_py_requirements, py_system_site_packages=current_py_system_site_packages, + process_line_callback=mock_callback_on_job_id.return_value, + ) + + mock_dataflow_wait_for_done.assert_called_once_with( + job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION ) - expected_cmd = [ - '/dummy_dir/bin/python', - '-m', - PY_FILE, - '--region=us-central1', - '--runner=DataflowRunner', - '--project=test', - '--labels=foo=bar', - '--staging_location=gs://test/staging', - f'--job_name={JOB_NAME}-{MOCK_UUID_PREFIX}', - ] - assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) + @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done')) def test_start_python_dataflow_with_empty_py_requirements_and_without_system_packages( - self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid + self, mock_dataflow_wait_for_done, mock_uuid ): + self.dataflow_hook.beam_hook = BeamHook(runner="DataflowRunner") mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None - with pytest.raises(AirflowException, match="Invalid method invocation."): + on_new_job_id_callback = MagicMock() + + with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"), self.assertRaisesRegex( + AirflowException, "Invalid method invocation." + ): self.dataflow_hook.start_python_dataflow( # pylint: disable=no-value-for-parameter job_name=JOB_NAME, variables=DATAFLOW_VARIABLES_PY, dataflow=PY_FILE, py_options=PY_OPTIONS, + py_interpreter=DEFAULT_PY_INTERPRETER, py_requirements=[], + on_new_job_id_callback=on_new_job_id_callback, ) + mock_dataflow_wait_for_done.assert_not_called() + @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) - def test_start_java_dataflow(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): + @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done')) + @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback')) + def test_start_java_dataflow(self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid): + mock_beam_start_java_pipeline = self.dataflow_hook.beam_hook.start_java_pipeline mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None - self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter - job_name=JOB_NAME, variables=DATAFLOW_VARIABLES_JAVA, jar=JAR_FILE - ) - expected_cmd = [ - 'java', - '-jar', - JAR_FILE, - '--region=us-central1', - '--runner=DataflowRunner', - '--project=test', - '--stagingLocation=gs://test/staging', - '--labels={"foo":"bar"}', - f'--jobName={JOB_NAME}-{MOCK_UUID_PREFIX}', - ] - assert sorted(expected_cmd) == sorted(mock_dataflow.call_args[1]["cmd"]) + on_new_job_id_callback = MagicMock() + job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" + + with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): + self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter + job_name=JOB_NAME, + variables=DATAFLOW_VARIABLES_JAVA, + jar=JAR_FILE, + job_class=JOB_CLASS, + on_new_job_id_callback=on_new_job_id_callback, + ) + + expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_JAVA) + expected_variables["jobName"] = job_name + expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION + expected_variables["labels"] = '{"foo":"bar"}' + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback) + mock_beam_start_java_pipeline.assert_called_once_with( + variables=expected_variables, + jar=JAR_FILE, + job_class=JOB_CLASS, + process_line_callback=mock_callback_on_job_id.return_value, + ) + + mock_dataflow_wait_for_done.assert_called_once_with( + job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION, multiple_jobs=False + ) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) + @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done')) + @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback')) def test_start_java_dataflow_with_multiple_values_in_variables( - self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid + self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid ): + mock_beam_start_java_pipeline = self.dataflow_hook.beam_hook.start_java_pipeline mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None - variables: Dict[str, Any] = copy.deepcopy(DATAFLOW_VARIABLES_JAVA) - variables['mock-option'] = ['a.whl', 'b.whl'] - - self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter - job_name=JOB_NAME, variables=variables, jar=JAR_FILE - ) - expected_cmd = [ - 'java', - '-jar', - JAR_FILE, - '--mock-option=a.whl', - '--mock-option=b.whl', - '--region=us-central1', - '--runner=DataflowRunner', - '--project=test', - '--stagingLocation=gs://test/staging', - '--labels={"foo":"bar"}', - f'--jobName={JOB_NAME}-{MOCK_UUID_PREFIX}', - ] - assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd) + on_new_job_id_callback = MagicMock() + job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" + + passed_variables: Dict[str, Any] = copy.deepcopy(DATAFLOW_VARIABLES_JAVA) + passed_variables['mock-option'] = ['a.whl', 'b.whl'] + + with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): + self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter + job_name=JOB_NAME, + variables=passed_variables, + jar=JAR_FILE, + job_class=JOB_CLASS, + on_new_job_id_callback=on_new_job_id_callback, + ) + + expected_variables = copy.deepcopy(passed_variables) + expected_variables["jobName"] = job_name + expected_variables["region"] = DEFAULT_DATAFLOW_LOCATION + expected_variables["labels"] = '{"foo":"bar"}' + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback) + mock_beam_start_java_pipeline.assert_called_once_with( + variables=expected_variables, + jar=JAR_FILE, + job_class=JOB_CLASS, + process_line_callback=mock_callback_on_job_id.return_value, + ) + + mock_dataflow_wait_for_done.assert_called_once_with( + job_id=mock.ANY, job_name=job_name, location=DEFAULT_DATAFLOW_LOCATION, multiple_jobs=False + ) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) + @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done')) + @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback')) def test_start_java_dataflow_with_custom_region_as_variable( - self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid + self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid ): + mock_beam_start_java_pipeline = self.dataflow_hook.beam_hook.start_java_pipeline mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None + on_new_job_id_callback = MagicMock() + job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" - variables = copy.deepcopy(DATAFLOW_VARIABLES_JAVA) - variables['region'] = TEST_LOCATION - - self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter - job_name=JOB_NAME, variables=variables, jar=JAR_FILE - ) - expected_cmd = [ - 'java', - '-jar', - JAR_FILE, - f'--region={TEST_LOCATION}', - '--runner=DataflowRunner', - '--project=test', - '--stagingLocation=gs://test/staging', - '--labels={"foo":"bar"}', - f'--jobName={JOB_NAME}-{MOCK_UUID_PREFIX}', - ] - assert sorted(expected_cmd) == sorted(mock_dataflow.call_args[1]["cmd"]) + passed_variables: Dict[str, Any] = copy.deepcopy(DATAFLOW_VARIABLES_JAVA) + passed_variables['region'] = TEST_LOCATION + + with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): + self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter + job_name=JOB_NAME, + variables=passed_variables, + jar=JAR_FILE, + job_class=JOB_CLASS, + on_new_job_id_callback=on_new_job_id_callback, + ) + + expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_JAVA) + expected_variables["jobName"] = job_name + expected_variables["region"] = TEST_LOCATION + expected_variables["labels"] = '{"foo":"bar"}' + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback) + mock_beam_start_java_pipeline.assert_called_once_with( + variables=expected_variables, + jar=JAR_FILE, + job_class=JOB_CLASS, + process_line_callback=mock_callback_on_job_id.return_value, + ) + + mock_dataflow_wait_for_done.assert_called_once_with( + job_id=mock.ANY, job_name=job_name, location=TEST_LOCATION, multiple_jobs=False + ) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) + @mock.patch(DATAFLOW_STRING.format('DataflowHook.wait_for_done')) + @mock.patch(DATAFLOW_STRING.format('process_line_and_extract_dataflow_job_id_callback')) def test_start_java_dataflow_with_custom_region_as_parameter( - self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid + self, mock_callback_on_job_id, mock_dataflow_wait_for_done, mock_uuid ): + mock_beam_start_java_pipeline = self.dataflow_hook.beam_hook.start_java_pipeline mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None + on_new_job_id_callback = MagicMock() + job_name = f"{JOB_NAME}-{MOCK_UUID_PREFIX}" - variables = copy.deepcopy(DATAFLOW_VARIABLES_JAVA) - variables['region'] = TEST_LOCATION - - self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter - job_name=JOB_NAME, variables=variables, jar=JAR_FILE - ) - expected_cmd = [ - 'java', - '-jar', - JAR_FILE, - f'--region={TEST_LOCATION}', - '--runner=DataflowRunner', - '--project=test', - '--stagingLocation=gs://test/staging', - '--labels={"foo":"bar"}', - f'--jobName={JOB_NAME}-{MOCK_UUID_PREFIX}', - ] - assert sorted(expected_cmd) == sorted(mock_dataflow.call_args[1]["cmd"]) + with self.assertWarnsRegex(DeprecationWarning, "This method is deprecated"): + self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter + job_name=JOB_NAME, + variables=DATAFLOW_VARIABLES_JAVA, + jar=JAR_FILE, + job_class=JOB_CLASS, + on_new_job_id_callback=on_new_job_id_callback, + location=TEST_LOCATION, + ) - @mock.patch(DATAFLOW_STRING.format('uuid.uuid4')) - @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) - @mock.patch(DATAFLOW_STRING.format('_DataflowRunner')) - @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) - def test_start_java_dataflow_with_job_class(self, mock_conn, mock_dataflow, mock_dataflowjob, mock_uuid): - mock_uuid.return_value = MOCK_UUID - mock_conn.return_value = None - dataflow_instance = mock_dataflow.return_value - dataflow_instance.wait_for_done.return_value = None - dataflowjob_instance = mock_dataflowjob.return_value - dataflowjob_instance.wait_for_done.return_value = None - self.dataflow_hook.start_java_dataflow( # pylint: disable=no-value-for-parameter - job_name=JOB_NAME, variables=DATAFLOW_VARIABLES_JAVA, jar=JAR_FILE, job_class=JOB_CLASS - ) - expected_cmd = [ - 'java', - '-cp', - JAR_FILE, - JOB_CLASS, - '--region=us-central1', - '--runner=DataflowRunner', - '--project=test', - '--stagingLocation=gs://test/staging', - '--labels={"foo":"bar"}', - f'--jobName={JOB_NAME}-{MOCK_UUID_PREFIX}', - ] - assert sorted(mock_dataflow.call_args[1]["cmd"]) == sorted(expected_cmd) + expected_variables = copy.deepcopy(DATAFLOW_VARIABLES_JAVA) + expected_variables["jobName"] = job_name + expected_variables["region"] = TEST_LOCATION + expected_variables["labels"] = '{"foo":"bar"}' + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback) + mock_beam_start_java_pipeline.assert_called_once_with( + variables=expected_variables, + jar=JAR_FILE, + job_class=JOB_CLASS, + process_line_callback=mock_callback_on_job_id.return_value, + ) + + mock_dataflow_wait_for_done.assert_called_once_with( + job_id=mock.ANY, job_name=job_name, location=TEST_LOCATION, multiple_jobs=False + ) @parameterized.expand( [ @@ -616,17 +668,20 @@ def test_start_java_dataflow_with_job_class(self, mock_conn, mock_dataflow, mock ) @mock.patch(DATAFLOW_STRING.format('uuid.uuid4'), return_value=MOCK_UUID) def test_valid_dataflow_job_name(self, expected_result, job_name, append_job_name, mock_uuid4): - job_name = self.dataflow_hook._build_dataflow_job_name( + job_name = self.dataflow_hook.build_dataflow_job_name( job_name=job_name, append_job_name=append_job_name ) - assert expected_result == job_name + self.assertEqual(expected_result, job_name) + # @parameterized.expand([("1dfjob@",), ("dfjob@",), ("df^jo",)]) def test_build_dataflow_job_name_with_invalid_value(self, job_name): - with pytest.raises(ValueError): - self.dataflow_hook._build_dataflow_job_name(job_name=job_name, append_job_name=False) + self.assertRaises( + ValueError, self.dataflow_hook.build_dataflow_job_name, job_name=job_name, append_job_name=False + ) + # @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) def test_get_job(self, mock_conn, mock_dataflowjob): @@ -641,6 +696,7 @@ def test_get_job(self, mock_conn, mock_dataflowjob): ) method_fetch_job_by_id.assert_called_once_with(TEST_JOB_ID) + # @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) def test_fetch_job_metrics_by_id(self, mock_conn, mock_dataflowjob): @@ -706,6 +762,34 @@ def test_fetch_job_autoscaling_events_by_id(self, mock_conn, mock_dataflowjob): ) method_fetch_job_autoscaling_events_by_id.assert_called_once_with(TEST_JOB_ID) + @mock.patch(DATAFLOW_STRING.format('_DataflowJobsController')) + @mock.patch(DATAFLOW_STRING.format('DataflowHook.get_conn')) + def test_wait_for_done(self, mock_conn, mock_dataflowjob): + method_wait_for_done = mock_dataflowjob.return_value.wait_for_done + + self.dataflow_hook.wait_for_done( + job_name="JOB_NAME", + project_id=TEST_PROJECT_ID, + job_id=TEST_JOB_ID, + location=TEST_LOCATION, + multiple_jobs=False, + ) + mock_conn.assert_called_once() + mock_dataflowjob.assert_called_once_with( + dataflow=mock_conn.return_value, + project_number=TEST_PROJECT_ID, + name="JOB_NAME", + location=TEST_LOCATION, + poll_sleep=self.dataflow_hook.poll_sleep, + job_id=TEST_JOB_ID, + num_retries=self.dataflow_hook.num_retries, + multiple_jobs=False, + drain_pipeline=self.dataflow_hook.drain_pipeline, + cancel_timeout=self.dataflow_hook.cancel_timeout, + wait_until_finished=self.dataflow_hook.wait_until_finished, + ) + method_wait_for_done.assert_called_once_with() + class TestDataflowTemplateHook(unittest.TestCase): def setUp(self): @@ -1691,13 +1775,32 @@ class TestDataflow(unittest.TestCase): def test_data_flow_valid_job_id(self, log): echos = ";".join([f"echo {shlex.quote(line)}" for line in log.split("\n")]) cmd = ["bash", "-c", echos] - assert _DataflowRunner(cmd).wait_for_done() == TEST_JOB_ID + found_job_id = None + + def callback(job_id): + nonlocal found_job_id + found_job_id = job_id + + BeamCommandRunner( + cmd, process_line_callback=process_line_and_extract_dataflow_job_id_callback(callback) + ).wait_for_done() + self.assertEqual(found_job_id, TEST_JOB_ID) def test_data_flow_missing_job_id(self): cmd = ['echo', 'unit testing'] - assert _DataflowRunner(cmd).wait_for_done() is None + found_job_id = None + + def callback(job_id): + nonlocal found_job_id + found_job_id = job_id + + BeamCommandRunner( + cmd, process_line_callback=process_line_and_extract_dataflow_job_id_callback(callback) + ).wait_for_done() + + self.assertEqual(found_job_id, None) - @mock.patch('airflow.providers.google.cloud.hooks.dataflow._DataflowRunner.log') + @mock.patch('airflow.providers.apache.beam.hooks.beam.BeamCommandRunner.log') @mock.patch('subprocess.Popen') @mock.patch('select.select') def test_dataflow_wait_for_done_logging(self, mock_select, mock_popen, mock_logging): @@ -1718,7 +1821,6 @@ def poll_resp_error(): mock_proc_poll.side_effect = [None, poll_resp_error] mock_proc.poll = mock_proc_poll mock_popen.return_value = mock_proc - dataflow = _DataflowRunner(['test', 'cmd']) + dataflow = BeamCommandRunner(['test', 'cmd']) mock_logging.info.assert_called_once_with('Running command: %s', 'test cmd') - with pytest.raises(Exception): - dataflow.wait_for_done() + self.assertRaises(Exception, dataflow.wait_for_done) diff --git a/tests/providers/google/cloud/hooks/test_dataproc.py b/tests/providers/google/cloud/hooks/test_dataproc.py index d09c91e13a663..6842accad1ca1 100644 --- a/tests/providers/google/cloud/hooks/test_dataproc.py +++ b/tests/providers/google/cloud/hooks/test_dataproc.py @@ -20,7 +20,7 @@ from unittest import mock import pytest -from google.cloud.dataproc_v1beta2.types import JobStatus # pylint: disable=no-name-in-module +from google.cloud.dataproc_v1beta2 import JobStatus # pylint: disable=no-name-in-module from airflow.exceptions import AirflowException from airflow.providers.google.cloud.hooks.dataproc import DataprocHook, DataProcJobBuilder @@ -43,8 +43,6 @@ "project_id": GCP_PROJECT, } -PARENT = "parent" -NAME = "name" BASE_STRING = "airflow.providers.google.common.hooks.base_google.{}" DATAPROC_STRING = "airflow.providers.google.cloud.hooks.dataproc.{}" @@ -113,11 +111,13 @@ def test_create_cluster(self, mock_client): ) mock_client.assert_called_once_with(location=GCP_LOCATION) mock_client.return_value.create_cluster.assert_called_once_with( - project_id=GCP_PROJECT, - region=GCP_LOCATION, - cluster=CLUSTER, + request=dict( + project_id=GCP_PROJECT, + region=GCP_LOCATION, + cluster=CLUSTER, + request_id=None, + ), metadata=None, - request_id=None, retry=None, timeout=None, ) @@ -127,12 +127,14 @@ def test_delete_cluster(self, mock_client): self.hook.delete_cluster(project_id=GCP_PROJECT, region=GCP_LOCATION, cluster_name=CLUSTER_NAME) mock_client.assert_called_once_with(location=GCP_LOCATION) mock_client.return_value.delete_cluster.assert_called_once_with( - project_id=GCP_PROJECT, - region=GCP_LOCATION, - cluster_name=CLUSTER_NAME, - cluster_uuid=None, + request=dict( + project_id=GCP_PROJECT, + region=GCP_LOCATION, + cluster_name=CLUSTER_NAME, + cluster_uuid=None, + request_id=None, + ), metadata=None, - request_id=None, retry=None, timeout=None, ) @@ -142,9 +144,11 @@ def test_diagnose_cluster(self, mock_client): self.hook.diagnose_cluster(project_id=GCP_PROJECT, region=GCP_LOCATION, cluster_name=CLUSTER_NAME) mock_client.assert_called_once_with(location=GCP_LOCATION) mock_client.return_value.diagnose_cluster.assert_called_once_with( - project_id=GCP_PROJECT, - region=GCP_LOCATION, - cluster_name=CLUSTER_NAME, + request=dict( + project_id=GCP_PROJECT, + region=GCP_LOCATION, + cluster_name=CLUSTER_NAME, + ), metadata=None, retry=None, timeout=None, @@ -156,9 +160,11 @@ def test_get_cluster(self, mock_client): self.hook.get_cluster(project_id=GCP_PROJECT, region=GCP_LOCATION, cluster_name=CLUSTER_NAME) mock_client.assert_called_once_with(location=GCP_LOCATION) mock_client.return_value.get_cluster.assert_called_once_with( - project_id=GCP_PROJECT, - region=GCP_LOCATION, - cluster_name=CLUSTER_NAME, + request=dict( + project_id=GCP_PROJECT, + region=GCP_LOCATION, + cluster_name=CLUSTER_NAME, + ), metadata=None, retry=None, timeout=None, @@ -171,10 +177,12 @@ def test_list_clusters(self, mock_client): self.hook.list_clusters(project_id=GCP_PROJECT, region=GCP_LOCATION, filter_=filter_) mock_client.assert_called_once_with(location=GCP_LOCATION) mock_client.return_value.list_clusters.assert_called_once_with( - project_id=GCP_PROJECT, - region=GCP_LOCATION, - filter_=filter_, - page_size=None, + request=dict( + project_id=GCP_PROJECT, + region=GCP_LOCATION, + filter=filter_, + page_size=None, + ), metadata=None, retry=None, timeout=None, @@ -192,14 +200,16 @@ def test_update_cluster(self, mock_client): ) mock_client.assert_called_once_with(location=GCP_LOCATION) mock_client.return_value.update_cluster.assert_called_once_with( - project_id=GCP_PROJECT, - region=GCP_LOCATION, - cluster=CLUSTER, - cluster_name=CLUSTER_NAME, - update_mask=update_mask, - graceful_decommission_timeout=None, + request=dict( + project_id=GCP_PROJECT, + region=GCP_LOCATION, + cluster=CLUSTER, + cluster_name=CLUSTER_NAME, + update_mask=update_mask, + graceful_decommission_timeout=None, + request_id=None, + ), metadata=None, - request_id=None, retry=None, timeout=None, ) @@ -207,44 +217,45 @@ def test_update_cluster(self, mock_client): @mock.patch(DATAPROC_STRING.format("DataprocHook.get_template_client")) def test_create_workflow_template(self, mock_client): template = {"test": "test"} - mock_client.return_value.region_path.return_value = PARENT + parent = f'projects/{GCP_PROJECT}/regions/{GCP_LOCATION}' self.hook.create_workflow_template(location=GCP_LOCATION, template=template, project_id=GCP_PROJECT) - mock_client.return_value.region_path.assert_called_once_with(GCP_PROJECT, GCP_LOCATION) mock_client.return_value.create_workflow_template.assert_called_once_with( - parent=PARENT, template=template, retry=None, timeout=None, metadata=None + request=dict(parent=parent, template=template), retry=None, timeout=None, metadata=() ) @mock.patch(DATAPROC_STRING.format("DataprocHook.get_template_client")) def test_instantiate_workflow_template(self, mock_client): template_name = "template_name" - mock_client.return_value.workflow_template_path.return_value = NAME + name = f'projects/{GCP_PROJECT}/regions/{GCP_LOCATION}/workflowTemplates/{template_name}' self.hook.instantiate_workflow_template( location=GCP_LOCATION, template_name=template_name, project_id=GCP_PROJECT ) - mock_client.return_value.workflow_template_path.assert_called_once_with( - GCP_PROJECT, GCP_LOCATION, template_name - ) mock_client.return_value.instantiate_workflow_template.assert_called_once_with( - name=NAME, version=None, parameters=None, request_id=None, retry=None, timeout=None, metadata=None + request=dict(name=name, version=None, parameters=None, request_id=None), + retry=None, + timeout=None, + metadata=(), ) @mock.patch(DATAPROC_STRING.format("DataprocHook.get_template_client")) def test_instantiate_inline_workflow_template(self, mock_client): template = {"test": "test"} - mock_client.return_value.region_path.return_value = PARENT + parent = f'projects/{GCP_PROJECT}/regions/{GCP_LOCATION}' self.hook.instantiate_inline_workflow_template( location=GCP_LOCATION, template=template, project_id=GCP_PROJECT ) - mock_client.return_value.region_path.assert_called_once_with(GCP_PROJECT, GCP_LOCATION) mock_client.return_value.instantiate_inline_workflow_template.assert_called_once_with( - parent=PARENT, template=template, request_id=None, retry=None, timeout=None, metadata=None + request=dict(parent=parent, template=template, request_id=None), + retry=None, + timeout=None, + metadata=(), ) @mock.patch(DATAPROC_STRING.format("DataprocHook.get_job")) def test_wait_for_job(self, mock_get_job): mock_get_job.side_effect = [ - mock.MagicMock(status=mock.MagicMock(state=JobStatus.RUNNING)), - mock.MagicMock(status=mock.MagicMock(state=JobStatus.ERROR)), + mock.MagicMock(status=mock.MagicMock(state=JobStatus.State.RUNNING)), + mock.MagicMock(status=mock.MagicMock(state=JobStatus.State.ERROR)), ] with pytest.raises(AirflowException): self.hook.wait_for_job(job_id=JOB_ID, location=GCP_LOCATION, project_id=GCP_PROJECT, wait_time=0) @@ -259,9 +270,11 @@ def test_get_job(self, mock_client): self.hook.get_job(location=GCP_LOCATION, job_id=JOB_ID, project_id=GCP_PROJECT) mock_client.assert_called_once_with(location=GCP_LOCATION) mock_client.return_value.get_job.assert_called_once_with( - region=GCP_LOCATION, - job_id=JOB_ID, - project_id=GCP_PROJECT, + request=dict( + region=GCP_LOCATION, + job_id=JOB_ID, + project_id=GCP_PROJECT, + ), retry=None, timeout=None, metadata=None, @@ -272,10 +285,12 @@ def test_submit_job(self, mock_client): self.hook.submit_job(location=GCP_LOCATION, job=JOB, project_id=GCP_PROJECT) mock_client.assert_called_once_with(location=GCP_LOCATION) mock_client.return_value.submit_job.assert_called_once_with( - region=GCP_LOCATION, - job=JOB, - project_id=GCP_PROJECT, - request_id=None, + request=dict( + region=GCP_LOCATION, + job=JOB, + project_id=GCP_PROJECT, + request_id=None, + ), retry=None, timeout=None, metadata=None, @@ -297,9 +312,11 @@ def test_cancel_job(self, mock_client): self.hook.cancel_job(location=GCP_LOCATION, job_id=JOB_ID, project_id=GCP_PROJECT) mock_client.assert_called_once_with(location=GCP_LOCATION) mock_client.return_value.cancel_job.assert_called_once_with( - region=GCP_LOCATION, - job_id=JOB_ID, - project_id=GCP_PROJECT, + request=dict( + region=GCP_LOCATION, + job_id=JOB_ID, + project_id=GCP_PROJECT, + ), retry=None, timeout=None, metadata=None, @@ -311,9 +328,11 @@ def test_cancel_job_deprecation_warning(self, mock_client): self.hook.cancel_job(job_id=JOB_ID, project_id=GCP_PROJECT) mock_client.assert_called_once_with(location='global') mock_client.return_value.cancel_job.assert_called_once_with( - region='global', - job_id=JOB_ID, - project_id=GCP_PROJECT, + request=dict( + region='global', + job_id=JOB_ID, + project_id=GCP_PROJECT, + ), retry=None, timeout=None, metadata=None, diff --git a/tests/providers/google/cloud/hooks/test_gcs.py b/tests/providers/google/cloud/hooks/test_gcs.py index dffe5adb0ef80..1ce44bb1ac9cf 100644 --- a/tests/providers/google/cloud/hooks/test_gcs.py +++ b/tests/providers/google/cloud/hooks/test_gcs.py @@ -672,7 +672,7 @@ def test_download_to_file(self, mock_service): ) self.assertEqual(response, test_file) - download_filename_method.assert_called_once_with(test_file) + download_filename_method.assert_called_once_with(test_file, timeout=60) @mock.patch(GCS_STRING.format('NamedTemporaryFile')) @mock.patch(GCS_STRING.format('GCSHook.get_conn')) @@ -697,7 +697,7 @@ def test_provide_file(self, mock_service, mock_temp_file): with self.gcs_hook.provide_file(bucket_name=test_bucket, object_name=test_object) as response: self.assertEqual(test_file, response.name) - download_filename_method.assert_called_once_with(test_file) + download_filename_method.assert_called_once_with(test_file, timeout=60) mock_temp_file.assert_has_calls( [ mock.call(suffix='test_object'), @@ -762,7 +762,7 @@ def test_upload_file(self, mock_service): self.gcs_hook.upload(test_bucket, test_object, filename=self.testfile.name) upload_method.assert_called_once_with( - filename=self.testfile.name, content_type='application/octet-stream' + filename=self.testfile.name, content_type='application/octet-stream', timeout=60 ) @mock.patch(GCS_STRING.format('GCSHook.get_conn')) @@ -782,7 +782,7 @@ def test_upload_data_str(self, mock_service): self.gcs_hook.upload(test_bucket, test_object, data=self.testdata_str) - upload_method.assert_called_once_with(self.testdata_str, content_type='text/plain') + upload_method.assert_called_once_with(self.testdata_str, content_type='text/plain', timeout=60) @mock.patch(GCS_STRING.format('GCSHook.get_conn')) def test_upload_data_bytes(self, mock_service): @@ -793,7 +793,7 @@ def test_upload_data_bytes(self, mock_service): self.gcs_hook.upload(test_bucket, test_object, data=self.testdata_bytes) - upload_method.assert_called_once_with(self.testdata_bytes, content_type='text/plain') + upload_method.assert_called_once_with(self.testdata_bytes, content_type='text/plain', timeout=60) @mock.patch(GCS_STRING.format('BytesIO')) @mock.patch(GCS_STRING.format('gz.GzipFile')) @@ -812,7 +812,7 @@ def test_upload_data_str_gzip(self, mock_service, mock_gzip, mock_bytes_io): byte_str = bytes(self.testdata_str, encoding) mock_gzip.assert_called_once_with(fileobj=mock_bytes_io.return_value, mode="w") gzip_ctx.write.assert_called_once_with(byte_str) - upload_method.assert_called_once_with(data, content_type='text/plain') + upload_method.assert_called_once_with(data, content_type='text/plain', timeout=60) @mock.patch(GCS_STRING.format('BytesIO')) @mock.patch(GCS_STRING.format('gz.GzipFile')) @@ -829,7 +829,7 @@ def test_upload_data_bytes_gzip(self, mock_service, mock_gzip, mock_bytes_io): mock_gzip.assert_called_once_with(fileobj=mock_bytes_io.return_value, mode="w") gzip_ctx.write.assert_called_once_with(self.testdata_bytes) - upload_method.assert_called_once_with(data, content_type='text/plain') + upload_method.assert_called_once_with(data, content_type='text/plain', timeout=60) @mock.patch(GCS_STRING.format('GCSHook.get_conn')) def test_upload_exceptions(self, mock_service): diff --git a/tests/providers/google/cloud/hooks/test_kms.py b/tests/providers/google/cloud/hooks/test_kms.py index 6b87e3c18aa8e..4de1dfbc6e91b 100644 --- a/tests/providers/google/cloud/hooks/test_kms.py +++ b/tests/providers/google/cloud/hooks/test_kms.py @@ -82,12 +82,14 @@ def test_encrypt(self, mock_get_conn): result = self.kms_hook.encrypt(TEST_KEY_ID, PLAINTEXT) mock_get_conn.assert_called_once_with() mock_get_conn.return_value.encrypt.assert_called_once_with( - name=TEST_KEY_ID, - plaintext=PLAINTEXT, - additional_authenticated_data=None, + request=dict( + name=TEST_KEY_ID, + plaintext=PLAINTEXT, + additional_authenticated_data=None, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) assert PLAINTEXT_b64 == result @@ -97,12 +99,14 @@ def test_encrypt_with_auth_data(self, mock_get_conn): result = self.kms_hook.encrypt(TEST_KEY_ID, PLAINTEXT, AUTH_DATA) mock_get_conn.assert_called_once_with() mock_get_conn.return_value.encrypt.assert_called_once_with( - name=TEST_KEY_ID, - plaintext=PLAINTEXT, - additional_authenticated_data=AUTH_DATA, + request=dict( + name=TEST_KEY_ID, + plaintext=PLAINTEXT, + additional_authenticated_data=AUTH_DATA, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) assert PLAINTEXT_b64 == result @@ -112,12 +116,14 @@ def test_decrypt(self, mock_get_conn): result = self.kms_hook.decrypt(TEST_KEY_ID, CIPHERTEXT_b64) mock_get_conn.assert_called_once_with() mock_get_conn.return_value.decrypt.assert_called_once_with( - name=TEST_KEY_ID, - ciphertext=CIPHERTEXT, - additional_authenticated_data=None, + request=dict( + name=TEST_KEY_ID, + ciphertext=CIPHERTEXT, + additional_authenticated_data=None, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) assert PLAINTEXT == result @@ -127,11 +133,13 @@ def test_decrypt_with_auth_data(self, mock_get_conn): result = self.kms_hook.decrypt(TEST_KEY_ID, CIPHERTEXT_b64, AUTH_DATA) mock_get_conn.assert_called_once_with() mock_get_conn.return_value.decrypt.assert_called_once_with( - name=TEST_KEY_ID, - ciphertext=CIPHERTEXT, - additional_authenticated_data=AUTH_DATA, + request=dict( + name=TEST_KEY_ID, + ciphertext=CIPHERTEXT, + additional_authenticated_data=AUTH_DATA, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) assert PLAINTEXT == result diff --git a/tests/providers/google/cloud/hooks/test_os_login.py b/tests/providers/google/cloud/hooks/test_os_login.py index 303f1ea9f8d14..d2b88e4c6c895 100644 --- a/tests/providers/google/cloud/hooks/test_os_login.py +++ b/tests/providers/google/cloud/hooks/test_os_login.py @@ -38,7 +38,7 @@ TEST_BODY: Dict = mock.MagicMock() TEST_RETRY: Retry = mock.MagicMock() TEST_TIMEOUT: float = 4 -TEST_METADATA: Sequence[Tuple[str, str]] = [] +TEST_METADATA: Sequence[Tuple[str, str]] = () TEST_PARENT: str = "users/test-user" @@ -67,9 +67,11 @@ def test_import_ssh_public_key(self, mock_get_conn, mock_get_creds_and_project_i metadata=TEST_METADATA, ) mock_get_conn.return_value.import_ssh_public_key.assert_called_once_with( - parent=TEST_PARENT, - ssh_public_key=TEST_BODY, - project_id=TEST_PROJECT_ID, + request=dict( + parent=TEST_PARENT, + ssh_public_key=TEST_BODY, + project_id=TEST_PROJECT_ID, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -101,9 +103,11 @@ def test_import_ssh_public_key(self, mock_get_conn, mock_get_creds_and_project_i metadata=TEST_METADATA, ) mock_get_conn.return_value.import_ssh_public_key.assert_called_once_with( - parent=TEST_PARENT, - ssh_public_key=TEST_BODY, - project_id=TEST_PROJECT_ID_2, + request=dict( + parent=TEST_PARENT, + ssh_public_key=TEST_BODY, + project_id=TEST_PROJECT_ID_2, + ), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, @@ -135,9 +139,7 @@ def test_import_ssh_public_key(self, mock_get_conn, mock_get_creds_and_project_i metadata=TEST_METADATA, ) mock_get_conn.return_value.import_ssh_public_key.assert_called_once_with( - parent=TEST_PARENT, - ssh_public_key=TEST_BODY, - project_id=TEST_PROJECT_ID, + request=dict(parent=TEST_PARENT, ssh_public_key=TEST_BODY, project_id=TEST_PROJECT_ID), retry=TEST_RETRY, timeout=TEST_TIMEOUT, metadata=TEST_METADATA, diff --git a/tests/providers/google/cloud/hooks/test_pubsub.py b/tests/providers/google/cloud/hooks/test_pubsub.py index 08418061a6c1c..eadb8064e560f 100644 --- a/tests/providers/google/cloud/hooks/test_pubsub.py +++ b/tests/providers/google/cloud/hooks/test_pubsub.py @@ -25,7 +25,6 @@ from google.api_core.exceptions import AlreadyExists, GoogleAPICallError from google.cloud.exceptions import NotFound from google.cloud.pubsub_v1.types import ReceivedMessage -from google.protobuf.json_format import ParseDict from googleapiclient.errors import HttpError from parameterized import parameterized @@ -67,15 +66,12 @@ def setUp(self): def _generate_messages(self, count) -> List[ReceivedMessage]: return [ - ParseDict( - { - "ack_id": str(i), - "message": { - "data": f'Message {i}'.encode('utf8'), - "attributes": {"type": "generated message"}, - }, + ReceivedMessage( + ack_id=str(i), + message={ + "data": f'Message {i}'.encode('utf8'), + "attributes": {"type": "generated message"}, }, - ReceivedMessage(), ) for i in range(1, count + 1) ] @@ -112,20 +108,19 @@ def test_create_nonexistent_topic(self, mock_service): create_method = mock_service.return_value.create_topic self.pubsub_hook.create_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC) create_method.assert_called_once_with( - name=EXPANDED_TOPIC, - labels=LABELS, - message_storage_policy=None, - kms_key_name=None, + request=dict(name=EXPANDED_TOPIC, labels=LABELS, message_storage_policy=None, kms_key_name=None), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_delete_topic(self, mock_service): delete_method = mock_service.return_value.delete_topic self.pubsub_hook.delete_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC) - delete_method.assert_called_once_with(topic=EXPANDED_TOPIC, retry=None, timeout=None, metadata=None) + delete_method.assert_called_once_with( + request=dict(topic=EXPANDED_TOPIC), retry=None, timeout=None, metadata=() + ) @mock.patch(PUBSUB_STRING.format('PubSubHook.get_conn')) def test_delete_nonexisting_topic_failifnotexists(self, mock_service): @@ -177,21 +172,23 @@ def test_create_nonexistent_subscription(self, mock_service): project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION ) create_method.assert_called_once_with( - name=EXPANDED_SUBSCRIPTION, - topic=EXPANDED_TOPIC, - push_config=None, - ack_deadline_seconds=10, - retain_acked_messages=None, - message_retention_duration=None, - labels=LABELS, - enable_message_ordering=False, - expiration_policy=None, - filter_=None, - dead_letter_policy=None, - retry_policy=None, + request=dict( + name=EXPANDED_SUBSCRIPTION, + topic=EXPANDED_TOPIC, + push_config=None, + ack_deadline_seconds=10, + retain_acked_messages=None, + message_retention_duration=None, + labels=LABELS, + enable_message_ordering=False, + expiration_policy=None, + filter=None, + dead_letter_policy=None, + retry_policy=None, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) assert TEST_SUBSCRIPTION == response @@ -208,21 +205,23 @@ def test_create_subscription_different_project_topic(self, mock_service): 'a-different-project', TEST_SUBSCRIPTION ) create_method.assert_called_once_with( - name=expected_subscription, - topic=EXPANDED_TOPIC, - push_config=None, - ack_deadline_seconds=10, - retain_acked_messages=None, - message_retention_duration=None, - labels=LABELS, - enable_message_ordering=False, - expiration_policy=None, - filter_=None, - dead_letter_policy=None, - retry_policy=None, + request=dict( + name=expected_subscription, + topic=EXPANDED_TOPIC, + push_config=None, + ack_deadline_seconds=10, + retain_acked_messages=None, + message_retention_duration=None, + labels=LABELS, + enable_message_ordering=False, + expiration_policy=None, + filter=None, + dead_letter_policy=None, + retry_policy=None, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) assert TEST_SUBSCRIPTION == response @@ -232,7 +231,7 @@ def test_delete_subscription(self, mock_service): self.pubsub_hook.delete_subscription(project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION) delete_method = mock_service.delete_subscription delete_method.assert_called_once_with( - subscription=EXPANDED_SUBSCRIPTION, retry=None, timeout=None, metadata=None + request=dict(subscription=EXPANDED_SUBSCRIPTION), retry=None, timeout=None, metadata=() ) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) @@ -266,21 +265,23 @@ def test_create_subscription_without_subscription_name( response = self.pubsub_hook.create_subscription(project_id=TEST_PROJECT, topic=TEST_TOPIC) create_method.assert_called_once_with( - name=expected_name, - topic=EXPANDED_TOPIC, - push_config=None, - ack_deadline_seconds=10, - retain_acked_messages=None, - message_retention_duration=None, - labels=LABELS, - enable_message_ordering=False, - expiration_policy=None, - filter_=None, - dead_letter_policy=None, - retry_policy=None, + request=dict( + name=expected_name, + topic=EXPANDED_TOPIC, + push_config=None, + ack_deadline_seconds=10, + retain_acked_messages=None, + message_retention_duration=None, + labels=LABELS, + enable_message_ordering=False, + expiration_policy=None, + filter=None, + dead_letter_policy=None, + retry_policy=None, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) assert 'sub-%s' % TEST_UUID == response @@ -292,21 +293,23 @@ def test_create_subscription_with_ack_deadline(self, mock_service): project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION, ack_deadline_secs=30 ) create_method.assert_called_once_with( - name=EXPANDED_SUBSCRIPTION, - topic=EXPANDED_TOPIC, - push_config=None, - ack_deadline_seconds=30, - retain_acked_messages=None, - message_retention_duration=None, - labels=LABELS, - enable_message_ordering=False, - expiration_policy=None, - filter_=None, - dead_letter_policy=None, - retry_policy=None, + request=dict( + name=EXPANDED_SUBSCRIPTION, + topic=EXPANDED_TOPIC, + push_config=None, + ack_deadline_seconds=30, + retain_acked_messages=None, + message_retention_duration=None, + labels=LABELS, + enable_message_ordering=False, + expiration_policy=None, + filter=None, + dead_letter_policy=None, + retry_policy=None, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) assert TEST_SUBSCRIPTION == response @@ -321,21 +324,23 @@ def test_create_subscription_with_filter(self, mock_service): filter_='attributes.domain="com"', ) create_method.assert_called_once_with( - name=EXPANDED_SUBSCRIPTION, - topic=EXPANDED_TOPIC, - push_config=None, - ack_deadline_seconds=10, - retain_acked_messages=None, - message_retention_duration=None, - labels=LABELS, - enable_message_ordering=False, - expiration_policy=None, - filter_='attributes.domain="com"', - dead_letter_policy=None, - retry_policy=None, + request=dict( + name=EXPANDED_SUBSCRIPTION, + topic=EXPANDED_TOPIC, + push_config=None, + ack_deadline_seconds=10, + retain_acked_messages=None, + message_retention_duration=None, + labels=LABELS, + enable_message_ordering=False, + expiration_policy=None, + filter='attributes.domain="com"', + dead_letter_policy=None, + retry_policy=None, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) assert TEST_SUBSCRIPTION == response @@ -401,12 +406,14 @@ def test_pull(self, mock_service): project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, max_messages=10 ) pull_method.assert_called_once_with( - subscription=EXPANDED_SUBSCRIPTION, - max_messages=10, - return_immediately=False, + request=dict( + subscription=EXPANDED_SUBSCRIPTION, + max_messages=10, + return_immediately=False, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) assert pulled_messages == response @@ -419,12 +426,14 @@ def test_pull_no_messages(self, mock_service): project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, max_messages=10 ) pull_method.assert_called_once_with( - subscription=EXPANDED_SUBSCRIPTION, - max_messages=10, - return_immediately=False, + request=dict( + subscription=EXPANDED_SUBSCRIPTION, + max_messages=10, + return_immediately=False, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) assert [] == response @@ -445,12 +454,14 @@ def test_pull_fails_on_exception(self, exception, mock_service): with pytest.raises(PubSubException): self.pubsub_hook.pull(project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, max_messages=10) pull_method.assert_called_once_with( - subscription=EXPANDED_SUBSCRIPTION, - max_messages=10, - return_immediately=False, + request=dict( + subscription=EXPANDED_SUBSCRIPTION, + max_messages=10, + return_immediately=False, + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) @@ -461,11 +472,13 @@ def test_acknowledge_by_ack_ids(self, mock_service): project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, ack_ids=['1', '2', '3'] ) ack_method.assert_called_once_with( - subscription=EXPANDED_SUBSCRIPTION, - ack_ids=['1', '2', '3'], + request=dict( + subscription=EXPANDED_SUBSCRIPTION, + ack_ids=['1', '2', '3'], + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch(PUBSUB_STRING.format('PubSubHook.subscriber_client')) @@ -478,11 +491,13 @@ def test_acknowledge_by_message_objects(self, mock_service): messages=self._generate_messages(3), ) ack_method.assert_called_once_with( - subscription=EXPANDED_SUBSCRIPTION, - ack_ids=['1', '2', '3'], + request=dict( + subscription=EXPANDED_SUBSCRIPTION, + ack_ids=['1', '2', '3'], + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) @parameterized.expand( @@ -504,11 +519,13 @@ def test_acknowledge_fails_on_exception(self, exception, mock_service): project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, ack_ids=['1', '2', '3'] ) ack_method.assert_called_once_with( - subscription=EXPANDED_SUBSCRIPTION, - ack_ids=['1', '2', '3'], + request=dict( + subscription=EXPANDED_SUBSCRIPTION, + ack_ids=['1', '2', '3'], + ), retry=None, timeout=None, - metadata=None, + metadata=(), ) @parameterized.expand( diff --git a/tests/providers/google/cloud/hooks/test_stackdriver.py b/tests/providers/google/cloud/hooks/test_stackdriver.py index 6892d0552f559..10a309781fc01 100644 --- a/tests/providers/google/cloud/hooks/test_stackdriver.py +++ b/tests/providers/google/cloud/hooks/test_stackdriver.py @@ -21,8 +21,8 @@ from unittest import mock from google.api_core.gapic_v1.method import DEFAULT -from google.cloud import monitoring_v3 -from google.protobuf.json_format import ParseDict +from google.cloud.monitoring_v3 import AlertPolicy, NotificationChannel +from google.protobuf.field_mask_pb2 import FieldMask from airflow.providers.google.cloud.hooks import stackdriver @@ -32,16 +32,15 @@ TEST_ALERT_POLICY_1 = { "combiner": "OR", "name": "projects/sd-project/alertPolicies/12345", - "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"}, "enabled": True, - "displayName": "test display", + "display_name": "test display", "conditions": [ { - "conditionThreshold": { + "condition_threshold": { "comparison": "COMPARISON_GT", - "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}], + "aggregations": [{"alignment_period": {'seconds': 60}, "per_series_aligner": "ALIGN_RATE"}], }, - "displayName": "Condition display", + "display_name": "Condition display", "name": "projects/sd-project/alertPolicies/123/conditions/456", } ], @@ -50,35 +49,34 @@ TEST_ALERT_POLICY_2 = { "combiner": "OR", "name": "projects/sd-project/alertPolicies/6789", - "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"}, "enabled": False, - "displayName": "test display", + "display_name": "test display", "conditions": [ { - "conditionThreshold": { + "condition_threshold": { "comparison": "COMPARISON_GT", - "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}], + "aggregations": [{"alignment_period": {'seconds': 60}, "per_series_aligner": "ALIGN_RATE"}], }, - "displayName": "Condition display", + "display_name": "Condition display", "name": "projects/sd-project/alertPolicies/456/conditions/789", } ], } TEST_NOTIFICATION_CHANNEL_1 = { - "displayName": "sd", + "display_name": "sd", "enabled": True, "labels": {"auth_token": "top-secret", "channel_name": "#channel"}, "name": "projects/sd-project/notificationChannels/12345", - "type": "slack", + "type_": "slack", } TEST_NOTIFICATION_CHANNEL_2 = { - "displayName": "sd", + "display_name": "sd", "enabled": False, "labels": {"auth_token": "top-secret", "channel_name": "#channel"}, "name": "projects/sd-project/notificationChannels/6789", - "type": "slack", + "type_": "slack", } @@ -96,13 +94,10 @@ def test_stackdriver_list_alert_policies(self, mock_policy_client, mock_get_cred project_id=PROJECT_ID, ) method.assert_called_once_with( - name=f'projects/{PROJECT_ID}', - filter_=TEST_FILTER, + request=dict(name=f'projects/{PROJECT_ID}', filter=TEST_FILTER, order_by=None, page_size=None), retry=DEFAULT, timeout=DEFAULT, - order_by=None, - page_size=None, - metadata=None, + metadata=(), ) @mock.patch( @@ -113,8 +108,8 @@ def test_stackdriver_list_alert_policies(self, mock_policy_client, mock_get_cred def test_stackdriver_enable_alert_policy(self, mock_policy_client, mock_get_creds_and_project_id): hook = stackdriver.StackdriverHook() - alert_policy_enabled = ParseDict(TEST_ALERT_POLICY_1, monitoring_v3.types.alert_pb2.AlertPolicy()) - alert_policy_disabled = ParseDict(TEST_ALERT_POLICY_2, monitoring_v3.types.alert_pb2.AlertPolicy()) + alert_policy_enabled = AlertPolicy(**TEST_ALERT_POLICY_1) + alert_policy_disabled = AlertPolicy(**TEST_ALERT_POLICY_2) alert_policies = [alert_policy_enabled, alert_policy_disabled] @@ -124,23 +119,18 @@ def test_stackdriver_enable_alert_policy(self, mock_policy_client, mock_get_cred project_id=PROJECT_ID, ) mock_policy_client.return_value.list_alert_policies.assert_called_once_with( - name=f'projects/{PROJECT_ID}', - filter_=TEST_FILTER, + request=dict(name=f'projects/{PROJECT_ID}', filter=TEST_FILTER, order_by=None, page_size=None), retry=DEFAULT, timeout=DEFAULT, - order_by=None, - page_size=None, - metadata=None, + metadata=(), ) - mask = monitoring_v3.types.field_mask_pb2.FieldMask() - alert_policy_disabled.enabled.value = True # pylint: disable=no-member - mask.paths.append('enabled') # pylint: disable=no-member + mask = FieldMask(paths=["enabled"]) + alert_policy_disabled.enabled = True # pylint: disable=no-member mock_policy_client.return_value.update_alert_policy.assert_called_once_with( - alert_policy=alert_policy_disabled, - update_mask=mask, + request=dict(alert_policy=alert_policy_disabled, update_mask=mask), retry=DEFAULT, timeout=DEFAULT, - metadata=None, + metadata=(), ) @mock.patch( @@ -150,8 +140,8 @@ def test_stackdriver_enable_alert_policy(self, mock_policy_client, mock_get_cred @mock.patch('airflow.providers.google.cloud.hooks.stackdriver.StackdriverHook._get_policy_client') def test_stackdriver_disable_alert_policy(self, mock_policy_client, mock_get_creds_and_project_id): hook = stackdriver.StackdriverHook() - alert_policy_enabled = ParseDict(TEST_ALERT_POLICY_1, monitoring_v3.types.alert_pb2.AlertPolicy()) - alert_policy_disabled = ParseDict(TEST_ALERT_POLICY_2, monitoring_v3.types.alert_pb2.AlertPolicy()) + alert_policy_enabled = AlertPolicy(**TEST_ALERT_POLICY_1) + alert_policy_disabled = AlertPolicy(**TEST_ALERT_POLICY_2) mock_policy_client.return_value.list_alert_policies.return_value = [ alert_policy_enabled, @@ -162,23 +152,18 @@ def test_stackdriver_disable_alert_policy(self, mock_policy_client, mock_get_cre project_id=PROJECT_ID, ) mock_policy_client.return_value.list_alert_policies.assert_called_once_with( - name=f'projects/{PROJECT_ID}', - filter_=TEST_FILTER, + request=dict(name=f'projects/{PROJECT_ID}', filter=TEST_FILTER, order_by=None, page_size=None), retry=DEFAULT, timeout=DEFAULT, - order_by=None, - page_size=None, - metadata=None, + metadata=(), ) - mask = monitoring_v3.types.field_mask_pb2.FieldMask() - alert_policy_enabled.enabled.value = False # pylint: disable=no-member - mask.paths.append('enabled') # pylint: disable=no-member + mask = FieldMask(paths=["enabled"]) + alert_policy_enabled.enabled = False # pylint: disable=no-member mock_policy_client.return_value.update_alert_policy.assert_called_once_with( - alert_policy=alert_policy_enabled, - update_mask=mask, + request=dict(alert_policy=alert_policy_enabled, update_mask=mask), retry=DEFAULT, timeout=DEFAULT, - metadata=None, + metadata=(), ) @mock.patch( @@ -191,8 +176,8 @@ def test_stackdriver_upsert_alert_policy( self, mock_channel_client, mock_policy_client, mock_get_creds_and_project_id ): hook = stackdriver.StackdriverHook() - existing_alert_policy = ParseDict(TEST_ALERT_POLICY_1, monitoring_v3.types.alert_pb2.AlertPolicy()) - alert_policy_to_create = ParseDict(TEST_ALERT_POLICY_2, monitoring_v3.types.alert_pb2.AlertPolicy()) + existing_alert_policy = AlertPolicy(**TEST_ALERT_POLICY_1) + alert_policy_to_create = AlertPolicy(**TEST_ALERT_POLICY_2) mock_policy_client.return_value.list_alert_policies.return_value = [existing_alert_policy] mock_channel_client.return_value.list_notification_channels.return_value = [] @@ -202,38 +187,77 @@ def test_stackdriver_upsert_alert_policy( project_id=PROJECT_ID, ) mock_channel_client.return_value.list_notification_channels.assert_called_once_with( - name=f'projects/{PROJECT_ID}', - filter_=None, + request=dict( + name=f'projects/{PROJECT_ID}', + filter=None, + order_by=None, + page_size=None, + ), retry=DEFAULT, timeout=DEFAULT, - order_by=None, - page_size=None, - metadata=None, + metadata=(), ) mock_policy_client.return_value.list_alert_policies.assert_called_once_with( - name=f'projects/{PROJECT_ID}', - filter_=None, + request=dict(name=f'projects/{PROJECT_ID}', filter=None, order_by=None, page_size=None), retry=DEFAULT, timeout=DEFAULT, - order_by=None, - page_size=None, - metadata=None, + metadata=(), ) - alert_policy_to_create.ClearField('name') - alert_policy_to_create.ClearField('creation_record') - alert_policy_to_create.ClearField('mutation_record') - alert_policy_to_create.conditions[0].ClearField('name') # pylint: disable=no-member + alert_policy_to_create.name = None + alert_policy_to_create.creation_record = None + alert_policy_to_create.mutation_record = None + alert_policy_to_create.conditions[0].name = None mock_policy_client.return_value.create_alert_policy.assert_called_once_with( - name=f'projects/{PROJECT_ID}', - alert_policy=alert_policy_to_create, + request=dict( + name=f'projects/{PROJECT_ID}', + alert_policy=alert_policy_to_create, + ), retry=DEFAULT, timeout=DEFAULT, - metadata=None, + metadata=(), ) - existing_alert_policy.ClearField('creation_record') - existing_alert_policy.ClearField('mutation_record') + existing_alert_policy.creation_record = None + existing_alert_policy.mutation_record = None mock_policy_client.return_value.update_alert_policy.assert_called_once_with( - alert_policy=existing_alert_policy, retry=DEFAULT, timeout=DEFAULT, metadata=None + request=dict(alert_policy=existing_alert_policy), retry=DEFAULT, timeout=DEFAULT, metadata=() + ) + + @mock.patch( + 'airflow.providers.google.common.hooks.base_google.GoogleBaseHook._get_credentials_and_project_id', + return_value=(CREDENTIALS, PROJECT_ID), + ) + @mock.patch('airflow.providers.google.cloud.hooks.stackdriver.StackdriverHook._get_policy_client') + @mock.patch('airflow.providers.google.cloud.hooks.stackdriver.StackdriverHook._get_channel_client') + def test_stackdriver_upsert_alert_policy_without_channel( + self, mock_channel_client, mock_policy_client, mock_get_creds_and_project_id + ): + hook = stackdriver.StackdriverHook() + existing_alert_policy = AlertPolicy(**TEST_ALERT_POLICY_1) + + mock_policy_client.return_value.list_alert_policies.return_value = [existing_alert_policy] + mock_channel_client.return_value.list_notification_channels.return_value = [] + + hook.upsert_alert( + alerts=json.dumps({"policies": [TEST_ALERT_POLICY_1, TEST_ALERT_POLICY_2]}), + project_id=PROJECT_ID, + ) + mock_channel_client.return_value.list_notification_channels.assert_called_once_with( + request=dict(name=f'projects/{PROJECT_ID}', filter=None, order_by=None, page_size=None), + metadata=(), + retry=DEFAULT, + timeout=DEFAULT, + ) + mock_policy_client.return_value.list_alert_policies.assert_called_once_with( + request=dict(name=f'projects/{PROJECT_ID}', filter=None, order_by=None, page_size=None), + retry=DEFAULT, + timeout=DEFAULT, + metadata=(), + ) + + existing_alert_policy.creation_record = None + existing_alert_policy.mutation_record = None + mock_policy_client.return_value.update_alert_policy.assert_called_once_with( + request=dict(alert_policy=existing_alert_policy), retry=DEFAULT, timeout=DEFAULT, metadata=() ) @mock.patch( @@ -247,10 +271,10 @@ def test_stackdriver_delete_alert_policy(self, mock_policy_client, mock_get_cred name='test-alert', ) mock_policy_client.return_value.delete_alert_policy.assert_called_once_with( - name='test-alert', + request=dict(name='test-alert'), retry=DEFAULT, timeout=DEFAULT, - metadata=None, + metadata=(), ) @mock.patch( @@ -265,13 +289,10 @@ def test_stackdriver_list_notification_channel(self, mock_channel_client, mock_g project_id=PROJECT_ID, ) mock_channel_client.return_value.list_notification_channels.assert_called_once_with( - name=f'projects/{PROJECT_ID}', - filter_=TEST_FILTER, - order_by=None, - page_size=None, + request=dict(name=f'projects/{PROJECT_ID}', filter=TEST_FILTER, order_by=None, page_size=None), retry=DEFAULT, timeout=DEFAULT, - metadata=None, + metadata=(), ) @mock.patch( @@ -283,12 +304,9 @@ def test_stackdriver_enable_notification_channel( self, mock_channel_client, mock_get_creds_and_project_id ): hook = stackdriver.StackdriverHook() - notification_channel_enabled = ParseDict( - TEST_NOTIFICATION_CHANNEL_1, monitoring_v3.types.notification_pb2.NotificationChannel() - ) - notification_channel_disabled = ParseDict( - TEST_NOTIFICATION_CHANNEL_2, monitoring_v3.types.notification_pb2.NotificationChannel() - ) + notification_channel_enabled = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_1) + notification_channel_disabled = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_2) + mock_channel_client.return_value.list_notification_channels.return_value = [ notification_channel_enabled, notification_channel_disabled, @@ -299,15 +317,13 @@ def test_stackdriver_enable_notification_channel( project_id=PROJECT_ID, ) - notification_channel_disabled.enabled.value = True # pylint: disable=no-member - mask = monitoring_v3.types.field_mask_pb2.FieldMask() - mask.paths.append('enabled') # pylint: disable=no-member + notification_channel_disabled.enabled = True # pylint: disable=no-member + mask = FieldMask(paths=['enabled']) mock_channel_client.return_value.update_notification_channel.assert_called_once_with( - notification_channel=notification_channel_disabled, - update_mask=mask, + request=dict(notification_channel=notification_channel_disabled, update_mask=mask), retry=DEFAULT, timeout=DEFAULT, - metadata=None, + metadata=(), ) @mock.patch( @@ -319,12 +335,8 @@ def test_stackdriver_disable_notification_channel( self, mock_channel_client, mock_get_creds_and_project_id ): hook = stackdriver.StackdriverHook() - notification_channel_enabled = ParseDict( - TEST_NOTIFICATION_CHANNEL_1, monitoring_v3.types.notification_pb2.NotificationChannel() - ) - notification_channel_disabled = ParseDict( - TEST_NOTIFICATION_CHANNEL_2, monitoring_v3.types.notification_pb2.NotificationChannel() - ) + notification_channel_enabled = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_1) + notification_channel_disabled = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_2) mock_channel_client.return_value.list_notification_channels.return_value = [ notification_channel_enabled, notification_channel_disabled, @@ -335,15 +347,13 @@ def test_stackdriver_disable_notification_channel( project_id=PROJECT_ID, ) - notification_channel_enabled.enabled.value = False # pylint: disable=no-member - mask = monitoring_v3.types.field_mask_pb2.FieldMask() - mask.paths.append('enabled') # pylint: disable=no-member + notification_channel_enabled.enabled = False # pylint: disable=no-member + mask = FieldMask(paths=['enabled']) mock_channel_client.return_value.update_notification_channel.assert_called_once_with( - notification_channel=notification_channel_enabled, - update_mask=mask, + request=dict(notification_channel=notification_channel_enabled, update_mask=mask), retry=DEFAULT, timeout=DEFAULT, - metadata=None, + metadata=(), ) @mock.patch( @@ -353,12 +363,9 @@ def test_stackdriver_disable_notification_channel( @mock.patch('airflow.providers.google.cloud.hooks.stackdriver.StackdriverHook._get_channel_client') def test_stackdriver_upsert_channel(self, mock_channel_client, mock_get_creds_and_project_id): hook = stackdriver.StackdriverHook() - existing_notification_channel = ParseDict( - TEST_NOTIFICATION_CHANNEL_1, monitoring_v3.types.notification_pb2.NotificationChannel() - ) - notification_channel_to_be_created = ParseDict( - TEST_NOTIFICATION_CHANNEL_2, monitoring_v3.types.notification_pb2.NotificationChannel() - ) + existing_notification_channel = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_1) + notification_channel_to_be_created = NotificationChannel(**TEST_NOTIFICATION_CHANNEL_2) + mock_channel_client.return_value.list_notification_channels.return_value = [ existing_notification_channel ] @@ -367,24 +374,25 @@ def test_stackdriver_upsert_channel(self, mock_channel_client, mock_get_creds_an project_id=PROJECT_ID, ) mock_channel_client.return_value.list_notification_channels.assert_called_once_with( - name=f'projects/{PROJECT_ID}', - filter_=None, - order_by=None, - page_size=None, + request=dict(name=f'projects/{PROJECT_ID}', filter=None, order_by=None, page_size=None), retry=DEFAULT, timeout=DEFAULT, - metadata=None, + metadata=(), ) mock_channel_client.return_value.update_notification_channel.assert_called_once_with( - notification_channel=existing_notification_channel, retry=DEFAULT, timeout=DEFAULT, metadata=None + request=dict(notification_channel=existing_notification_channel), + retry=DEFAULT, + timeout=DEFAULT, + metadata=(), ) - notification_channel_to_be_created.ClearField('name') + notification_channel_to_be_created.name = None mock_channel_client.return_value.create_notification_channel.assert_called_once_with( - name=f'projects/{PROJECT_ID}', - notification_channel=notification_channel_to_be_created, + request=dict( + name=f'projects/{PROJECT_ID}', notification_channel=notification_channel_to_be_created + ), retry=DEFAULT, timeout=DEFAULT, - metadata=None, + metadata=(), ) @mock.patch( @@ -400,5 +408,5 @@ def test_stackdriver_delete_notification_channel( name='test-channel', ) mock_channel_client.return_value.delete_notification_channel.assert_called_once_with( - name='test-channel', retry=DEFAULT, timeout=DEFAULT, metadata=None + request=dict(name='test-channel'), retry=DEFAULT, timeout=DEFAULT, metadata=() ) diff --git a/tests/providers/google/cloud/hooks/test_tasks.py b/tests/providers/google/cloud/hooks/test_tasks.py index 8be6686cf6c1d..65045953b2b7c 100644 --- a/tests/providers/google/cloud/hooks/test_tasks.py +++ b/tests/providers/google/cloud/hooks/test_tasks.py @@ -72,11 +72,10 @@ def test_create_queue(self, get_conn): self.assertIs(result, API_RESPONSE) get_conn.return_value.create_queue.assert_called_once_with( - parent=FULL_LOCATION_PATH, - queue=Queue(name=FULL_QUEUE_PATH), + request=dict(parent=FULL_LOCATION_PATH, queue=Queue(name=FULL_QUEUE_PATH)), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch( @@ -94,11 +93,10 @@ def test_update_queue(self, get_conn): self.assertIs(result, API_RESPONSE) get_conn.return_value.update_queue.assert_called_once_with( - queue=Queue(name=FULL_QUEUE_PATH, state=3), - update_mask=None, + request=dict(queue=Queue(name=FULL_QUEUE_PATH, state=3), update_mask=None), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch( @@ -111,30 +109,28 @@ def test_get_queue(self, get_conn): self.assertIs(result, API_RESPONSE) get_conn.return_value.get_queue.assert_called_once_with( - name=FULL_QUEUE_PATH, retry=None, timeout=None, metadata=None + request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=() ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", - **{"return_value.list_queues.return_value": API_RESPONSE}, # type: ignore + **{"return_value.list_queues.return_value": [Queue(name=FULL_QUEUE_PATH)]}, # type: ignore ) def test_list_queues(self, get_conn): result = self.hook.list_queues(location=LOCATION, project_id=PROJECT_ID) - self.assertEqual(result, list(API_RESPONSE)) + self.assertEqual(result, [Queue(name=FULL_QUEUE_PATH)]) get_conn.return_value.list_queues.assert_called_once_with( - parent=FULL_LOCATION_PATH, - filter_=None, - page_size=None, + request=dict(parent=FULL_LOCATION_PATH, filter=None, page_size=None), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", - **{"return_value.delete_queue.return_value": API_RESPONSE}, # type: ignore + **{"return_value.delete_queue.return_value": None}, # type: ignore ) def test_delete_queue(self, get_conn): result = self.hook.delete_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) @@ -142,51 +138,51 @@ def test_delete_queue(self, get_conn): self.assertEqual(result, None) get_conn.return_value.delete_queue.assert_called_once_with( - name=FULL_QUEUE_PATH, retry=None, timeout=None, metadata=None + request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=() ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", - **{"return_value.purge_queue.return_value": API_RESPONSE}, # type: ignore + **{"return_value.purge_queue.return_value": Queue(name=FULL_QUEUE_PATH)}, # type: ignore ) def test_purge_queue(self, get_conn): result = self.hook.purge_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) - self.assertEqual(result, API_RESPONSE) + self.assertEqual(result, Queue(name=FULL_QUEUE_PATH)) get_conn.return_value.purge_queue.assert_called_once_with( - name=FULL_QUEUE_PATH, retry=None, timeout=None, metadata=None + request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=() ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", - **{"return_value.pause_queue.return_value": API_RESPONSE}, # type: ignore + **{"return_value.pause_queue.return_value": Queue(name=FULL_QUEUE_PATH)}, # type: ignore ) def test_pause_queue(self, get_conn): result = self.hook.pause_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) - self.assertEqual(result, API_RESPONSE) + self.assertEqual(result, Queue(name=FULL_QUEUE_PATH)) get_conn.return_value.pause_queue.assert_called_once_with( - name=FULL_QUEUE_PATH, retry=None, timeout=None, metadata=None + request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=() ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", - **{"return_value.resume_queue.return_value": API_RESPONSE}, # type: ignore + **{"return_value.resume_queue.return_value": Queue(name=FULL_QUEUE_PATH)}, # type: ignore ) def test_resume_queue(self, get_conn): result = self.hook.resume_queue(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) - self.assertEqual(result, API_RESPONSE) + self.assertEqual(result, Queue(name=FULL_QUEUE_PATH)) get_conn.return_value.resume_queue.assert_called_once_with( - name=FULL_QUEUE_PATH, retry=None, timeout=None, metadata=None + request=dict(name=FULL_QUEUE_PATH), retry=None, timeout=None, metadata=() ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", - **{"return_value.create_task.return_value": API_RESPONSE}, # type: ignore + **{"return_value.create_task.return_value": Task(name=FULL_TASK_PATH)}, # type: ignore ) def test_create_task(self, get_conn): result = self.hook.create_task( @@ -197,20 +193,18 @@ def test_create_task(self, get_conn): task_name=TASK_NAME, ) - self.assertEqual(result, API_RESPONSE) + self.assertEqual(result, Task(name=FULL_TASK_PATH)) get_conn.return_value.create_task.assert_called_once_with( - parent=FULL_QUEUE_PATH, - task=Task(name=FULL_TASK_PATH), - response_view=None, + request=dict(parent=FULL_QUEUE_PATH, task=Task(name=FULL_TASK_PATH), response_view=None), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", - **{"return_value.get_task.return_value": API_RESPONSE}, # type: ignore + **{"return_value.get_task.return_value": Task(name=FULL_TASK_PATH)}, # type: ignore ) def test_get_task(self, get_conn): result = self.hook.get_task( @@ -220,37 +214,34 @@ def test_get_task(self, get_conn): project_id=PROJECT_ID, ) - self.assertEqual(result, API_RESPONSE) + self.assertEqual(result, Task(name=FULL_TASK_PATH)) get_conn.return_value.get_task.assert_called_once_with( - name=FULL_TASK_PATH, - response_view=None, + request=dict(name=FULL_TASK_PATH, response_view=None), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", - **{"return_value.list_tasks.return_value": API_RESPONSE}, # type: ignore + **{"return_value.list_tasks.return_value": [Task(name=FULL_TASK_PATH)]}, # type: ignore ) def test_list_tasks(self, get_conn): result = self.hook.list_tasks(location=LOCATION, queue_name=QUEUE_ID, project_id=PROJECT_ID) - self.assertEqual(result, list(API_RESPONSE)) + self.assertEqual(result, [Task(name=FULL_TASK_PATH)]) get_conn.return_value.list_tasks.assert_called_once_with( - parent=FULL_QUEUE_PATH, - response_view=None, - page_size=None, + request=dict(parent=FULL_QUEUE_PATH, response_view=None, page_size=None), retry=None, timeout=None, - metadata=None, + metadata=(), ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", - **{"return_value.delete_task.return_value": API_RESPONSE}, # type: ignore + **{"return_value.delete_task.return_value": None}, # type: ignore ) def test_delete_task(self, get_conn): result = self.hook.delete_task( @@ -263,12 +254,12 @@ def test_delete_task(self, get_conn): self.assertEqual(result, None) get_conn.return_value.delete_task.assert_called_once_with( - name=FULL_TASK_PATH, retry=None, timeout=None, metadata=None + request=dict(name=FULL_TASK_PATH), retry=None, timeout=None, metadata=() ) @mock.patch( "airflow.providers.google.cloud.hooks.tasks.CloudTasksHook.get_conn", - **{"return_value.run_task.return_value": API_RESPONSE}, # type: ignore + **{"return_value.run_task.return_value": Task(name=FULL_TASK_PATH)}, # type: ignore ) def test_run_task(self, get_conn): result = self.hook.run_task( @@ -278,12 +269,11 @@ def test_run_task(self, get_conn): project_id=PROJECT_ID, ) - self.assertEqual(result, API_RESPONSE) + self.assertEqual(result, Task(name=FULL_TASK_PATH)) get_conn.return_value.run_task.assert_called_once_with( - name=FULL_TASK_PATH, - response_view=None, + request=dict(name=FULL_TASK_PATH, response_view=None), retry=None, timeout=None, - metadata=None, + metadata=(), ) diff --git a/tests/providers/google/cloud/hooks/test_workflows.py b/tests/providers/google/cloud/hooks/test_workflows.py new file mode 100644 index 0000000000000..4f3d4d0b1202d --- /dev/null +++ b/tests/providers/google/cloud/hooks/test_workflows.py @@ -0,0 +1,256 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from unittest import mock + +from airflow.providers.google.cloud.hooks.workflows import WorkflowsHook + +BASE_PATH = "airflow.providers.google.cloud.hooks.workflows.{}" +LOCATION = "europe-west1" +WORKFLOW_ID = "workflow_id" +EXECUTION_ID = "execution_id" +WORKFLOW = {"aa": "bb"} +EXECUTION = {"ccc": "ddd"} +PROJECT_ID = "airflow-testing" +METADATA = () +TIMEOUT = None +RETRY = None +FILTER_ = "aaaa" +ORDER_BY = "bbb" +UPDATE_MASK = "aaa,bbb" + +WORKFLOW_PARENT = f"projects/{PROJECT_ID}/locations/{LOCATION}" +WORKFLOW_NAME = f"projects/{PROJECT_ID}/locations/{LOCATION}/workflows/{WORKFLOW_ID}" +EXECUTION_PARENT = f"projects/{PROJECT_ID}/locations/{LOCATION}/workflows/{WORKFLOW_ID}" +EXECUTION_NAME = ( + f"projects/{PROJECT_ID}/locations/{LOCATION}/workflows/{WORKFLOW_ID}/executions/{EXECUTION_ID}" +) + + +def mock_init(*args, **kwargs): + pass + + +class TestWorkflowsHook: + def setup_method(self, _): + with mock.patch(BASE_PATH.format("GoogleBaseHook.__init__"), new=mock_init): + self.hook = WorkflowsHook(gcp_conn_id="test") # pylint: disable=attribute-defined-outside-init + + @mock.patch(BASE_PATH.format("WorkflowsHook._get_credentials")) + @mock.patch(BASE_PATH.format("WorkflowsHook.client_info"), new_callable=mock.PropertyMock) + @mock.patch(BASE_PATH.format("WorkflowsClient")) + def test_get_workflows_client(self, mock_client, mock_client_info, mock_get_credentials): + self.hook.get_workflows_client() + mock_client.assert_called_once_with( + credentials=mock_get_credentials.return_value, + client_info=mock_client_info.return_value, + ) + + @mock.patch(BASE_PATH.format("WorkflowsHook._get_credentials")) + @mock.patch(BASE_PATH.format("WorkflowsHook.client_info"), new_callable=mock.PropertyMock) + @mock.patch(BASE_PATH.format("ExecutionsClient")) + def test_get_executions_client(self, mock_client, mock_client_info, mock_get_credentials): + self.hook.get_executions_client() + mock_client.assert_called_once_with( + credentials=mock_get_credentials.return_value, + client_info=mock_client_info.return_value, + ) + + @mock.patch(BASE_PATH.format("WorkflowsHook.get_workflows_client")) + def test_create_workflow(self, mock_client): + result = self.hook.create_workflow( + workflow=WORKFLOW, + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert mock_client.return_value.create_workflow.return_value == result + mock_client.return_value.create_workflow.assert_called_once_with( + request=dict(workflow=WORKFLOW, workflow_id=WORKFLOW_ID, parent=WORKFLOW_PARENT), + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + @mock.patch(BASE_PATH.format("WorkflowsHook.get_workflows_client")) + def test_get_workflow(self, mock_client): + result = self.hook.get_workflow( + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert mock_client.return_value.get_workflow.return_value == result + mock_client.return_value.get_workflow.assert_called_once_with( + request=dict(name=WORKFLOW_NAME), + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + @mock.patch(BASE_PATH.format("WorkflowsHook.get_workflows_client")) + def test_update_workflow(self, mock_client): + result = self.hook.update_workflow( + workflow=WORKFLOW, + update_mask=UPDATE_MASK, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert mock_client.return_value.update_workflow.return_value == result + mock_client.return_value.update_workflow.assert_called_once_with( + request=dict( + workflow=WORKFLOW, + update_mask=UPDATE_MASK, + ), + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + @mock.patch(BASE_PATH.format("WorkflowsHook.get_workflows_client")) + def test_delete_workflow(self, mock_client): + result = self.hook.delete_workflow( + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert mock_client.return_value.delete_workflow.return_value == result + mock_client.return_value.delete_workflow.assert_called_once_with( + request=dict(name=WORKFLOW_NAME), + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + @mock.patch(BASE_PATH.format("WorkflowsHook.get_workflows_client")) + def test_list_workflows(self, mock_client): + result = self.hook.list_workflows( + location=LOCATION, + project_id=PROJECT_ID, + filter_=FILTER_, + order_by=ORDER_BY, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert mock_client.return_value.list_workflows.return_value == result + mock_client.return_value.list_workflows.assert_called_once_with( + request=dict( + parent=WORKFLOW_PARENT, + filter=FILTER_, + order_by=ORDER_BY, + ), + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + @mock.patch(BASE_PATH.format("WorkflowsHook.get_executions_client")) + def test_create_execution(self, mock_client): + result = self.hook.create_execution( + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + execution=EXECUTION, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert mock_client.return_value.create_execution.return_value == result + mock_client.return_value.create_execution.assert_called_once_with( + request=dict( + parent=EXECUTION_PARENT, + execution=EXECUTION, + ), + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + @mock.patch(BASE_PATH.format("WorkflowsHook.get_executions_client")) + def test_get_execution(self, mock_client): + result = self.hook.get_execution( + workflow_id=WORKFLOW_ID, + execution_id=EXECUTION_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert mock_client.return_value.get_execution.return_value == result + mock_client.return_value.get_execution.assert_called_once_with( + request=dict(name=EXECUTION_NAME), + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + @mock.patch(BASE_PATH.format("WorkflowsHook.get_executions_client")) + def test_cancel_execution(self, mock_client): + result = self.hook.cancel_execution( + workflow_id=WORKFLOW_ID, + execution_id=EXECUTION_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert mock_client.return_value.cancel_execution.return_value == result + mock_client.return_value.cancel_execution.assert_called_once_with( + request=dict(name=EXECUTION_NAME), + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + @mock.patch(BASE_PATH.format("WorkflowsHook.get_executions_client")) + def test_list_execution(self, mock_client): + result = self.hook.list_executions( + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert mock_client.return_value.list_executions.return_value == result + mock_client.return_value.list_executions.assert_called_once_with( + request=dict(parent=EXECUTION_PARENT), + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) diff --git a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py b/tests/providers/google/cloud/log/test_stackdriver_task_handler.py index 4159e9e0f4e54..b4dbf69c71d7c 100644 --- a/tests/providers/google/cloud/log/test_stackdriver_task_handler.py +++ b/tests/providers/google/cloud/log/test_stackdriver_task_handler.py @@ -21,7 +21,8 @@ from unittest import mock from urllib.parse import parse_qs, urlparse -from google.cloud.logging.resource import Resource +from google.cloud.logging import Resource +from google.cloud.logging_v2.types import ListLogEntriesRequest, ListLogEntriesResponse, LogEntry from airflow.models import TaskInstance from airflow.models.dag import DAG @@ -30,15 +31,27 @@ from airflow.utils.state import State -def _create_list_response(messages, token): - page = [mock.MagicMock(payload={"message": message}) for message in messages] - return mock.MagicMock(pages=(n for n in [page]), next_page_token=token) +def _create_list_log_entries_response_mock(messages, token): + return ListLogEntriesResponse( + entries=[LogEntry(json_payload={"message": message}) for message in messages], next_page_token=token + ) + + +def _remove_stackdriver_handlers(): + for handler_ref in reversed(logging._handlerList[:]): + handler = handler_ref() + if not isinstance(handler, StackdriverTaskHandler): + continue + logging._removeHandlerRef(handler_ref) + del handler class TestStackdriverLoggingHandlerStandalone(unittest.TestCase): @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client') def test_should_pass_message_to_client(self, mock_client, mock_get_creds_and_project_id): + self.addCleanup(_remove_stackdriver_handlers) + mock_get_creds_and_project_id.return_value = ('creds', 'project_id') transport_type = mock.MagicMock() @@ -69,6 +82,7 @@ def setUp(self) -> None: self.ti.try_number = 1 self.ti.state = State.RUNNING self.addCleanup(self.dag.clear) + self.addCleanup(_remove_stackdriver_handlers) @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client') @@ -118,107 +132,153 @@ def test_should_append_labels(self, mock_client, mock_get_creds_and_project_id): ) @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') - @mock.patch( - 'airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client', - **{'return_value.project': 'asf-project'}, # type: ignore - ) + @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client') def test_should_read_logs_for_all_try(self, mock_client, mock_get_creds_and_project_id): - mock_client.return_value.list_entries.return_value = _create_list_response(["MSG1", "MSG2"], None) + mock_client.return_value.list_log_entries.return_value.pages = iter( + [_create_list_log_entries_response_mock(["MSG1", "MSG2"], None)] + ) mock_get_creds_and_project_id.return_value = ('creds', 'project_id') logs, metadata = self.stackdriver_task_handler.read(self.ti) - mock_client.return_value.list_entries.assert_called_once_with( - filter_='resource.type="global"\n' - 'logName="projects/asf-project/logs/airflow"\n' - 'labels.task_id="task_for_testing_file_log_handler"\n' - 'labels.dag_id="dag_for_testing_file_task_handler"\n' - 'labels.execution_date="2016-01-01T00:00:00+00:00"', - page_token=None, + mock_client.return_value.list_log_entries.assert_called_once_with( + request=ListLogEntriesRequest( + resource_names=["projects/project_id"], + filter=( + 'resource.type="global"\n' + 'logName="projects/project_id/logs/airflow"\n' + 'labels.task_id="task_for_testing_file_log_handler"\n' + 'labels.dag_id="dag_for_testing_file_task_handler"\n' + 'labels.execution_date="2016-01-01T00:00:00+00:00"' + ), + order_by='timestamp asc', + page_size=1000, + page_token=None, + ) ) - assert ['MSG1\nMSG2'] == logs + assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs assert [{'end_of_log': True}] == metadata @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') - @mock.patch( - 'airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client', - **{'return_value.project': 'asf-project'}, # type: ignore - ) + @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client') def test_should_read_logs_for_task_with_quote(self, mock_client, mock_get_creds_and_project_id): - mock_client.return_value.list_entries.return_value = _create_list_response(["MSG1", "MSG2"], None) + mock_client.return_value.list_log_entries.return_value.pages = iter( + [_create_list_log_entries_response_mock(["MSG1", "MSG2"], None)] + ) mock_get_creds_and_project_id.return_value = ('creds', 'project_id') self.ti.task_id = "K\"OT" logs, metadata = self.stackdriver_task_handler.read(self.ti) - mock_client.return_value.list_entries.assert_called_once_with( - filter_='resource.type="global"\n' - 'logName="projects/asf-project/logs/airflow"\n' - 'labels.task_id="K\\"OT"\n' - 'labels.dag_id="dag_for_testing_file_task_handler"\n' - 'labels.execution_date="2016-01-01T00:00:00+00:00"', - page_token=None, + mock_client.return_value.list_log_entries.assert_called_once_with( + request=ListLogEntriesRequest( + resource_names=["projects/project_id"], + filter=( + 'resource.type="global"\n' + 'logName="projects/project_id/logs/airflow"\n' + 'labels.task_id="K\\"OT"\n' + 'labels.dag_id="dag_for_testing_file_task_handler"\n' + 'labels.execution_date="2016-01-01T00:00:00+00:00"' + ), + order_by='timestamp asc', + page_size=1000, + page_token=None, + ) ) - assert ['MSG1\nMSG2'] == logs + assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs assert [{'end_of_log': True}] == metadata @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') - @mock.patch( - 'airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client', - **{'return_value.project': 'asf-project'}, # type: ignore - ) + @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client') def test_should_read_logs_for_single_try(self, mock_client, mock_get_creds_and_project_id): - mock_client.return_value.list_entries.return_value = _create_list_response(["MSG1", "MSG2"], None) + mock_client.return_value.list_log_entries.return_value.pages = iter( + [_create_list_log_entries_response_mock(["MSG1", "MSG2"], None)] + ) mock_get_creds_and_project_id.return_value = ('creds', 'project_id') logs, metadata = self.stackdriver_task_handler.read(self.ti, 3) - mock_client.return_value.list_entries.assert_called_once_with( - filter_='resource.type="global"\n' - 'logName="projects/asf-project/logs/airflow"\n' - 'labels.task_id="task_for_testing_file_log_handler"\n' - 'labels.dag_id="dag_for_testing_file_task_handler"\n' - 'labels.execution_date="2016-01-01T00:00:00+00:00"\n' - 'labels.try_number="3"', - page_token=None, + mock_client.return_value.list_log_entries.assert_called_once_with( + request=ListLogEntriesRequest( + resource_names=["projects/project_id"], + filter=( + 'resource.type="global"\n' + 'logName="projects/project_id/logs/airflow"\n' + 'labels.task_id="task_for_testing_file_log_handler"\n' + 'labels.dag_id="dag_for_testing_file_task_handler"\n' + 'labels.execution_date="2016-01-01T00:00:00+00:00"\n' + 'labels.try_number="3"' + ), + order_by='timestamp asc', + page_size=1000, + page_token=None, + ) ) - assert ['MSG1\nMSG2'] == logs + assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs assert [{'end_of_log': True}] == metadata @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') - @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client') + @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client') def test_should_read_logs_with_pagination(self, mock_client, mock_get_creds_and_project_id): - mock_client.return_value.list_entries.side_effect = [ - _create_list_response(["MSG1", "MSG2"], "TOKEN1"), - _create_list_response(["MSG3", "MSG4"], None), + mock_client.return_value.list_log_entries.side_effect = [ + mock.MagicMock(pages=iter([_create_list_log_entries_response_mock(["MSG1", "MSG2"], "TOKEN1")])), + mock.MagicMock(pages=iter([_create_list_log_entries_response_mock(["MSG3", "MSG4"], None)])), ] mock_get_creds_and_project_id.return_value = ('creds', 'project_id') logs, metadata1 = self.stackdriver_task_handler.read(self.ti, 3) - mock_client.return_value.list_entries.assert_called_once_with(filter_=mock.ANY, page_token=None) - assert ['MSG1\nMSG2'] == logs + mock_client.return_value.list_log_entries.assert_called_once_with( + request=ListLogEntriesRequest( + resource_names=["projects/project_id"], + filter=( + '''resource.type="global" +logName="projects/project_id/logs/airflow" +labels.task_id="task_for_testing_file_log_handler" +labels.dag_id="dag_for_testing_file_task_handler" +labels.execution_date="2016-01-01T00:00:00+00:00" +labels.try_number="3"''' + ), + order_by='timestamp asc', + page_size=1000, + page_token=None, + ) + ) + assert [(('default-hostname', 'MSG1\nMSG2'),)] == logs assert [{'end_of_log': False, 'next_page_token': 'TOKEN1'}] == metadata1 - mock_client.return_value.list_entries.return_value.next_page_token = None + mock_client.return_value.list_log_entries.return_value.next_page_token = None logs, metadata2 = self.stackdriver_task_handler.read(self.ti, 3, metadata1[0]) - mock_client.return_value.list_entries.assert_called_with(filter_=mock.ANY, page_token="TOKEN1") - assert ['MSG3\nMSG4'] == logs + + mock_client.return_value.list_log_entries.assert_called_with( + request=ListLogEntriesRequest( + resource_names=["projects/project_id"], + filter=( + 'resource.type="global"\n' + 'logName="projects/project_id/logs/airflow"\n' + 'labels.task_id="task_for_testing_file_log_handler"\n' + 'labels.dag_id="dag_for_testing_file_task_handler"\n' + 'labels.execution_date="2016-01-01T00:00:00+00:00"\n' + 'labels.try_number="3"' + ), + order_by='timestamp asc', + page_size=1000, + page_token="TOKEN1", + ) + ) + assert [(('default-hostname', 'MSG3\nMSG4'),)] == logs assert [{'end_of_log': True}] == metadata2 @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') - @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client') + @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client') def test_should_read_logs_with_download(self, mock_client, mock_get_creds_and_project_id): - mock_client.return_value.list_entries.side_effect = [ - _create_list_response(["MSG1", "MSG2"], "TOKEN1"), - _create_list_response(["MSG3", "MSG4"], None), + mock_client.return_value.list_log_entries.side_effect = [ + mock.MagicMock(pages=iter([_create_list_log_entries_response_mock(["MSG1", "MSG2"], "TOKEN1")])), + mock.MagicMock(pages=iter([_create_list_log_entries_response_mock(["MSG3", "MSG4"], None)])), ] mock_get_creds_and_project_id.return_value = ('creds', 'project_id') logs, metadata1 = self.stackdriver_task_handler.read(self.ti, 3, {'download_logs': True}) - assert ['MSG1\nMSG2\nMSG3\nMSG4'] == logs + assert [(('default-hostname', 'MSG1\nMSG2\nMSG3\nMSG4'),)] == logs assert [{'end_of_log': True}] == metadata1 @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') - @mock.patch( - 'airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client', - **{'return_value.project': 'asf-project'}, # type: ignore - ) + @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client') def test_should_read_logs_with_custom_resources(self, mock_client, mock_get_creds_and_project_id): mock_get_creds_and_project_id.return_value = ('creds', 'project_id') resource = Resource( @@ -226,31 +286,37 @@ def test_should_read_logs_with_custom_resources(self, mock_client, mock_get_cred labels={ "environment.name": 'test-instancce', "location": 'europpe-west-3', - "project_id": "asf-project", + "project_id": "project_id", }, ) self.stackdriver_task_handler = StackdriverTaskHandler( transport=self.transport_mock, resource=resource ) - entry = mock.MagicMock(payload={"message": "TEXT"}) - page = [entry, entry] - mock_client.return_value.list_entries.return_value.pages = (n for n in [page]) - mock_client.return_value.list_entries.return_value.next_page_token = None + entry = mock.MagicMock(json_payload={"message": "TEXT"}) + page = mock.MagicMock(entries=[entry, entry], next_page_token=None) + mock_client.return_value.list_log_entries.return_value.pages = (n for n in [page]) logs, metadata = self.stackdriver_task_handler.read(self.ti) - mock_client.return_value.list_entries.assert_called_once_with( - filter_='resource.type="cloud_composer_environment"\n' - 'logName="projects/asf-project/logs/airflow"\n' - 'resource.labels."environment.name"="test-instancce"\n' - 'resource.labels.location="europpe-west-3"\n' - 'resource.labels.project_id="asf-project"\n' - 'labels.task_id="task_for_testing_file_log_handler"\n' - 'labels.dag_id="dag_for_testing_file_task_handler"\n' - 'labels.execution_date="2016-01-01T00:00:00+00:00"', - page_token=None, + mock_client.return_value.list_log_entries.assert_called_once_with( + request=ListLogEntriesRequest( + resource_names=["projects/project_id"], + filter=( + 'resource.type="cloud_composer_environment"\n' + 'logName="projects/project_id/logs/airflow"\n' + 'resource.labels."environment.name"="test-instancce"\n' + 'resource.labels.location="europpe-west-3"\n' + 'resource.labels.project_id="project_id"\n' + 'labels.task_id="task_for_testing_file_log_handler"\n' + 'labels.dag_id="dag_for_testing_file_task_handler"\n' + 'labels.execution_date="2016-01-01T00:00:00+00:00"' + ), + order_by='timestamp asc', + page_size=1000, + page_token=None, + ) ) - assert ['TEXT\nTEXT'] == logs + assert [(('default-hostname', 'TEXT\nTEXT'),)] == logs assert [{'end_of_log': True}] == metadata @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') @@ -278,10 +344,9 @@ def test_should_use_credentials(self, mock_client, mock_get_creds_and_project_id assert mock_client.return_value == client @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.get_credentials_and_project_id') - @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.gcp_logging.Client') + @mock.patch('airflow.providers.google.cloud.log.stackdriver_task_handler.LoggingServiceV2Client') def test_should_return_valid_external_url(self, mock_client, mock_get_creds_and_project_id): mock_get_creds_and_project_id.return_value = ('creds', 'project_id') - mock_client.return_value.project = 'project_id' stackdriver_task_handler = StackdriverTaskHandler( gcp_key_path="KEY_PATH", diff --git a/tests/providers/google/cloud/operators/test_automl.py b/tests/providers/google/cloud/operators/test_automl.py index 903600b0fa815..4c80703d58a1c 100644 --- a/tests/providers/google/cloud/operators/test_automl.py +++ b/tests/providers/google/cloud/operators/test_automl.py @@ -20,8 +20,9 @@ import unittest from unittest import mock -from google.cloud.automl_v1beta1 import AutoMlClient, PredictionServiceClient +from google.cloud.automl_v1beta1 import BatchPredictResult, Dataset, Model, PredictResponse +from airflow.providers.google.cloud.hooks.automl import CloudAutoMLHook from airflow.providers.google.cloud.operators.automl import ( AutoMLBatchPredictOperator, AutoMLCreateDatasetOperator, @@ -43,7 +44,7 @@ GCP_PROJECT_ID = "test-project" GCP_LOCATION = "test-location" MODEL_NAME = "test_model" -MODEL_ID = "projects/198907790164/locations/us-central1/models/TBL9195602771183665152" +MODEL_ID = "TBL9195602771183665152" DATASET_ID = "TBL123456789" MODEL = { "display_name": MODEL_NAME, @@ -51,8 +52,9 @@ "tables_model_metadata": {"train_budget_milli_node_hours": 1000}, } -LOCATION_PATH = AutoMlClient.location_path(GCP_PROJECT_ID, GCP_LOCATION) -MODEL_PATH = PredictionServiceClient.model_path(GCP_PROJECT_ID, GCP_LOCATION, MODEL_ID) +LOCATION_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}" +MODEL_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}/models/{MODEL_ID}" +DATASET_PATH = f"projects/{GCP_PROJECT_ID}/locations/{GCP_LOCATION}/datasets/{DATASET_ID}" INPUT_CONFIG = {"input": "value"} OUTPUT_CONFIG = {"output": "value"} @@ -60,12 +62,15 @@ DATASET = {"dataset_id": "data"} MASK = {"field": "mask"} +extract_object_id = CloudAutoMLHook.extract_object_id + class TestAutoMLTrainModelOperator(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.automl.AutoMLTrainModelOperator.xcom_push") @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook") def test_execute(self, mock_hook, mock_xcom): - mock_hook.return_value.extract_object_id.return_value = MODEL_ID + mock_hook.return_value.create_model.return_value.result.return_value = Model(name=MODEL_PATH) + mock_hook.return_value.extract_object_id = extract_object_id op = AutoMLTrainModelOperator( model=MODEL, location=GCP_LOCATION, @@ -87,6 +92,9 @@ def test_execute(self, mock_hook, mock_xcom): class TestAutoMLBatchPredictOperator(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook") def test_execute(self, mock_hook): + mock_hook.return_value.batch_predict.return_value.result.return_value = BatchPredictResult() + mock_hook.return_value.extract_object_id = extract_object_id + op = AutoMLBatchPredictOperator( model_id=MODEL_ID, location=GCP_LOCATION, @@ -113,6 +121,8 @@ def test_execute(self, mock_hook): class TestAutoMLPredictOperator(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook") def test_execute(self, mock_hook): + mock_hook.return_value.predict.return_value = PredictResponse() + op = AutoMLPredictOperator( model_id=MODEL_ID, location=GCP_LOCATION, @@ -137,7 +147,9 @@ class TestAutoMLCreateImportOperator(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.automl.AutoMLCreateDatasetOperator.xcom_push") @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook") def test_execute(self, mock_hook, mock_xcom): - mock_hook.return_value.extract_object_id.return_value = DATASET_ID + mock_hook.return_value.create_dataset.return_value = Dataset(name=DATASET_PATH) + mock_hook.return_value.extract_object_id = extract_object_id + op = AutoMLCreateDatasetOperator( dataset=DATASET, location=GCP_LOCATION, @@ -191,6 +203,8 @@ def test_execute(self, mock_hook): class TestAutoMLUpdateDatasetOperator(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook") def test_execute(self, mock_hook): + mock_hook.return_value.update_dataset.return_value = Dataset(name=DATASET_PATH) + dataset = copy.deepcopy(DATASET) dataset["name"] = DATASET_ID @@ -213,6 +227,9 @@ def test_execute(self, mock_hook): class TestAutoMLGetModelOperator(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.automl.CloudAutoMLHook") def test_execute(self, mock_hook): + mock_hook.return_value.get_model.return_value = Model(name=MODEL_PATH) + mock_hook.return_value.extract_object_id = extract_object_id + op = AutoMLGetModelOperator( model_id=MODEL_ID, location=GCP_LOCATION, diff --git a/tests/providers/google/cloud/operators/test_bigquery_dts.py b/tests/providers/google/cloud/operators/test_bigquery_dts.py index 4d423527acc8e..d6071fa3f3688 100644 --- a/tests/providers/google/cloud/operators/test_bigquery_dts.py +++ b/tests/providers/google/cloud/operators/test_bigquery_dts.py @@ -18,6 +18,8 @@ import unittest from unittest import mock +from google.cloud.bigquery_datatransfer_v1 import StartManualTransferRunsResponse, TransferConfig, TransferRun + from airflow.providers.google.cloud.operators.bigquery_dts import ( BigQueryCreateDataTransferOperator, BigQueryDataTransferServiceStartTransferRunsOperator, @@ -39,20 +41,23 @@ TRANSFER_CONFIG_ID = "id1234" -NAME = "projects/123abc/locations/321cba/transferConfig/1a2b3c" +TRANSFER_CONFIG_NAME = "projects/123abc/locations/321cba/transferConfig/1a2b3c" +RUN_NAME = "projects/123abc/locations/321cba/transferConfig/1a2b3c/runs/123" class BigQueryCreateDataTransferOperatorTestCase(unittest.TestCase): - @mock.patch("airflow.providers.google.cloud.operators.bigquery_dts.BiqQueryDataTransferServiceHook") - @mock.patch("airflow.providers.google.cloud.operators.bigquery_dts.get_object_id") - def test_execute(self, mock_name, mock_hook): - mock_name.return_value = TRANSFER_CONFIG_ID - mock_xcom = mock.MagicMock() + @mock.patch( + "airflow.providers.google.cloud.operators.bigquery_dts.BiqQueryDataTransferServiceHook", + **{'return_value.create_transfer_config.return_value': TransferConfig(name=TRANSFER_CONFIG_NAME)}, + ) + def test_execute(self, mock_hook): op = BigQueryCreateDataTransferOperator( transfer_config=TRANSFER_CONFIG, project_id=PROJECT_ID, task_id="id" ) - op.xcom_push = mock_xcom - op.execute(None) + ti = mock.MagicMock() + + op.execute({'ti': ti}) + mock_hook.return_value.create_transfer_config.assert_called_once_with( authorization_code=None, metadata=None, @@ -61,6 +66,7 @@ def test_execute(self, mock_name, mock_hook): retry=None, timeout=None, ) + ti.xcom_push.assert_called_once_with(execution_date=None, key='transfer_config_id', value='1a2b3c') class BigQueryDeleteDataTransferConfigOperatorTestCase(unittest.TestCase): @@ -80,12 +86,22 @@ def test_execute(self, mock_hook): class BigQueryDataTransferServiceStartTransferRunsOperatorTestCase(unittest.TestCase): - @mock.patch("airflow.providers.google.cloud.operators.bigquery_dts.BiqQueryDataTransferServiceHook") + @mock.patch( + "airflow.providers.google.cloud.operators.bigquery_dts.BiqQueryDataTransferServiceHook", + **{ + 'return_value.start_manual_transfer_runs.return_value': StartManualTransferRunsResponse( + runs=[TransferRun(name=RUN_NAME)] + ) + }, + ) def test_execute(self, mock_hook): op = BigQueryDataTransferServiceStartTransferRunsOperator( transfer_config_id=TRANSFER_CONFIG_ID, task_id="id", project_id=PROJECT_ID ) - op.execute(None) + ti = mock.MagicMock() + + op.execute({'ti': ti}) + mock_hook.return_value.start_manual_transfer_runs.assert_called_once_with( transfer_config_id=TRANSFER_CONFIG_ID, project_id=PROJECT_ID, @@ -95,3 +111,4 @@ def test_execute(self, mock_hook): retry=None, timeout=None, ) + ti.xcom_push.assert_called_once_with(execution_date=None, key='run_id', value='123') diff --git a/tests/providers/google/cloud/operators/test_cloud_memorystore.py b/tests/providers/google/cloud/operators/test_cloud_memorystore.py index 8ef60bd9b62b1..6db8a3a719dae 100644 --- a/tests/providers/google/cloud/operators/test_cloud_memorystore.py +++ b/tests/providers/google/cloud/operators/test_cloud_memorystore.py @@ -20,7 +20,7 @@ from google.api_core.retry import Retry from google.cloud.memcache_v1beta2.types import cloud_memcache -from google.cloud.redis_v1.gapic.enums import FailoverInstanceRequest +from google.cloud.redis_v1 import FailoverInstanceRequest from google.cloud.redis_v1.types import Instance from airflow.providers.google.cloud.operators.cloud_memorystore import ( @@ -78,6 +78,7 @@ def test_assert_valid_hook_call(self, mock_hook): gcp_conn_id=TEST_GCP_CONN_ID, impersonation_chain=TEST_IMPERSONATION_CHAIN, ) + mock_hook.return_value.create_instance.return_value = Instance(name=TEST_NAME) task.execute(mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, @@ -199,6 +200,7 @@ def test_assert_valid_hook_call(self, mock_hook): gcp_conn_id=TEST_GCP_CONN_ID, impersonation_chain=TEST_IMPERSONATION_CHAIN, ) + mock_hook.return_value.get_instance.return_value = Instance(name=TEST_NAME) task.execute(mock.MagicMock()) mock_hook.assert_called_once_with( gcp_conn_id=TEST_GCP_CONN_ID, diff --git a/tests/providers/google/cloud/operators/test_datacatalog.py b/tests/providers/google/cloud/operators/test_datacatalog.py index b575dd47a5a95..517b35c71edc1 100644 --- a/tests/providers/google/cloud/operators/test_datacatalog.py +++ b/tests/providers/google/cloud/operators/test_datacatalog.py @@ -87,15 +87,25 @@ ) TEST_ENTRY: Entry = Entry(name=TEST_ENTRY_PATH) -TEST_ENTRY_DICT: Dict = dict(name=TEST_ENTRY_PATH) +TEST_ENTRY_DICT: Dict = { + 'description': '', + 'display_name': '', + 'linked_resource': '', + 'name': TEST_ENTRY_PATH, +} TEST_ENTRY_GROUP: EntryGroup = EntryGroup(name=TEST_ENTRY_GROUP_PATH) -TEST_ENTRY_GROUP_DICT: Dict = dict(name=TEST_ENTRY_GROUP_PATH) -TEST_TAG: EntryGroup = Tag(name=TEST_TAG_PATH) -TEST_TAG_DICT: Dict = dict(name=TEST_TAG_PATH) +TEST_ENTRY_GROUP_DICT: Dict = {'description': '', 'display_name': '', 'name': TEST_ENTRY_GROUP_PATH} +TEST_TAG: Tag = Tag(name=TEST_TAG_PATH) +TEST_TAG_DICT: Dict = {'fields': {}, 'name': TEST_TAG_PATH, 'template': '', 'template_display_name': ''} TEST_TAG_TEMPLATE: TagTemplate = TagTemplate(name=TEST_TAG_TEMPLATE_PATH) -TEST_TAG_TEMPLATE_DICT: Dict = dict(name=TEST_TAG_TEMPLATE_PATH) -TEST_TAG_TEMPLATE_FIELD: Dict = TagTemplateField(name=TEST_TAG_TEMPLATE_FIELD_ID) -TEST_TAG_TEMPLATE_FIELD_DICT: Dict = dict(name=TEST_TAG_TEMPLATE_FIELD_ID) +TEST_TAG_TEMPLATE_DICT: Dict = {'display_name': '', 'fields': {}, 'name': TEST_TAG_TEMPLATE_PATH} +TEST_TAG_TEMPLATE_FIELD: TagTemplateField = TagTemplateField(name=TEST_TAG_TEMPLATE_FIELD_ID) +TEST_TAG_TEMPLATE_FIELD_DICT: Dict = { + 'display_name': '', + 'is_required': False, + 'name': TEST_TAG_TEMPLATE_FIELD_ID, + 'order': 0, +} class TestCloudDataCatalogCreateEntryOperator(TestCase): @@ -498,7 +508,10 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: class TestCloudDataCatalogGetEntryOperator(TestCase): - @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") + @mock.patch( + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook", + **{"return_value.get_entry.return_value": TEST_ENTRY}, # type: ignore + ) def test_assert_valid_hook_call(self, mock_hook) -> None: task = CloudDataCatalogGetEntryOperator( task_id="task_id", @@ -529,7 +542,10 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: class TestCloudDataCatalogGetEntryGroupOperator(TestCase): - @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") + @mock.patch( + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook", + **{"return_value.get_entry_group.return_value": TEST_ENTRY_GROUP}, # type: ignore + ) def test_assert_valid_hook_call(self, mock_hook) -> None: task = CloudDataCatalogGetEntryGroupOperator( task_id="task_id", @@ -560,7 +576,10 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: class TestCloudDataCatalogGetTagTemplateOperator(TestCase): - @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") + @mock.patch( + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook", + **{"return_value.get_tag_template.return_value": TEST_TAG_TEMPLATE}, # type: ignore + ) def test_assert_valid_hook_call(self, mock_hook) -> None: task = CloudDataCatalogGetTagTemplateOperator( task_id="task_id", @@ -589,7 +608,10 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: class TestCloudDataCatalogListTagsOperator(TestCase): - @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") + @mock.patch( + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook", + **{"return_value.list_tags.return_value": [TEST_TAG]}, # type: ignore + ) def test_assert_valid_hook_call(self, mock_hook) -> None: task = CloudDataCatalogListTagsOperator( task_id="task_id", @@ -622,7 +644,10 @@ def test_assert_valid_hook_call(self, mock_hook) -> None: class TestCloudDataCatalogLookupEntryOperator(TestCase): - @mock.patch("airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook") + @mock.patch( + "airflow.providers.google.cloud.operators.datacatalog.CloudDataCatalogHook", + **{"return_value.lookup_entry.return_value": TEST_ENTRY}, # type: ignore + ) def test_assert_valid_hook_call(self, mock_hook) -> None: task = CloudDataCatalogLookupEntryOperator( task_id="task_id", diff --git a/tests/providers/google/cloud/operators/test_dataflow.py b/tests/providers/google/cloud/operators/test_dataflow.py index 7e290d7f05ca7..301805266711b 100644 --- a/tests/providers/google/cloud/operators/test_dataflow.py +++ b/tests/providers/google/cloud/operators/test_dataflow.py @@ -16,7 +16,7 @@ # specific language governing permissions and limitations # under the License. # - +import copy import unittest from copy import deepcopy from unittest import mock @@ -115,35 +115,56 @@ def test_init(self): assert self.dataflow.dataflow_default_options == DEFAULT_OPTIONS_PYTHON assert self.dataflow.options == EXPECTED_ADDITIONAL_OPTIONS + @mock.patch( + 'airflow.providers.google.cloud.operators.dataflow.process_line_and_extract_dataflow_job_id_callback' + ) + @mock.patch('airflow.providers.google.cloud.operators.dataflow.BeamHook') @mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook') @mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook') - def test_exec(self, gcs_hook, dataflow_mock): + def test_exec(self, gcs_hook, dataflow_hook_mock, beam_hook_mock, mock_callback_on_job_id): """Test DataflowHook is created and the right args are passed to start_python_workflow. """ - start_python_hook = dataflow_mock.return_value.start_python_dataflow + start_python_mock = beam_hook_mock.return_value.start_python_pipeline gcs_provide_file = gcs_hook.return_value.provide_file + job_name = dataflow_hook_mock.return_value.build_dataflow_job_name.return_value self.dataflow.execute(None) - assert dataflow_mock.called + beam_hook_mock.assert_called_once_with(runner="DataflowRunner") + self.assertTrue(self.dataflow.py_file.startswith('/tmp/dataflow')) + gcs_provide_file.assert_called_once_with(object_url=PY_FILE) + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback=mock.ANY) + dataflow_hook_mock.assert_called_once_with( + gcp_conn_id="google_cloud_default", + delegate_to=mock.ANY, + poll_sleep=POLL_SLEEP, + impersonation_chain=None, + drain_pipeline=False, + cancel_timeout=mock.ANY, + wait_until_finished=None, + ) expected_options = { - 'project': 'test', - 'staging_location': 'gs://test/staging', + "project": dataflow_hook_mock.return_value.project_id, + "staging_location": 'gs://test/staging', + "job_name": job_name, + "region": TEST_LOCATION, 'output': 'gs://test/output', - 'labels': {'foo': 'bar', 'airflow-version': TEST_VERSION}, + 'labels': {'foo': 'bar', 'airflow-version': 'v2-1-0-dev0'}, } - gcs_provide_file.assert_called_once_with(object_url=PY_FILE) - start_python_hook.assert_called_once_with( - job_name=JOB_NAME, + start_python_mock.assert_called_once_with( variables=expected_options, - dataflow=mock.ANY, + py_file=gcs_provide_file.return_value.__enter__.return_value.name, py_options=PY_OPTIONS, py_interpreter=PY_INTERPRETER, py_requirements=None, py_system_site_packages=False, - on_new_job_id_callback=mock.ANY, - project_id=None, + process_line_callback=mock_callback_on_job_id.return_value, + ) + dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with( + job_id=mock.ANY, + job_name=job_name, location=TEST_LOCATION, + multiple_jobs=False, ) assert self.dataflow.py_file.startswith('/tmp/dataflow') @@ -172,110 +193,182 @@ def test_init(self): assert self.dataflow.options == EXPECTED_ADDITIONAL_OPTIONS assert self.dataflow.check_if_running == CheckJobRunning.WaitForRun + @mock.patch( + 'airflow.providers.google.cloud.operators.dataflow.process_line_and_extract_dataflow_job_id_callback' + ) + @mock.patch('airflow.providers.google.cloud.operators.dataflow.BeamHook') @mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook') @mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook') - def test_exec(self, gcs_hook, dataflow_mock): + def test_exec(self, gcs_hook, dataflow_hook_mock, beam_hook_mock, mock_callback_on_job_id): """Test DataflowHook is created and the right args are passed to start_java_workflow. """ - start_java_hook = dataflow_mock.return_value.start_java_dataflow + start_java_mock = beam_hook_mock.return_value.start_java_pipeline gcs_provide_file = gcs_hook.return_value.provide_file + job_name = dataflow_hook_mock.return_value.build_dataflow_job_name.return_value self.dataflow.check_if_running = CheckJobRunning.IgnoreJob + self.dataflow.execute(None) - assert dataflow_mock.called + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback=mock.ANY) gcs_provide_file.assert_called_once_with(object_url=JAR_FILE) - start_java_hook.assert_called_once_with( - job_name=JOB_NAME, - variables=mock.ANY, - jar=mock.ANY, + expected_variables = { + 'project': dataflow_hook_mock.return_value.project_id, + 'stagingLocation': 'gs://test/staging', + 'jobName': job_name, + 'region': TEST_LOCATION, + 'output': 'gs://test/output', + 'labels': {'foo': 'bar', 'airflow-version': 'v2-1-0-dev0'}, + } + + start_java_mock.assert_called_once_with( + variables=expected_variables, + jar=gcs_provide_file.return_value.__enter__.return_value.name, job_class=JOB_CLASS, - append_job_name=True, - multiple_jobs=None, - on_new_job_id_callback=mock.ANY, - project_id=None, + process_line_callback=mock_callback_on_job_id.return_value, + ) + dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with( + job_id=mock.ANY, + job_name=job_name, location=TEST_LOCATION, + multiple_jobs=None, ) + @mock.patch('airflow.providers.google.cloud.operators.dataflow.BeamHook') @mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook') @mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook') - def test_check_job_running_exec(self, gcs_hook, dataflow_mock): + def test_check_job_running_exec(self, gcs_hook, dataflow_mock, beam_hook_mock): """Test DataflowHook is created and the right args are passed to start_java_workflow. """ dataflow_running = dataflow_mock.return_value.is_job_dataflow_running dataflow_running.return_value = True - start_java_hook = dataflow_mock.return_value.start_java_dataflow + start_java_hook = beam_hook_mock.return_value.start_java_pipeline gcs_provide_file = gcs_hook.return_value.provide_file self.dataflow.check_if_running = True + self.dataflow.execute(None) - assert dataflow_mock.called - gcs_provide_file.assert_not_called() + + self.assertTrue(dataflow_mock.called) start_java_hook.assert_not_called() - dataflow_running.assert_called_once_with( - name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION - ) + gcs_provide_file.assert_called_once() + variables = { + 'project': dataflow_mock.return_value.project_id, + 'stagingLocation': 'gs://test/staging', + 'jobName': JOB_NAME, + 'region': TEST_LOCATION, + 'output': 'gs://test/output', + 'labels': {'foo': 'bar', 'airflow-version': 'v2-1-0-dev0'}, + } + dataflow_running.assert_called_once_with(name=JOB_NAME, variables=variables) + @mock.patch( + 'airflow.providers.google.cloud.operators.dataflow.process_line_and_extract_dataflow_job_id_callback' + ) + @mock.patch('airflow.providers.google.cloud.operators.dataflow.BeamHook') @mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook') @mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook') - def test_check_job_not_running_exec(self, gcs_hook, dataflow_mock): + def test_check_job_not_running_exec( + self, gcs_hook, dataflow_hook_mock, beam_hook_mock, mock_callback_on_job_id + ): """Test DataflowHook is created and the right args are passed to start_java_workflow with option to check if job is running - """ - dataflow_running = dataflow_mock.return_value.is_job_dataflow_running + is_job_dataflow_running_variables = None + + def set_is_job_dataflow_running_variables(*args, **kwargs): + nonlocal is_job_dataflow_running_variables + is_job_dataflow_running_variables = copy.deepcopy(kwargs.get("variables")) + + dataflow_running = dataflow_hook_mock.return_value.is_job_dataflow_running + dataflow_running.side_effect = set_is_job_dataflow_running_variables dataflow_running.return_value = False - start_java_hook = dataflow_mock.return_value.start_java_dataflow + start_java_mock = beam_hook_mock.return_value.start_java_pipeline gcs_provide_file = gcs_hook.return_value.provide_file self.dataflow.check_if_running = True + self.dataflow.execute(None) - assert dataflow_mock.called + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback=mock.ANY) gcs_provide_file.assert_called_once_with(object_url=JAR_FILE) - start_java_hook.assert_called_once_with( - job_name=JOB_NAME, - variables=mock.ANY, - jar=mock.ANY, + expected_variables = { + 'project': dataflow_hook_mock.return_value.project_id, + 'stagingLocation': 'gs://test/staging', + 'jobName': JOB_NAME, + 'region': TEST_LOCATION, + 'output': 'gs://test/output', + 'labels': {'foo': 'bar', 'airflow-version': 'v2-1-0-dev0'}, + } + self.assertEqual(expected_variables, is_job_dataflow_running_variables) + job_name = dataflow_hook_mock.return_value.build_dataflow_job_name.return_value + expected_variables["jobName"] = job_name + start_java_mock.assert_called_once_with( + variables=expected_variables, + jar=gcs_provide_file.return_value.__enter__.return_value.name, job_class=JOB_CLASS, - append_job_name=True, - multiple_jobs=None, - on_new_job_id_callback=mock.ANY, - project_id=None, - location=TEST_LOCATION, + process_line_callback=mock_callback_on_job_id.return_value, ) - dataflow_running.assert_called_once_with( - name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION + dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with( + job_id=mock.ANY, + job_name=job_name, + location=TEST_LOCATION, + multiple_jobs=None, ) + @mock.patch( + 'airflow.providers.google.cloud.operators.dataflow.process_line_and_extract_dataflow_job_id_callback' + ) + @mock.patch('airflow.providers.google.cloud.operators.dataflow.BeamHook') @mock.patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook') @mock.patch('airflow.providers.google.cloud.operators.dataflow.GCSHook') - def test_check_multiple_job_exec(self, gcs_hook, dataflow_mock): + def test_check_multiple_job_exec( + self, gcs_hook, dataflow_hook_mock, beam_hook_mock, mock_callback_on_job_id + ): """Test DataflowHook is created and the right args are passed to - start_java_workflow with option to check multiple jobs - + start_java_workflow with option to check if job is running """ - dataflow_running = dataflow_mock.return_value.is_job_dataflow_running + is_job_dataflow_running_variables = None + + def set_is_job_dataflow_running_variables(*args, **kwargs): + nonlocal is_job_dataflow_running_variables + is_job_dataflow_running_variables = copy.deepcopy(kwargs.get("variables")) + + dataflow_running = dataflow_hook_mock.return_value.is_job_dataflow_running + dataflow_running.side_effect = set_is_job_dataflow_running_variables dataflow_running.return_value = False - start_java_hook = dataflow_mock.return_value.start_java_dataflow + start_java_mock = beam_hook_mock.return_value.start_java_pipeline gcs_provide_file = gcs_hook.return_value.provide_file - self.dataflow.multiple_jobs = True self.dataflow.check_if_running = True + self.dataflow.multiple_jobs = True + self.dataflow.execute(None) - assert dataflow_mock.called + + mock_callback_on_job_id.assert_called_once_with(on_new_job_id_callback=mock.ANY) gcs_provide_file.assert_called_once_with(object_url=JAR_FILE) - start_java_hook.assert_called_once_with( - job_name=JOB_NAME, - variables=mock.ANY, - jar=mock.ANY, + expected_variables = { + 'project': dataflow_hook_mock.return_value.project_id, + 'stagingLocation': 'gs://test/staging', + 'jobName': JOB_NAME, + 'region': TEST_LOCATION, + 'output': 'gs://test/output', + 'labels': {'foo': 'bar', 'airflow-version': 'v2-1-0-dev0'}, + } + self.assertEqual(expected_variables, is_job_dataflow_running_variables) + job_name = dataflow_hook_mock.return_value.build_dataflow_job_name.return_value + expected_variables["jobName"] = job_name + start_java_mock.assert_called_once_with( + variables=expected_variables, + jar=gcs_provide_file.return_value.__enter__.return_value.name, job_class=JOB_CLASS, - append_job_name=True, - multiple_jobs=True, - on_new_job_id_callback=mock.ANY, - project_id=None, - location=TEST_LOCATION, + process_line_callback=mock_callback_on_job_id.return_value, ) - dataflow_running.assert_called_once_with( - name=JOB_NAME, variables=mock.ANY, project_id=None, location=TEST_LOCATION + dataflow_hook_mock.return_value.wait_for_done.assert_called_once_with( + job_id=mock.ANY, + job_name=job_name, + location=TEST_LOCATION, + multiple_jobs=True, ) diff --git a/tests/providers/google/cloud/operators/test_dataproc.py b/tests/providers/google/cloud/operators/test_dataproc.py index ca8f706f4bc15..791e8ea398b2a 100644 --- a/tests/providers/google/cloud/operators/test_dataproc.py +++ b/tests/providers/google/cloud/operators/test_dataproc.py @@ -204,8 +204,9 @@ def test_deprecation_warning(self): assert_warning("Default region value", warning) self.assertEqual(op_default_region.region, 'global') + @mock.patch(DATAPROC_PATH.format("Cluster.to_dict")) @mock.patch(DATAPROC_PATH.format("DataprocHook")) - def test_execute(self, mock_hook): + def test_execute(self, mock_hook, to_dict_mock): op = DataprocCreateClusterOperator( task_id=TASK_ID, region=GCP_LOCATION, @@ -233,9 +234,11 @@ def test_execute(self, mock_hook): timeout=TIMEOUT, metadata=METADATA, ) + to_dict_mock.assert_called_once_with(mock_hook().create_cluster().result()) + @mock.patch(DATAPROC_PATH.format("Cluster.to_dict")) @mock.patch(DATAPROC_PATH.format("DataprocHook")) - def test_execute_if_cluster_exists(self, mock_hook): + def test_execute_if_cluster_exists(self, mock_hook, to_dict_mock): mock_hook.return_value.create_cluster.side_effect = [AlreadyExists("test")] mock_hook.return_value.get_cluster.return_value.status.state = 0 op = DataprocCreateClusterOperator( @@ -273,6 +276,7 @@ def test_execute_if_cluster_exists(self, mock_hook): timeout=TIMEOUT, metadata=METADATA, ) + to_dict_mock.assert_called_once_with(mock_hook.return_value.get_cluster.return_value) @mock.patch(DATAPROC_PATH.format("DataprocHook")) def test_execute_if_cluster_exists_do_not_use(self, mock_hook): @@ -300,7 +304,7 @@ def test_execute_if_cluster_exists_in_error_state(self, mock_hook): mock_hook.return_value.create_cluster.side_effect = [AlreadyExists("test")] cluster_status = mock_hook.return_value.get_cluster.return_value.status cluster_status.state = 0 - cluster_status.ERROR = 0 + cluster_status.State.ERROR = 0 op = DataprocCreateClusterOperator( task_id=TASK_ID, @@ -335,11 +339,11 @@ def test_execute_if_cluster_exists_in_deleting_state( ): cluster = mock.MagicMock() cluster.status.state = 0 - cluster.status.DELETING = 0 + cluster.status.State.DELETING = 0 # pylint: disable=no-member cluster2 = mock.MagicMock() cluster2.status.state = 0 - cluster2.status.ERROR = 0 + cluster2.status.State.ERROR = 0 # pylint: disable=no-member mock_create_cluster.side_effect = [AlreadyExists("test"), cluster2] mock_generator.return_value = [0] diff --git a/tests/providers/google/cloud/operators/test_mlengine_utils.py b/tests/providers/google/cloud/operators/test_mlengine_utils.py index 539ee608dec0f..c46fa62b46431 100644 --- a/tests/providers/google/cloud/operators/test_mlengine_utils.py +++ b/tests/providers/google/cloud/operators/test_mlengine_utils.py @@ -106,9 +106,14 @@ def test_successful_run(self): ) assert success_message['predictionOutput'] == result - with patch('airflow.providers.google.cloud.operators.dataflow.DataflowHook') as mock_dataflow_hook: - hook_instance = mock_dataflow_hook.return_value - hook_instance.start_python_dataflow.return_value = None + with patch( + 'airflow.providers.google.cloud.operators.dataflow.DataflowHook' + ) as mock_dataflow_hook, patch( + 'airflow.providers.google.cloud.operators.dataflow.BeamHook' + ) as mock_beam_hook: + dataflow_hook_instance = mock_dataflow_hook.return_value + dataflow_hook_instance.start_python_dataflow.return_value = None + beam_hook_instance = mock_beam_hook.return_value summary.execute(None) mock_dataflow_hook.assert_called_once_with( gcp_conn_id='google_cloud_default', @@ -117,23 +122,28 @@ def test_successful_run(self): drain_pipeline=False, cancel_timeout=600, wait_until_finished=None, + impersonation_chain=None, ) - hook_instance.start_python_dataflow.assert_called_once_with( - job_name='{{task.task_id}}', + mock_beam_hook.assert_called_once_with(runner="DataflowRunner") + beam_hook_instance.start_python_pipeline.assert_called_once_with( variables={ 'prediction_path': 'gs://legal-bucket/fake-output-path', 'labels': {'airflow-version': TEST_VERSION}, 'metric_keys': 'err', 'metric_fn_encoded': self.metric_fn_encoded, + 'project': 'test-project', + 'region': 'us-central1', + 'job_name': mock.ANY, }, - dataflow=mock.ANY, + py_file=mock.ANY, py_options=[], - py_requirements=['apache-beam[gcp]>=2.14.0'], py_interpreter='python3', + py_requirements=['apache-beam[gcp]>=2.14.0'], py_system_site_packages=False, - on_new_job_id_callback=ANY, - project_id='test-project', - location='us-central1', + process_line_callback=mock.ANY, + ) + dataflow_hook_instance.wait_for_done.assert_called_once_with( + job_name=mock.ANY, location='us-central1', job_id=mock.ANY, multiple_jobs=False ) with patch('airflow.providers.google.cloud.utils.mlengine_operator_utils.GCSHook') as mock_gcs_hook: diff --git a/tests/providers/google/cloud/operators/test_pubsub.py b/tests/providers/google/cloud/operators/test_pubsub.py index 9ff71e6e7f8e5..6abfffa0e0c84 100644 --- a/tests/providers/google/cloud/operators/test_pubsub.py +++ b/tests/providers/google/cloud/operators/test_pubsub.py @@ -21,7 +21,6 @@ from unittest import mock from google.cloud.pubsub_v1.types import ReceivedMessage -from google.protobuf.json_format import MessageToDict, ParseDict from airflow.providers.google.cloud.operators.pubsub import ( PubSubCreateSubscriptionOperator, @@ -230,21 +229,18 @@ def test_publish(self, mock_hook): class TestPubSubPullOperator(unittest.TestCase): def _generate_messages(self, count): return [ - ParseDict( - { - "ack_id": "%s" % i, - "message": { - "data": f'Message {i}'.encode('utf8'), - "attributes": {"type": "generated message"}, - }, + ReceivedMessage( + ack_id="%s" % i, + message={ + "data": f'Message {i}'.encode('utf8'), + "attributes": {"type": "generated message"}, }, - ReceivedMessage(), ) for i in range(1, count + 1) ] def _generate_dicts(self, count): - return [MessageToDict(m) for m in self._generate_messages(count)] + return [ReceivedMessage.to_dict(m) for m in self._generate_messages(count)] @mock.patch('airflow.providers.google.cloud.operators.pubsub.PubSubHook') def test_execute_no_messages(self, mock_hook): diff --git a/tests/providers/google/cloud/operators/test_stackdriver.py b/tests/providers/google/cloud/operators/test_stackdriver.py index 28901b4b32416..6063c9299a14d 100644 --- a/tests/providers/google/cloud/operators/test_stackdriver.py +++ b/tests/providers/google/cloud/operators/test_stackdriver.py @@ -21,6 +21,7 @@ from unittest import mock from google.api_core.gapic_v1.method import DEFAULT +from google.cloud.monitoring_v3 import AlertPolicy, NotificationChannel from airflow.providers.google.cloud.operators.stackdriver import ( StackdriverDeleteAlertOperator, @@ -40,16 +41,15 @@ TEST_ALERT_POLICY_1 = { "combiner": "OR", "name": "projects/sd-project/alertPolicies/12345", - "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"}, "enabled": True, - "displayName": "test display", + "display_name": "test display", "conditions": [ { - "conditionThreshold": { + "condition_threshold": { "comparison": "COMPARISON_GT", - "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}], + "aggregations": [{"alignment_eriod": {'seconds': 60}, "per_series_aligner": "ALIGN_RATE"}], }, - "displayName": "Condition display", + "display_name": "Condition display", "name": "projects/sd-project/alertPolicies/123/conditions/456", } ], @@ -58,16 +58,15 @@ TEST_ALERT_POLICY_2 = { "combiner": "OR", "name": "projects/sd-project/alertPolicies/6789", - "creationRecord": {"mutatedBy": "user123", "mutateTime": "2020-01-01T00:00:00.000000Z"}, "enabled": False, - "displayName": "test display", + "display_name": "test display", "conditions": [ { - "conditionThreshold": { + "condition_threshold": { "comparison": "COMPARISON_GT", - "aggregations": [{"alignmentPeriod": "60s", "perSeriesAligner": "ALIGN_RATE"}], + "aggregations": [{"alignment_period": {'seconds': 60}, "per_series_aligner": "ALIGN_RATE"}], }, - "displayName": "Condition display", + "display_name": "Condition display", "name": "projects/sd-project/alertPolicies/456/conditions/789", } ], @@ -94,7 +93,8 @@ class TestStackdriverListAlertPoliciesOperator(unittest.TestCase): @mock.patch('airflow.providers.google.cloud.operators.stackdriver.StackdriverHook') def test_execute(self, mock_hook): operator = StackdriverListAlertPoliciesOperator(task_id=TEST_TASK_ID, filter_=TEST_FILTER) - operator.execute(None) + mock_hook.return_value.list_alert_policies.return_value = [AlertPolicy(name="test-name")] + result = operator.execute(None) mock_hook.return_value.list_alert_policies.assert_called_once_with( project_id=None, filter_=TEST_FILTER, @@ -105,6 +105,16 @@ def test_execute(self, mock_hook): timeout=DEFAULT, metadata=None, ) + assert [ + { + 'combiner': 0, + 'conditions': [], + 'display_name': '', + 'name': 'test-name', + 'notification_channels': [], + 'user_labels': {}, + } + ] == result class TestStackdriverEnableAlertPoliciesOperator(unittest.TestCase): @@ -160,7 +170,11 @@ class TestStackdriverListNotificationChannelsOperator(unittest.TestCase): @mock.patch('airflow.providers.google.cloud.operators.stackdriver.StackdriverHook') def test_execute(self, mock_hook): operator = StackdriverListNotificationChannelsOperator(task_id=TEST_TASK_ID, filter_=TEST_FILTER) - operator.execute(None) + mock_hook.return_value.list_notification_channels.return_value = [ + NotificationChannel(name="test-123") + ] + + result = operator.execute(None) mock_hook.return_value.list_notification_channels.assert_called_once_with( project_id=None, filter_=TEST_FILTER, @@ -171,6 +185,33 @@ def test_execute(self, mock_hook): timeout=DEFAULT, metadata=None, ) + # Depending on the version of google-apitools installed we might receive the response either with or + # without mutation_records. + assert result in [ + [ + { + 'description': '', + 'display_name': '', + 'labels': {}, + 'name': 'test-123', + 'type_': '', + 'user_labels': {}, + 'verification_status': 0, + } + ], + [ + { + 'description': '', + 'display_name': '', + 'labels': {}, + 'mutation_records': [], + 'name': 'test-123', + 'type_': '', + 'user_labels': {}, + 'verification_status': 0, + } + ], + ] class TestStackdriverEnableNotificationChannelsOperator(unittest.TestCase): diff --git a/tests/providers/google/cloud/operators/test_tasks.py b/tests/providers/google/cloud/operators/test_tasks.py index cac1441f67698..ed7691169fe4c 100644 --- a/tests/providers/google/cloud/operators/test_tasks.py +++ b/tests/providers/google/cloud/operators/test_tasks.py @@ -45,21 +45,26 @@ FULL_QUEUE_PATH = "projects/test-project/locations/asia-east2/queues/test-queue" TASK_NAME = "test-task" FULL_TASK_PATH = "projects/test-project/locations/asia-east2/queues/test-queue/tasks/test-task" +TEST_QUEUE = Queue(name=FULL_QUEUE_PATH) +TEST_TASK = Task(app_engine_http_request={}) class TestCloudTasksQueueCreate(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_create_queue(self, mock_hook): - mock_hook.return_value.create_queue.return_value = mock.MagicMock() - operator = CloudTasksQueueCreateOperator(location=LOCATION, task_queue=Queue(), task_id="id") - operator.execute(context=None) + mock_hook.return_value.create_queue.return_value = TEST_QUEUE + operator = CloudTasksQueueCreateOperator(location=LOCATION, task_queue=TEST_QUEUE, task_id="id") + + result = operator.execute(context=None) + + self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, ) mock_hook.return_value.create_queue.assert_called_once_with( location=LOCATION, - task_queue=Queue(), + task_queue=TEST_QUEUE, project_id=None, queue_name=None, retry=None, @@ -71,9 +76,12 @@ def test_create_queue(self, mock_hook): class TestCloudTasksQueueUpdate(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_update_queue(self, mock_hook): - mock_hook.return_value.update_queue.return_value = mock.MagicMock() + mock_hook.return_value.update_queue.return_value = TEST_QUEUE operator = CloudTasksQueueUpdateOperator(task_queue=Queue(name=FULL_QUEUE_PATH), task_id="id") - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -93,9 +101,12 @@ def test_update_queue(self, mock_hook): class TestCloudTasksQueueGet(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_get_queue(self, mock_hook): - mock_hook.return_value.get_queue.return_value = mock.MagicMock() + mock_hook.return_value.get_queue.return_value = TEST_QUEUE operator = CloudTasksQueueGetOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id") - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -113,9 +124,12 @@ def test_get_queue(self, mock_hook): class TestCloudTasksQueuesList(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_list_queues(self, mock_hook): - mock_hook.return_value.list_queues.return_value = mock.MagicMock() + mock_hook.return_value.list_queues.return_value = [TEST_QUEUE] operator = CloudTasksQueuesListOperator(location=LOCATION, task_id="id") - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual([{'name': FULL_QUEUE_PATH, 'state': 0}], result) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -134,9 +148,12 @@ def test_list_queues(self, mock_hook): class TestCloudTasksQueueDelete(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_delete_queue(self, mock_hook): - mock_hook.return_value.delete_queue.return_value = mock.MagicMock() + mock_hook.return_value.delete_queue.return_value = None operator = CloudTasksQueueDeleteOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id") - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual(None, result) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -154,9 +171,12 @@ def test_delete_queue(self, mock_hook): class TestCloudTasksQueuePurge(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_delete_queue(self, mock_hook): - mock_hook.return_value.purge_queue.return_value = mock.MagicMock() + mock_hook.return_value.purge_queue.return_value = TEST_QUEUE operator = CloudTasksQueuePurgeOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id") - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -174,9 +194,12 @@ def test_delete_queue(self, mock_hook): class TestCloudTasksQueuePause(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_pause_queue(self, mock_hook): - mock_hook.return_value.pause_queue.return_value = mock.MagicMock() + mock_hook.return_value.pause_queue.return_value = TEST_QUEUE operator = CloudTasksQueuePauseOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id") - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -194,9 +217,12 @@ def test_pause_queue(self, mock_hook): class TestCloudTasksQueueResume(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_resume_queue(self, mock_hook): - mock_hook.return_value.resume_queue.return_value = mock.MagicMock() + mock_hook.return_value.resume_queue.return_value = TEST_QUEUE operator = CloudTasksQueueResumeOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id") - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual({'name': FULL_QUEUE_PATH, 'state': 0}, result) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -214,11 +240,23 @@ def test_resume_queue(self, mock_hook): class TestCloudTasksTaskCreate(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_create_task(self, mock_hook): - mock_hook.return_value.create_task.return_value = mock.MagicMock() + mock_hook.return_value.create_task.return_value = TEST_TASK operator = CloudTasksTaskCreateOperator( location=LOCATION, queue_name=QUEUE_ID, task=Task(), task_id="id" ) - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual( + { + 'app_engine_http_request': {'body': '', 'headers': {}, 'http_method': 0, 'relative_uri': ''}, + 'dispatch_count': 0, + 'name': '', + 'response_count': 0, + 'view': 0, + }, + result, + ) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -239,11 +277,23 @@ def test_create_task(self, mock_hook): class TestCloudTasksTaskGet(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_get_task(self, mock_hook): - mock_hook.return_value.get_task.return_value = mock.MagicMock() + mock_hook.return_value.get_task.return_value = TEST_TASK operator = CloudTasksTaskGetOperator( location=LOCATION, queue_name=QUEUE_ID, task_name=TASK_NAME, task_id="id" ) - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual( + { + 'app_engine_http_request': {'body': '', 'headers': {}, 'http_method': 0, 'relative_uri': ''}, + 'dispatch_count': 0, + 'name': '', + 'response_count': 0, + 'view': 0, + }, + result, + ) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -263,9 +313,28 @@ def test_get_task(self, mock_hook): class TestCloudTasksTasksList(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_list_tasks(self, mock_hook): - mock_hook.return_value.list_tasks.return_value = mock.MagicMock() + mock_hook.return_value.list_tasks.return_value = [TEST_TASK] operator = CloudTasksTasksListOperator(location=LOCATION, queue_name=QUEUE_ID, task_id="id") - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual( + [ + { + 'app_engine_http_request': { + 'body': '', + 'headers': {}, + 'http_method': 0, + 'relative_uri': '', + }, + 'dispatch_count': 0, + 'name': '', + 'response_count': 0, + 'view': 0, + } + ], + result, + ) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -285,11 +354,14 @@ def test_list_tasks(self, mock_hook): class TestCloudTasksTaskDelete(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_delete_task(self, mock_hook): - mock_hook.return_value.delete_task.return_value = mock.MagicMock() + mock_hook.return_value.delete_task.return_value = None operator = CloudTasksTaskDeleteOperator( location=LOCATION, queue_name=QUEUE_ID, task_name=TASK_NAME, task_id="id" ) - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual(None, result) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, @@ -308,11 +380,23 @@ def test_delete_task(self, mock_hook): class TestCloudTasksTaskRun(unittest.TestCase): @mock.patch("airflow.providers.google.cloud.operators.tasks.CloudTasksHook") def test_run_task(self, mock_hook): - mock_hook.return_value.run_task.return_value = mock.MagicMock() + mock_hook.return_value.run_task.return_value = TEST_TASK operator = CloudTasksTaskRunOperator( location=LOCATION, queue_name=QUEUE_ID, task_name=TASK_NAME, task_id="id" ) - operator.execute(context=None) + + result = operator.execute(context=None) + + self.assertEqual( + { + 'app_engine_http_request': {'body': '', 'headers': {}, 'http_method': 0, 'relative_uri': ''}, + 'dispatch_count': 0, + 'name': '', + 'response_count': 0, + 'view': 0, + }, + result, + ) mock_hook.assert_called_once_with( gcp_conn_id=GCP_CONN_ID, impersonation_chain=None, diff --git a/tests/providers/google/cloud/operators/test_workflows.py b/tests/providers/google/cloud/operators/test_workflows.py new file mode 100644 index 0000000000000..5578548ffb40e --- /dev/null +++ b/tests/providers/google/cloud/operators/test_workflows.py @@ -0,0 +1,383 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import datetime +from unittest import mock + +import pytz + +from airflow.providers.google.cloud.operators.workflows import ( + WorkflowsCancelExecutionOperator, + WorkflowsCreateExecutionOperator, + WorkflowsCreateWorkflowOperator, + WorkflowsDeleteWorkflowOperator, + WorkflowsGetExecutionOperator, + WorkflowsGetWorkflowOperator, + WorkflowsListExecutionsOperator, + WorkflowsListWorkflowsOperator, + WorkflowsUpdateWorkflowOperator, +) + +BASE_PATH = "airflow.providers.google.cloud.operators.workflows.{}" +LOCATION = "europe-west1" +WORKFLOW_ID = "workflow_id" +EXECUTION_ID = "execution_id" +WORKFLOW = {"aa": "bb"} +EXECUTION = {"ccc": "ddd"} +PROJECT_ID = "airflow-testing" +METADATA = None +TIMEOUT = None +RETRY = None +FILTER_ = "aaaa" +ORDER_BY = "bbb" +UPDATE_MASK = "aaa,bbb" +GCP_CONN_ID = "test-conn" +IMPERSONATION_CHAIN = None + + +class TestWorkflowsCreateWorkflowOperator: + @mock.patch(BASE_PATH.format("Workflow")) + @mock.patch(BASE_PATH.format("WorkflowsHook")) + def test_execute(self, mock_hook, mock_object): + op = WorkflowsCreateWorkflowOperator( + task_id="test_task", + workflow=WORKFLOW, + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + result = op.execute({}) + + mock_hook.assert_called_once_with( + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + mock_hook.return_value.create_workflow.assert_called_once_with( + workflow=WORKFLOW, + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert result == mock_object.to_dict.return_value + + +class TestWorkflowsUpdateWorkflowOperator: + @mock.patch(BASE_PATH.format("Workflow")) + @mock.patch(BASE_PATH.format("WorkflowsHook")) + def test_execute(self, mock_hook, mock_object): + op = WorkflowsUpdateWorkflowOperator( + task_id="test_task", + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + update_mask=UPDATE_MASK, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + result = op.execute({}) + + mock_hook.assert_called_once_with( + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + mock_hook.return_value.get_workflow.assert_called_once_with( + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + mock_hook.return_value.update_workflow.assert_called_once_with( + workflow=mock_hook.return_value.get_workflow.return_value, + update_mask=UPDATE_MASK, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert result == mock_object.to_dict.return_value + + +class TestWorkflowsDeleteWorkflowOperator: + @mock.patch(BASE_PATH.format("WorkflowsHook")) + def test_execute( + self, + mock_hook, + ): + op = WorkflowsDeleteWorkflowOperator( + task_id="test_task", + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + op.execute({}) + + mock_hook.assert_called_once_with( + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + mock_hook.return_value.delete_workflow.assert_called_once_with( + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + +class TestWorkflowsListWorkflowsOperator: + @mock.patch(BASE_PATH.format("Workflow")) + @mock.patch(BASE_PATH.format("WorkflowsHook")) + def test_execute(self, mock_hook, mock_object): + workflow_mock = mock.MagicMock() + workflow_mock.start_time = datetime.datetime.now(tz=pytz.UTC) + datetime.timedelta(minutes=5) + mock_hook.return_value.list_workflows.return_value = [workflow_mock] + + op = WorkflowsListWorkflowsOperator( + task_id="test_task", + location=LOCATION, + project_id=PROJECT_ID, + filter_=FILTER_, + order_by=ORDER_BY, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + result = op.execute({}) + + mock_hook.assert_called_once_with( + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + mock_hook.return_value.list_workflows.assert_called_once_with( + location=LOCATION, + project_id=PROJECT_ID, + filter_=FILTER_, + order_by=ORDER_BY, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert result == [mock_object.to_dict.return_value] + + +class TestWorkflowsGetWorkflowOperator: + @mock.patch(BASE_PATH.format("Workflow")) + @mock.patch(BASE_PATH.format("WorkflowsHook")) + def test_execute(self, mock_hook, mock_object): + op = WorkflowsGetWorkflowOperator( + task_id="test_task", + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + result = op.execute({}) + + mock_hook.assert_called_once_with( + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + mock_hook.return_value.get_workflow.assert_called_once_with( + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert result == mock_object.to_dict.return_value + + +class TestWorkflowExecutionsCreateExecutionOperator: + @mock.patch(BASE_PATH.format("Execution")) + @mock.patch(BASE_PATH.format("WorkflowsHook")) + @mock.patch(BASE_PATH.format("WorkflowsCreateExecutionOperator.xcom_push")) + def test_execute(self, mock_xcom, mock_hook, mock_object): + mock_hook.return_value.create_execution.return_value.name = "name/execution_id" + op = WorkflowsCreateExecutionOperator( + task_id="test_task", + workflow_id=WORKFLOW_ID, + execution=EXECUTION, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + result = op.execute({}) + + mock_hook.assert_called_once_with( + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + mock_hook.return_value.create_execution.assert_called_once_with( + workflow_id=WORKFLOW_ID, + execution=EXECUTION, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + mock_xcom.assert_called_once_with({}, key="execution_id", value="execution_id") + assert result == mock_object.to_dict.return_value + + +class TestWorkflowExecutionsCancelExecutionOperator: + @mock.patch(BASE_PATH.format("Execution")) + @mock.patch(BASE_PATH.format("WorkflowsHook")) + def test_execute(self, mock_hook, mock_object): + op = WorkflowsCancelExecutionOperator( + task_id="test_task", + workflow_id=WORKFLOW_ID, + execution_id=EXECUTION_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + result = op.execute({}) + + mock_hook.assert_called_once_with( + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + mock_hook.return_value.cancel_execution.assert_called_once_with( + workflow_id=WORKFLOW_ID, + execution_id=EXECUTION_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert result == mock_object.to_dict.return_value + + +class TestWorkflowExecutionsListExecutionsOperator: + @mock.patch(BASE_PATH.format("Execution")) + @mock.patch(BASE_PATH.format("WorkflowsHook")) + def test_execute(self, mock_hook, mock_object): + execution_mock = mock.MagicMock() + execution_mock.start_time = datetime.datetime.now(tz=pytz.UTC) + datetime.timedelta(minutes=5) + mock_hook.return_value.list_executions.return_value = [execution_mock] + + op = WorkflowsListExecutionsOperator( + task_id="test_task", + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + result = op.execute({}) + + mock_hook.assert_called_once_with( + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + mock_hook.return_value.list_executions.assert_called_once_with( + workflow_id=WORKFLOW_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert result == [mock_object.to_dict.return_value] + + +class TestWorkflowExecutionsGetExecutionOperator: + @mock.patch(BASE_PATH.format("Execution")) + @mock.patch(BASE_PATH.format("WorkflowsHook")) + def test_execute(self, mock_hook, mock_object): + op = WorkflowsGetExecutionOperator( + task_id="test_task", + workflow_id=WORKFLOW_ID, + execution_id=EXECUTION_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + result = op.execute({}) + + mock_hook.assert_called_once_with( + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + mock_hook.return_value.get_execution.assert_called_once_with( + workflow_id=WORKFLOW_ID, + execution_id=EXECUTION_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert result == mock_object.to_dict.return_value diff --git a/tests/providers/google/cloud/operators/test_workflows_system.py b/tests/providers/google/cloud/operators/test_workflows_system.py new file mode 100644 index 0000000000000..0a768edecbbd8 --- /dev/null +++ b/tests/providers/google/cloud/operators/test_workflows_system.py @@ -0,0 +1,29 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import pytest + +from tests.providers.google.cloud.utils.gcp_authenticator import GCP_WORKFLOWS_KEY +from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context + + +@pytest.mark.system("google.cloud") +@pytest.mark.credential_file(GCP_WORKFLOWS_KEY) +class CloudVisionExampleDagsSystemTest(GoogleSystemTest): + @provide_gcp_context(GCP_WORKFLOWS_KEY) + def test_run_example_workflow_dag(self): + self.run_dag('example_cloud_workflows', CLOUD_DAG_FOLDER) diff --git a/tests/providers/google/cloud/sensors/test_bigquery_dts.py b/tests/providers/google/cloud/sensors/test_bigquery_dts.py index 92a116ef8e3e4..c8a05483f1ec6 100644 --- a/tests/providers/google/cloud/sensors/test_bigquery_dts.py +++ b/tests/providers/google/cloud/sensors/test_bigquery_dts.py @@ -19,6 +19,8 @@ import unittest from unittest import mock +from google.cloud.bigquery_datatransfer_v1 import TransferState + from airflow.providers.google.cloud.sensors.bigquery_dts import BigQueryDataTransferServiceTransferRunSensor TRANSFER_CONFIG_ID = "config_id" @@ -27,20 +29,45 @@ class TestBigQueryDataTransferServiceTransferRunSensor(unittest.TestCase): - @mock.patch("airflow.providers.google.cloud.sensors.bigquery_dts.BiqQueryDataTransferServiceHook") @mock.patch( - "airflow.providers.google.cloud.sensors.bigquery_dts.MessageToDict", - return_value={"state": "success"}, + "airflow.providers.google.cloud.sensors.bigquery_dts.BiqQueryDataTransferServiceHook", + **{'return_value.get_transfer_run.return_value.state': TransferState.FAILED}, + ) + def test_poke_returns_false(self, mock_hook): + op = BigQueryDataTransferServiceTransferRunSensor( + transfer_config_id=TRANSFER_CONFIG_ID, + run_id=RUN_ID, + task_id="id", + project_id=PROJECT_ID, + expected_statuses={"SUCCEEDED"}, + ) + result = op.poke({}) + + self.assertEqual(result, False) + mock_hook.return_value.get_transfer_run.assert_called_once_with( + transfer_config_id=TRANSFER_CONFIG_ID, + run_id=RUN_ID, + project_id=PROJECT_ID, + metadata=None, + retry=None, + timeout=None, + ) + + @mock.patch( + "airflow.providers.google.cloud.sensors.bigquery_dts.BiqQueryDataTransferServiceHook", + **{'return_value.get_transfer_run.return_value.state': TransferState.SUCCEEDED}, ) - def test_poke(self, mock_msg_to_dict, mock_hook): + def test_poke_returns_true(self, mock_hook): op = BigQueryDataTransferServiceTransferRunSensor( transfer_config_id=TRANSFER_CONFIG_ID, run_id=RUN_ID, task_id="id", project_id=PROJECT_ID, - expected_statuses={"success"}, + expected_statuses={"SUCCEEDED"}, ) - op.poke(None) + result = op.poke({}) + + self.assertEqual(result, True) mock_hook.return_value.get_transfer_run.assert_called_once_with( transfer_config_id=TRANSFER_CONFIG_ID, run_id=RUN_ID, diff --git a/tests/providers/google/cloud/sensors/test_dataproc.py b/tests/providers/google/cloud/sensors/test_dataproc.py index 1ce8eea3fcb75..6f2991adf40a7 100644 --- a/tests/providers/google/cloud/sensors/test_dataproc.py +++ b/tests/providers/google/cloud/sensors/test_dataproc.py @@ -45,7 +45,7 @@ def create_job(self, state: int): @mock.patch(DATAPROC_PATH.format("DataprocHook")) def test_done(self, mock_hook): - job = self.create_job(JobStatus.DONE) + job = self.create_job(JobStatus.State.DONE) job_id = "job_id" mock_hook.return_value.get_job.return_value = job @@ -66,7 +66,7 @@ def test_done(self, mock_hook): @mock.patch(DATAPROC_PATH.format("DataprocHook")) def test_error(self, mock_hook): - job = self.create_job(JobStatus.ERROR) + job = self.create_job(JobStatus.State.ERROR) job_id = "job_id" mock_hook.return_value.get_job.return_value = job @@ -88,7 +88,7 @@ def test_error(self, mock_hook): @mock.patch(DATAPROC_PATH.format("DataprocHook")) def test_wait(self, mock_hook): - job = self.create_job(JobStatus.RUNNING) + job = self.create_job(JobStatus.State.RUNNING) job_id = "job_id" mock_hook.return_value.get_job.return_value = job @@ -109,7 +109,7 @@ def test_wait(self, mock_hook): @mock.patch(DATAPROC_PATH.format("DataprocHook")) def test_cancelled(self, mock_hook): - job = self.create_job(JobStatus.CANCELLED) + job = self.create_job(JobStatus.State.CANCELLED) job_id = "job_id" mock_hook.return_value.get_job.return_value = job diff --git a/tests/providers/google/cloud/sensors/test_pubsub.py b/tests/providers/google/cloud/sensors/test_pubsub.py index ba1aee983f1b7..795860b3d353c 100644 --- a/tests/providers/google/cloud/sensors/test_pubsub.py +++ b/tests/providers/google/cloud/sensors/test_pubsub.py @@ -22,7 +22,6 @@ import pytest from google.cloud.pubsub_v1.types import ReceivedMessage -from google.protobuf.json_format import MessageToDict, ParseDict from airflow.exceptions import AirflowSensorTimeout from airflow.providers.google.cloud.sensors.pubsub import PubSubPullSensor @@ -35,21 +34,18 @@ class TestPubSubPullSensor(unittest.TestCase): def _generate_messages(self, count): return [ - ParseDict( - { - "ack_id": "%s" % i, - "message": { - "data": f'Message {i}'.encode('utf8'), - "attributes": {"type": "generated message"}, - }, + ReceivedMessage( + ack_id="%s" % i, + message={ + "data": f'Message {i}'.encode('utf8'), + "attributes": {"type": "generated message"}, }, - ReceivedMessage(), ) for i in range(1, count + 1) ] def _generate_dicts(self, count): - return [MessageToDict(m) for m in self._generate_messages(count)] + return [ReceivedMessage.to_dict(m) for m in self._generate_messages(count)] @mock.patch('airflow.providers.google.cloud.sensors.pubsub.PubSubHook') def test_poke_no_messages(self, mock_hook): diff --git a/tests/providers/google/cloud/sensors/test_workflows.py b/tests/providers/google/cloud/sensors/test_workflows.py new file mode 100644 index 0000000000000..56ad958b2d979 --- /dev/null +++ b/tests/providers/google/cloud/sensors/test_workflows.py @@ -0,0 +1,108 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +from unittest import mock + +import pytest +from google.cloud.workflows.executions_v1beta import Execution + +from airflow.exceptions import AirflowException +from airflow.providers.google.cloud.sensors.workflows import WorkflowExecutionSensor + +BASE_PATH = "airflow.providers.google.cloud.sensors.workflows.{}" +LOCATION = "europe-west1" +WORKFLOW_ID = "workflow_id" +EXECUTION_ID = "execution_id" +PROJECT_ID = "airflow-testing" +METADATA = None +TIMEOUT = None +RETRY = None +GCP_CONN_ID = "test-conn" +IMPERSONATION_CHAIN = None + + +class TestWorkflowExecutionSensor: + @mock.patch(BASE_PATH.format("WorkflowsHook")) + def test_poke_success(self, mock_hook): + mock_hook.return_value.get_execution.return_value = mock.MagicMock(state=Execution.State.SUCCEEDED) + op = WorkflowExecutionSensor( + task_id="test_task", + workflow_id=WORKFLOW_ID, + execution_id=EXECUTION_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + request_timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + result = op.poke({}) + + mock_hook.assert_called_once_with( + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + mock_hook.return_value.get_execution.assert_called_once_with( + workflow_id=WORKFLOW_ID, + execution_id=EXECUTION_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + timeout=TIMEOUT, + metadata=METADATA, + ) + + assert result is True + + @mock.patch(BASE_PATH.format("WorkflowsHook")) + def test_poke_wait(self, mock_hook): + mock_hook.return_value.get_execution.return_value = mock.MagicMock(state=Execution.State.ACTIVE) + op = WorkflowExecutionSensor( + task_id="test_task", + workflow_id=WORKFLOW_ID, + execution_id=EXECUTION_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + request_timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + result = op.poke({}) + + assert result is False + + @mock.patch(BASE_PATH.format("WorkflowsHook")) + def test_poke_failure(self, mock_hook): + mock_hook.return_value.get_execution.return_value = mock.MagicMock(state=Execution.State.FAILED) + op = WorkflowExecutionSensor( + task_id="test_task", + workflow_id=WORKFLOW_ID, + execution_id=EXECUTION_ID, + location=LOCATION, + project_id=PROJECT_ID, + retry=RETRY, + request_timeout=TIMEOUT, + metadata=METADATA, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + with pytest.raises(AirflowException): + op.poke({}) diff --git a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py index 1e18c201f112e..a76bfb5d904bd 100644 --- a/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py +++ b/tests/providers/google/cloud/transfers/test_mysql_to_gcs.py @@ -22,7 +22,7 @@ from unittest import mock import pytest -from _mysql_exceptions import ProgrammingError +from MySQLdb import ProgrammingError # pylint: disable=no-name-in-module from parameterized import parameterized from airflow.providers.google.cloud.transfers.mysql_to_gcs import MySQLToGCSOperator diff --git a/tests/providers/google/cloud/transfers/test_trino_to_gcs.py b/tests/providers/google/cloud/transfers/test_trino_to_gcs.py new file mode 100644 index 0000000000000..7cb6539a3d846 --- /dev/null +++ b/tests/providers/google/cloud/transfers/test_trino_to_gcs.py @@ -0,0 +1,331 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import unittest +from unittest.mock import patch + +import pytest + +from airflow.providers.google.cloud.transfers.trino_to_gcs import TrinoToGCSOperator + +TASK_ID = "test-trino-to-gcs" +TRINO_CONN_ID = "my-trino-conn" +GCP_CONN_ID = "my-gcp-conn" +IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"] +SQL = "SELECT * FROM memory.default.test_multiple_types" +BUCKET = "gs://test" +FILENAME = "test_{}.ndjson" + +NDJSON_LINES = [ + b'{"some_num": 42, "some_str": "mock_row_content_1"}\n', + b'{"some_num": 43, "some_str": "mock_row_content_2"}\n', + b'{"some_num": 44, "some_str": "mock_row_content_3"}\n', +] +CSV_LINES = [ + b"some_num,some_str\r\n", + b"42,mock_row_content_1\r\n", + b"43,mock_row_content_2\r\n", + b"44,mock_row_content_3\r\n", +] +SCHEMA_FILENAME = "schema_test.json" +SCHEMA_JSON = b'[{"name": "some_num", "type": "INT64"}, {"name": "some_str", "type": "STRING"}]' + + +@pytest.mark.integration("trino") +class TestTrinoToGCSOperator(unittest.TestCase): + def test_init(self): + """Test TrinoToGCSOperator instance is properly initialized.""" + op = TrinoToGCSOperator( + task_id=TASK_ID, + sql=SQL, + bucket=BUCKET, + filename=FILENAME, + impersonation_chain=IMPERSONATION_CHAIN, + ) + assert op.task_id == TASK_ID + assert op.sql == SQL + assert op.bucket == BUCKET + assert op.filename == FILENAME + assert op.impersonation_chain == IMPERSONATION_CHAIN + + @patch("airflow.providers.google.cloud.transfers.trino_to_gcs.TrinoHook") + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") + def test_save_as_json(self, mock_gcs_hook, mock_trino_hook): + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): + assert BUCKET == bucket + assert FILENAME.format(0) == obj + assert "application/json" == mime_type + assert not gzip + with open(tmp_filename, "rb") as file: + assert b"".join(NDJSON_LINES) == file.read() + + mock_gcs_hook.return_value.upload.side_effect = _assert_upload + + mock_cursor = mock_trino_hook.return_value.get_conn.return_value.cursor + + mock_cursor.return_value.description = [ + ("some_num", "INTEGER", None, None, None, None, None), + ("some_str", "VARCHAR", None, None, None, None, None), + ] + + mock_cursor.return_value.fetchone.side_effect = [ + [42, "mock_row_content_1"], + [43, "mock_row_content_2"], + [44, "mock_row_content_3"], + None, + ] + + op = TrinoToGCSOperator( + task_id=TASK_ID, + sql=SQL, + bucket=BUCKET, + filename=FILENAME, + trino_conn_id=TRINO_CONN_ID, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + op.execute(None) + + mock_trino_hook.assert_called_once_with(trino_conn_id=TRINO_CONN_ID) + mock_gcs_hook.assert_called_once_with( + delegate_to=None, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + mock_gcs_hook.return_value.upload.assert_called() + + @patch("airflow.providers.google.cloud.transfers.trino_to_gcs.TrinoHook") + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") + def test_save_as_json_with_file_splitting(self, mock_gcs_hook, mock_trino_hook): + """Test that ndjson is split by approx_max_file_size_bytes param.""" + + expected_upload = { + FILENAME.format(0): b"".join(NDJSON_LINES[:2]), + FILENAME.format(1): NDJSON_LINES[2], + } + + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): + assert BUCKET == bucket + assert "application/json" == mime_type + assert not gzip + with open(tmp_filename, "rb") as file: + assert expected_upload[obj] == file.read() + + mock_gcs_hook.return_value.upload.side_effect = _assert_upload + + mock_cursor = mock_trino_hook.return_value.get_conn.return_value.cursor + + mock_cursor.return_value.description = [ + ("some_num", "INTEGER", None, None, None, None, None), + ("some_str", "VARCHAR(20)", None, None, None, None, None), + ] + + mock_cursor.return_value.fetchone.side_effect = [ + [42, "mock_row_content_1"], + [43, "mock_row_content_2"], + [44, "mock_row_content_3"], + None, + ] + + op = TrinoToGCSOperator( + task_id=TASK_ID, + sql=SQL, + bucket=BUCKET, + filename=FILENAME, + approx_max_file_size_bytes=len(expected_upload[FILENAME.format(0)]), + ) + + op.execute(None) + + mock_gcs_hook.return_value.upload.assert_called() + + @patch("airflow.providers.google.cloud.transfers.trino_to_gcs.TrinoHook") + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") + def test_save_as_json_with_schema_file(self, mock_gcs_hook, mock_trino_hook): + """Test writing schema files.""" + + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument + if obj == SCHEMA_FILENAME: + with open(tmp_filename, "rb") as file: + assert SCHEMA_JSON == file.read() + + mock_gcs_hook.return_value.upload.side_effect = _assert_upload + + mock_cursor = mock_trino_hook.return_value.get_conn.return_value.cursor + + mock_cursor.return_value.description = [ + ("some_num", "INTEGER", None, None, None, None, None), + ("some_str", "VARCHAR", None, None, None, None, None), + ] + + mock_cursor.return_value.fetchone.side_effect = [ + [42, "mock_row_content_1"], + [43, "mock_row_content_2"], + [44, "mock_row_content_3"], + None, + ] + + op = TrinoToGCSOperator( + task_id=TASK_ID, + sql=SQL, + bucket=BUCKET, + filename=FILENAME, + schema_filename=SCHEMA_FILENAME, + export_format="csv", + trino_conn_id=TRINO_CONN_ID, + gcp_conn_id=GCP_CONN_ID, + ) + op.execute(None) + + # once for the file and once for the schema + assert 2 == mock_gcs_hook.return_value.upload.call_count + + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") + @patch("airflow.providers.google.cloud.transfers.trino_to_gcs.TrinoHook") + def test_save_as_csv(self, mock_trino_hook, mock_gcs_hook): + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): + assert BUCKET == bucket + assert FILENAME.format(0) == obj + assert "text/csv" == mime_type + assert not gzip + with open(tmp_filename, "rb") as file: + assert b"".join(CSV_LINES) == file.read() + + mock_gcs_hook.return_value.upload.side_effect = _assert_upload + + mock_cursor = mock_trino_hook.return_value.get_conn.return_value.cursor + + mock_cursor.return_value.description = [ + ("some_num", "INTEGER", None, None, None, None, None), + ("some_str", "VARCHAR", None, None, None, None, None), + ] + + mock_cursor.return_value.fetchone.side_effect = [ + [42, "mock_row_content_1"], + [43, "mock_row_content_2"], + [44, "mock_row_content_3"], + None, + ] + + op = TrinoToGCSOperator( + task_id=TASK_ID, + sql=SQL, + bucket=BUCKET, + filename=FILENAME, + export_format="csv", + trino_conn_id=TRINO_CONN_ID, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + op.execute(None) + + mock_gcs_hook.return_value.upload.assert_called() + + mock_trino_hook.assert_called_once_with(trino_conn_id=TRINO_CONN_ID) + mock_gcs_hook.assert_called_once_with( + delegate_to=None, + gcp_conn_id=GCP_CONN_ID, + impersonation_chain=IMPERSONATION_CHAIN, + ) + + @patch("airflow.providers.google.cloud.transfers.trino_to_gcs.TrinoHook") + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") + def test_save_as_csv_with_file_splitting(self, mock_gcs_hook, mock_trino_hook): + """Test that csv is split by approx_max_file_size_bytes param.""" + + expected_upload = { + FILENAME.format(0): b"".join(CSV_LINES[:3]), + FILENAME.format(1): b"".join([CSV_LINES[0], CSV_LINES[3]]), + } + + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): + assert BUCKET == bucket + assert "text/csv" == mime_type + assert not gzip + with open(tmp_filename, "rb") as file: + assert expected_upload[obj] == file.read() + + mock_gcs_hook.return_value.upload.side_effect = _assert_upload + + mock_cursor = mock_trino_hook.return_value.get_conn.return_value.cursor + + mock_cursor.return_value.description = [ + ("some_num", "INTEGER", None, None, None, None, None), + ("some_str", "VARCHAR(20)", None, None, None, None, None), + ] + + mock_cursor.return_value.fetchone.side_effect = [ + [42, "mock_row_content_1"], + [43, "mock_row_content_2"], + [44, "mock_row_content_3"], + None, + ] + + op = TrinoToGCSOperator( + task_id=TASK_ID, + sql=SQL, + bucket=BUCKET, + filename=FILENAME, + approx_max_file_size_bytes=len(expected_upload[FILENAME.format(0)]), + export_format="csv", + ) + + op.execute(None) + + mock_gcs_hook.return_value.upload.assert_called() + + @patch("airflow.providers.google.cloud.transfers.trino_to_gcs.TrinoHook") + @patch("airflow.providers.google.cloud.transfers.sql_to_gcs.GCSHook") + def test_save_as_csv_with_schema_file(self, mock_gcs_hook, mock_trino_hook): + """Test writing schema files.""" + + def _assert_upload(bucket, obj, tmp_filename, mime_type, gzip): # pylint: disable=unused-argument + if obj == SCHEMA_FILENAME: + with open(tmp_filename, "rb") as file: + assert SCHEMA_JSON == file.read() + + mock_gcs_hook.return_value.upload.side_effect = _assert_upload + + mock_cursor = mock_trino_hook.return_value.get_conn.return_value.cursor + + mock_cursor.return_value.description = [ + ("some_num", "INTEGER", None, None, None, None, None), + ("some_str", "VARCHAR", None, None, None, None, None), + ] + + mock_cursor.return_value.fetchone.side_effect = [ + [42, "mock_row_content_1"], + [43, "mock_row_content_2"], + [44, "mock_row_content_3"], + None, + ] + + op = TrinoToGCSOperator( + task_id=TASK_ID, + sql=SQL, + bucket=BUCKET, + filename=FILENAME, + schema_filename=SCHEMA_FILENAME, + export_format="csv", + ) + op.execute(None) + + # once for the file and once for the schema + assert 2 == mock_gcs_hook.return_value.upload.call_count diff --git a/tests/providers/google/cloud/transfers/test_trino_to_gcs_system.py b/tests/providers/google/cloud/transfers/test_trino_to_gcs_system.py new file mode 100644 index 0000000000000..00d5716556183 --- /dev/null +++ b/tests/providers/google/cloud/transfers/test_trino_to_gcs_system.py @@ -0,0 +1,169 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import os +from contextlib import closing, suppress + +import pytest + +from airflow.models import Connection +from airflow.providers.trino.hooks.trino import TrinoHook +from tests.providers.google.cloud.utils.gcp_authenticator import GCP_BIGQUERY_KEY, GCP_GCS_KEY +from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context + +try: + from airflow.utils.session import create_session +except ImportError: + # This is a hack to import create_session from old destination and + # fool the pre-commit check that looks for old imports... + # TODO remove this once we don't need to test this on 1.10 + import importlib + + db_module = importlib.import_module("airflow.utils.db") + create_session = getattr(db_module, "create_session") + + +GCS_BUCKET = os.environ.get("GCP_TRINO_TO_GCS_BUCKET_NAME", "test-trino-to-gcs-bucket") +DATASET_NAME = os.environ.get("GCP_TRINO_TO_GCS_DATASET_NAME", "test_trino_to_gcs_dataset") + +CREATE_QUERY = """ +CREATE TABLE memory.default.test_multiple_types ( + -- Boolean + z_boolean BOOLEAN, + -- Integers + z_tinyint TINYINT, + z_smallint SMALLINT, + z_integer INTEGER, + z_bigint BIGINT, + -- Floating-Point + z_real REAL, + z_double DOUBLE, + -- Fixed-Point + z_decimal DECIMAL(10,2), + -- String + z_varchar VARCHAR(20), + z_char CHAR(20), + z_varbinary VARBINARY, + z_json JSON, + -- Date and Time + z_date DATE, + z_time TIME, + z_time_with_time_zone TIME WITH TIME ZONE, + z_timestamp TIMESTAMP, + z_timestamp_with_time_zone TIMESTAMP WITH TIME ZONE, + -- Network Address + z_ipaddress_v4 IPADDRESS, + z_ipaddress_v6 IPADDRESS, + -- UUID + z_uuid UUID +) +""" + +LOAD_QUERY = """ +INSERT INTO memory.default.test_multiple_types VALUES( + -- Boolean + true, -- z_boolean BOOLEAN, + -- Integers + CAST(POW(2, 7 ) - 42 AS TINYINT), -- z_tinyint TINYINT, + CAST(POW(2, 15) - 42 AS SMALLINT), -- z_smallint SMALLINT, + CAST(POW(2, 31) - 42 AS INTEGER), -- z_integer INTEGER, + CAST(POW(2, 32) - 42 AS BIGINT) * 2, -- z_bigint BIGINT, + -- Floating-Point + REAL '42', -- z_real REAL, + DOUBLE '1.03e42', -- z_double DOUBLE, + -- Floating-Point + DECIMAL '1.1', -- z_decimal DECIMAL(10, 2), + -- String + U&'Hello winter \2603 !', -- z_vaarchar VARCHAR(20), + 'cat', -- z_char CHAR(20), + X'65683F', -- z_varbinary VARBINARY, + CAST('["A", 1, true]' AS JSON), -- z_json JSON, + -- Date and Time + DATE '2001-08-22', -- z_date DATE, + TIME '01:02:03.456', -- z_time TIME, + TIME '01:02:03.456 America/Los_Angeles', -- z_time_with_time_zone TIME WITH TIME ZONE, + TIMESTAMP '2001-08-22 03:04:05.321', -- z_timestamp TIMESTAMP, + TIMESTAMP '2001-08-22 03:04:05.321 America/Los_Angeles', -- z_timestamp_with_time_zone TIMESTAMP WITH TIME + -- ZONE, + -- Network Address + IPADDRESS '10.0.0.1', -- z_ipaddress_v4 IPADDRESS, + IPADDRESS '2001:db8::1', -- z_ipaddress_v6 IPADDRESS, + -- UUID + UUID '12151fd2-7586-11e9-8f9e-2a86e4085a59' -- z_uuid UUID +) +""" +DELETE_QUERY = "DROP TABLE memory.default.test_multiple_types" + + +@pytest.mark.integration("trino") +class TrinoToGCSSystemTest(GoogleSystemTest): + @staticmethod + def init_connection(): + with create_session() as session: + session.query(Connection).filter(Connection.conn_id == "trino_default").delete() + session.merge( + Connection( + conn_id="trino_default", conn_type="conn_type", host="trino", port=8080, login="airflow" + ) + ) + + @staticmethod + def init_db(): + hook = TrinoHook() + with hook.get_conn() as conn: + with closing(conn.cursor()) as cur: + cur.execute(CREATE_QUERY) + # Trino does not execute queries until the result is fetched. :-( + cur.fetchone() + cur.execute(LOAD_QUERY) + cur.fetchone() + + @staticmethod + def drop_db(): + hook = TrinoHook() + with hook.get_conn() as conn: + with closing(conn.cursor()) as cur: + cur.execute(DELETE_QUERY) + # Trino does not execute queries until the result is fetched. :-( + cur.fetchone() + + @provide_gcp_context(GCP_GCS_KEY) + def setUp(self): + super().setUp() + self.init_connection() + self.create_gcs_bucket(GCS_BUCKET) + with suppress(Exception): + self.drop_db() + self.init_db() + self.execute_with_ctx( + ["bq", "rm", "--recursive", "--force", f"{self._project_id()}:{DATASET_NAME}"], + key=GCP_BIGQUERY_KEY, + ) + + @provide_gcp_context(GCP_BIGQUERY_KEY) + def test_run_example_dag(self): + self.run_dag("example_trino_to_gcs", CLOUD_DAG_FOLDER) + + @provide_gcp_context(GCP_GCS_KEY) + def tearDown(self): + self.delete_gcs_bucket(GCS_BUCKET) + self.drop_db() + self.execute_with_ctx( + ["bq", "rm", "--recursive", "--force", f"{self._project_id()}:{DATASET_NAME}"], + key=GCP_BIGQUERY_KEY, + ) + super().tearDown() diff --git a/tests/providers/google/cloud/utils/gcp_authenticator.py b/tests/providers/google/cloud/utils/gcp_authenticator.py index bf36eadc2202b..2fad48c9a8288 100644 --- a/tests/providers/google/cloud/utils/gcp_authenticator.py +++ b/tests/providers/google/cloud/utils/gcp_authenticator.py @@ -54,6 +54,7 @@ GCP_SPANNER_KEY = 'gcp_spanner.json' GCP_STACKDDRIVER = 'gcp_stackdriver.json' GCP_TASKS_KEY = 'gcp_tasks.json' +GCP_WORKFLOWS_KEY = "gcp_workflows.json" GMP_KEY = 'gmp.json' G_FIREBASE_KEY = 'g_firebase.json' GCP_AWS_KEY = 'gcp_aws.json' diff --git a/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py new file mode 100644 index 0000000000000..ea445ec7f8c88 --- /dev/null +++ b/tests/providers/microsoft/azure/hooks/test_azure_data_factory.py @@ -0,0 +1,439 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# pylint: disable=redefined-outer-name,unused-argument + +import json +from unittest.mock import MagicMock, Mock + +import pytest +from pytest import fixture + +from airflow.exceptions import AirflowException +from airflow.models.connection import Connection +from airflow.providers.microsoft.azure.hooks.azure_data_factory import ( + AzureDataFactoryHook, + provide_targeted_factory, +) +from airflow.utils import db + +DEFAULT_RESOURCE_GROUP = "defaultResourceGroup" +RESOURCE_GROUP = "testResourceGroup" + +DEFAULT_FACTORY = "defaultFactory" +FACTORY = "testFactory" + +MODEL = object() +NAME = "testName" +ID = "testId" + + +def setup_module(): + connection = Connection( + conn_id="azure_data_factory_test", + conn_type="azure_data_factory", + login="clientId", + password="clientSecret", + extra=json.dumps( + { + "tenantId": "tenantId", + "subscriptionId": "subscriptionId", + "resourceGroup": DEFAULT_RESOURCE_GROUP, + "factory": DEFAULT_FACTORY, + } + ), + ) + + db.merge_conn(connection) + + +@fixture +def hook(): + client = AzureDataFactoryHook(conn_id="azure_data_factory_test") + client._conn = MagicMock( + spec=[ + "factories", + "linked_services", + "datasets", + "pipelines", + "pipeline_runs", + "triggers", + "trigger_runs", + ] + ) + + return client + + +def parametrize(explicit_factory, implicit_factory): + def wrapper(func): + return pytest.mark.parametrize( + ("user_args", "sdk_args"), + (explicit_factory, implicit_factory), + ids=("explicit factory", "implicit factory"), + )(func) + + return wrapper + + +def test_provide_targeted_factory(): + def echo(_, resource_group_name=None, factory_name=None): + return resource_group_name, factory_name + + conn = MagicMock() + hook = MagicMock() + hook.get_connection.return_value = conn + + conn.extra_dejson = {} + assert provide_targeted_factory(echo)(hook, RESOURCE_GROUP, FACTORY) == (RESOURCE_GROUP, FACTORY) + + conn.extra_dejson = {"resourceGroup": DEFAULT_RESOURCE_GROUP, "factory": DEFAULT_FACTORY} + assert provide_targeted_factory(echo)(hook) == (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY) + + with pytest.raises(AirflowException): + conn.extra_dejson = {} + provide_targeted_factory(echo)(hook) + + +@parametrize( + explicit_factory=((RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY)), + implicit_factory=((), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY)), +) +def test_get_factory(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.get_factory(*user_args) + + hook._conn.factories.get.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, MODEL)), + implicit_factory=((MODEL,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, MODEL)), +) +def test_create_factory(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.create_factory(*user_args) + + hook._conn.factories.create_or_update.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, MODEL)), + implicit_factory=((MODEL,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, MODEL)), +) +def test_update_factory(hook: AzureDataFactoryHook, user_args, sdk_args): + hook._factory_exists = Mock(return_value=True) + hook.update_factory(*user_args) + + hook._conn.factories.create_or_update.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, MODEL)), + implicit_factory=((MODEL,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, MODEL)), +) +def test_update_factory_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args): + hook._factory_exists = Mock(return_value=False) + + with pytest.raises(AirflowException, match=r"Factory .+ does not exist"): + hook.update_factory(*user_args) + + +@parametrize( + explicit_factory=((RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY)), + implicit_factory=((), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY)), +) +def test_delete_factory(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.delete_factory(*user_args) + + hook._conn.factories.delete.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), + implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), +) +def test_get_linked_service(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.get_linked_service(*user_args) + + hook._conn.linked_services.get.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_create_linked_service(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.create_linked_service(*user_args) + + hook._conn.linked_services.create_or_update(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_update_linked_service(hook: AzureDataFactoryHook, user_args, sdk_args): + hook._linked_service_exists = Mock(return_value=True) + hook.update_linked_service(*user_args) + + hook._conn.linked_services.create_or_update(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_update_linked_service_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args): + hook._linked_service_exists = Mock(return_value=False) + + with pytest.raises(AirflowException, match=r"Linked service .+ does not exist"): + hook.update_linked_service(*user_args) + + +@parametrize( + explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), + implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), +) +def test_delete_linked_service(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.delete_linked_service(*user_args) + + hook._conn.linked_services.delete.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), + implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), +) +def test_get_dataset(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.get_dataset(*user_args) + + hook._conn.datasets.get.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_create_dataset(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.create_dataset(*user_args) + + hook._conn.datasets.create_or_update.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_update_dataset(hook: AzureDataFactoryHook, user_args, sdk_args): + hook._dataset_exists = Mock(return_value=True) + hook.update_dataset(*user_args) + + hook._conn.datasets.create_or_update.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_update_dataset_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args): + hook._dataset_exists = Mock(return_value=False) + + with pytest.raises(AirflowException, match=r"Dataset .+ does not exist"): + hook.update_dataset(*user_args) + + +@parametrize( + explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), + implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), +) +def test_delete_dataset(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.delete_dataset(*user_args) + + hook._conn.datasets.delete.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), + implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), +) +def test_get_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.get_pipeline(*user_args) + + hook._conn.pipelines.get.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_create_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.create_pipeline(*user_args) + + hook._conn.pipelines.create_or_update.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_update_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args): + hook._pipeline_exists = Mock(return_value=True) + hook.update_pipeline(*user_args) + + hook._conn.pipelines.create_or_update.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_update_pipeline_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args): + hook._pipeline_exists = Mock(return_value=False) + + with pytest.raises(AirflowException, match=r"Pipeline .+ does not exist"): + hook.update_pipeline(*user_args) + + +@parametrize( + explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), + implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), +) +def test_delete_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.delete_pipeline(*user_args) + + hook._conn.pipelines.delete.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), + implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), +) +def test_run_pipeline(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.run_pipeline(*user_args) + + hook._conn.pipelines.create_run.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((ID, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, ID)), + implicit_factory=((ID,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, ID)), +) +def test_get_pipeline_run(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.get_pipeline_run(*user_args) + + hook._conn.pipeline_runs.get.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((ID, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, ID)), + implicit_factory=((ID,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, ID)), +) +def test_cancel_pipeline_run(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.cancel_pipeline_run(*user_args) + + hook._conn.pipeline_runs.cancel.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), + implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), +) +def test_get_trigger(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.get_trigger(*user_args) + + hook._conn.triggers.get.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_create_trigger(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.create_trigger(*user_args) + + hook._conn.triggers.create_or_update.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_update_trigger(hook: AzureDataFactoryHook, user_args, sdk_args): + hook._trigger_exists = Mock(return_value=True) + hook.update_trigger(*user_args) + + hook._conn.triggers.create_or_update.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, MODEL, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, MODEL)), + implicit_factory=((NAME, MODEL), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, MODEL)), +) +def test_update_trigger_non_existent(hook: AzureDataFactoryHook, user_args, sdk_args): + hook._trigger_exists = Mock(return_value=False) + + with pytest.raises(AirflowException, match=r"Trigger .+ does not exist"): + hook.update_trigger(*user_args) + + +@parametrize( + explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), + implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), +) +def test_delete_trigger(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.delete_trigger(*user_args) + + hook._conn.triggers.delete.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), + implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), +) +def test_start_trigger(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.start_trigger(*user_args) + + hook._conn.triggers.start.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME)), + implicit_factory=((NAME,), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME)), +) +def test_stop_trigger(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.stop_trigger(*user_args) + + hook._conn.triggers.stop.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, ID, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, ID)), + implicit_factory=((NAME, ID), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, ID)), +) +def test_rerun_trigger(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.rerun_trigger(*user_args) + + hook._conn.trigger_runs.rerun.assert_called_with(*sdk_args) + + +@parametrize( + explicit_factory=((NAME, ID, RESOURCE_GROUP, FACTORY), (RESOURCE_GROUP, FACTORY, NAME, ID)), + implicit_factory=((NAME, ID), (DEFAULT_RESOURCE_GROUP, DEFAULT_FACTORY, NAME, ID)), +) +def test_cancel_trigger(hook: AzureDataFactoryHook, user_args, sdk_args): + hook.cancel_trigger(*user_args) + + hook._conn.trigger_runs.cancel.assert_called_with(*sdk_args) diff --git a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py b/tests/providers/microsoft/azure/log/test_wasb_task_handler.py index 8e517e0e31b0e..8362333737136 100644 --- a/tests/providers/microsoft/azure/log/test_wasb_task_handler.py +++ b/tests/providers/microsoft/azure/log/test_wasb_task_handler.py @@ -18,6 +18,7 @@ import unittest from datetime import datetime from unittest import mock +from unittest.mock import ANY from azure.common import AzureHttpError @@ -72,8 +73,9 @@ def test_hook_raises(self): mock_error.assert_called_once_with( 'Could not create an WasbHook with connection id "%s". ' 'Please make sure that airflow[azure] is installed and ' - 'the Wasb connection exists.', + 'the Wasb connection exists. Exception "%s"', "wasb_default", + ANY, ) def test_set_context_raw(self): @@ -120,9 +122,11 @@ def test_wasb_read_raises(self): mock_hook.return_value.read_file.side_effect = AzureHttpError("failed to connect", 404) handler.wasb_read(self.remote_log_location, return_error=True) - mock_error.assert_called_once_with( - 'Could not read logs from remote/log/location/1.log', exc_info=True + "Message: '%s', exception '%s'", + 'Could not read logs from remote/log/location/1.log', + ANY, + exc_info=True, ) @mock.patch("airflow.providers.microsoft.azure.hooks.wasb.WasbHook") diff --git a/tests/providers/mysql/hooks/test_mysql.py b/tests/providers/mysql/hooks/test_mysql.py index 538381f61bbe5..9e1155dd84cde 100644 --- a/tests/providers/mysql/hooks/test_mysql.py +++ b/tests/providers/mysql/hooks/test_mysql.py @@ -21,6 +21,7 @@ import os import unittest import uuid +from contextlib import closing from unittest import mock import MySQLdb.cursors @@ -348,9 +349,10 @@ def setUp(self): def tearDown(self): drop_tables = {'test_mysql_to_mysql', 'test_airflow'} - with MySqlHook().get_conn() as conn: - for table in drop_tables: - conn.execute(f"DROP TABLE IF EXISTS {table}") + with closing(MySqlHook().get_conn()) as conn: + with closing(conn.cursor()) as cursor: + for table in drop_tables: + cursor.execute(f"DROP TABLE IF EXISTS {table}") @parameterized.expand( [ @@ -375,19 +377,20 @@ def test_mysql_hook_test_bulk_load(self, client): f.flush() hook = MySqlHook('airflow_db') - with hook.get_conn() as conn: - conn.execute( + with closing(hook.get_conn()) as conn: + with closing(conn.cursor()) as cursor: + cursor.execute( + """ + CREATE TABLE IF NOT EXISTS test_airflow ( + dummy VARCHAR(50) + ) """ - CREATE TABLE IF NOT EXISTS test_airflow ( - dummy VARCHAR(50) ) - """ - ) - conn.execute("TRUNCATE TABLE test_airflow") - hook.bulk_load("test_airflow", f.name) - conn.execute("SELECT dummy FROM test_airflow") - results = tuple(result[0] for result in conn.fetchall()) - assert sorted(results) == sorted(records) + cursor.execute("TRUNCATE TABLE test_airflow") + hook.bulk_load("test_airflow", f.name) + cursor.execute("SELECT dummy FROM test_airflow") + results = tuple(result[0] for result in cursor.fetchall()) + assert sorted(results) == sorted(records) @parameterized.expand( [ diff --git a/tests/providers/mysql/operators/test_mysql.py b/tests/providers/mysql/operators/test_mysql.py index af5cc25e122d2..c8d31284eb057 100644 --- a/tests/providers/mysql/operators/test_mysql.py +++ b/tests/providers/mysql/operators/test_mysql.py @@ -16,6 +16,7 @@ # specific language governing permissions and limitations # under the License. import unittest +from contextlib import closing import pytest from parameterized import parameterized @@ -41,9 +42,10 @@ def setUp(self): def tearDown(self): drop_tables = {'test_mysql_to_mysql', 'test_airflow'} - with MySqlHook().get_conn() as conn: - for table in drop_tables: - conn.execute(f"DROP TABLE IF EXISTS {table}") + with closing(MySqlHook().get_conn()) as conn: + with closing(conn.cursor()) as cursor: + for table in drop_tables: + cursor.execute(f"DROP TABLE IF EXISTS {table}") @parameterized.expand( [ @@ -100,7 +102,7 @@ def test_overwrite_schema(self, client): database="foobar", ) - from _mysql_exceptions import OperationalError + from MySQLdb import OperationalError # pylint: disable=no-name-in-module try: op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) diff --git a/tests/providers/mysql/transfers/test_trino_to_mysql.py b/tests/providers/mysql/transfers/test_trino_to_mysql.py new file mode 100644 index 0000000000000..2e23169cc5f29 --- /dev/null +++ b/tests/providers/mysql/transfers/test_trino_to_mysql.py @@ -0,0 +1,73 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import os +import unittest +from unittest.mock import patch + +from airflow.providers.mysql.transfers.trino_to_mysql import TrinoToMySqlOperator +from tests.providers.apache.hive import DEFAULT_DATE, TestHiveEnvironment + + +class TestTrinoToMySqlTransfer(TestHiveEnvironment): + def setUp(self): + self.kwargs = dict( + sql='sql', + mysql_table='mysql_table', + task_id='test_trino_to_mysql_transfer', + ) + super().setUp() + + @patch('airflow.providers.mysql.transfers.trino_to_mysql.MySqlHook') + @patch('airflow.providers.mysql.transfers.trino_to_mysql.TrinoHook') + def test_execute(self, mock_trino_hook, mock_mysql_hook): + TrinoToMySqlOperator(**self.kwargs).execute(context={}) + + mock_trino_hook.return_value.get_records.assert_called_once_with(self.kwargs['sql']) + mock_mysql_hook.return_value.insert_rows.assert_called_once_with( + table=self.kwargs['mysql_table'], rows=mock_trino_hook.return_value.get_records.return_value + ) + + @patch('airflow.providers.mysql.transfers.trino_to_mysql.MySqlHook') + @patch('airflow.providers.mysql.transfers.trino_to_mysql.TrinoHook') + def test_execute_with_mysql_preoperator(self, mock_trino_hook, mock_mysql_hook): + self.kwargs.update(dict(mysql_preoperator='mysql_preoperator')) + + TrinoToMySqlOperator(**self.kwargs).execute(context={}) + + mock_trino_hook.return_value.get_records.assert_called_once_with(self.kwargs['sql']) + mock_mysql_hook.return_value.run.assert_called_once_with(self.kwargs['mysql_preoperator']) + mock_mysql_hook.return_value.insert_rows.assert_called_once_with( + table=self.kwargs['mysql_table'], rows=mock_trino_hook.return_value.get_records.return_value + ) + + @unittest.skipIf( + 'AIRFLOW_RUNALL_TESTS' not in os.environ, "Skipped because AIRFLOW_RUNALL_TESTS is not set" + ) + def test_trino_to_mysql(self): + op = TrinoToMySqlOperator( + task_id='trino_to_mysql_check', + sql=""" + SELECT name, count(*) as ccount + FROM airflow.static_babynames + GROUP BY name + """, + mysql_table='test_static_babynames', + mysql_preoperator='TRUNCATE TABLE test_static_babynames;', + dag=self.dag, + ) + op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) diff --git a/tests/providers/neo4j/__init__.py b/tests/providers/neo4j/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/tests/providers/neo4j/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/neo4j/hooks/__init__.py b/tests/providers/neo4j/hooks/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/tests/providers/neo4j/hooks/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/neo4j/hooks/test_neo4j.py b/tests/providers/neo4j/hooks/test_neo4j.py new file mode 100644 index 0000000000000..7f64fc4efbec0 --- /dev/null +++ b/tests/providers/neo4j/hooks/test_neo4j.py @@ -0,0 +1,65 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +import json +import unittest +from unittest import mock + +from airflow.models import Connection +from airflow.providers.neo4j.hooks.neo4j import Neo4jHook + + +class TestNeo4jHookConn(unittest.TestCase): + def setUp(self): + super().setUp() + self.neo4j_hook = Neo4jHook() + self.connection = Connection( + conn_type='neo4j', login='login', password='password', host='host', schema='schema' + ) + + def test_get_uri_neo4j_scheme(self): + + self.neo4j_hook.get_connection = mock.Mock() + self.neo4j_hook.get_connection.return_value = self.connection + uri = self.neo4j_hook.get_uri(self.connection) + + self.assertEqual(uri, "bolt://host:7687") + + def test_get_uri_bolt_scheme(self): + + self.connection.extra = json.dumps({"bolt_scheme": True}) + self.neo4j_hook.get_connection = mock.Mock() + self.neo4j_hook.get_connection.return_value = self.connection + uri = self.neo4j_hook.get_uri(self.connection) + + self.assertEqual(uri, "bolt://host:7687") + + def test_get_uri_bolt_ssc_scheme(self): + self.connection.extra = json.dumps({"certs_self_signed": True, "bolt_scheme": True}) + self.neo4j_hook.get_connection = mock.Mock() + self.neo4j_hook.get_connection.return_value = self.connection + uri = self.neo4j_hook.get_uri(self.connection) + + self.assertEqual(uri, "bolt+ssc://host:7687") + + def test_get_uri_bolt_trusted_ca_scheme(self): + self.connection.extra = json.dumps({"certs_trusted_ca": True, "bolt_scheme": True}) + self.neo4j_hook.get_connection = mock.Mock() + self.neo4j_hook.get_connection.return_value = self.connection + uri = self.neo4j_hook.get_uri(self.connection) + + self.assertEqual(uri, "bolt+s://host:7687") diff --git a/tests/providers/neo4j/operators/__init__.py b/tests/providers/neo4j/operators/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/tests/providers/neo4j/operators/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/neo4j/operators/test_neo4j.py b/tests/providers/neo4j/operators/test_neo4j.py new file mode 100644 index 0000000000000..39c8d697231f9 --- /dev/null +++ b/tests/providers/neo4j/operators/test_neo4j.py @@ -0,0 +1,61 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +import unittest +from unittest import mock + +from airflow.models.dag import DAG +from airflow.providers.neo4j.operators.neo4j import Neo4jOperator +from airflow.utils import timezone + +DEFAULT_DATE = timezone.datetime(2015, 1, 1) +DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat() +DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10] +TEST_DAG_ID = 'unit_test_dag' + + +class TestNeo4jOperator(unittest.TestCase): + def setUp(self): + args = {'owner': 'airflow', 'start_date': DEFAULT_DATE} + dag = DAG(TEST_DAG_ID, default_args=args) + self.dag = dag + + @mock.patch('airflow.providers.neo4j.operators.neo4j.Neo4jOperator.get_hook') + def test_neo4j_operator_test(self, mock_hook): + + sql = """ + MATCH (tom {name: "Tom Hanks"}) RETURN tom + """ + op = Neo4jOperator(task_id='basic_neo4j', sql=sql, dag=self.dag) + op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True) diff --git a/tests/providers/presto/hooks/test_presto.py b/tests/providers/presto/hooks/test_presto.py index f9e85875fd8de..e6ebb737c2c09 100644 --- a/tests/providers/presto/hooks/test_presto.py +++ b/tests/providers/presto/hooks/test_presto.py @@ -206,28 +206,3 @@ def test_get_pandas_df(self): assert result_sets[1][0] == df.values.tolist()[1][0] self.cur.execute.assert_called_once_with(statement, None) - - -class TestPrestoHookIntegration(unittest.TestCase): - @pytest.mark.integration("presto") - @mock.patch.dict('os.environ', AIRFLOW_CONN_PRESTO_DEFAULT="presto://airflow@presto:8080/") - def test_should_record_records(self): - hook = PrestoHook() - sql = "SELECT name FROM tpch.sf1.customer ORDER BY custkey ASC LIMIT 3" - records = hook.get_records(sql) - assert [['Customer#000000001'], ['Customer#000000002'], ['Customer#000000003']] == records - - @pytest.mark.integration("presto") - @pytest.mark.integration("kerberos") - def test_should_record_records_with_kerberos_auth(self): - conn_url = ( - 'presto://airflow@presto:7778/?' - 'auth=kerberos&kerberos__service_name=HTTP&' - 'verify=False&' - 'protocol=https' - ) - with mock.patch.dict('os.environ', AIRFLOW_CONN_PRESTO_DEFAULT=conn_url): - hook = PrestoHook() - sql = "SELECT name FROM tpch.sf1.customer ORDER BY custkey ASC LIMIT 3" - records = hook.get_records(sql) - assert [['Customer#000000001'], ['Customer#000000002'], ['Customer#000000003']] == records diff --git a/tests/providers/sftp/hooks/test_sftp.py b/tests/providers/sftp/hooks/test_sftp.py index 45097e6c47665..9211c30abc254 100644 --- a/tests/providers/sftp/hooks/test_sftp.py +++ b/tests/providers/sftp/hooks/test_sftp.py @@ -15,12 +15,14 @@ # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. - +import json import os import shutil import unittest +from io import StringIO from unittest import mock +import paramiko import pysftp from parameterized import parameterized @@ -28,6 +30,15 @@ from airflow.providers.sftp.hooks.sftp import SFTPHook from airflow.utils.session import provide_session + +def generate_host_key(pkey: paramiko.PKey): + key_fh = StringIO() + pkey.write_private_key(key_fh) + key_fh.seek(0) + key_obj = paramiko.RSAKey(file_obj=key_fh) + return key_obj.get_base64() + + TMP_PATH = '/tmp' TMP_DIR_FOR_TESTS = 'tests_sftp_hook_dir' SUB_DIR = "sub_dir" @@ -35,6 +46,9 @@ SFTP_CONNECTION_USER = "root" +TEST_PKEY = paramiko.RSAKey.generate(4096) +TEST_HOST_KEY = generate_host_key(pkey=TEST_PKEY) + class TestSFTPHook(unittest.TestCase): @provide_session @@ -178,6 +192,31 @@ def test_no_host_key_check_no_ignore(self, get_connection): hook = SFTPHook() self.assertEqual(hook.no_host_key_check, False) + @mock.patch('airflow.providers.sftp.hooks.sftp.SFTPHook.get_connection') + def test_host_key_default(self, get_connection): + connection = Connection(login='login', host='host') + get_connection.return_value = connection + hook = SFTPHook() + self.assertEqual(hook.host_key, None) + + @mock.patch('airflow.providers.sftp.hooks.sftp.SFTPHook.get_connection') + def test_host_key(self, get_connection): + connection = Connection( + login='login', + host='host', + extra=json.dumps({"host_key": TEST_HOST_KEY, "no_host_key_check": False}), + ) + get_connection.return_value = connection + hook = SFTPHook() + self.assertEqual(hook.host_key.get_base64(), TEST_HOST_KEY) + + @mock.patch('airflow.providers.sftp.hooks.sftp.SFTPHook.get_connection') + def test_host_key_with_no_host_key_check(self, get_connection): + connection = Connection(login='login', host='host', extra=json.dumps({"host_key": TEST_HOST_KEY})) + get_connection.return_value = connection + hook = SFTPHook() + self.assertEqual(hook.host_key, None) + @parameterized.expand( [ (os.path.join(TMP_PATH, TMP_DIR_FOR_TESTS), True), diff --git a/tests/providers/slack/hooks/test_slack.py b/tests/providers/slack/hooks/test_slack.py index cbe3d26654aca..5fef4095cf0f7 100644 --- a/tests/providers/slack/hooks/test_slack.py +++ b/tests/providers/slack/hooks/test_slack.py @@ -20,7 +20,7 @@ from unittest import mock import pytest -from slack.errors import SlackApiError +from slack_sdk.errors import SlackApiError from airflow.exceptions import AirflowException from airflow.providers.slack.hooks.slack import SlackHook diff --git a/tests/providers/ssh/hooks/test_ssh.py b/tests/providers/ssh/hooks/test_ssh.py index 027de40c639a2..fea52bc5e01b6 100644 --- a/tests/providers/ssh/hooks/test_ssh.py +++ b/tests/providers/ssh/hooks/test_ssh.py @@ -51,8 +51,17 @@ def generate_key_string(pkey: paramiko.PKey, passphrase: Optional[str] = None): return key_str +def generate_host_key(pkey: paramiko.PKey): + key_fh = StringIO() + pkey.write_private_key(key_fh) + key_fh.seek(0) + key_obj = paramiko.RSAKey(file_obj=key_fh) + return key_obj.get_base64() + + TEST_PKEY = paramiko.RSAKey.generate(4096) TEST_PRIVATE_KEY = generate_key_string(pkey=TEST_PKEY) +TEST_HOST_KEY = generate_host_key(pkey=TEST_PKEY) PASSPHRASE = ''.join(random.choice(string.ascii_letters) for i in range(10)) TEST_ENCRYPTED_PRIVATE_KEY = generate_key_string(pkey=TEST_PKEY, passphrase=PASSPHRASE) @@ -63,6 +72,10 @@ class TestSSHHook(unittest.TestCase): CONN_SSH_WITH_PRIVATE_KEY_PASSPHRASE_EXTRA = 'ssh_with_private_key_passphrase_extra' CONN_SSH_WITH_EXTRA = 'ssh_with_extra' CONN_SSH_WITH_EXTRA_FALSE_LOOK_FOR_KEYS = 'ssh_with_extra_false_look_for_keys' + CONN_SSH_WITH_HOST_KEY_EXTRA = 'ssh_with_host_key_extra' + CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE = 'ssh_with_host_key_and_no_host_key_check_false' + CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE = 'ssh_with_host_key_and_no_host_key_check_true' + CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE = 'ssh_with_no_host_key_and_no_host_key_check_false' @classmethod def tearDownClass(cls) -> None: @@ -70,6 +83,11 @@ def tearDownClass(cls) -> None: conns_to_reset = [ cls.CONN_SSH_WITH_PRIVATE_KEY_EXTRA, cls.CONN_SSH_WITH_PRIVATE_KEY_PASSPHRASE_EXTRA, + cls.CONN_SSH_WITH_EXTRA, + cls.CONN_SSH_WITH_HOST_KEY_EXTRA, + cls.CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE, + cls.CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE, + cls.CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE, ] connections = session.query(Connection).filter(Connection.conn_id.in_(conns_to_reset)) connections.delete(synchronize_session=False) @@ -116,6 +134,42 @@ def setUpClass(cls) -> None: ), ) ) + db.merge_conn( + Connection( + conn_id=cls.CONN_SSH_WITH_HOST_KEY_EXTRA, + host='localhost', + conn_type='ssh', + extra=json.dumps({"private_key": TEST_PRIVATE_KEY, "host_key": TEST_HOST_KEY}), + ) + ) + db.merge_conn( + Connection( + conn_id=cls.CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE, + host='remote_host', + conn_type='ssh', + extra=json.dumps( + {"private_key": TEST_PRIVATE_KEY, "host_key": TEST_HOST_KEY, "no_host_key_check": False} + ), + ) + ) + db.merge_conn( + Connection( + conn_id=cls.CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE, + host='remote_host', + conn_type='ssh', + extra=json.dumps( + {"private_key": TEST_PRIVATE_KEY, "host_key": TEST_HOST_KEY, "no_host_key_check": True} + ), + ) + ) + db.merge_conn( + Connection( + conn_id=cls.CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE, + host='remote_host', + conn_type='ssh', + extra=json.dumps({"private_key": TEST_PRIVATE_KEY, "no_host_key_check": False}), + ) + ) @mock.patch('airflow.providers.ssh.hooks.ssh.paramiko.SSHClient') def test_ssh_connection_with_password(self, ssh_mock): @@ -344,3 +398,42 @@ def test_ssh_connection_with_private_key_passphrase_extra(self, ssh_mock): sock=None, look_for_keys=True, ) + + @mock.patch('airflow.providers.ssh.hooks.ssh.paramiko.SSHClient') + def test_ssh_connection_with_host_key_extra(self, ssh_client): + hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_HOST_KEY_EXTRA) + assert hook.host_key is None # Since default no_host_key_check = True unless explicit override + with hook.get_conn(): + assert ssh_client.return_value.connect.called is True + assert ssh_client.return_value.get_host_keys.return_value.add.called is False + + @mock.patch('airflow.providers.ssh.hooks.ssh.paramiko.SSHClient') + def test_ssh_connection_with_host_key_where_no_host_key_check_is_true(self, ssh_client): + hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_TRUE) + assert hook.host_key is None + with hook.get_conn(): + assert ssh_client.return_value.connect.called is True + assert ssh_client.return_value.get_host_keys.return_value.add.called is False + + @mock.patch('airflow.providers.ssh.hooks.ssh.paramiko.SSHClient') + def test_ssh_connection_with_host_key_where_no_host_key_check_is_false(self, ssh_client): + hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE) + assert hook.host_key.get_base64() == TEST_HOST_KEY + with hook.get_conn(): + assert ssh_client.return_value.connect.called is True + assert ssh_client.return_value.get_host_keys.return_value.add.called is True + assert ssh_client.return_value.get_host_keys.return_value.add.call_args == mock.call( + hook.remote_host, 'ssh-rsa', hook.host_key + ) + + @mock.patch('airflow.providers.ssh.hooks.ssh.paramiko.SSHClient') + def test_ssh_connection_with_no_host_key_where_no_host_key_check_is_false(self, ssh_client): + hook = SSHHook(ssh_conn_id=self.CONN_SSH_WITH_NO_HOST_KEY_AND_NO_HOST_KEY_CHECK_FALSE) + assert hook.host_key is None + with hook.get_conn(): + assert ssh_client.return_value.connect.called is True + assert ssh_client.return_value.get_host_keys.return_value.add.called is False + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/providers/tableau/hooks/__init__.py b/tests/providers/tableau/hooks/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/tests/providers/tableau/hooks/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/salesforce/hooks/test_tableau.py b/tests/providers/tableau/hooks/test_tableau.py similarity index 81% rename from tests/providers/salesforce/hooks/test_tableau.py rename to tests/providers/tableau/hooks/test_tableau.py index 130746d43b268..66ecdf747dd57 100644 --- a/tests/providers/salesforce/hooks/test_tableau.py +++ b/tests/providers/tableau/hooks/test_tableau.py @@ -19,12 +19,19 @@ from unittest.mock import patch from airflow import configuration, models -from airflow.providers.salesforce.hooks.tableau import TableauHook +from airflow.providers.tableau.hooks.tableau import TableauHook from airflow.utils import db class TestTableauHook(unittest.TestCase): + """ + Test class for TableauHook + """ + def setUp(self): + """ + setup + """ configuration.conf.load_test_config() db.merge_conn( @@ -46,9 +53,12 @@ def setUp(self): ) ) - @patch('airflow.providers.salesforce.hooks.tableau.TableauAuth') - @patch('airflow.providers.salesforce.hooks.tableau.Server') + @patch('airflow.providers.tableau.hooks.tableau.TableauAuth') + @patch('airflow.providers.tableau.hooks.tableau.Server') def test_get_conn_auth_via_password_and_site_in_connection(self, mock_server, mock_tableau_auth): + """ + Test get conn auth via password + """ with TableauHook(tableau_conn_id='tableau_test_password') as tableau_hook: mock_server.assert_called_once_with(tableau_hook.conn.host, use_server_version=True) mock_tableau_auth.assert_called_once_with( @@ -59,9 +69,12 @@ def test_get_conn_auth_via_password_and_site_in_connection(self, mock_server, mo mock_server.return_value.auth.sign_in.assert_called_once_with(mock_tableau_auth.return_value) mock_server.return_value.auth.sign_out.assert_called_once_with() - @patch('airflow.providers.salesforce.hooks.tableau.PersonalAccessTokenAuth') - @patch('airflow.providers.salesforce.hooks.tableau.Server') + @patch('airflow.providers.tableau.hooks.tableau.PersonalAccessTokenAuth') + @patch('airflow.providers.tableau.hooks.tableau.Server') def test_get_conn_auth_via_token_and_site_in_init(self, mock_server, mock_tableau_auth): + """ + Test get conn auth via token + """ with TableauHook(site_id='test', tableau_conn_id='tableau_test_token') as tableau_hook: mock_server.assert_called_once_with(tableau_hook.conn.host, use_server_version=True) mock_tableau_auth.assert_called_once_with( @@ -74,10 +87,13 @@ def test_get_conn_auth_via_token_and_site_in_init(self, mock_server, mock_tablea ) mock_server.return_value.auth.sign_out.assert_called_once_with() - @patch('airflow.providers.salesforce.hooks.tableau.TableauAuth') - @patch('airflow.providers.salesforce.hooks.tableau.Server') - @patch('airflow.providers.salesforce.hooks.tableau.Pager', return_value=[1, 2, 3]) + @patch('airflow.providers.tableau.hooks.tableau.TableauAuth') + @patch('airflow.providers.tableau.hooks.tableau.Server') + @patch('airflow.providers.tableau.hooks.tableau.Pager', return_value=[1, 2, 3]) def test_get_all(self, mock_pager, mock_server, mock_tableau_auth): # pylint: disable=unused-argument + """ + Test get all + """ with TableauHook(tableau_conn_id='tableau_test_password') as tableau_hook: jobs = tableau_hook.get_all(resource_name='jobs') assert jobs == mock_pager.return_value diff --git a/tests/providers/tableau/operators/__init__.py b/tests/providers/tableau/operators/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/tableau/operators/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/salesforce/operators/test_tableau_refresh_workbook.py b/tests/providers/tableau/operators/test_tableau_refresh_workbook.py similarity index 80% rename from tests/providers/salesforce/operators/test_tableau_refresh_workbook.py rename to tests/providers/tableau/operators/test_tableau_refresh_workbook.py index 77139c19773bc..72377a549ba21 100644 --- a/tests/providers/salesforce/operators/test_tableau_refresh_workbook.py +++ b/tests/providers/tableau/operators/test_tableau_refresh_workbook.py @@ -21,11 +21,18 @@ import pytest from airflow.exceptions import AirflowException -from airflow.providers.salesforce.operators.tableau_refresh_workbook import TableauRefreshWorkbookOperator +from airflow.providers.tableau.operators.tableau_refresh_workbook import TableauRefreshWorkbookOperator class TestTableauRefreshWorkbookOperator(unittest.TestCase): + """ + Test class for TableauRefreshWorkbookOperator + """ + def setUp(self): + """ + setup + """ self.mocked_workbooks = [] for i in range(3): mock_workbook = Mock() @@ -34,8 +41,11 @@ def setUp(self): self.mocked_workbooks.append(mock_workbook) self.kwargs = {'site_id': 'test_site', 'task_id': 'task', 'dag': None} - @patch('airflow.providers.salesforce.operators.tableau_refresh_workbook.TableauHook') + @patch('airflow.providers.tableau.operators.tableau_refresh_workbook.TableauHook') def test_execute(self, mock_tableau_hook): + """ + Test Execute + """ mock_tableau_hook.get_all = Mock(return_value=self.mocked_workbooks) mock_tableau_hook.return_value.__enter__ = Mock(return_value=mock_tableau_hook) operator = TableauRefreshWorkbookOperator(blocking=False, workbook_name='wb_2', **self.kwargs) @@ -45,9 +55,12 @@ def test_execute(self, mock_tableau_hook): mock_tableau_hook.server.workbooks.refresh.assert_called_once_with(2) assert mock_tableau_hook.server.workbooks.refresh.return_value.id == job_id - @patch('airflow.providers.salesforce.sensors.tableau_job_status.TableauJobStatusSensor') - @patch('airflow.providers.salesforce.operators.tableau_refresh_workbook.TableauHook') + @patch('airflow.providers.tableau.sensors.tableau_job_status.TableauJobStatusSensor') + @patch('airflow.providers.tableau.operators.tableau_refresh_workbook.TableauHook') def test_execute_blocking(self, mock_tableau_hook, mock_tableau_job_status_sensor): + """ + Test execute blocking + """ mock_tableau_hook.get_all = Mock(return_value=self.mocked_workbooks) mock_tableau_hook.return_value.__enter__ = Mock(return_value=mock_tableau_hook) operator = TableauRefreshWorkbookOperator(workbook_name='wb_2', **self.kwargs) @@ -64,8 +77,11 @@ def test_execute_blocking(self, mock_tableau_hook, mock_tableau_job_status_senso dag=None, ) - @patch('airflow.providers.salesforce.operators.tableau_refresh_workbook.TableauHook') + @patch('airflow.providers.tableau.operators.tableau_refresh_workbook.TableauHook') def test_execute_missing_workbook(self, mock_tableau_hook): + """ + Test execute missing workbook + """ mock_tableau_hook.get_all = Mock(return_value=self.mocked_workbooks) mock_tableau_hook.return_value.__enter__ = Mock(return_value=mock_tableau_hook) operator = TableauRefreshWorkbookOperator(workbook_name='test', **self.kwargs) diff --git a/tests/providers/tableau/sensors/__init__.py b/tests/providers/tableau/sensors/__init__.py new file mode 100644 index 0000000000000..13a83393a9124 --- /dev/null +++ b/tests/providers/tableau/sensors/__init__.py @@ -0,0 +1,16 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/salesforce/sensors/test_tableau_job_status.py b/tests/providers/tableau/sensors/test_tableau_job_status.py similarity index 84% rename from tests/providers/salesforce/sensors/test_tableau_job_status.py rename to tests/providers/tableau/sensors/test_tableau_job_status.py index 7f01011befc91..ea6eeb2e9668a 100644 --- a/tests/providers/salesforce/sensors/test_tableau_job_status.py +++ b/tests/providers/tableau/sensors/test_tableau_job_status.py @@ -21,18 +21,25 @@ import pytest from parameterized import parameterized -from airflow.providers.salesforce.sensors.tableau_job_status import ( +from airflow.providers.tableau.sensors.tableau_job_status import ( TableauJobFailedException, TableauJobStatusSensor, ) class TestTableauJobStatusSensor(unittest.TestCase): + """ + Test Class for JobStatusSensor + """ + def setUp(self): self.kwargs = {'job_id': 'job_2', 'site_id': 'test_site', 'task_id': 'task', 'dag': None} - @patch('airflow.providers.salesforce.sensors.tableau_job_status.TableauHook') + @patch('airflow.providers.tableau.sensors.tableau_job_status.TableauHook') def test_poke(self, mock_tableau_hook): + """ + Test poke + """ mock_tableau_hook.return_value.__enter__ = Mock(return_value=mock_tableau_hook) mock_get = mock_tableau_hook.server.jobs.get_by_id mock_get.return_value.finish_code = '0' @@ -44,8 +51,11 @@ def test_poke(self, mock_tableau_hook): mock_get.assert_called_once_with(sensor.job_id) @parameterized.expand([('1',), ('2',)]) - @patch('airflow.providers.salesforce.sensors.tableau_job_status.TableauHook') + @patch('airflow.providers.tableau.sensors.tableau_job_status.TableauHook') def test_poke_failed(self, finish_code, mock_tableau_hook): + """ + Test poke failed + """ mock_tableau_hook.return_value.__enter__ = Mock(return_value=mock_tableau_hook) mock_get = mock_tableau_hook.server.jobs.get_by_id mock_get.return_value.finish_code = finish_code diff --git a/tests/providers/trino/__init__.py b/tests/providers/trino/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/tests/providers/trino/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/trino/hooks/__init__.py b/tests/providers/trino/hooks/__init__.py new file mode 100644 index 0000000000000..217e5db960782 --- /dev/null +++ b/tests/providers/trino/hooks/__init__.py @@ -0,0 +1,17 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. diff --git a/tests/providers/trino/hooks/test_trino.py b/tests/providers/trino/hooks/test_trino.py new file mode 100644 index 0000000000000..e649d2bece789 --- /dev/null +++ b/tests/providers/trino/hooks/test_trino.py @@ -0,0 +1,233 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +import json +import re +import unittest +from unittest import mock +from unittest.mock import patch + +import pytest +from parameterized import parameterized +from trino.transaction import IsolationLevel + +from airflow import AirflowException +from airflow.models import Connection +from airflow.providers.trino.hooks.trino import TrinoHook + + +class TestTrinoHookConn(unittest.TestCase): + @patch('airflow.providers.trino.hooks.trino.trino.auth.BasicAuthentication') + @patch('airflow.providers.trino.hooks.trino.trino.dbapi.connect') + @patch('airflow.providers.trino.hooks.trino.TrinoHook.get_connection') + def test_get_conn_basic_auth(self, mock_get_connection, mock_connect, mock_basic_auth): + mock_get_connection.return_value = Connection( + login='login', password='password', host='host', schema='hive' + ) + + conn = TrinoHook().get_conn() + mock_connect.assert_called_once_with( + catalog='hive', + host='host', + port=None, + http_scheme='http', + schema='hive', + source='airflow', + user='login', + isolation_level=0, + auth=mock_basic_auth.return_value, + ) + mock_basic_auth.assert_called_once_with('login', 'password') + assert mock_connect.return_value == conn + + @patch('airflow.providers.trino.hooks.trino.TrinoHook.get_connection') + def test_get_conn_invalid_auth(self, mock_get_connection): + mock_get_connection.return_value = Connection( + login='login', + password='password', + host='host', + schema='hive', + extra=json.dumps({'auth': 'kerberos'}), + ) + with pytest.raises( + AirflowException, match=re.escape("Kerberos authorization doesn't support password.") + ): + TrinoHook().get_conn() + + @patch('airflow.providers.trino.hooks.trino.trino.auth.KerberosAuthentication') + @patch('airflow.providers.trino.hooks.trino.trino.dbapi.connect') + @patch('airflow.providers.trino.hooks.trino.TrinoHook.get_connection') + def test_get_conn_kerberos_auth(self, mock_get_connection, mock_connect, mock_auth): + mock_get_connection.return_value = Connection( + login='login', + host='host', + schema='hive', + extra=json.dumps( + { + 'auth': 'kerberos', + 'kerberos__config': 'TEST_KERBEROS_CONFIG', + 'kerberos__service_name': 'TEST_SERVICE_NAME', + 'kerberos__mutual_authentication': 'TEST_MUTUAL_AUTHENTICATION', + 'kerberos__force_preemptive': True, + 'kerberos__hostname_override': 'TEST_HOSTNAME_OVERRIDE', + 'kerberos__sanitize_mutual_error_response': True, + 'kerberos__principal': 'TEST_PRINCIPAL', + 'kerberos__delegate': 'TEST_DELEGATE', + 'kerberos__ca_bundle': 'TEST_CA_BUNDLE', + } + ), + ) + + conn = TrinoHook().get_conn() + mock_connect.assert_called_once_with( + catalog='hive', + host='host', + port=None, + http_scheme='http', + schema='hive', + source='airflow', + user='login', + isolation_level=0, + auth=mock_auth.return_value, + ) + mock_auth.assert_called_once_with( + ca_bundle='TEST_CA_BUNDLE', + config='TEST_KERBEROS_CONFIG', + delegate='TEST_DELEGATE', + force_preemptive=True, + hostname_override='TEST_HOSTNAME_OVERRIDE', + mutual_authentication='TEST_MUTUAL_AUTHENTICATION', + principal='TEST_PRINCIPAL', + sanitize_mutual_error_response=True, + service_name='TEST_SERVICE_NAME', + ) + assert mock_connect.return_value == conn + + @parameterized.expand( + [ + ('False', False), + ('false', False), + ('true', True), + ('true', True), + ('/tmp/cert.crt', '/tmp/cert.crt'), + ] + ) + def test_get_conn_verify(self, current_verify, expected_verify): + patcher_connect = patch('airflow.providers.trino.hooks.trino.trino.dbapi.connect') + patcher_get_connections = patch('airflow.providers.trino.hooks.trino.TrinoHook.get_connection') + + with patcher_connect as mock_connect, patcher_get_connections as mock_get_connection: + mock_get_connection.return_value = Connection( + login='login', host='host', schema='hive', extra=json.dumps({'verify': current_verify}) + ) + mock_verify = mock.PropertyMock() + type(mock_connect.return_value._http_session).verify = mock_verify + + conn = TrinoHook().get_conn() + mock_verify.assert_called_once_with(expected_verify) + assert mock_connect.return_value == conn + + +class TestTrinoHook(unittest.TestCase): + def setUp(self): + super().setUp() + + self.cur = mock.MagicMock() + self.conn = mock.MagicMock() + self.conn.cursor.return_value = self.cur + conn = self.conn + + class UnitTestTrinoHook(TrinoHook): + conn_name_attr = 'test_conn_id' + + def get_conn(self): + return conn + + def get_isolation_level(self): + return IsolationLevel.READ_COMMITTED + + self.db_hook = UnitTestTrinoHook() + + @patch('airflow.hooks.dbapi.DbApiHook.insert_rows') + def test_insert_rows(self, mock_insert_rows): + table = "table" + rows = [("hello",), ("world",)] + target_fields = None + commit_every = 10 + self.db_hook.insert_rows(table, rows, target_fields, commit_every) + mock_insert_rows.assert_called_once_with(table, rows, None, 10) + + def test_get_first_record(self): + statement = 'SQL' + result_sets = [('row1',), ('row2',)] + self.cur.fetchone.return_value = result_sets[0] + + assert result_sets[0] == self.db_hook.get_first(statement) + self.conn.close.assert_called_once_with() + self.cur.close.assert_called_once_with() + self.cur.execute.assert_called_once_with(statement) + + def test_get_records(self): + statement = 'SQL' + result_sets = [('row1',), ('row2',)] + self.cur.fetchall.return_value = result_sets + + assert result_sets == self.db_hook.get_records(statement) + self.conn.close.assert_called_once_with() + self.cur.close.assert_called_once_with() + self.cur.execute.assert_called_once_with(statement) + + def test_get_pandas_df(self): + statement = 'SQL' + column = 'col' + result_sets = [('row1',), ('row2',)] + self.cur.description = [(column,)] + self.cur.fetchall.return_value = result_sets + df = self.db_hook.get_pandas_df(statement) + + assert column == df.columns[0] + + assert result_sets[0][0] == df.values.tolist()[0][0] + assert result_sets[1][0] == df.values.tolist()[1][0] + + self.cur.execute.assert_called_once_with(statement, None) + + +class TestTrinoHookIntegration(unittest.TestCase): + @pytest.mark.integration("trino") + @mock.patch.dict('os.environ', AIRFLOW_CONN_TRINO_DEFAULT="trino://airflow@trino:8080/") + def test_should_record_records(self): + hook = TrinoHook() + sql = "SELECT name FROM tpch.sf1.customer ORDER BY custkey ASC LIMIT 3" + records = hook.get_records(sql) + assert [['Customer#000000001'], ['Customer#000000002'], ['Customer#000000003']] == records + + @pytest.mark.integration("trino") + @pytest.mark.integration("kerberos") + def test_should_record_records_with_kerberos_auth(self): + conn_url = ( + 'trino://airflow@trino.example.com:7778/?' + 'auth=kerberos&kerberos__service_name=HTTP&' + 'verify=False&' + 'protocol=https' + ) + with mock.patch.dict('os.environ', AIRFLOW_CONN_TRINO_DEFAULT=conn_url): + hook = TrinoHook() + sql = "SELECT name FROM tpch.sf1.customer ORDER BY custkey ASC LIMIT 3" + records = hook.get_records(sql) + assert [['Customer#000000001'], ['Customer#000000002'], ['Customer#000000003']] == records diff --git a/tests/serialization/test_dag_serialization.py b/tests/serialization/test_dag_serialization.py index 2046e226d953e..895f2cfb36404 100644 --- a/tests/serialization/test_dag_serialization.py +++ b/tests/serialization/test_dag_serialization.py @@ -60,6 +60,7 @@ "depends_on_past": False, "retries": 1, "retry_delay": {"__type": "timedelta", "__var": 300.0}, + "max_retry_delay": {"__type": "timedelta", "__var": 600.0}, "sla": {"__type": "timedelta", "__var": 100.0}, }, }, @@ -78,6 +79,7 @@ }, "is_paused_upon_creation": False, "_dag_id": "simple_dag", + "doc_md": "### DAG Tutorial Documentation", "fileloc": None, "tasks": [ { @@ -85,6 +87,7 @@ "owner": "airflow", "retries": 1, "retry_delay": 300.0, + "max_retry_delay": 600.0, "sla": 100.0, "_downstream_task_ids": [], "_inlets": [], @@ -108,11 +111,13 @@ } }, }, + "doc_md": "### Task Tutorial Documentation", }, { "task_id": "custom_task", "retries": 1, "retry_delay": 300.0, + "max_retry_delay": 600.0, "sla": 100.0, "_downstream_task_ids": [], "_inlets": [], @@ -160,12 +165,14 @@ def make_simple_dag(): default_args={ "retries": 1, "retry_delay": timedelta(minutes=5), + "max_retry_delay": timedelta(minutes=10), "depends_on_past": False, "sla": timedelta(seconds=100), }, start_date=datetime(2019, 8, 1), is_paused_upon_creation=False, access_control={"test_role": {permissions.ACTION_CAN_READ, permissions.ACTION_CAN_EDIT}}, + doc_md="### DAG Tutorial Documentation", ) as dag: CustomOperator(task_id='custom_task') BashOperator( @@ -173,6 +180,7 @@ def make_simple_dag(): bash_command='echo {{ task.task_id }}', owner='airflow', executor_config={"pod_override": executor_config_pod}, + doc_md="### Task Tutorial Documentation", ) return {'simple_dag': dag} @@ -848,6 +856,11 @@ def test_no_new_fields_added_to_base_operator(self): '_upstream_task_ids': set(), 'depends_on_past': False, 'do_xcom_push': True, + 'doc': None, + 'doc_json': None, + 'doc_md': None, + 'doc_rst': None, + 'doc_yaml': None, 'email': None, 'email_on_failure': True, 'email_on_retry': True, @@ -1023,6 +1036,56 @@ def test_dag_on_failure_callback_roundtrip(self, passed_failure_callback, expect assert deserialized_dag.has_on_failure_callback is expected_value + @parameterized.expand( + [ + ( + ['task_1', 'task_5', 'task_2', 'task_4'], + ['task_1', 'task_5', 'task_2', 'task_4'], + ), + ( + {'task_1', 'task_5', 'task_2', 'task_4'}, + ['task_1', 'task_2', 'task_4', 'task_5'], + ), + ( + ('task_1', 'task_5', 'task_2', 'task_4'), + ['task_1', 'task_5', 'task_2', 'task_4'], + ), + ( + { + "staging_schema": [ + {"key:": "foo", "value": "bar"}, + {"key:": "this", "value": "that"}, + "test_conf", + ] + }, + { + "staging_schema": [ + {"__type": "dict", "__var": {"key:": "foo", "value": "bar"}}, + { + "__type": "dict", + "__var": {"key:": "this", "value": "that"}, + }, + "test_conf", + ] + }, + ), + ( + {"task3": "test3", "task2": "test2", "task1": "test1"}, + {"task1": "test1", "task2": "test2", "task3": "test3"}, + ), + ( + ('task_1', 'task_5', 'task_2', 3, ["x", "y"]), + ['task_1', 'task_5', 'task_2', 3, ["x", "y"]], + ), + ] + ) + def test_serialized_objects_are_sorted(self, object_to_serialized, expected_output): + """Test Serialized Sets are sorted while list and tuple preserve order""" + serialized_obj = SerializedDAG._serialize(object_to_serialized) + if isinstance(serialized_obj, dict) and "__type" in serialized_obj: + serialized_obj = serialized_obj["__var"] + assert serialized_obj == expected_output + def test_kubernetes_optional(): """Serialisation / deserialisation continues to work without kubernetes installed""" diff --git a/tests/task/task_runner/test_standard_task_runner.py b/tests/task/task_runner/test_standard_task_runner.py index fcd4948477289..6a3ab5d280cb4 100644 --- a/tests/task/task_runner/test_standard_task_runner.py +++ b/tests/task/task_runner/test_standard_task_runner.py @@ -19,11 +19,11 @@ import logging import os import time -import unittest from logging.config import dictConfig from unittest import mock import psutil +import pytest from airflow import models, settings from airflow.jobs.local_task_job import LocalTaskJob @@ -48,22 +48,24 @@ 'class': 'logging.StreamHandler', 'formatter': 'airflow.task', 'stream': 'ext://sys.stdout', - } + }, }, - 'loggers': {'airflow': {'handlers': ['console'], 'level': 'INFO', 'propagate': False}}, + 'loggers': {'airflow': {'handlers': ['console'], 'level': 'INFO', 'propagate': True}}, } -class TestStandardTaskRunner(unittest.TestCase): - @classmethod - def setUpClass(cls): +class TestStandardTaskRunner: + @pytest.fixture(autouse=True, scope="class") + def logging_and_db(self): + """ + This fixture sets up logging to have a different setup on the way in + (as the test environment does not have enough context for the normal + way to run) and ensures they reset back to normal on the way out. + """ dictConfig(LOGGING_CONFIG) - - @classmethod - def tearDownClass(cls): + yield airflow_logger = logging.getLogger('airflow') airflow_logger.handlers = [] - airflow_logger.propagate = True try: clear_db_runs() except Exception: # noqa pylint: disable=broad-except @@ -131,6 +133,43 @@ def test_start_and_terminate_run_as_user(self): assert runner.return_code() is not None + def test_early_reap_exit(self, caplog): + """ + Tests that when a child process running a task is killed externally + (e.g. by an OOM error, which we fake here), then we get return code + -9 and a log message. + """ + # Set up mock task + local_task_job = mock.Mock() + local_task_job.task_instance = mock.MagicMock() + local_task_job.task_instance.run_as_user = getpass.getuser() + local_task_job.task_instance.command_as_list.return_value = [ + 'airflow', + 'tasks', + 'test', + 'test_on_kill', + 'task1', + '2016-01-01', + ] + + # Kick off the runner + runner = StandardTaskRunner(local_task_job) + runner.start() + time.sleep(0.2) + + # Kill the child process externally from the runner + # Note that we have to do this from ANOTHER process, as if we just + # call os.kill here we're doing it from the parent process and it + # won't be the same as an external kill in terms of OS tracking. + pgid = os.getpgid(runner.process.pid) + os.system(f"kill -s KILL {pgid}") + time.sleep(0.2) + + runner.terminate() + + assert runner.return_code() == -9 + assert "running out of memory" in caplog.text + def test_on_kill(self): """ Test that ensures that clearing in the UI SIGTERMS diff --git a/tests/test_utils/asserts.py b/tests/test_utils/asserts.py index dccaad10e2894..fe8e7dbb0cfd6 100644 --- a/tests/test_utils/asserts.py +++ b/tests/test_utils/asserts.py @@ -63,9 +63,8 @@ def after_cursor_execute(self, *args, **kwargs): and __file__ != f.filename and ('session.py' not in f.filename and f.name != 'wrapper') ] - stack_info = ">".join([f"{f.filename.rpartition('/')[-1]}:{f.name}" for f in stack][-3:]) - lineno = stack[-1].lineno - self.result[f"{stack_info}:{lineno}"] += 1 + stack_info = ">".join([f"{f.filename.rpartition('/')[-1]}:{f.name}:{f.lineno}" for f in stack][-3:]) + self.result[f"{stack_info}"] += 1 count_queries = CountQueries # pylint: disable=invalid-name diff --git a/tests/test_utils/config.py b/tests/test_utils/config.py index c55a2b5033c05..7bffb475a22aa 100644 --- a/tests/test_utils/config.py +++ b/tests/test_utils/config.py @@ -38,6 +38,8 @@ def conf_vars(overrides): else: original[(section, key)] = None if value is not None: + if not conf.has_section(section): + conf.add_section(section) conf.set(section, key, value) else: conf.remove_option(section, key) diff --git a/tests/test_utils/perf/dags/elastic_dag.py b/tests/test_utils/perf/dags/elastic_dag.py index 9aa0a4d621c8c..a57a3287951cb 100644 --- a/tests/test_utils/perf/dags/elastic_dag.py +++ b/tests/test_utils/perf/dags/elastic_dag.py @@ -40,12 +40,10 @@ def parse_time_delta(time_str: str): """ parts = RE_TIME_DELTA.match(time_str) - # pylint: disable=do-not-use-asserts assert parts is not None, ( f"Could not parse any time information from '{time_str}'. " f"Examples of valid strings: '8h', '2d8h5m20s', '2m4s'" ) - # pylint: enable=do-not-use-asserts time_params = {name: float(param) for name, param in parts.groupdict().items() if param} return timedelta(**time_params) # type: ignore diff --git a/tests/utils/test_cli_util.py b/tests/utils/test_cli_util.py index c567f44d1eab6..6d88f662598cf 100644 --- a/tests/utils/test_cli_util.py +++ b/tests/utils/test_cli_util.py @@ -112,9 +112,19 @@ def test_get_dags(self): "airflow connections add dsfs --conn-login asd --conn-password test --conn-type google", "airflow connections add dsfs --conn-login asd --conn-password ******** --conn-type google", ), + ( + "airflow scheduler -p", + "airflow scheduler -p", + ), + ( + "airflow celery flower -p 8888", + "airflow celery flower -p 8888", + ), ] ) def test_cli_create_user_supplied_password_is_masked(self, given_command, expected_masked_command): + # '-p' value which is not password, like 'airflow scheduler -p' + # or 'airflow celery flower -p 8888', should not be masked args = given_command.split() expected_command = expected_masked_command.split() diff --git a/tests/utils/test_dag_processing.py b/tests/utils/test_dag_processing.py index dc082104a9165..913711d27073a 100644 --- a/tests/utils/test_dag_processing.py +++ b/tests/utils/test_dag_processing.py @@ -16,9 +16,13 @@ # specific language governing permissions and limitations # under the License. +import logging import multiprocessing import os +import pathlib +import socket import sys +import threading import unittest from datetime import datetime, timedelta from tempfile import TemporaryDirectory @@ -34,7 +38,7 @@ from airflow.models.serialized_dag import SerializedDagModel from airflow.models.taskinstance import SimpleTaskInstance from airflow.utils import timezone -from airflow.utils.callback_requests import TaskCallbackRequest +from airflow.utils.callback_requests import CallbackRequest, TaskCallbackRequest from airflow.utils.dag_processing import ( DagFileProcessorAgent, DagFileProcessorManager, @@ -49,7 +53,7 @@ from tests.test_utils.config import conf_vars from tests.test_utils.db import clear_db_dags, clear_db_runs, clear_db_serialized_dags -TEST_DAG_FOLDER = os.path.join(os.path.dirname(os.path.realpath(__file__)), os.pardir, 'dags') +TEST_DAG_FOLDER = pathlib.Path(__file__).parent.parent / 'dags' DEFAULT_DATE = timezone.datetime(2016, 1, 1) @@ -276,7 +280,7 @@ def test_handle_failure_callback_with_zombies_are_correctly_passed_to_dag_file_p Check that the same set of failure callback with zombies are passed to the dag file processors until the next zombie detection logic is invoked. """ - test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_example_bash_operator.py') + test_dag_path = TEST_DAG_FOLDER / 'test_example_bash_operator.py' with conf_vars({('scheduler', 'parsing_processes'): '1', ('core', 'load_examples'): 'False'}): dagbag = DagBag(test_dag_path, read_dags_from_db=False) with create_session() as session: @@ -305,7 +309,7 @@ def test_handle_failure_callback_with_zombies_are_correctly_passed_to_dag_file_p ) ] - test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_example_bash_operator.py') + test_dag_path = TEST_DAG_FOLDER / 'test_example_bash_operator.py' child_pipe, parent_pipe = multiprocessing.Pipe() async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') @@ -334,12 +338,12 @@ def fake_processor_factory(*args, **kwargs): if async_mode: # Once for initial parse, and then again for the add_callback_to_queue assert len(fake_processors) == 2 - assert fake_processors[0]._file_path == test_dag_path + assert fake_processors[0]._file_path == str(test_dag_path) assert fake_processors[0]._callback_requests == [] else: assert len(fake_processors) == 1 - assert fake_processors[-1]._file_path == test_dag_path + assert fake_processors[-1]._file_path == str(test_dag_path) callback_requests = fake_processors[-1]._callback_requests assert {zombie.simple_task_instance.key for zombie in expected_failure_callback_requests} == { result.simple_task_instance.key for result in callback_requests @@ -403,7 +407,7 @@ def test_dag_with_system_exit(self): from airflow.jobs.scheduler_job import SchedulerJob dag_id = 'exit_test_dag' - dag_directory = os.path.normpath(os.path.join(TEST_DAG_FOLDER, os.pardir, "dags_with_system_exit")) + dag_directory = TEST_DAG_FOLDER.parent / 'dags_with_system_exit' # Delete the one valid DAG/SerializedDAG, and check that it gets re-created clear_db_dags() @@ -424,17 +428,90 @@ def test_dag_with_system_exit(self): manager._run_parsing_loop() + result = None while parent_pipe.poll(timeout=None): result = parent_pipe.recv() if isinstance(result, DagParsingStat) and result.done: break # Three files in folder should be processed - assert len(result.file_paths) == 3 + assert sum(stat.run_count for stat in manager._file_stats.values()) == 3 with create_session() as session: assert session.query(DagModel).get(dag_id) is not None + @conf_vars({('core', 'load_examples'): 'False'}) + @pytest.mark.backend("mysql", "postgres") + @pytest.mark.execution_timeout(30) + def test_pipe_full_deadlock(self): + dag_filepath = TEST_DAG_FOLDER / "test_scheduler_dags.py" + + child_pipe, parent_pipe = multiprocessing.Pipe() + + # Shrink the buffers to exacerbate the problem! + for fd in (parent_pipe.fileno(),): + sock = socket.socket(fileno=fd) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 1024) + sock.detach() + + exit_event = threading.Event() + + # To test this behaviour we need something that continually fills the + # parent pipe's buffer (and keeps it full). + def keep_pipe_full(pipe, exit_event): + n = 0 + while True: + if exit_event.is_set(): + break + + req = CallbackRequest(str(dag_filepath)) + try: + logging.debug("Sending CallbackRequests %d", n + 1) + pipe.send(req) + except TypeError: + # This is actually the error you get when the parent pipe + # is closed! Nicely handled, eh? + break + except OSError: + break + n += 1 + logging.debug(" Sent %d CallbackRequests", n) + + thread = threading.Thread(target=keep_pipe_full, args=(parent_pipe, exit_event)) + + fake_processors = [] + + def fake_processor_factory(*args, **kwargs): + nonlocal fake_processors + processor = FakeDagFileProcessorRunner._fake_dag_processor_factory(*args, **kwargs) + fake_processors.append(processor) + return processor + + manager = DagFileProcessorManager( + dag_directory=dag_filepath, + dag_ids=[], + # A reasonable large number to ensure that we trigger the deadlock + max_runs=100, + processor_factory=fake_processor_factory, + processor_timeout=timedelta(seconds=5), + signal_conn=child_pipe, + pickle_dags=False, + async_mode=True, + ) + + try: + thread.start() + + # If this completes without hanging, then the test is good! + manager._run_parsing_loop() + exit_event.set() + finally: + logging.info("Closing pipes") + parent_pipe.close() + child_pipe.close() + thread.join(timeout=1.0) + class TestDagFileProcessorAgent(unittest.TestCase): def setUp(self): @@ -465,7 +542,7 @@ class path, thus when reloading logging module the airflow.processor_manager with settings_context(SETTINGS_FILE_VALID): # Launch a process through DagFileProcessorAgent, which will try # reload the logging module. - test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py') + test_dag_path = TEST_DAG_FOLDER / 'test_scheduler_dags.py' async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') log_file_loc = conf.get('logging', 'DAG_PROCESSOR_MANAGER_LOG_LOCATION') @@ -493,7 +570,7 @@ def test_parse_once(self): clear_db_serialized_dags() clear_db_dags() - test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py') + test_dag_path = TEST_DAG_FOLDER / 'test_scheduler_dags.py' async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') processor_agent = DagFileProcessorAgent( test_dag_path, 1, type(self)._processor_factory, timedelta.max, [], False, async_mode @@ -517,7 +594,7 @@ def test_parse_once(self): assert dag_ids == [('test_start_date_scheduling',), ('test_task_start_date_scheduling',)] def test_launch_process(self): - test_dag_path = os.path.join(TEST_DAG_FOLDER, 'test_scheduler_dags.py') + test_dag_path = TEST_DAG_FOLDER / 'test_scheduler_dags.py' async_mode = 'sqlite' not in conf.get('core', 'sql_alchemy_conn') log_file_loc = conf.get('logging', 'DAG_PROCESSOR_MANAGER_LOG_LOCATION') diff --git a/tests/utils/test_dot_renderer.py b/tests/utils/test_dot_renderer.py index b0306233fc9dc..ca3ea01794135 100644 --- a/tests/utils/test_dot_renderer.py +++ b/tests/utils/test_dot_renderer.py @@ -23,9 +23,11 @@ from airflow.models import TaskInstance from airflow.models.dag import DAG from airflow.operators.bash import BashOperator +from airflow.operators.dummy import DummyOperator from airflow.operators.python import PythonOperator from airflow.utils import dot_renderer from airflow.utils.state import State +from airflow.utils.task_group import TaskGroup START_DATE = datetime.datetime.now() @@ -72,9 +74,16 @@ def test_should_render_dag_with_task_instances(self): source = dot.source # Should render DAG title assert "label=DAG_ID" in source - assert 'first [color=black fillcolor=tan shape=rectangle style="filled,rounded"]' in source - assert 'second [color=white fillcolor=green shape=rectangle style="filled,rounded"]' in source - assert 'third [color=black fillcolor=lime shape=rectangle style="filled,rounded"]' in source + assert ( + 'first [color=black fillcolor=tan label=first shape=rectangle style="filled,rounded"]' in source + ) + assert ( + 'second [color=white fillcolor=green label=second shape=rectangle style="filled,rounded"]' + in source + ) + assert ( + 'third [color=black fillcolor=lime label=third shape=rectangle style="filled,rounded"]' in source + ) def test_should_render_dag_orientation(self): orientation = "TB" @@ -105,3 +114,89 @@ def test_should_render_dag_orientation(self): # Should render DAG title with orientation assert "label=DAG_ID" in source assert f'label=DAG_ID labelloc=t rankdir={orientation}' in source + + def test_render_task_group(self): + with DAG(dag_id="example_task_group", start_date=START_DATE) as dag: + start = DummyOperator(task_id="start") + + with TaskGroup("section_1", tooltip="Tasks for section_1") as section_1: + task_1 = DummyOperator(task_id="task_1") + task_2 = BashOperator(task_id="task_2", bash_command='echo 1') + task_3 = DummyOperator(task_id="task_3") + + task_1 >> [task_2, task_3] + + with TaskGroup("section_2", tooltip="Tasks for section_2") as section_2: + task_1 = DummyOperator(task_id="task_1") + + with TaskGroup("inner_section_2", tooltip="Tasks for inner_section2"): + task_2 = BashOperator(task_id="task_2", bash_command='echo 1') + task_3 = DummyOperator(task_id="task_3") + task_4 = DummyOperator(task_id="task_4") + + [task_2, task_3] >> task_4 + + end = DummyOperator(task_id='end') + + start >> section_1 >> section_2 >> end + + dot = dot_renderer.render_dag(dag) + + assert dot.source == '\n'.join( + [ + 'digraph example_task_group {', + '\tgraph [label=example_task_group labelloc=t rankdir=LR]', + '\tend [color="#000000" fillcolor="#e8f7e4" label=end shape=rectangle ' + 'style="filled,rounded"]', + '\tsubgraph cluster_section_1 {', + '\t\tcolor="#000000" fillcolor="#6495ed7f" label=section_1 shape=rectangle style=filled', + '\t\t"section_1.upstream_join_id" [color="#000000" fillcolor=CornflowerBlue height=0.2 ' + 'label="" shape=circle style="filled,rounded" width=0.2]', + '\t\t"section_1.downstream_join_id" [color="#000000" fillcolor=CornflowerBlue height=0.2 ' + 'label="" shape=circle style="filled,rounded" width=0.2]', + '\t\t"section_1.task_1" [color="#000000" fillcolor="#e8f7e4" label=task_1 shape=rectangle ' + 'style="filled,rounded"]', + '\t\t"section_1.task_2" [color="#000000" fillcolor="#f0ede4" label=task_2 shape=rectangle ' + 'style="filled,rounded"]', + '\t\t"section_1.task_3" [color="#000000" fillcolor="#e8f7e4" label=task_3 shape=rectangle ' + 'style="filled,rounded"]', + '\t}', + '\tsubgraph cluster_section_2 {', + '\t\tcolor="#000000" fillcolor="#6495ed7f" label=section_2 shape=rectangle style=filled', + '\t\t"section_2.upstream_join_id" [color="#000000" fillcolor=CornflowerBlue height=0.2 ' + 'label="" shape=circle style="filled,rounded" width=0.2]', + '\t\t"section_2.downstream_join_id" [color="#000000" fillcolor=CornflowerBlue height=0.2 ' + 'label="" shape=circle style="filled,rounded" width=0.2]', + '\t\tsubgraph "cluster_section_2.inner_section_2" {', + '\t\t\tcolor="#000000" fillcolor="#6495ed7f" label=inner_section_2 shape=rectangle ' + 'style=filled', + '\t\t\t"section_2.inner_section_2.task_2" [color="#000000" fillcolor="#f0ede4" label=task_2 ' + 'shape=rectangle style="filled,rounded"]', + '\t\t\t"section_2.inner_section_2.task_3" [color="#000000" fillcolor="#e8f7e4" label=task_3 ' + 'shape=rectangle style="filled,rounded"]', + '\t\t\t"section_2.inner_section_2.task_4" [color="#000000" fillcolor="#e8f7e4" label=task_4 ' + 'shape=rectangle style="filled,rounded"]', + '\t\t}', + '\t\t"section_2.task_1" [color="#000000" fillcolor="#e8f7e4" label=task_1 shape=rectangle ' + 'style="filled,rounded"]', + '\t}', + '\tstart [color="#000000" fillcolor="#e8f7e4" label=start shape=rectangle ' + 'style="filled,rounded"]', + '\t"section_1.downstream_join_id" -> "section_2.upstream_join_id"', + '\t"section_1.task_1" -> "section_1.task_2"', + '\t"section_1.task_1" -> "section_1.task_3"', + '\t"section_1.task_2" -> "section_1.downstream_join_id"', + '\t"section_1.task_3" -> "section_1.downstream_join_id"', + '\t"section_1.upstream_join_id" -> "section_1.task_1"', + '\t"section_2.downstream_join_id" -> end', + '\t"section_2.inner_section_2.task_2" -> "section_2.inner_section_2.task_4"', + '\t"section_2.inner_section_2.task_3" -> "section_2.inner_section_2.task_4"', + '\t"section_2.inner_section_2.task_4" -> "section_2.downstream_join_id"', + '\t"section_2.task_1" -> "section_2.downstream_join_id"', + '\t"section_2.upstream_join_id" -> "section_2.inner_section_2.task_2"', + '\t"section_2.upstream_join_id" -> "section_2.inner_section_2.task_3"', + '\t"section_2.upstream_join_id" -> "section_2.task_1"', + '\tstart -> "section_1.upstream_join_id"', + '}', + ] + ) diff --git a/tests/utils/test_helpers.py b/tests/utils/test_helpers.py index fffa2d44a8e10..bb7b4533aea3b 100644 --- a/tests/utils/test_helpers.py +++ b/tests/utils/test_helpers.py @@ -142,10 +142,17 @@ def test_merge_dicts_recursive_right_only(self): @conf_vars( { - ("webserver", "dag_default_view"): "custom", + ("webserver", "dag_default_view"): "graph", } ) def test_build_airflow_url_with_query(self): + """ + Test query generated with dag_id and params + """ query = {"dag_id": "test_dag", "param": "key/to.encode"} - url = build_airflow_url_with_query(query) - assert url == "/custom?dag_id=test_dag¶m=key%2Fto.encode" + expected_url = "/graph?dag_id=test_dag¶m=key%2Fto.encode" + + from airflow.www.app import cached_app + + with cached_app(testing=True).test_request_context(): + assert build_airflow_url_with_query(query) == expected_url diff --git a/tests/utils/test_process_utils.py b/tests/utils/test_process_utils.py index 2c14ae42a47a5..21d6cdd98b6e4 100644 --- a/tests/utils/test_process_utils.py +++ b/tests/utils/test_process_utils.py @@ -136,23 +136,21 @@ def test_should_kill_process(self): num_process = subprocess.check_output(["ps", "-ax", "-o", "pid="]).decode().count("\n") assert before_num_process == num_process - @pytest.mark.quarantined def test_should_force_kill_process(self): - before_num_process = subprocess.check_output(["ps", "-ax", "-o", "pid="]).decode().count("\n") process = multiprocessing.Process(target=my_sleep_subprocess_with_signals, args=()) process.start() sleep(0) - num_process = subprocess.check_output(["ps", "-ax", "-o", "pid="]).decode().count("\n") - assert before_num_process + 1 == num_process + all_processes = subprocess.check_output(["ps", "-ax", "-o", "pid="]).decode().splitlines() + assert str(process.pid) in map(lambda x: x.strip(), all_processes) with self.assertLogs(process_utils.log) as cm: process_utils.kill_child_processes_by_pids([process.pid], timeout=0) assert any("Killing child PID" in line for line in cm.output) - - num_process = subprocess.check_output(["ps", "-ax", "-o", "pid="]).decode().count("\n") - assert before_num_process == num_process + sleep(0) + all_processes = subprocess.check_output(["ps", "-ax", "-o", "pid="]).decode().splitlines() + assert str(process.pid) not in map(lambda x: x.strip(), all_processes) class TestPatchEnviron(unittest.TestCase): diff --git a/tests/utils/test_session.py b/tests/utils/test_session.py new file mode 100644 index 0000000000000..08f317f42e2a3 --- /dev/null +++ b/tests/utils/test_session.py @@ -0,0 +1,52 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +import pytest + +from airflow.utils.session import provide_session + + +class TestSession: + def dummy_session(self, session=None): + return session + + def test_raised_provide_session(self): + with pytest.raises(ValueError, match="Function .*dummy has no `session` argument"): + + @provide_session + def dummy(): + pass + + def test_provide_session_without_args_and_kwargs(self): + assert self.dummy_session() is None + + wrapper = provide_session(self.dummy_session) + + assert wrapper() is not None + + def test_provide_session_with_args(self): + wrapper = provide_session(self.dummy_session) + + session = object() + assert wrapper(session) is session + + def test_provide_session_with_kwargs(self): + wrapper = provide_session(self.dummy_session) + + session = object() + assert wrapper(session=session) is session diff --git a/tests/www/test_app.py b/tests/www/test_app.py index b731db57551a4..dddfb71d1af1d 100644 --- a/tests/www/test_app.py +++ b/tests/www/test_app.py @@ -233,6 +233,12 @@ def test_should_set_permanent_session_timeout(self): app = application.cached_app(testing=True) assert app.config['PERMANENT_SESSION_LIFETIME'] == timedelta(minutes=3600) + @conf_vars({('webserver', 'cookie_samesite'): ''}) + @mock.patch("airflow.www.app.app", None) + def test_correct_default_is_set_for_cookie_samesite(self): + app = application.cached_app(testing=True) + assert app.config['SESSION_COOKIE_SAMESITE'] == 'Lax' + class TestFlaskCli(unittest.TestCase): def test_flask_cli_should_display_routes(self): diff --git a/tests/www/test_security.py b/tests/www/test_security.py index 9179be29a1240..46a1d6ba76601 100644 --- a/tests/www/test_security.py +++ b/tests/www/test_security.py @@ -33,6 +33,7 @@ from airflow.www import app as application from airflow.www.utils import CustomSQLAInterface from tests.test_utils import fab_utils +from tests.test_utils.asserts import assert_queries_count from tests.test_utils.db import clear_db_dags, clear_db_runs from tests.test_utils.mock_security_manager import MockSecurityManager @@ -102,7 +103,7 @@ def delete_roles(cls): fab_utils.delete_role(cls.app, role_name) def expect_user_is_in_role(self, user, rolename): - self.security_manager.init_role(rolename, []) + self.security_manager.bulk_sync_roles([{'role': rolename, 'perms': []}]) role = self.security_manager.find_role(rolename) if not role: self.security_manager.add_role(rolename) @@ -140,14 +141,28 @@ def tearDown(self): log.debug("Complete teardown!") def test_init_role_baseview(self): + role_name = 'MyRole7' + role_perms = [('can_some_other_action', 'AnotherBaseView')] + with pytest.warns( + DeprecationWarning, + match="`init_role` has been deprecated\\. Please use `bulk_sync_roles` instead\\.", + ): + self.security_manager.init_role(role_name, role_perms) + + role = self.appbuilder.sm.find_role(role_name) + assert role is not None + assert len(role_perms) == len(role.permissions) + + def test_bulk_sync_roles_baseview(self): role_name = 'MyRole3' role_perms = [('can_some_action', 'SomeBaseView')] - self.security_manager.init_role(role_name, perms=role_perms) + self.security_manager.bulk_sync_roles([{'role': role_name, 'perms': role_perms}]) + role = self.appbuilder.sm.find_role(role_name) assert role is not None assert len(role_perms) == len(role.permissions) - def test_init_role_modelview(self): + def test_bulk_sync_roles_modelview(self): role_name = 'MyRole2' role_perms = [ ('can_list', 'SomeModelView'), @@ -156,24 +171,33 @@ def test_init_role_modelview(self): (permissions.ACTION_CAN_EDIT, 'SomeModelView'), (permissions.ACTION_CAN_DELETE, 'SomeModelView'), ] - self.security_manager.init_role(role_name, role_perms) + mock_roles = [{'role': role_name, 'perms': role_perms}] + self.security_manager.bulk_sync_roles(mock_roles) + role = self.appbuilder.sm.find_role(role_name) assert role is not None assert len(role_perms) == len(role.permissions) + # Check short circuit works + with assert_queries_count(2): # One for permissionview, one for roles + self.security_manager.bulk_sync_roles(mock_roles) + def test_update_and_verify_permission_role(self): role_name = 'Test_Role' - self.security_manager.init_role(role_name, []) + role_perms = [] + mock_roles = [{'role': role_name, 'perms': role_perms}] + self.security_manager.bulk_sync_roles(mock_roles) role = self.security_manager.find_role(role_name) perm = self.security_manager.find_permission_view_menu(permissions.ACTION_CAN_EDIT, 'RoleModelView') self.security_manager.add_permission_role(role, perm) role_perms_len = len(role.permissions) - self.security_manager.init_role(role_name, []) + self.security_manager.bulk_sync_roles(mock_roles) new_role_perms_len = len(role.permissions) assert role_perms_len == new_role_perms_len + assert new_role_perms_len == 1 def test_verify_public_role_has_no_permissions(self): public = self.appbuilder.sm.find_role("Public") @@ -542,3 +566,57 @@ def test_correct_roles_have_perms_to_read_config(self): f"{role.name} should not have {permissions.ACTION_CAN_READ} " f"on {permissions.RESOURCE_CONFIG}" ) + + def test_create_dag_specific_permissions(self): + dag_id = 'some_dag_id' + dag_permission_name = self.security_manager.prefixed_dag_id(dag_id) + assert ('can_read', dag_permission_name) not in self.security_manager.get_all_permissions() + + dag_model = DagModel( + dag_id=dag_id, fileloc='/tmp/dag_.py', schedule_interval='2 2 * * *', is_paused=True + ) + self.session.add(dag_model) + self.session.commit() + + self.security_manager.create_dag_specific_permissions() + self.session.commit() + + assert ('can_read', dag_permission_name) in self.security_manager.get_all_permissions() + + # Make sure we short circuit when the perms already exist + with assert_queries_count(2): # One query to get DagModels, one query to get all perms + self.security_manager.create_dag_specific_permissions() + + def test_get_all_permissions(self): + with assert_queries_count(1): + perms = self.security_manager.get_all_permissions() + + assert isinstance(perms, set) + for perm in perms: + assert isinstance(perm, tuple) + assert len(perm) == 2 + + assert ('can_read', 'Connections') in perms + + def test_get_all_non_dag_permissionviews(self): + with assert_queries_count(1): + pvs = self.security_manager._get_all_non_dag_permissionviews() + + assert isinstance(pvs, dict) + for (perm_name, viewmodel_name), perm_view in pvs.items(): + assert isinstance(perm_name, str) + assert isinstance(viewmodel_name, str) + assert isinstance(perm_view, self.security_manager.permissionview_model) + + assert ('can_read', 'Connections') in pvs + + def test_get_all_roles_with_permissions(self): + with assert_queries_count(1): + roles = self.security_manager._get_all_roles_with_permissions() + + assert isinstance(roles, dict) + for role_name, role in roles.items(): + assert isinstance(role_name, str) + assert isinstance(role, self.security_manager.role_model) + + assert 'Admin' in roles diff --git a/tests/www/test_utils.py b/tests/www/test_utils.py index f6e53e4dbfbe1..f4e50d9480c8a 100644 --- a/tests/www/test_utils.py +++ b/tests/www/test_utils.py @@ -240,8 +240,19 @@ def test_wrapped_markdown_with_table(self): ) assert ( - '
\n\n\n\n' + '
Job
\n\n\n\n' '\n\n\n\n\n\n\n\n\n' '
JobDuration
ETL' '14m
' ) == rendered + + def test_wrapped_markdown_with_indented_lines(self): + rendered = wrapped_markdown( + """ + # header + 1st line + 2nd line + """ + ) + + assert '

header

\n

1st line\n2nd line

' == rendered diff --git a/tests/www/test_views.py b/tests/www/test_views.py index 5011547278c7e..b9ac21ef904a0 100644 --- a/tests/www/test_views.py +++ b/tests/www/test_views.py @@ -43,7 +43,7 @@ from airflow import models, settings, version from airflow.config_templates.airflow_local_settings import DEFAULT_LOGGING_CONFIG -from airflow.configuration import conf +from airflow.configuration import conf, initialize_config from airflow.executors.celery_executor import CeleryExecutor from airflow.jobs.base_job import BaseJob from airflow.models import DAG, Connection, DagRun, TaskInstance @@ -62,7 +62,7 @@ from airflow.utils.timezone import datetime from airflow.utils.types import DagRunType from airflow.www import app as application -from airflow.www.views import ConnectionModelView, get_safe_url +from airflow.www.views import ConnectionModelView, get_safe_url, truncate_task_duration from tests.test_utils import fab_utils from tests.test_utils.asserts import assert_queries_count from tests.test_utils.config import conf_vars @@ -361,6 +361,12 @@ def test_should_list_entrypoint_plugins_on_page_with_details(self): self.check_content_in_response("source", resp) self.check_content_in_response("test-entrypoint-testpluginview==1.0.0: