diff --git a/.coverage2.7rc b/.coverage2.7rc deleted file mode 100644 index bf159f595..000000000 --- a/.coverage2.7rc +++ /dev/null @@ -1,2 +0,0 @@ -[xml] -output=python_connector_2.7_coverage.xml diff --git a/.coverage3.4rc b/.coverage3.4rc deleted file mode 100644 index 4dc1c2ce8..000000000 --- a/.coverage3.4rc +++ /dev/null @@ -1,2 +0,0 @@ -[xml] -output=python_connector_3.4_coverage.xml diff --git a/.coverage3.5rc b/.coverage3.5rc deleted file mode 100644 index 236b04fee..000000000 --- a/.coverage3.5rc +++ /dev/null @@ -1,2 +0,0 @@ -[xml] -output=python_connector_3.5_coverage.xml diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 000000000..0c66ead66 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,15 @@ +# Since version 2.23 (released in August 2019), git-blame has a feature +# to ignore or bypass certain commits. +# +# This file contains a list of commits that are not likely what you +# are looking for in a blame, such as mass reformatting or renaming. +# You can set this file as a default ignore file for blame by running +# the following command. +# +# $ git config blame.ignoreRevsFile .git-blame-ignore-revs + +# Format of files with psf/black +998940692da07a0c2984f1963ace71731dcc11bc + +# License header update(s) +d695d7d159ea94d6211199b7ff40cbc66f5a1dde diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 000000000..7dcdf9e5f --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @sfc-gh-mkeller @sfc-gh-kwagner diff --git a/.github/ISSUE_TEMPLATE.txt b/.github/ISSUE_TEMPLATE.txt deleted file mode 100644 index 6ef774120..000000000 --- a/.github/ISSUE_TEMPLATE.txt +++ /dev/null @@ -1,30 +0,0 @@ -Please answer these questions before submitting your issue. Thanks! - -1. What version of Python are you using (`python --version`)? - -2. What operating system and processor architecture are you using (`python -c 'import platform; print(platform.platform())'`)? - -3. What are the component versions in the environment (`pip list`)? - -4. What did you do? -If possible, provide a recipe for reproducing the error. -A complete runnable program is good. - -5. What did you expect to see? - -6. What did you see instead? - -7. Can you set logging to DEBUG and collect the logs? - -``` -import logging -import os - -for logger_name in ['snowflake.sqlalchemy', 'snowflake.connector', 'botocore']: - logger = logging.getLogger(logger_name) - logger.setLevel(logging.DEBUG) - ch = logging.StreamHandler() - ch.setLevel(logging.DEBUG) - ch.setFormatter(logging.Formatter('%(asctime)s - %(threadName)s %(filename)s:%(lineno)d - %(funcName)s() - %(levelname)s - %(message)s')) - logger.addHandler(ch) -``` diff --git a/.github/ISSUE_TEMPLATE/BUG_REPORT.md b/.github/ISSUE_TEMPLATE/BUG_REPORT.md new file mode 100644 index 000000000..2f81b99b6 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/BUG_REPORT.md @@ -0,0 +1,47 @@ +--- +name: Bug Report 🐞 +about: Something isn't working as expected? Here is the right place to report. +labels: bug, needs triage +--- + +Please answer these questions before submitting your issue. Thanks! + +1. What version of Python are you using? + + Replace with the output of `python --version --version` + +2. What operating system and processor architecture are you using? + + Replace with the output of `python -c 'import platform; print(platform.platform())'` + +3. What are the component versions in the environment (`pip freeze`)? + + Replace with the output of `python -m pip freeze` + +4. What did you do? + + If possible, provide a recipe for reproducing the error. + A complete runnable program is good. + +5. What did you expect to see? + + What should have happened and what happened instead? + +6. Can you set logging to DEBUG and collect the logs? + + ``` + import logging + import os + + for logger_name in ('snowflake.connector',): + logger = logging.getLogger(logger_name) + logger.setLevel(logging.DEBUG) + ch = logging.StreamHandler() + ch.setLevel(logging.DEBUG) + ch.setFormatter(logging.Formatter('%(asctime)s - %(threadName)s %(filename)s:%(lineno)d - %(funcName)s() - %(levelname)s - %(message)s')) + logger.addHandler(ch) + ``` + + diff --git a/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md new file mode 100644 index 000000000..24038a005 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/FEATURE_REQUEST.md @@ -0,0 +1,13 @@ +--- +name: Feature Request 💡 +about: Suggest a new idea for the project. +labels: feature +--- + +## What is the current behavior? + +## What is the desired behavior? + +## How would this improve `snowflake-connector-python`? + +## References, Other Background diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 000000000..283993b5d --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,8 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" + assignees: + - "sfc-gh-mkeller" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 000000000..c212decb1 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,18 @@ +Please answer these questions before submitting your pull requests. Thanks! + +1. What GitHub issue is this PR addressing? Make sure that there is an accompanying issue to your PR. + + Fixes #NNNN + +2. Fill out the following pre-review checklist: + + - [ ] I am adding a new automated test(s) to verify correctness of my new code + - [ ] I am adding new logging messages + - [ ] I am modifying authorization mechanisms + - [ ] I am adding new credentials + - [ ] I am modifying OCSP code + - [ ] I am adding a new dependency + +3. Please describe how your code solves the related issue. + + Please write a short description of how your code change solves the related issue. diff --git a/.github/repo_meta.yaml b/.github/repo_meta.yaml new file mode 100644 index 000000000..381401cfa --- /dev/null +++ b/.github/repo_meta.yaml @@ -0,0 +1,4 @@ +point_of_contact: @snowflakedb/client +production: true +code_owners_file_present: true +jira_area: Snowpark: Application Development Ecosystem diff --git a/.github/workflows/build_test.yml b/.github/workflows/build_test.yml new file mode 100644 index 000000000..35f42fef1 --- /dev/null +++ b/.github/workflows/build_test.yml @@ -0,0 +1,308 @@ +name: Build and Test + +on: + push: + branches: + - master + - main + tags: + - v* + pull_request: + branches: + - master + - main + - prep-** + workflow_dispatch: + inputs: + logLevel: + default: warning + description: "Log level" + required: true + tags: + description: "Test scenario tags" + +jobs: + lint: + name: Check linting + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.7' + - name: Display Python version + run: python -c "import sys; import os; print(\"\n\".join(os.environ[\"PATH\"].split(os.pathsep))); print(sys.version); print(sys.executable);" + - name: Upgrade setuptools, pip and wheel + run: python -m pip install -U setuptools pip wheel + - name: Install tox + run: python -m pip install tox + - name: Set PY + run: echo "PY=$(python -VV | sha256sum | cut -d' ' -f1)" >> $GITHUB_ENV + - uses: actions/cache@v1 + with: + path: ~/.cache/pre-commit + key: pre-commit|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }} + - name: Run fix_lint + run: python -m tox -e fix_lint + + whitesource: + name: Whitesource linux-3.7 + needs: lint + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: 3.7 + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Upgrade setuptools, pip, wheel and build + run: python -m pip install -U setuptools pip wheel build + - name: Build Python connector + run: python -m build --wheel . + - name: Show wheels generated + run: ls -lh dist/ + - name: Run WhiteSource script + run: ./ci/wss.sh + env: + WHITESOURCE_API_KEY: ${{ secrets.WHITESOURCE_API_KEY }} + + build: + needs: lint + strategy: + matrix: + os: + - image: ubuntu-20.04 + id: manylinux_x86_64 + - image: ubuntu-20.04 + id: manylinux_aarch64 + - image: windows-2019 + id: win_amd64 + - image: macos-10.15 + id: macosx_x86_64 + - image: macos-10.15 + id: macosx_arm64 + python-version: ["3.7", "3.8", "3.9", "3.10"] + exclude: + - os: + id: macosx_arm64 + python-version: 3.7 + name: Build ${{ matrix.os.id }}-py${{ matrix.python-version }} + runs-on: ${{ matrix.os.image }} + steps: + - name: Set shortver + run: echo "shortver=${longver//./}" >> $GITHUB_ENV + env: + longver: ${{ matrix.python-version }} + shell: bash + - name: Set up QEMU + if: ${{ matrix.os.id == 'manylinux_aarch64' }} + uses: docker/setup-qemu-action@v1 + with: + platforms: all + - uses: actions/checkout@v2 + - name: Building wheel + uses: pypa/cibuildwheel@v2.3.1 + env: + CIBW_BUILD: cp${{ env.shortver }}-${{ matrix.os.id }} + MACOSX_DEPLOYMENT_TARGET: 10.14 # Should be kept in sync with ci/build_darwin.sh + with: + output-dir: dist + - name: Show wheels generated + run: ls -lh dist + shell: bash + - uses: actions/upload-artifact@v1 + with: + name: ${{ matrix.os.id }}_py${{ matrix.python-version }} + path: dist/ + + test: + name: Test ${{ matrix.os.download_name }}-${{ matrix.python-version }}-${{ matrix.cloud-provider }} + needs: build + runs-on: ${{ matrix.os.image_name }} + strategy: + fail-fast: false + matrix: + os: + - image_name: ubuntu-latest + download_name: manylinux_x86_64 + - image_name: macos-latest + download_name: macosx_x86_64 + - image_name: windows-2019 + download_name: win_amd64 + python-version: ["3.7", "3.8", "3.9", "3.10"] + cloud-provider: [aws, azure, gcp] + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Setup parameters file + shell: bash + env: + PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} + run: | + gpg --quiet --batch --yes --decrypt --passphrase="$PARAMETERS_SECRET" \ + .github/workflows/parameters/public/parameters_${{ matrix.cloud-provider }}.py.gpg > test/parameters.py + - name: Download wheel(s) + uses: actions/download-artifact@v2 + with: + name: ${{ matrix.os.download_name }}_py${{ matrix.python-version }} + path: dist + - name: Show wheels downloaded + run: ls -lh dist + shell: bash + - name: Upgrade setuptools, pip and wheel + run: python -m pip install -U setuptools pip wheel + - name: Install tox + run: python -m pip install tox tox-external-wheels + - name: Run tests + run: python -m tox -e "py${PYTHON_VERSION/\./}-{unit,integ,pandas,sso}-ci" + env: + PYTHON_VERSION: ${{ matrix.python-version }} + cloud_provider: ${{ matrix.cloud-provider }} + PYTEST_ADDOPTS: --color=yes --tb=short + TOX_PARALLEL_NO_SPINNER: 1 + shell: bash + - name: Combine coverages + run: python -m tox -e coverage --skip-missing-interpreters false + shell: bash + - uses: actions/upload-artifact@v2 + with: + name: coverage_${{ matrix.os.download_name }}-${{ matrix.python-version }}-${{ matrix.cloud-provider }} + path: | + .tox/.coverage + .tox/coverage.xml + + test-olddriver: + name: Old Driver Test ${{ matrix.os.download_name }}-${{ matrix.python-version }}-${{ matrix.cloud-provider }} + needs: lint + runs-on: ${{ matrix.os.image_name }} + strategy: + fail-fast: false + matrix: + os: + - image_name: ubuntu-latest + download_name: linux + python-version: [3.7] + cloud-provider: [aws] + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Setup parameters file + shell: bash + env: + PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} + run: | + gpg --quiet --batch --yes --decrypt --passphrase="$PARAMETERS_SECRET" \ + .github/workflows/parameters/public/parameters_${{ matrix.cloud-provider }}.py.gpg > test/parameters.py + - name: Upgrade setuptools, pip and wheel + run: python -m pip install -U setuptools pip wheel + - name: Install tox + run: python -m pip install tox + - name: Run tests + run: python -m tox -e olddriver + env: + PYTHON_VERSION: ${{ matrix.python-version }} + cloud_provider: ${{ matrix.cloud-provider }} + PYTEST_ADDOPTS: --color=yes --tb=short + shell: bash + + test-fips: + name: Test FIPS linux-3.7-${{ matrix.cloud-provider }} + needs: build + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + cloud-provider: [aws] + steps: + - uses: actions/checkout@v2 + - name: Setup parameters file + shell: bash + env: + PARAMETERS_SECRET: ${{ secrets.PARAMETERS_SECRET }} + run: | + gpg --quiet --batch --yes --decrypt --passphrase="$PARAMETERS_SECRET" \ + .github/workflows/parameters/public/parameters_${{ matrix.cloud-provider }}.py.gpg > test/parameters.py + - name: Download wheel(s) + uses: actions/download-artifact@v2 + with: + name: manylinux_x86_64_py3.7 + path: dist + - name: Show wheels downloaded + run: ls -lh dist + shell: bash + - name: Run tests + run: ./ci/test_fips_docker.sh + env: + PYTHON_VERSION: 3.7 + cloud_provider: ${{ matrix.cloud-provider }} + PYTEST_ADDOPTS: --color=yes --tb=short + TOX_PARALLEL_NO_SPINNER: 1 + shell: bash + - uses: actions/upload-artifact@v2 + with: + name: coverage_linux-fips-3.7-${{ matrix.cloud-provider }} + path: | + .coverage + coverage.xml + + combine-coverage: + if: ${{ success() || failure() }} + name: Combine coverage + needs: [test, test-fips] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/download-artifact@v2 + with: + path: artifacts + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: '3.7' + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Upgrade setuptools and pip + run: python -m pip install -U setuptools pip wheel + - name: Install tox + run: python -m pip install tox + - name: Collect all coverages to one dir + run: | + python -c ' + from pathlib import Path + import shutil + + src_dir = Path("artifacts") + dst_dir = Path(".") / ".tox" + dst_dir.mkdir() + for src_file in src_dir.glob("*/.coverage"): + dst_file = dst_dir / ".coverage.{}".format(src_file.parent.name[9:]) + print("{} copy to {}".format(src_file, dst_file)) + shutil.copy(str(src_file), str(dst_file))' + - name: Combine coverages + run: python -m tox -e coverage + - name: Publish html coverage + uses: actions/upload-artifact@v2 + with: + name: overall_cov_html + path: .tox/htmlcov + - name: Publish xml coverage + uses: actions/upload-artifact@v2 + with: + name: overall_cov_xml + path: .tox/coverage.xml + - uses: codecov/codecov-action@v1 + with: + file: .tox/coverage.xml diff --git a/.github/workflows/cla_bot.yml b/.github/workflows/cla_bot.yml new file mode 100644 index 000000000..367e18738 --- /dev/null +++ b/.github/workflows/cla_bot.yml @@ -0,0 +1,24 @@ +name: "CLA Assistant" +on: + issue_comment: + types: [created] + pull_request_target: + types: [opened,closed,synchronize] + +jobs: + CLAssistant: + runs-on: ubuntu-latest + steps: + - name: "CLA Assistant" + if: (github.event.comment.body == 'recheck' || github.event.comment.body == 'I have read the CLA Document and I hereby sign the CLA') || github.event_name == 'pull_request_target' + uses: contributor-assistant/github-action/@master + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PERSONAL_ACCESS_TOKEN : ${{ secrets.CLA_BOT_TOKEN }} + with: + path-to-signatures: 'signatures/version1.json' + path-to-document: 'https://github.com/Snowflake-Labs/CLA/blob/main/README.md' + branch: 'main' + allowlist: 'dependabot[bot],github-actions' + remote-organization-name: 'snowflakedb' + remote-repository-name: 'cla-db' diff --git a/.github/workflows/create_req_files.yml b/.github/workflows/create_req_files.yml new file mode 100644 index 000000000..e3f1e7c96 --- /dev/null +++ b/.github/workflows/create_req_files.yml @@ -0,0 +1,63 @@ +name: Create Requirements Files + +on: + workflow_dispatch: + +jobs: + create-req-files: + name: Create requirements files + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.7", "3.8", "3.9", "3.10"] + steps: + - uses: actions/checkout@v2 + - name: Set up Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Display Python version + run: python -c "import sys; print(sys.version)" + - name: Upgrade setuptools, pip and wheel + run: python -m pip install -U setuptools pip wheel + - name: Install Python Connector + shell: bash + run: python -m pip install . + - name: Generate reqs file name + shell: bash + run: echo "requirements_file=temp_requirement/requirements_$(python -c 'from sys import version_info;print(str(version_info.major)+str(version_info.minor))').reqs" >> $GITHUB_ENV + - name: Create reqs file + shell: bash + run: | + mkdir temp_requirement + echo "# Generated on: $(python --version)" >${{ env.requirements_file }} + python -m pip freeze | grep -v snowflake-connector-python 1>>${{ env.requirements_file }} 2>/dev/null + echo "snowflake-connector-python==$(python -m pip show snowflake-connector-python | grep ^Version | cut -d' ' -f2-)" >>${{ env.requirements_file }} + id: create-reqs-file + - name: Show created req file + shell: bash + run: cat ${{ env.requirements_file }} + - uses: actions/upload-artifact@v2 + with: + path: temp_requirement + + push-files: + needs: create-req-files + name: Commit and push files + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + with: + token: ${{ secrets.PAT }} + - name: Download requirement files + uses: actions/download-artifact@v2 + with: + name: artifact + path: tested_requirements + - name: Commit and push new requirements files + run: | + git config user.name github-actions + git config user.email github-actions@github.com + git add tested_requirements + git commit -m "Update requirements files" -a + git push diff --git a/.github/workflows/jira_close.yml b/.github/workflows/jira_close.yml new file mode 100644 index 000000000..dfcb8bc73 --- /dev/null +++ b/.github/workflows/jira_close.yml @@ -0,0 +1,35 @@ +name: Jira closure + +on: + issues: + types: [closed, deleted] + +jobs: + close-issue: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + repository: snowflakedb/gh-actions + ref: jira_v1 + token: ${{ secrets.SNOWFLAKE_GITHUB_TOKEN }} # stored in GitHub secrets + path: . + - name: Jira login + uses: atlassian/gajira-login@master + env: + JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} + JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} + JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} + - name: Extract issue from title + id: extract + env: + TITLE: "${{ github.event.issue.title }}" + run: | + jira=$(echo -n $TITLE | awk '{print $1}' | sed -e 's/://') + echo ::set-output name=jira::$jira + - name: Close issue + uses: ./jira/gajira-close + if: startsWith(steps.extract.outputs.jira, 'SNOW-') + with: + issue: "${{ steps.extract.outputs.jira }}" diff --git a/.github/workflows/jira_comment.yml b/.github/workflows/jira_comment.yml new file mode 100644 index 000000000..954929fa6 --- /dev/null +++ b/.github/workflows/jira_comment.yml @@ -0,0 +1,29 @@ +name: Jira comment + +on: + issue_comment: + types: [created] + +jobs: + comment-issue: + runs-on: ubuntu-latest + steps: + - name: Jira login + uses: atlassian/gajira-login@master + env: + JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} + JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} + JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} + - name: Extract issue from title + id: extract + env: + TITLE: "${{ github.event.issue.title }}" + run: | + jira=$(echo -n $TITLE | awk '{print $1}' | sed -e 's/://') + echo ::set-output name=jira::$jira + - name: Comment on issue + uses: atlassian/gajira-comment@master + if: startsWith(steps.extract.outputs.jira, 'SNOW-') + with: + issue: "${{ steps.extract.outputs.jira }}" + comment: "${{ github.event.comment.user.login }} commented:\n\n${{ github.event.comment.body }}\n\n${{ github.event.comment.html_url }}" diff --git a/.github/workflows/jira_issue.yml b/.github/workflows/jira_issue.yml new file mode 100644 index 000000000..c3da1061e --- /dev/null +++ b/.github/workflows/jira_issue.yml @@ -0,0 +1,48 @@ +name: Jira creation + +on: + issues: + types: [opened] + issue_comment: + types: [created] + +jobs: + create-issue: + runs-on: ubuntu-latest + if: ((github.event_name == 'issue_comment' && github.event.comment.body == 'recreate jira' && github.event.comment.user.login == 'sfc-gh-mkeller') || (github.event_name == 'issues' && github.event.pull_request.user.login != 'whitesource-for-github-com[bot]')) + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + repository: snowflakedb/gh-actions + ref: jira_v1 + token: ${{ secrets.SNOWFLAKE_GITHUB_TOKEN }} # stored in GitHub secrets + path: . + + - name: Login + uses: atlassian/gajira-login@v2.0.0 + env: + JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} + JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} + JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} + + - name: Create JIRA Ticket + id: create + uses: atlassian/gajira-create@v2.0.1 + with: + project: SNOW + issuetype: Bug + summary: '${{ github.event.issue.title }}' + description: | + ${{ github.event.issue.body }} \\ \\ _Created from GitHub Action_ for ${{ github.event.issue.html_url }} + fields: '{"customfield_11401":{"id":"13474"},"assignee":{"id":"61027a237ab143006ecfb9a2"},"components":[{"id":"16413"}]}' + + - name: Update GitHub Issue + uses: ./jira/gajira-issue-update + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + issue_number: "{{ event.issue.id }}" + owner: "{{ event.repository.owner.login }}" + name: "{{ event.repository.name }}" + jira: "${{ steps.create.outputs.issue }}" diff --git a/.github/workflows/parameters/private/jenkins_test_parameters.py.gpg b/.github/workflows/parameters/private/jenkins_test_parameters.py.gpg new file mode 100644 index 000000000..286e54a54 Binary files /dev/null and b/.github/workflows/parameters/private/jenkins_test_parameters.py.gpg differ diff --git a/.github/workflows/parameters/private/parameters_aws.py.gpg b/.github/workflows/parameters/private/parameters_aws.py.gpg new file mode 100644 index 000000000..42c9ec487 Binary files /dev/null and b/.github/workflows/parameters/private/parameters_aws.py.gpg differ diff --git a/.github/workflows/parameters/private/parameters_aws_jenkins.py.gpg b/.github/workflows/parameters/private/parameters_aws_jenkins.py.gpg new file mode 100644 index 000000000..929c8507f Binary files /dev/null and b/.github/workflows/parameters/private/parameters_aws_jenkins.py.gpg differ diff --git a/.github/workflows/parameters/private/parameters_azure.py.gpg b/.github/workflows/parameters/private/parameters_azure.py.gpg new file mode 100644 index 000000000..55f90e5c9 --- /dev/null +++ b/.github/workflows/parameters/private/parameters_azure.py.gpg @@ -0,0 +1,2 @@ +  +͏q- +Z&J>Ӿ jYDH_Ex&Qy~7y.2]u 5pԖX_|$ű@OTbd;Î1vui_ؒ[vk^,%[12$"sq([+4ꄑJ6Y$yK mF#La=+3z')VĥӏPN tcc9cT @+ 挧^~A0v&$)xK:ld?c_=t5苗^(ATih.4qVQ',c6Il7A2Hc=Ljc ̸IcNKZwY'm(!.T@5:>]vi`^MΈWl]9"whOe a³ \ No newline at end of file diff --git a/.github/workflows/parameters/private/parameters_azure_jenkins.py.gpg b/.github/workflows/parameters/private/parameters_azure_jenkins.py.gpg new file mode 100644 index 000000000..6ab7ed0d7 Binary files /dev/null and b/.github/workflows/parameters/private/parameters_azure_jenkins.py.gpg differ diff --git a/.github/workflows/parameters/private/parameters_gcp.py.gpg b/.github/workflows/parameters/private/parameters_gcp.py.gpg new file mode 100644 index 000000000..79ececd19 Binary files /dev/null and b/.github/workflows/parameters/private/parameters_gcp.py.gpg differ diff --git a/.github/workflows/parameters/private/parameters_gcp_jenkins.py.gpg b/.github/workflows/parameters/private/parameters_gcp_jenkins.py.gpg new file mode 100644 index 000000000..ad350ee74 Binary files /dev/null and b/.github/workflows/parameters/private/parameters_gcp_jenkins.py.gpg differ diff --git a/.github/workflows/parameters/public/parameters_aws.py.gpg b/.github/workflows/parameters/public/parameters_aws.py.gpg new file mode 100644 index 000000000..3d79d64d7 Binary files /dev/null and b/.github/workflows/parameters/public/parameters_aws.py.gpg differ diff --git a/.github/workflows/parameters/public/parameters_azure.py.gpg b/.github/workflows/parameters/public/parameters_azure.py.gpg new file mode 100644 index 000000000..cca1fa4e5 Binary files /dev/null and b/.github/workflows/parameters/public/parameters_azure.py.gpg differ diff --git a/.github/workflows/parameters/public/parameters_gcp.py.gpg b/.github/workflows/parameters/public/parameters_gcp.py.gpg new file mode 100644 index 000000000..c56f89bff Binary files /dev/null and b/.github/workflows/parameters/public/parameters_gcp.py.gpg differ diff --git a/.gitignore b/.gitignore index 616e6e0cd..dfd7b2a35 100644 --- a/.gitignore +++ b/.gitignore @@ -1,10 +1,18 @@ +test/parameters*.py + # Byte-compiled / optimized / DLL files -__pycache__/ +.mypy_cache +__pycache__ *.py[cod] *$py.class # C extensions +*.a +*.dll +*.exe +*.o *.so +*.dylib # Distribution / packaging .Python @@ -79,7 +87,7 @@ celerybeat-schedule .env # virtualenv -venv/ +venv*/ ENV/ # Spyder project settings @@ -94,5 +102,25 @@ ENV/ # others generated_version.py *coverage.xml -.idea/ .pytest_cache/ +snowflake_connector_python.egg-info/ +.tox/ + +# WhiteSource Scan +wss*.config +wss-unified-agent.jar +whitesource/ + +# core dumps +core.* + +# Debug specific +.gdb_history + +# Editor specific +.idea/ +.vscode/ +*.code-workspace + +# Compiled Cython +src/snowflake/connector/arrow_iterator.cpp diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..47da1f074 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,69 @@ +exclude: '^(.*egg.info.*|.*/parameters.py|src/snowflake/connector/vendored/.*/).*$' +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.1.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + exclude: license_header.txt + - id: check-yaml + exclude: .github/repo_meta.yaml + - id: debug-statements + - id: check-ast +- repo: https://github.com/Lucas-C/pre-commit-hooks.git + rev: v1.1.13 + hooks: + - id: insert-license + name: insert-py-license + files: > + (?x)^( + src/snowflake/connector/.*\.pyx?| + test/.*\.py| + )$ + exclude: > + (?x)^( + src/snowflake/connector/version.py| + src/snowflake/connector/cpp| + )$ + args: + - --license-filepath + - license_header.txt + - id: insert-license + name: insert-cpp-license + files: src/snowflake/connector/cpp/.*\.(cpp|hpp)$ + exclude: version.py$ + args: + - --comment-style + - // + - --license-filepath + - license_header.txt +- repo: https://github.com/asottile/yesqa + rev: v1.3.0 + hooks: + - id: yesqa +- repo: https://github.com/mgedmin/check-manifest + rev: "0.48" + hooks: + - id: check-manifest +- repo: https://github.com/PyCQA/isort + rev: 5.10.1 + hooks: + - id: isort +- repo: https://github.com/asottile/pyupgrade + rev: v2.31.1 + hooks: + - id: pyupgrade + args: [--py37-plus] +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.9.2 + hooks: + - id: flake8 + additional_dependencies: + - flake8-bugbear == 20.11.1 +- repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + args: + - --safe + language_version: python3 diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 155aa8e01..000000000 --- a/.travis.yml +++ /dev/null @@ -1,29 +0,0 @@ -sudo: required -dist: xenial -language: python -matrix: - include: - - os: linux - python: '2.7' - - os: linux - python: '3.5' - - os: linux - python: '3.6' - - os: linux - python: '3.7' - - os: osx - language: generic - env: PYTHON_VERSION=3.6.7 - - os: linux - python: '3.7' - env: SNOWFLAKE_AZURE=true -install: - - "./scripts/install.sh" -script: - - "./scripts/run_travis.sh" -after_success: - - bash <(curl -s https://codecov.io/bash) -env: - global: - - secure: dFpjLRaP1GcqHB449/6tca+63dZk8iAyD1afM5GK74HiO4dNAwGlONFY0KNnFVIkVDO7XJu0DpbNL/gXZy+EZ2WKis/vcwb2FhkYoiSJ1oZZ6fr/QbwyVZMH1y5L/gxipb5+MlBZyOQGX+9OZNQaGaSRd8kygOX8maBGm3IZFsNhjq6RwbBTLP3Zr8cJ+xrkrj7r413jKy/0xVKVr234gl4VU5IN2k6ovT4KBTyRMhwI5ImWL4ZCiJ7cZbfuLCaOuCdtXTzxU32O0gjbDV1D6ryezVfjtKzdUSLzb+71nT444xXq1OFDWxvODWdE0MBYQEdqHsbhg/Bj/Gp+i1smH4s5wgZ1r7TQMX/oUnsKSkCx0NphS57FKTiYdMGl5o78JuFNf02o2CYux6Zv0LEky/EhsP1o1vclm8LKZiHERZF6fALrOVDTibPw8wKSjHK8NkcglR2d/9X3DOvddxwfN7oJjt/cjIkkDTtYE4WdOZ06EUVzSgUVjaj8JqdlOaWqYBsJRzb0b2uWfU8L5yJ9DJemmWxhhsilEhD7SGPOphuGjHVXa2YivXxl7lO7pJgWWWG37T+XYhbGxIusSP+VwSOcP2LykQbg0S964344dpk2T/CKX0JpeBPYhRoRFvOLBt7HsItT2d9kbSYVJ07xuRkMUMxN7jm4dl+Pxsy3gxs= - - secure: iFgCTUE6izlHLYSJuDFpGwj+wI6yY7zjkGXLME7W0RHCQk1xjwN9o0EwsEhE0+/WPlxn4SrXI7gKp+2+z94ImtZY7MiM1qOzUaS0FrrwoMPpvIb3e8csGfI5mKbiKOoZNCMTD3wdxc+sohZ5GHc/qeUvNw6WLJL15V8m+w/qPeaGTj6mC/Oys18d91uz2WD9erIeVx0eQx653lL+pd+/D4dPk43/Jy19vLmliie3sragjcbS9lRdRvlp8WDJ+y41D5EPRai+F1Y5CWP0rKDV3vrmdy50DYrzu8X9kTCcXq1CjS90o/Wb+E640awhlvjHw4ocDcNAfZv5BQoZ2YcqZWhdSIICGiei70NMOdCdWyTHmLgFu+7TqGQdc3mtaPWcC1BQ28urWcWsivjj83aCxJkshTdfpaTV6xfnYaJFTm6+oa2Yq2wDadhjzA4OAxtP9FIKurfhE6zaSGJUiFtXyCAEKUwQKGkfe6Dw3JmfusxPlDYaYu0mD9g+OJn1jcoVeuwwVf3HLoW/dNp+FWDsnroImMAc1D0eiwgMTovuSmpPFD+zi19yNpCNlJcFIfH85JxUj8q44+yiEFrdTD1tj5SgXBp7fOGW+wH8l5u+rGKEGiL1j8c5uYEahRW/QhmrjHZfgcZiKz4VKX5DmRysCbD0qWYaUZBZQkFZwKCZUc0= diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..94bebad51 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,35 @@ +# Contributing to snowflake-connector-python + +Hi, thank you for taking the time to improve Snowflake's Python connector! + +## I have a feature request, or a bug report to submit + +Many questions can be answered by checking our [docs](https://docs.snowflake.com/) or looking for already existing bug reports and enhancement requests on our [issue tracker](https://github.com/snowflakedb/snowflake-connector-python/issues). + +Please start by checking these first! + +## Nobody else had my idea/issue + +In that case we'd love to hear from you! +Please [open a new issue](https://github.com/snowflakedb/snowflake-connector-python/issues/new/choose) to get in touch with us. + +## I'd like to contribute the bug fix or feature myself + +We encourage everyone to first open an issue to discuss any feature work or bug fixes with one of the maintainers. +This should help guide contributors through potential pitfalls. + +### Setup a development environment + +What is a development environment? It's a [virtualenv](https://virtualenv.pypa.io) that has all of neccessary +dependencies installed with `snowflake-connector-python` installed as an editable package. + +Setting up a development environment is super easy with this [one simple tox command](https://tox.wiki/en/latest/example/devenv.html). + +```shell +tox --devenv venv37 -e py37 +. venv37/bin/activate +``` + +Note: we suggest using the lowest supported Python version for development. + +To run tests, please see our [testing README](test/README.md). diff --git a/DESCRIPTION.rst b/DESCRIPTION.md similarity index 55% rename from DESCRIPTION.rst rename to DESCRIPTION.md index c944ff939..c96d0e7be 100644 --- a/DESCRIPTION.rst +++ b/DESCRIPTION.md @@ -2,14 +2,396 @@ This package includes the Snowflake Connector for Python, which conforms to the https://www.python.org/dev/peps/pep-0249/ Snowflake Documentation is available at: -https://docs.snowflake.net/ +https://docs.snowflake.com/ Source code is also available at: https://github.com/snowflakedb/snowflake-connector-python -Release Notes -------------------------------------------------------------------------------- +# Release Notes -- v1.9.0(August 26,2019) + + +- v2.7.7(April 30,2022) + + - Bumped supported pandas version to < 1.5.0 + - Fixed a bug where partner name (from SF_PARTNER environmental variable) was set after connection was established + - Added a new _no_retry option to executing queries + - Fixed a bug where extreme timestamps lost precision + + +- v2.7.6(March 17,2022) + + - Fixed missing python_requires tag in setup.cfg + +- v2.7.5(March 17,2022) + + - Added an option for partners to inject their name through an environmental variable (SF_PARTNER) + - Fixed a bug where we would not wait for input if a browser window couldn't be opened for SSO login + - Deprecate support for Python 3.6 + - Exported a type definition for SnowflakeConnection + - Fixed a bug where final Arrow table would contain duplicate index numbers when using fetch_pandas_all + +- v2.7.4(February 05,2022) + + - Add Geography Types + - Removing automated incident reporting code + - Fixed a bug where circular reference would prevent garbage collection on some objects + - Fixed a bug where `DatabaseError` was thrown when executing against a closed cursor instead of `InterfaceError` + - Fixed a bug where calling `executemany` would crash if an iterator was supplied as args + - Fixed a bug where violating `NOT NULL` constraint raised `DatabaseError` instead of `IntegrityError` + +- v2.7.3(January 22,2022) + + - Fixed a bug where timezone was missing from retrieved Timestamp_TZ columns + - Fixed a bug where a long running PUT/GET command could hit a Storage Credential Error while renewing credentials + - Fixed a bug where py.typed was not being included in our release wheels + - Fixed a bug where negative numbers were mangled when fetched with the connection parameter arrow_number_to_decimal + - Improved the error message that is encountered when running GET for a non-existing file + - Fixed rendering of our long description for PyPi + - Fixed a bug where DUO authentication ran into errors if sms authentication was disabled for the user + - Add the ability to auto-create a table when writing a pandas DataFrame to a Snowflake table + - Bumped the maximum dependency version of numpy from <1.22.0 to <1.23.0 + +- v2.7.2(December 17,2021) + + - Added support for Python version 3.10. + - Fixed an issue bug where _get_query_status failed if there was a network error. + - Added the interpolate_empty_sequences connection parameter to control interpolating empty sequences into queries. + - Fixed an issue where where BLOCKED was considered to be an error by is_an_error. + - Added source field to Telemetry. + - Increased the cryptography dependency version. + - Increased the pyopenssl dependency version. + - Fixed an issue where dbapi.Binary returned a string instead of bytes. + - Increased the required version of numpy. + - Increased the required version of keyring. + - Fixed issue so that fetch functions now return a typed DataFrames and pyarrow Tables for empty results. + - Added py.typed + - Improved error messages for PUT/GET. + - Added Cursor.query attribute for accessing last query. + - Increased the required version of pyarrow. + + +- v2.7.1(November 19,2021) + + - Fixed a bug where uploading a streaming file with multiple parts did not work. + - JWT tokens are now regenerated when a request is retired. + - Updated URL escaping when uploading to AWS S3 to match how S3 escapes URLs. + - Removed the unused s3_connection_pool_size connection parameter. + - Blocked queries are now be considered to be still running. + - Snowflake specific exceptions are now set using Exception arguments. + - Fixed an issue where use_s3_regional_url was not set correctly by the connector. + + +- v2.7.0(October 25,2021) + + - Removing cloud sdks.snowflake-connector-python will not install them anymore. Recreate your virtualenv to get rid of unnecessary dependencies. + - Include Standard C++ headers. + - Update minimum dependency version pin of cryptography. + - Fixed a bug where error number would not be added to Exception messages. + - Fixed a bug where client_prefetch_threads parameter was not respected when pre-fetching results. + - Update signature of SnowflakeCursor.execute's params argument. + + +- v2.6.2(September 27,2021) + + - Updated vendored urllib3 and requests versions. + - Fixed a bug where GET commands would fail to download files from sub directories from stages. + - Added a feature where where the connector will print the url it tried to open when it is unable to open it for external browser authentication. + + +- v2.6.1(September 16,2021) + + - Bump pandas version from <1.3 to <1.4 + - Fixing Python deprecation warnings. + - Added more type-hints. + - Marked HeartBeatTimer threads as daemon threads. + - Force cast a column into integer in write_pandas to avoid a rare behavior that would lead to crashing. + - Implement AWS signature V4 to new SDKless PUT and GET. + - Removed a deprecated setuptools option from setup.py. + - Fixed a bug where error logs would be printed for query executions that produce no results. + - Fixed a bug where the temporary stage for bulk array inserts exists. + + +- v2.6.0(August 29,2021) + + - Internal change to the implementation of result fetching. + - Upgraded Pyarrow version from 3.0 to 5.0. + - Internal change to the implementation for PUT and GET. A new connection parameter use_new_put_get was added to toggle between implementations. + - Fixed a bug where executemany did not detect the type of data it was inserting. + - Updated the minimum Mac OSX build target from 10.13 to 10.14. + + +- v2.5.1(July 31,2021) + + - Fixes Python Connector bug that prevents the connector from using AWS S3 Regional URL. The driver currently overrides the regional URL information with the default S3 URL causing failure in PUT. + + +- v2.5.0(July 22,2021) + + - Fixed a bug in write_pandas when quote_identifiers is set to True the function would not actually quote column names. + - Bumping idna dependency pin from <3,>=2.5 to >=2.5,<4 + - Fix describe method when running `insert into ...` commands + + +- v2.4.6(June 25,2021) + + - Fixed a potential memory leak. + - Removed upper certifi version pin. + - Updated vendored libraries , urllib(1.26.5) and requests(2.25.1). + - Replace pointers with UniqueRefs. + - Changed default value of client_session_keep_alive to None. + - Added the ability to retrieve metadata/schema without executing the query (describe method). + +- v2.4.5(June 15,2021) + + - Fix for incorrect JWT token invalidity when an account alias with a dash in it is used for regionless account URL. + +- v2.4.4(May 30,2021) + + - Fixed a segfault issue when using DictCursor and arrow result format with out of range dates. + - Adds new make_pd_writer helper function + + +- v2.4.3(April 29,2021) + + - Uses s3 regional URL in private links when a param is set. + - New Arrow NUMBER to Decimal converter option. + - Update pyopenssl requirement from <20.0.0,>=16.2.0 to >=16.2.0,<21.0.0. + - Update pandas requirement from <1.2.0,>=1.0.0 to >=1.0.0,<1.3.0. + - Update numpy requirement from <1.20.0 to <1.21.0. + + +- v2.4.2(April 03,2021) + + - PUT statements are now thread-safe. + + +- v2.4.1(March 04,2021) + + - Make connection object exit() aware of status of parameter `autocommit` + + +- v2.4.0(March 04,2021) + + - Added support for Python 3.9 and PyArrow 3.0.x. + - Added support for the upcoming multipart PUT threshold keyword. + - Added support for using the PUT command with a file-like object. + - Added some compilation flags to ease building conda community package. + - Removed the pytz pin because it doesn't follow semantic versioning release format. + - Added support for optimizing batch inserts through bulk array binding. + + +- v2.3.10(February 01,2021) + + - Improved query ID logging and added request GUID logging. + - For dependency checking, increased the version condition for the pyjwt package from <2.0.0 to <3.0.0. + + +- v2.3.9(January 27,2021) + + - The fix to add proper proxy CONNECT headers for connections made over proxies. + + +- v2.3.8(January 14,2021) + + - Arrow result conversion speed up. + - Send all Python Connector exceptions to in-band or out-of-band telemetry. + - Vendoring requests and urllib3 to contain OCSP monkey patching to our library only. + - Declare dependency on setuptools. + + +- v2.3.7(December 10,2020) + + - Added support for upcoming downscoped GCS credentials. + - Tightened the pyOpenSSL dependency pin. + - Relaxed the boto3 dependency pin up to the next major release. + - Relaxed the cffi dependency pin up to the next major release. + - Added support for executing asynchronous queries. + - Dropped support for Python 3.5. + +- v2.3.6(November 16,2020) + + - Fixed a bug that was preventing the connector from working on Windows with Python 3.8. + - Improved the string formatting in exception messages. + - For dependency checking, increased the version condition for the cryptography package from <3.0.0 to <4.0.0. + - For dependency checking, increased the version condition for the pandas package from <1.1 to <1.2. + +- v2.3.5(November 03,2020) + + - Updated the dependency on the cryptography package from version 2.9.2 to 3.2.1. + +- v2.3.4(October 26,2020) + + - Added an optional parameter to the write_pandas function to specify that identifiers should not be quoted before being sent to the server. + - The write_pandas function now honors default and auto-increment values for columns when inserting new rows. + - Updated the Python Connector OCSP error messages and accompanying telemetry Information. + - Enabled the runtime pyarrow version verification to fail gracefully. Fixed a bug with AWS glue environment. + - Upgraded the version of boto3 from 1.14.47 to 1.15.9. + - Upgraded the version of idna from 2.9 to 2.10. + +- v2.3.3(October 05,2020) + + - Simplified the configuration files by consolidating test settings. + - In the Connection object, the execute_stream and execute_string methods now filter out empty lines from their inputs. + +- v2.3.2(September 14,2020) + + - Fixed a bug where a file handler was not closed properly. + - Fixed various documentation typos. + +- v2.3.1(August 25,2020) + + - Fixed a bug where 2 constants were removed by mistake. + +- v2.3.0(August 24,2020) + + - When the log level is set to DEBUG, log the OOB telemetry entries that are sent to Snowflake. + - Fixed a bug in the PUT command where long running PUTs would fail to re-authenticate to GCP for storage. + - Updated the minimum build target MacOS version to 10.13. + +- v2.2.10(August 03,2020) + + - Improved an error message for when "pandas" optional dependency group is not installed and user tries to fetch data into a pandas DataFrame. It'll now point user to our online documentation. + +- v2.2.9(July 13,2020) + + - Connection parameter validate_default_parameters now verifies known connection parameter names and types. It emits warnings for anything unexpected types or names. + - Correct logging messages for compiled C++ code. + - Fixed an issue in write_pandas with location determination when database, or schema name was included. + - Bumped boto3 dependency version. + - Fixed an issue where uploading a file with special UTF-8 characters in their names corrupted file. + +- v2.2.8(June 22,2020) + + - Switched docstring style to Google from Epydoc and added automated tests to enforce the standard. + - Fixed a memory leak in DictCursor's Arrow format code. + +- v2.2.7(June 1,2020) + + - Support azure-storage-blob v12 as well as v2 (for Python 3.5.0-3.5.1) by Python Connector + - Fixed a bug where temporary directory path was not Windows compatible in write_pandas function + - Added out of band telemetry error reporting of unknown errors + +- v2.2.6(May 11,2020) + + - Update Pyarrow version from 0.16.0 to 0.17.0 for Python connector + - Remove more restrictive application name enforcement. + - Missing keyring dependency will not raise an exception, only emit a debug log from now on. + - Bumping boto3 to <1.14 + - Fix flake8 3.8.0 new issues + - Implement Python log interceptor + +- v2.2.5(April 30,2020) + + - Added more efficient way to ingest a pandas.Dataframe into Snowflake, located in snowflake.connector.pandas_tools + - More restrictive application name enforcement and standardizing it with other Snowflake drivers + - Added checking and warning for users when they have a wrong version of pyarrow installed + +- v2.2.4(April 10,2020) + + - Emit warning only if trying to set different setting of use_openssl_only parameter + +- v2.2.3(March 30,2020) + + - Secure SSO ID Token + - Add use_openssl_only connection parameter, which disables the usage of pure Python cryptographic libraries for FIPS + - Add manylinux1 as well as manylinux2010 + - Fix a bug where a certificate file was opened and never closed in snowflake-connector-python. + - Fix python connector skips validating GCP URLs + - Adds additional client driver config information to in band telemetry. + +- v2.2.2(March 9,2020) + + - Fix retry with chunck_downloader.py for stability. + - Support Python 3.8 for Linux and Mac. + +- v2.2.1(February 18,2020) + + - Fix use DictCursor with execute_string #248 + +- v2.2.0(January 27,2020) + + - Drop Python 2.7 support + - AWS: When OVERWRITE is false, which is set by default, the file is uploaded if no same file name exists in the stage. This used to check the content signature but it will no longer check. Azure and GCP already work this way. + - Document Python connector dependencies on our GitHub page in addition to Snowflake docs. + - Fix sqlalchemy and possibly python-connector warnings. + - Fix GCP exception using the Python connector to PUT a file in a stage with auto_compress=false. + - Bump up botocore requirements to 1.14. + - Fix uppercaseing authenticator breaks Okta URL which may include case-sensitive elements(#257). + - Fix wrong result bug while using fetch_pandas_all() to get fixed numbers with large scales. + - Increase multi part upload threshold for S3 to 64MB. + +- v2.1.3(January 06,2020) + + - Fix GCP Put failed after hours + +- v2.1.2(December 16,2019) + + - Fix the arrow bundling issue for python connector on mac. + - Fix the arrow dll bundle issue on windows.Add more logging. + +- v2.1.1(December 12,2019) + + - Fix GZIP uncompressed content for Azure GET command. + - Add support for GCS PUT and GET for private preview. + - Support fetch as numpy value in arrow result format. + - Fix NameError: name 'EmptyPyArrowIterator' is not defined for Mac. + - Return empty dataframe for fetch_pandas_all() api if result set is empty. + +- v2.1.0(December 2,2019) + + - Fix default `ssl_context` options + - Pin more dependencies for Python Connector + - Fix import of SnowflakeOCSPAsn1Crypto crashes Python on MacOS Catalina + - Update the release note that 1.9.0 was removed + - Support DictCursor for arrow result format + - Upgrade Python's arrow lib to 0.15.1 + - Raise Exception when PUT fails to Upload Data + - Handle year out of range correctly in arrow result format + +- v2.0.4(November 13,2019) + + - Increase OCSP Cache expiry time from 24 hours to 120 hours. + - Fix pyarrow cxx11 abi compatibility issue + - Use new query result format parameter in python tests + +- v2.0.3(November 1,2019) + + - Fix for ,Pandas fetch API did not handle the case that first chunk is empty correctly. + - Updated with botocore, boto3 and requests packages to the latest version. + - Pinned stable versions of Azure urllib3 packages. + +- v2.0.2(October 21,2019) + + - Fix sessions remaining open even if they are disposed manually. Retry deleting session if the connection is explicitly closed. + - Fix memory leak in the new fetch pandas API + - Fix Auditwheel failed with python37 + - Reduce the footprint of Python Connector + - Support asn1crypto 1.1.x + - Ensure that the cython components are present for Conda package + +- v2.0.1(October 04,2019) + + - Add asn1crypto requirement to mitigate incompatibility change + +- v2.0.0(September 30,2019) + + - Release Python Connector 2.0.0 for Arrow format change. + - Fix SF_OCSP_RESPONSE_CACHE_DIR referring to the OCSP cache response file directory and not the top level of directory. + - Fix Malformed certificate ID key causes uncaught KeyError. + - No retry for certificate errors. + - Fix In-Memory OCSP Response Cache - PythonConnector + - Move AWS_ID and AWS_SECRET_KEY to their newer versions in the Python client + - Fix result set downloader for ijson 2.5 + - Make authenticator field case insensitive earlier + - Update USER-AGENT to be consistent with new format + - Update Python Driver URL Whitelist to support US Gov domain + - Fix memory leak in python connector panda df fetch API + +- v1.9.1(October 4,2019) + + - Add asn1crypto requirement to mitigate incompatibility change. + +- v1.9.0(August 26,2019) **REMOVED from pypi due to dependency compatibility issues** - Implement converter for all arrow data types in python connector extension - Fix arrow error when returning empty result using python connecter @@ -28,7 +410,7 @@ Release Notes - Fix Azure Gov PUT and GET issue - v1.8.6(July 29,2019) - + - Reduce retries for OCSP from Python Driver - Azure PUT issue: ValueError: I/O operation on closed file - Add client information to USER-AGENT HTTP header - PythonConnector @@ -64,13 +446,13 @@ Release Notes - Add Option to Skip Request Pooling - Add OCSP_MODE metric - Fixed PUT URI issue for Windows path - - OCSP SoftFail + - OCSP SoftFail - v1.7.11 (April 22, 2019) - numpy timestamp with timezone support - qmark not binding None - + - v1.7.10 (April 8, 2019) - Fix the incorrect custom Server URL in Python Driver for Privatelink @@ -160,7 +542,7 @@ Release Notes - Enforce virtual host URL for PUT and GET. - Added retryCount, clientStarTime for query-request for better service. - + - v1.6.6 (August 9, 2018) - Replaced ``pycryptodome`` with ``pycryptodomex`` to avoid namespace conflict with ``PyCrypto``. @@ -449,7 +831,7 @@ Release Notes - Added support for the ``BINARY`` data type, which enables support for more Python data types: - - Python 3: + - Python 3: - ``bytes`` and ``bytearray`` can be used for binding. - ``bytes`` is also used for fetching ``BINARY`` data type. @@ -562,12 +944,12 @@ Release Notes - v1.0.3 (Jan 13, 2016) - Added support for the ``BOOLEAN`` data type (i.e. ``TRUE`` or ``FALSE``). This changes the behavior of the binding for the ``bool`` type object: - + - Previously, ``bool`` was bound as a numeric value (i.e. ``1`` for ``True``, ``0`` for ``False``). - Now, ``bool`` is bound as native SQL data (i.e. ``TRUE`` or ``FALSE``). - Added the ``autocommit`` method to the ``Connection`` object: - + - By default, ``autocommit`` mode is ON (i.e. each DML statement commits the change). - If ``autocommit`` mode is OFF, the ``commit`` and ``rollback`` methods are enabled. @@ -585,4 +967,3 @@ Release Notes - v1.0.0 (Dec 1, 2015) - General Availability release. - diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 000000000..7551e2bb0 --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,117 @@ +import groovy.json.JsonOutput + + +timestamps { + node('parallelizable-c7') { + stage('checkout') { + scmInfo = checkout scm + println("${scmInfo}") + env.GIT_BRANCH = scmInfo.GIT_BRANCH + env.GIT_COMMIT = scmInfo.GIT_COMMIT + } + + stage('Build') { + withCredentials([ + usernamePassword(credentialsId: '063fc85b-62a6-4181-9d72-873b43488411', usernameVariable: 'AWS_ACCESS_KEY_ID', passwordVariable: 'AWS_SECRET_ACCESS_KEY'), + string(credentialsId: 'a791118f-a1ea-46cd-b876-56da1b9bc71c',variable: 'NEXUS_PASSWORD') + ]) { + sh '''\ + |cd $WORKSPACE + |export GIT_BRANCH=${GIT_BRANCH} + |export GIT_COMMIT=${GIT_COMMIT} + |./ci/build_docker.sh + |cp dist/**/*.txt dist/repaired_wheels/ + |cp dist/*.tar.gz dist/repaired_wheels/ + |aws s3 cp --only-show-errors ./dist/repaired_wheels/ s3://sfc-jenkins/repository/python_connector/linux/${GIT_BRANCH}/${GIT_COMMIT}/ --recursive --include '*' + |echo ${GIT_COMMIT} > latest_commit + |aws s3 cp --only-show-errors latest_commit s3://sfc-jenkins/repository/python_connector/linux/${GIT_BRANCH}/ + '''.stripMargin() + } + } + params = [ + string(name: 'svn_revision', value: 'main'), + string(name: 'branch', value: 'main'), + string(name: 'client_git_commit', value: scmInfo.GIT_COMMIT), + string(name: 'client_git_branch', value: scmInfo.GIT_BRANCH), + string(name: 'parent_job', value: env.JOB_NAME), + string(name: 'parent_build_number', value: env.BUILD_NUMBER) + ] + stage('Test') { + parallel ( + 'Test Python 37': { build job: 'RT-PyConnector37-PC',parameters: params}, + 'Test Python 38': { build job: 'RT-PyConnector38-PC',parameters: params}, + 'Test Python 39': { build job: 'RT-PyConnector39-PC',parameters: params}, + 'Test Python 310': { build job: 'RT-PyConnector310-PC',parameters: params}, + 'Test Python Lambda 37': { build job: 'RT-PyConnector37-PC-Lambda',parameters: params} + ) + } + } + } + + +pipeline { + agent { label 'regular-memory-node' } + options { timestamps() } + environment { + COMMIT_SHA_LONG = sh(returnStdout: true, script: "echo \$(git rev-parse " + "HEAD)").trim() + SEMGREP_DEPLOYMENT_ID = 1 + INPUT_PUBLISHURL = "https://semgrep.snowflake.com" + + // environment variables for semgrep_agent (for findings / analytics page) + // remove .git at the end + SEMGREP_REPO_URL = env.GIT_URL.replaceFirst(/^(.*).git$/,'$1') + SEMGREP_BRANCH = "${CHANGE_BRANCH}" + SEMGREP_JOB_URL = "${BUILD_URL}" + // remove SCM URL + .git at the end + SEMGREP_REPO_NAME = env.GIT_URL.replaceFirst(/^https:\/\/github.com\/(.*).git$/, '$1') + + SEMGREP_COMMIT = "${GIT_COMMIT}" + SEMGREP_PR_ID = "${env.CHANGE_ID}" + BASELINE_BRANCH = "${env.CHANGE_TARGET}" + } + stages { + stage('Checkout') { + steps { + checkout scm + } + } + stage('Semgrep_agent') { + agent { + docker { + label 'parallelizable-c7' + image 'nexus.int.snowflakecomputing.com:8087/returntocorp/semgrep-agent:v1' + args '-u root' + } + } + when { + expression { env.CHANGE_ID && env.BRANCH_NAME.startsWith("PR-") } + } + steps{ + wrap([$class: 'MaskPasswordsBuildWrapper']) { + withCredentials([ + [$class: 'UsernamePasswordMultiBinding', credentialsId: + 'b4f59663-ae0a-4384-9fdc-c7f2fe1c4fca', usernameVariable: + 'GIT_USERNAME', passwordVariable: 'GIT_PASSWORD'], + string(credentialsId:'SEMGREP_APP_TOKEN', variable: 'SEMGREP_APP_TOKEN'), + + ]) { + script { + try { + sh 'export SEMGREP_DIR=semgrep-scan-$(pwd | rev | cut -d \'/\' -f1 | rev) && mkdir -p ../$SEMGREP_DIR && cp -R . ../$SEMGREP_DIR && cd ../$SEMGREP_DIR && git fetch https://$GIT_USERNAME:$GIT_PASSWORD@github.com/$SEMGREP_REPO_NAME.git $BASELINE_BRANCH:refs/remotes/origin/$BASELINE_BRANCH && python -m semgrep_agent --baseline-ref $(git merge-base origin/$BASELINE_BRANCH HEAD) --publish-token $SEMGREP_APP_TOKEN --publish-deployment $SEMGREP_DEPLOYMENT_ID && cd ../ && rm -r $SEMGREP_DIR' + wgetUpdateGithub('success', 'semgrep', "${BUILD_URL}", '123') + } catch (err) { + wgetUpdateGithub('failure', 'semgrep', "${BUILD_URL}", '123') + } + } + } + } + } + } + } +} + +def wgetUpdateGithub(String state, String folder, String targetUrl, String seconds) { + def ghURL = "https://api.github.com/repos/snowflakedb/snowflake-connector-python/statuses/$COMMIT_SHA_LONG" + def data = JsonOutput.toJson([state: "${state}", context: "jenkins/${folder}",target_url: "${targetUrl}"]) + sh "wget ${ghURL} --spider -q --header='Authorization: token $GIT_PASSWORD' --post-data='${data}'" +} diff --git a/MANIFEST.in b/MANIFEST.in index de50e659c..874058246 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,21 @@ -include *.rst *.py +include *.md +include *.rst +include LICENSE.txt +include NOTICE +include pyproject.toml +recursive-include src/snowflake/connector py.typed *.py *.pyx +recursive-include src/snowflake/connector/vendored LICENSE* + +recursive-include src/snowflake/connector/cpp *.cpp *.hpp +exclude src/snowflake/connector/arrow_iterator.cpp + +exclude .git-blame-ignore-revs +exclude .pre-commit-config.yaml +exclude license_header.txt +exclude tox.ini + +prune ci +prune benchmark prune test +prune tested_requirements +prune src/snowflake/connector/cpp/scripts diff --git a/NOTICE b/NOTICE new file mode 100644 index 000000000..d30101e7b --- /dev/null +++ b/NOTICE @@ -0,0 +1,8 @@ +Snowflake Python Connector +Copyright 2020 Snowflake Inc. + +This software includes software derived from urllib3, licensed under the MIT license (https://urllib3.readthedocs.io). +Copyright (c) 2008-2020 Andrey Petrov and contributors + +This software includes software derived from Requests: HTTP For Humans, licensed under the Apache license, developed by the Python Software Foundation (https://requests.readthedocs.io/) +Requests Copyright 2019 Kenneth Reitz diff --git a/README.md b/README.md new file mode 100644 index 000000000..dd3952a10 --- /dev/null +++ b/README.md @@ -0,0 +1,54 @@ +# Snowflake Connector for Python + +[![Build and Test](https://github.com/snowflakedb/snowflake-connector-python/actions/workflows/build_test.yml/badge.svg)](https://github.com/snowflakedb/snowflake-connector-python/actions/workflows/build_test.yml) +[![codecov](https://codecov.io/gh/snowflakedb/snowflake-connector-python/branch/main/graph/badge.svg?token=MVKSNtnLr0)](https://codecov.io/gh/snowflakedb/snowflake-connector-python) +[![PyPi](https://img.shields.io/pypi/v/snowflake-connector-python.svg)](https://pypi.python.org/pypi/snowflake-connector-python/) +[![License Apache-2.0](https://img.shields.io/:license-Apache%202-brightgreen.svg)](http://www.apache.org/licenses/LICENSE-2.0.txt) +[![Codestyle Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) + +This package includes the Snowflake Connector for Python, which conforms to the [Python DB API 2.0](https://www.python.org/dev/peps/pep-0249/) specification. + +The Snowflake Connector for Python provides an interface for developing Python +applications that can connect to Snowflake and perform all standard operations. It +provides a programming alternative to developing applications in Java or C/C++ +using the Snowflake JDBC or ODBC drivers. + +The connector has **no** dependencies on JDBC or ODBC. +It can be installed using ``pip`` on Linux, Mac OSX, and Windows platforms +where Python 3.7.0 (or higher) is installed. + +Snowflake Documentation is available at: +https://docs.snowflake.com/ + +Feel free to file an issue or submit a PR here for general cases. For official support, contact Snowflake support at: +https://community.snowflake.com/s/article/How-To-Submit-a-Support-Case-in-Snowflake-Lodge + +## How to build + +### Locally + +Install Python 3.7.0 or higher. Clone the Snowflake Connector for Python repository, then run the following commands +to create a wheel package using PEP-517 build: + +```shell +git clone git@github.com:snowflakedb/snowflake-connector-python.git +cd snowflake-connector-python +python -m pip install -U pip setuptools wheel build +python -m build --wheel . +``` + +Find the `snowflake_connector_python*.whl` package in the `./dist` directory. + +### In Docker +Or use our Dockerized build script `ci/build_docker.sh` and find the built wheel files in `dist/repaired_wheels`. + +Note: `ci/build_docker.sh` can be used to compile only certain versions, like this: `ci/build_docker.sh "3.7 3.8"` + +## Code hygiene and other utilities +These tools are integrated into `tox` to allow us to easily set them up universally on any computer. + +* **fix_lint**: Runs `pre-commit` to check for a bunch of lint issues. This can be installed to run upon each + time a commit is created locally, keep an eye out for the hint that this environment prints upon succeeding. +* **coverage**: Runs `coverage.py` to combine generated coverage data files. Useful when multiple categories were run + and we would like to have an overall coverage data file created for them. +* **flake8**: (Deprecated) Similar to `fix_lint`, but only runs `flake8` checks. diff --git a/README.rst b/README.rst deleted file mode 100644 index 162852bd1..000000000 --- a/README.rst +++ /dev/null @@ -1,35 +0,0 @@ -Snowflake Connector for Python -******************************************************************************** - -.. image:: https://travis-ci.org/snowflakedb/snowflake-connector-python.svg?branch=master - :target: https://travis-ci.org/snowflakedb/snowflake-connector-python - -.. image:: https://ci.appveyor.com/api/projects/status/xb70i4jt8mingig0/branch/master?svg=true - :target: https://ci.appveyor.com/project/smtakeda/snowflake-connector-python/branch/master - -.. image:: https://codecov.io/gh/snowflakedb/snowflake-connector-python/branch/master/graph/badge.svg - :target: https://codecov.io/gh/snowflakedb/snowflake-connector-python - -.. image:: https://img.shields.io/pypi/v/snowflake-connector-python.svg - :target: https://pypi.python.org/pypi/snowflake-connector-python/ - -.. image:: http://img.shields.io/:license-Apache%202-brightgreen.svg - :target: http://www.apache.org/licenses/LICENSE-2.0.txt - -This package includes the Snowflake Connector for Python, which conforms to the Python DB API 2.0 specification: -https://www.python.org/dev/peps/pep-0249/ - -The Snowflake Connector for Python provides an interface for developing Python -applications that can connect to Snowflake and perform all standard operations. It -provides a programming alternative to developing applications in Java or C/C++ -using the Snowflake JDBC or ODBC drivers. - -The connector is a native, pure Python package that has no dependencies on JDBC or -ODBC. It can be installed using ``pip`` on Linux, Mac OSX, and Windows platforms -where either Python 2.7.9 (or higher) or Python 3.4.3 (or higher) is installed. - -Snowflake Documentation is available at: -https://docs.snowflake.net/ - -Feel free to file an issue or submit a PR here for general cases. For official support, contact Snowflake support at: -https://community.snowflake.com/s/article/How-To-Submit-a-Support-Case-in-Snowflake-Lodge diff --git a/__init__.py b/__init__.py deleted file mode 100644 index 89cdb0382..000000000 --- a/__init__.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -# -# Python Db API v2 -# -apilevel = u'2.0' -threadsafety = 2 -paramstyle = u'pyformat' - -import logging -from logging import NullHandler - -logging.getLogger(__name__).addHandler(NullHandler()) - -from .version import (VERSION) -from .compat import (TO_UNICODE) -from .connection import SnowflakeConnection -from .cursor import DictCursor -from .errors import ( - Error, Warning, InterfaceError, DatabaseError, - NotSupportedError, DataError, IntegrityError, ProgrammingError, - OperationalError, InternalError) -from .dbapi import (Timestamp, TimeFromTicks, Time, TimestampFromTicks, Date, - DateFromTicks, Binary, Json, - DATETIME, ROWID, STRING, NUMBER, BINARY) - - -def Connect(**kwargs): - return SnowflakeConnection(**kwargs) - - -connect = Connect - -SNOWFLAKE_CONNECTOR_VERSION = u'.'.join(TO_UNICODE(v) for v in VERSION[0:3]) -__version__ = SNOWFLAKE_CONNECTOR_VERSION - -__all__ = [ - # Error handling - u'Error', u'Warning', - u'InterfaceError', u'DatabaseError', - u'NotSupportedError', u'DataError', u'IntegrityError', u'ProgrammingError', - u'OperationalError', u'InternalError', - - # Extended cursor - u'DictCursor', - - # DBAPI PEP 249 required exports - u'connect', - u'apilevel', - u'threadsafety', - u'paramstyle', - u'Date', - u'Time', - u'Timestamp', - u'Binary', - u'DateFromTicks', - u'TimeFromTicks', - u'TimestampFromTicks', - u'STRING', - u'BINARY', - u'NUMBER', - u'DATETIME', - u'ROWID', - - # Extended data type (experimental) - u'Json', -] diff --git a/_config.yml b/_config.yml deleted file mode 100644 index 2f7efbeab..000000000 --- a/_config.yml +++ /dev/null @@ -1 +0,0 @@ -theme: jekyll-theme-minimal \ No newline at end of file diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 7adbdce95..000000000 --- a/appveyor.yml +++ /dev/null @@ -1,19 +0,0 @@ -platform: - - amd64 - -environment: - global: - VSVER: 14 - my_secret: - secure: UzwZuxCL9RvUOiAZsc+OqN4oAhqtXa5OO7IDm94gzAc= - - matrix: - - PYTHON: "C:\\Python36-x64" - -install: - - .\scripts\install.bat - -build: off - -test_script: - - .\scripts\run_appveyor.bat diff --git a/arrow_context.py b/arrow_context.py deleted file mode 100644 index bd46ed6f6..000000000 --- a/arrow_context.py +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import time -from datetime import datetime, timedelta -from logging import getLogger -from .constants import ( - PARAMETER_TIMEZONE) -from .converter import ( - _generate_tzinfo_from_tzoffset) - -import pytz - -try: - import tzlocal -except ImportError: - tzlocal = None - -ZERO_EPOCH = datetime.utcfromtimestamp(0) - -logger = getLogger(__name__) - - -class ArrowConverterContext(object): - def __init__(self, session_parameters={}): - self._timezone = None if PARAMETER_TIMEZONE not in session_parameters else session_parameters[PARAMETER_TIMEZONE] - - @property - def timezone(self): - return self._timezone - - @timezone.setter - def timezone(self, tz): - self._timezone = tz - - def _get_session_tz(self): - """ Get the session timezone or use the local computer's timezone. """ - try: - tz = 'UTC' if not self.timezone else self.timezone - return pytz.timezone(tz) - except pytz.exceptions.UnknownTimeZoneError: - logger.warning('converting to tzinfo failed') - if tzlocal is not None: - return tzlocal.get_localzone() - else: - try: - return datetime.timezone.utc - except AttributeError: - return pytz.timezone('UTC') - - def TIMESTAMP_TZ_to_python(self, microseconds, tz): - """ - TIMESTAMP TZ to datetime - - The timezone offset is piggybacked - - @para microseconds : float - @para tz : int - """ - - tzinfo = _generate_tzinfo_from_tzoffset(tz - 1440) - return datetime.fromtimestamp(microseconds, tz=tzinfo) - - def TIMESTAMP_TZ_to_python_windows(self, microseconds, tz): - tzinfo = _generate_tzinfo_from_tzoffset(tz - 1440) - t = ZERO_EPOCH + timedelta(seconds=microseconds) - if pytz.utc != tzinfo: - t += tzinfo.utcoffset(t) - return t.replace(tzinfo=tzinfo) - - def TIMESTAMP_NTZ_to_python(self, microseconds): - return datetime.utcfromtimestamp(microseconds) - - def TIMESTAMP_NTZ_to_python_windows(self, microseconds): - return ZERO_EPOCH + timedelta(seconds=(microseconds)) - - def TIMESTAMP_LTZ_to_python(self, microseconds): - tzinfo = self._get_session_tz() - return datetime.fromtimestamp(microseconds, tz=tzinfo) - - def TIMESTAMP_LTZ_to_python_windows(self, microseconds): - tzinfo = self._get_session_tz() - try: - t0 = ZERO_EPOCH + timedelta(seconds=(microseconds)) - t = pytz.utc.localize(t0, is_dst=False).astimezone(tzinfo) - return t - except OverflowError: - logger.debug( - "OverflowError in converting from epoch time to " - "timestamp_ltz: %s(ms). Falling back to use struct_time." - ) - return time.localtime(microseconds) diff --git a/arrow_iterator.pyx b/arrow_iterator.pyx deleted file mode 100644 index 347f84e41..000000000 --- a/arrow_iterator.pyx +++ /dev/null @@ -1,83 +0,0 @@ -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -# distutils: language = c++ - -from logging import getLogger -from cpython.ref cimport PyObject - -logger = getLogger(__name__) - -''' -the unit in this iterator -EMPTY_UNIT: default -ROW_UNIT: fetch row by row if the user call `fetchone()` -TABLE_UNIT: fetch one arrow table if the user call `fetch_pandas()` -''' -ROW_UNIT, TABLE_UNIT, EMPTY_UNIT = 'row', 'table', '' - - -cdef extern from "cpp/ArrowIterator/CArrowIterator.hpp" namespace "sf": - cdef cppclass CArrowIterator: - void addRecordBatch(PyObject * rb) - - PyObject* next(); - - void reset(); - - -cdef extern from "cpp/ArrowIterator/CArrowChunkIterator.hpp" namespace "sf": - cdef cppclass CArrowChunkIterator(CArrowIterator): - CArrowChunkIterator(PyObject* context) except + - - -cdef extern from "cpp/ArrowIterator/CArrowTableIterator.hpp" namespace "sf": - cdef cppclass CArrowTableIterator(CArrowIterator): - CArrowTableIterator(PyObject* context) except + - - -cdef class PyArrowIterator: - cdef object reader - cdef object context - cdef CArrowIterator* cIterator - cdef str unit - cdef PyObject* cret - - def __cinit__(self, object arrow_stream_reader, object arrow_context): - self.reader = arrow_stream_reader - self.context = arrow_context - self.cIterator = NULL - self.unit = '' - - def __dealloc__(self): - del self.cIterator - - def __next__(self): - self.cret = self.cIterator.next() - - if not self.cret: - logger.error("Internal error from CArrowIterator\n") - # it looks like this line can help us get into python and detect the global variable immediately - # however, this log will not show up for unclear reason - ret = self.cret - - if ret is None: - raise StopIteration - else: - return ret - - def init(self, str iter_unit): - # init chunk (row) iterator or table iterator - if iter_unit != ROW_UNIT and iter_unit != TABLE_UNIT: - raise NotImplementedError - elif iter_unit == ROW_UNIT: - self.cIterator = new CArrowChunkIterator(self.context) - elif iter_unit == TABLE_UNIT: - self.cIterator = new CArrowTableIterator(self.context) - self.unit = iter_unit - - # read - for rb in self.reader: - self.cIterator.addRecordBatch(rb) - self.cIterator.reset() \ No newline at end of file diff --git a/arrow_result.pyx b/arrow_result.pyx deleted file mode 100644 index ba3fb2214..000000000 --- a/arrow_result.pyx +++ /dev/null @@ -1,244 +0,0 @@ -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -# cython: profile=False - -from base64 import b64decode -from logging import getLogger -from .telemetry import TelemetryField -from .time_util import get_time_millis -try: - from pyarrow.ipc import open_stream - from pyarrow import concat_tables - from .arrow_iterator import PyArrowIterator, ROW_UNIT, TABLE_UNIT, EMPTY_UNIT - from .arrow_context import ArrowConverterContext -except ImportError: - pass - - -logger = getLogger(__name__) - - -cdef class ArrowResult: - cdef: - object _cursor - object _connection - int total_row_index; - int _chunk_index - int _chunk_count - int _current_chunk_row_count - list _description - object _column_idx_to_name - object _current_chunk_row - object _chunk_downloader - object _arrow_context - str _iter_unit - - def __init__(self, raw_response, cursor): - self._reset() - self._cursor = cursor - self._connection = cursor.connection - self._chunk_info(raw_response) - - def _chunk_info(self, data): - self.total_row_index = -1 # last fetched number of rows - - self._chunk_index = 0 - self._chunk_count = 0 - # result as arrow chunk - rowset_b64 = data.get(u'rowsetBase64') - - if rowset_b64: - arrow_bytes = b64decode(rowset_b64) - arrow_reader = open_stream(arrow_bytes) - self._arrow_context = ArrowConverterContext(self._connection._session_parameters) - self._current_chunk_row = PyArrowIterator(arrow_reader, self._arrow_context) - else: - self._current_chunk_row = iter(()) - self._iter_unit = EMPTY_UNIT - - if u'chunks' in data: - chunks = data[u'chunks'] - self._chunk_count = len(chunks) - logger.debug(u'chunk size=%s', self._chunk_count) - # prepare the downloader for further fetch - qrmk = data[u'qrmk'] if u'qrmk' in data else None - chunk_headers = None - if u'chunkHeaders' in data: - chunk_headers = {} - for header_key, header_value in data[ - u'chunkHeaders'].items(): - chunk_headers[header_key] = header_value - logger.debug( - u'added chunk header: key=%s, value=%s', - header_key, - header_value) - - logger.debug(u'qrmk=%s', qrmk) - self._chunk_downloader = self._connection._chunk_downloader_class( - chunks, self._connection, self._cursor, qrmk, chunk_headers, - query_result_format='arrow', - prefetch_threads=self._connection.client_prefetch_threads, - use_ijson=False) - - def __iter__(self): - return self - - def __next__(self): - if self._iter_unit == EMPTY_UNIT: - self._iter_unit = ROW_UNIT - self._current_chunk_row.init(self._iter_unit) - elif self._iter_unit == TABLE_UNIT: - logger.debug(u'The iterator has been built for fetching arrow table') - raise RuntimeError - - is_done = False - try: - row = None - self.total_row_index += 1 - try: - row = self._current_chunk_row.__next__() - except StopIteration: - if self._chunk_index < self._chunk_count: - logger.debug( - u"chunk index: %s, chunk_count: %s", - self._chunk_index, self._chunk_count) - next_chunk = self._chunk_downloader.next_chunk() - self._current_chunk_row = next_chunk.result_data - self._current_chunk_row.init(self._iter_unit) - self._chunk_index += 1 - try: - row = self._current_chunk_row.__next__() - except StopIteration: - is_done = True - raise IndexError - else: - if self._chunk_count > 0 and \ - self._chunk_downloader is not None: - self._chunk_downloader.terminate() - self._cursor._log_telemetry_job_data( - TelemetryField.TIME_DOWNLOADING_CHUNKS, - self._chunk_downloader._total_millis_downloading_chunks) - self._cursor._log_telemetry_job_data( - TelemetryField.TIME_PARSING_CHUNKS, - self._chunk_downloader._total_millis_parsing_chunks) - self._chunk_downloader = None - self._chunk_count = 0 - self._current_chunk_row = iter(()) - is_done = True - - if is_done: - raise StopIteration - - return row - - except IndexError: - # returns None if the iteration is completed so that iter() stops - return None - finally: - if is_done and self._cursor._first_chunk_time: - logger.info("fetching data done") - time_consume_last_result = get_time_millis() - self._cursor._first_chunk_time - self._cursor._log_telemetry_job_data( - TelemetryField.TIME_CONSUME_LAST_RESULT, - time_consume_last_result) - - def _reset(self): - self.total_row_index = -1 # last fetched number of rows - self._current_chunk_row_count = 0 - self._current_chunk_row = iter(()) - self._chunk_index = 0 - - if hasattr(self, u'_chunk_count') and self._chunk_count > 0 and \ - self._chunk_downloader is not None: - self._chunk_downloader.terminate() - - self._chunk_count = 0 - self._chunk_downloader = None - self._arrow_context = None - self._iter_unit = EMPTY_UNIT - - def _fetch_arrow_batches(self): - ''' - Fetch Arrow Table in batch, where 'batch' refers to Snowflake Chunk - Thus, the batch size (the number of rows in table) may be different - ''' - if self._iter_unit == EMPTY_UNIT: - self._iter_unit = TABLE_UNIT - elif self._iter_unit == ROW_UNIT: - logger.debug(u'The iterator has been built for fetching row') - raise RuntimeError - - try: - self._current_chunk_row.init(self._iter_unit) # AttributeError if it is iter(()) - while self._chunk_index <= self._chunk_count: - table = self._current_chunk_row.__next__() - if self._chunk_index < self._chunk_count: # multiple chunks - logger.debug( - u"chunk index: %s, chunk_count: %s", - self._chunk_index, self._chunk_count) - next_chunk = self._chunk_downloader.next_chunk() - self._current_chunk_row = next_chunk.result_data - self._current_chunk_row.init(self._iter_unit) - self._chunk_index += 1 - yield table - else: - if self._chunk_count > 0 and \ - self._chunk_downloader is not None: - self._chunk_downloader.terminate() - self._cursor._log_telemetry_job_data( - TelemetryField.TIME_DOWNLOADING_CHUNKS, - self._chunk_downloader._total_millis_downloading_chunks) - self._cursor._log_telemetry_job_data( - TelemetryField.TIME_PARSING_CHUNKS, - self._chunk_downloader._total_millis_parsing_chunks) - self._chunk_downloader = None - self._chunk_count = 0 - self._current_chunk_row = iter(()) - except AttributeError: - # just for handling the case of empty result - return None - finally: - if self._cursor._first_chunk_time: - logger.info("fetching data into pandas dataframe done") - time_consume_last_result = get_time_millis() - self._cursor._first_chunk_time - self._cursor._log_telemetry_job_data( - TelemetryField.TIME_CONSUME_LAST_RESULT, - time_consume_last_result) - - def _fetch_arrow_all(self): - ''' - Fetch a single Arrow Table - ''' - tables = list(self._fetch_arrow_batches()) - if tables: - return concat_tables(tables) - else: - return None - - def _fetch_pandas_batches(self): - ''' - Fetch Pandas dataframes in batch, where 'batch' refers to Snowflake Chunk - Thus, the batch size (the number of rows in dataframe) may be different - TODO: take a look at pyarrow to_pandas() API, which provides some useful arguments - e.g. 1. use `use_threads=true` for acceleration - 2. use `strings_to_categorical` and `categories` to encoding categorical data, - which is really different from `string` in data science. - For example, some data may be marked as 0 and 1 as binary class in dataset, - the user wishes to interpret as categorical data instead of integer. - 3. use `zero_copy_only` to capture the potential unnecessary memory copying - we'd better also provide these handy arguments to make data scientists happy :) - ''' - for table in self._fetch_arrow_batches(): - yield table.to_pandas() - - def _fetch_pandas_all(self): - ''' - Fetch a single Pandas dataframe - ''' - table = self._fetch_arrow_all() - if table: - return table.to_pandas() - else: - return None \ No newline at end of file diff --git a/auth.py b/auth.py deleted file mode 100644 index d6fa8d5c0..000000000 --- a/auth.py +++ /dev/null @@ -1,490 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import codecs -import copy -from datetime import datetime -import json -import logging -import platform -import tempfile -import time -import uuid -from os import getenv, path, makedirs, mkdir, rmdir, removedirs, remove -from os.path import expanduser -from threading import Lock -from threading import Thread - -from .auth_keypair import AuthByKeyPair -from .compat import (TO_UNICODE, urlencode, IS_LINUX) -from .constants import ( - HTTP_HEADER_CONTENT_TYPE, - HTTP_HEADER_ACCEPT, - HTTP_HEADER_USER_AGENT, - HTTP_HEADER_SERVICE_NAME, - PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL, - PARAMETER_CLIENT_USE_SECURE_STORAGE_FOR_TEMPORARY_CREDENTIAL -) -from .description import ( - OPERATING_SYSTEM, - PYTHON_VERSION, - PLATFORM, - IMPLEMENTATION, - COMPILER -) -from .errorcode import (ER_FAILED_TO_CONNECT_TO_DB) -from .errors import (Error, - DatabaseError, - ServiceUnavailableError, - ForbiddenError, - BadGatewayError) -from .network import (CONTENT_TYPE_APPLICATION_JSON, - ACCEPT_TYPE_APPLICATION_SNOWFLAKE, - PYTHON_CONNECTOR_USER_AGENT, - ReauthenticationRequest) -from .sqlstate import (SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED) -from .version import VERSION - -logger = logging.getLogger(__name__) - -# Cache directory -CACHE_ROOT_DIR = getenv('SF_TEMPORARY_CREDENTIAL_CACHE_DIR') or \ - expanduser("~") or tempfile.gettempdir() -if platform.system() == 'Windows': - CACHE_DIR = path.join(CACHE_ROOT_DIR, 'AppData', 'Local', 'Snowflake', - 'Caches') -elif platform.system() == 'Darwin': - CACHE_DIR = path.join(CACHE_ROOT_DIR, 'Library', 'Caches', 'Snowflake') -else: - CACHE_DIR = path.join(CACHE_ROOT_DIR, '.cache', 'snowflake') - -if not path.exists(CACHE_DIR): - try: - makedirs(CACHE_DIR, mode=0o700) - except Exception as ex: - logger.debug('cannot create a cache directory: [%s], err=[%s]', - CACHE_DIR, ex) - CACHE_DIR = None -logger.debug("cache directory: %s", CACHE_DIR) - -# temporary credential cache -TEMPORARY_CREDENTIAL = {} - -TEMPORARY_CREDENTIAL_LOCK = Lock() - -# temporary credential cache file name -TEMPORARY_CREDENTIAL_FILE = "temporary_credential.json" -TEMPORARY_CREDENTIAL_FILE = path.join( - CACHE_DIR, TEMPORARY_CREDENTIAL_FILE) if CACHE_DIR else "" - -# temporary credential cache lock directory name -TEMPORARY_CREDENTIAL_FILE_LOCK = TEMPORARY_CREDENTIAL_FILE + ".lck" - -# keyring -KEYRING_SERVICE_NAME = "net.snowflake.temporary_token" -KEYRING_USER = "temp_token" - - -class Auth(object): - """ - Snowflake Authenticator - """ - - def __init__(self, rest): - self._rest = rest - - @staticmethod - def base_auth_data(user, account, application, - internal_application_name, - internal_application_version, - ocsp_mode): - return { - u'data': { - u"CLIENT_APP_ID": internal_application_name, - u"CLIENT_APP_VERSION": internal_application_version, - u"SVN_REVISION": VERSION[3], - u"ACCOUNT_NAME": account, - u"LOGIN_NAME": user, - u"CLIENT_ENVIRONMENT": { - u"APPLICATION": application, - u"OS": OPERATING_SYSTEM, - u"OS_VERSION": PLATFORM, - u"PYTHON_VERSION": PYTHON_VERSION, - u"PYTHON_RUNTIME": IMPLEMENTATION, - u"PYTHON_COMPILER": COMPILER, - u"OCSP_MODE": ocsp_mode.name, - } - }, - } - - def authenticate(self, auth_instance, account, user, - database=None, schema=None, - warehouse=None, role=None, passcode=None, - passcode_in_password=False, - mfa_callback=None, password_callback=None, - session_parameters=None, timeout=120): - logger.debug(u'authenticate') - - if session_parameters is None: - session_parameters = {} - - request_id = TO_UNICODE(uuid.uuid4()) - headers = { - HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, - HTTP_HEADER_ACCEPT: ACCEPT_TYPE_APPLICATION_SNOWFLAKE, - HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT, - } - if HTTP_HEADER_SERVICE_NAME in session_parameters: - headers[HTTP_HEADER_SERVICE_NAME] = \ - session_parameters[HTTP_HEADER_SERVICE_NAME] - url = u"/session/v1/login-request" - body_template = Auth.base_auth_data( - user, account, self._rest._connection.application, - self._rest._connection._internal_application_name, - self._rest._connection._internal_application_version, - self._rest._connection._ocsp_mode()) - - body = copy.deepcopy(body_template) - # updating request body - logger.debug(u'assertion content: %s', - auth_instance.assertion_content) - auth_instance.update_body(body) - - logger.debug( - u'account=%s, user=%s, database=%s, schema=%s, ' - u'warehouse=%s, role=%s, request_id=%s', - account, - user, - database, - schema, - warehouse, - role, - request_id, - ) - url_parameters = {u'request_id': request_id} - if database is not None: - url_parameters[u'databaseName'] = database - if schema is not None: - url_parameters[u'schemaName'] = schema - if warehouse is not None: - url_parameters[u'warehouse'] = warehouse - if role is not None: - url_parameters[u'roleName'] = role - - url = url + u'?' + urlencode(url_parameters) - - # first auth request - if passcode_in_password: - body[u'data'][u'EXT_AUTHN_DUO_METHOD'] = u'passcode' - elif passcode: - body[u'data'][u'EXT_AUTHN_DUO_METHOD'] = u'passcode' - body[u'data'][u'PASSCODE'] = passcode - - if session_parameters: - body[u'data'][u'SESSION_PARAMETERS'] = session_parameters - - logger.debug( - "body['data']: %s", - {k: v for (k, v) in body[u'data'].items() if k != u'PASSWORD'}) - - try: - ret = self._rest._post_request( - url, headers, json.dumps(body), - timeout=self._rest._connection.login_timeout, - socket_timeout=self._rest._connection.login_timeout) - except ForbiddenError as err: - # HTTP 403 - raise err.__class__( - msg=(u"Failed to connect to DB. " - u"Verify the account name is correct: {host}:{port}. " - u"{message}").format( - host=self._rest._host, - port=self._rest._port, - message=TO_UNICODE(err) - ), - errno=ER_FAILED_TO_CONNECT_TO_DB, - sqlstate=SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED) - except (ServiceUnavailableError, BadGatewayError) as err: - # HTTP 502/504 - raise err.__class__( - msg=(u"Failed to connect to DB. " - u"Service is unavailable: {host}:{port}. " - u"{message}").format( - host=self._rest._host, - port=self._rest._port, - message=TO_UNICODE(err) - ), - errno=ER_FAILED_TO_CONNECT_TO_DB, - sqlstate=SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED) - - # waiting for MFA authentication - if ret[u'data'].get(u'nextAction') == u'EXT_AUTHN_DUO_ALL': - body[u'inFlightCtx'] = ret[u'data'][u'inFlightCtx'] - body[u'data'][u'EXT_AUTHN_DUO_METHOD'] = u'push' - self.ret = {u'message': "Timeout", u'data': {}} - - def post_request_wrapper(self, url, headers, body): - # get the MFA response - self.ret = self._rest._post_request( - url, headers, body, - timeout=self._rest._connection.login_timeout) - - # send new request to wait until MFA is approved - t = Thread(target=post_request_wrapper, - args=[self, url, headers, json.dumps(body)]) - t.daemon = True - t.start() - if callable(mfa_callback): - c = mfa_callback() - while not self.ret or self.ret.get(u'message') == u'Timeout': - next(c) - else: - t.join(timeout=timeout) - - ret = self.ret - if ret and ret[u'data'].get(u'nextAction') == u'EXT_AUTHN_SUCCESS': - body = copy.deepcopy(body_template) - body[u'inFlightCtx'] = ret[u'data'][u'inFlightCtx'] - # final request to get tokens - ret = self._rest._post_request( - url, headers, json.dumps(body), - timeout=self._rest._connection.login_timeout, - socket_timeout=self._rest._connection.login_timeout) - elif not ret or not ret[u'data'].get(u'token'): - # not token is returned. - Error.errorhandler_wrapper( - self._rest._connection, None, DatabaseError, - { - u'msg': (u"Failed to connect to DB. MFA " - u"authentication failed: {" - u"host}:{port}. {message}").format( - host=self._rest._host, - port=self._rest._port, - message=ret[u'message'], - ), - u'errno': ER_FAILED_TO_CONNECT_TO_DB, - u'sqlstate': SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, - }) - return session_parameters # required for unit test - - elif ret[u'data'].get(u'nextAction') == u'PWD_CHANGE': - if callable(password_callback): - body = copy.deepcopy(body_template) - body[u'inFlightCtx'] = ret[u'data'][u'inFlightCtx'] - body[u'data'][u"LOGIN_NAME"] = user - body[u'data'][u"PASSWORD"] = \ - auth_instance.password if hasattr( - auth_instance, 'password') else None - body[u'data'][u'CHOSEN_NEW_PASSWORD'] = password_callback() - # New Password input - ret = self._rest._post_request( - url, headers, json.dumps(body), - timeout=self._rest._connection.login_timeout, - socket_timeout=self._rest._connection.login_timeout) - - logger.debug(u'completed authentication') - if not ret[u'success']: - if type(auth_instance) is AuthByKeyPair: - logger.debug( - "JWT Token authentication failed. " - "Token expires at: %s. " - "Current Time: %s", - str(auth_instance._jwt_token_exp), - str(datetime.utcnow()) - ) - Error.errorhandler_wrapper( - self._rest._connection, None, DatabaseError, - { - u'msg': (u"Failed to connect to DB: {host}:{port}. " - u"{message}").format( - host=self._rest._host, - port=self._rest._port, - message=ret[u'message'], - ), - u'errno': ER_FAILED_TO_CONNECT_TO_DB, - u'sqlstate': SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, - }) - else: - logger.debug(u'token = %s', - '******' if ret[u'data'][u'token'] is not None else - 'NULL') - logger.debug(u'master_token = %s', - '******' if ret[u'data'][ - u'masterToken'] is not None else - 'NULL') - logger.debug(u'id_token = %s', - '******' if ret[u'data'].get( - u'id_token') is not None else - 'NULL') - self._rest.update_tokens( - ret[u'data'][u'token'], ret[u'data'][u'masterToken'], - master_validity_in_seconds=ret[u'data'].get( - u'masterValidityInSeconds'), - id_token=ret[u'data'].get(u'idToken') - ) - if self._rest._connection.consent_cache_id_token: - write_temporary_credential_file( - account, user, self._rest.id_token, - session_parameters.get( - PARAMETER_CLIENT_USE_SECURE_STORAGE_FOR_TEMPORARY_CREDENTIAL)) - if u'sessionId' in ret[u'data']: - self._rest._connection._session_id = ret[u'data'][u'sessionId'] - if u'sessionInfo' in ret[u'data']: - session_info = ret[u'data'][u'sessionInfo'] - self._rest._connection._database = session_info.get(u'databaseName') - self._rest._connection._schema = session_info.get(u'schemaName') - self._rest._connection._warehouse = session_info.get(u'warehouseName') - self._rest._connection._role = session_info.get(u'roleName') - self._rest._connection._set_parameters(ret, session_parameters) - - return session_parameters - - def read_temporary_credential(self, account, user, session_parameters): - if session_parameters.get(PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL): - read_temporary_credential_file( - session_parameters.get( - PARAMETER_CLIENT_USE_SECURE_STORAGE_FOR_TEMPORARY_CREDENTIAL) - ) - id_token = TEMPORARY_CREDENTIAL.get( - account.upper(), {}).get(user.upper()) - if id_token: - self._rest.id_token = id_token - if self._rest.id_token: - try: - self._rest._id_token_session() - return True - except ReauthenticationRequest as ex: - # catch token expiration error - logger.debug( - "ID token expired. Reauthenticating...: %s", ex) - return False - - -def write_temporary_credential_file( - account, user, id_token, - use_secure_storage_for_temporary_credential=False): - if not CACHE_DIR or not id_token: - # no cache is enabled or no id_token is given - return - global TEMPORARY_CREDENTIAL - global TEMPORARY_CREDENTIAL_LOCK - global TEMPORARY_CREDENTIAL_FILE - with TEMPORARY_CREDENTIAL_LOCK: - # update the cache - account_data = TEMPORARY_CREDENTIAL.get(account.upper(), {}) - account_data[user.upper()] = id_token - TEMPORARY_CREDENTIAL[account.upper()] = account_data - for _ in range(10): - if lock_temporary_credential_file(): - break - time.sleep(1) - else: - logger.debug("The lock file still persists. Will ignore and " - "write the temporary credential file: %s", - TEMPORARY_CREDENTIAL_FILE) - try: - if IS_LINUX or not use_secure_storage_for_temporary_credential: - with codecs.open(TEMPORARY_CREDENTIAL_FILE, 'w', - encoding='utf-8', errors='ignore') as f: - json.dump(TEMPORARY_CREDENTIAL, f) - else: - import keyring - keyring.set_password( - KEYRING_SERVICE_NAME, KEYRING_USER, - json.dumps(TEMPORARY_CREDENTIAL)) - - except Exception as ex: - logger.debug("Failed to write a credential file: " - "file=[%s], err=[%s]", TEMPORARY_CREDENTIAL_FILE, ex) - finally: - unlock_temporary_credential_file() - - -def read_temporary_credential_file( - use_secure_storage_for_temporary_credential=False): - """ - Read temporary credential file - """ - if not CACHE_DIR: - # no cache is enabled - return - - global TEMPORARY_CREDENTIAL - global TEMPORARY_CREDENTIAL_LOCK - global TEMPORARY_CREDENTIAL_FILE - with TEMPORARY_CREDENTIAL_LOCK: - for _ in range(10): - if lock_temporary_credential_file(): - break - time.sleep(1) - else: - logger.debug("The lock file still persists. Will ignore and " - "write the temporary credential file: %s", - TEMPORARY_CREDENTIAL_FILE) - try: - if IS_LINUX or not use_secure_storage_for_temporary_credential: - with codecs.open(TEMPORARY_CREDENTIAL_FILE, 'r', - encoding='utf-8', errors='ignore') as f: - TEMPORARY_CREDENTIAL = json.load(f) - else: - import keyring - f = keyring.get_password( - KEYRING_SERVICE_NAME, KEYRING_USER) or "{}" - TEMPORARY_CREDENTIAL = json.loads(f) - return TEMPORARY_CREDENTIAL - except Exception as ex: - logger.debug("Failed to read a credential file. The file may not" - "exists: file=[%s], err=[%s]", - TEMPORARY_CREDENTIAL_FILE, ex) - finally: - unlock_temporary_credential_file() - return None - - -def lock_temporary_credential_file(): - global TEMPORARY_CREDENTIAL_FILE_LOCK - try: - mkdir(TEMPORARY_CREDENTIAL_FILE_LOCK) - return True - except OSError: - logger.debug("Temporary cache file lock already exists. Other " - "process may be updating the temporary ") - return False - - -def unlock_temporary_credential_file(): - global TEMPORARY_CREDENTIAL_FILE_LOCK - try: - rmdir(TEMPORARY_CREDENTIAL_FILE_LOCK) - return True - except OSError: - logger.debug("Temporary cache file lock no longer exists.") - return False - - -def delete_temporary_credential_file( - use_secure_storage_for_temporary_credential=False): - """ - Delete temporary credential file and its lock file - """ - global TEMPORARY_CREDENTIAL_FILE - if IS_LINUX or not use_secure_storage_for_temporary_credential: - try: - remove(TEMPORARY_CREDENTIAL_FILE) - except Exception as ex: - logger.debug("Failed to delete a credential file: " - "file=[%s], err=[%s]", TEMPORARY_CREDENTIAL_FILE, ex) - else: - try: - import keyring - keyring.delete_password(KEYRING_SERVICE_NAME, KEYRING_USER) - except Exception as ex: - logger.debug("Failed to delete credential in the keyring: err=[%s]", - ex) - try: - removedirs(TEMPORARY_CREDENTIAL_FILE_LOCK) - except Exception as ex: - logger.debug("Failed to delete credential lock file: err=[%s]", ex) diff --git a/auth_by_plugin.py b/auth_by_plugin.py deleted file mode 100644 index de2bc1912..000000000 --- a/auth_by_plugin.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from .errors import Error, DatabaseError -from .sqlstate import SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED - - -class AuthByPlugin(object): - """ - External Authenticator interface. - """ - - @property - def assertion_content(self): - raise NotImplementedError - - def update_body(self, body): - raise NotImplementedError - - def authenticate( - self, authenticator, service_name, account, user, password): - raise NotImplementedError - - def handle_failure(self, ret): - """ Handles a failure when connecting to Snowflake - - Args: - ret: dictionary returned from Snowflake. - """ - Error.errorhandler_wrapper( - self._rest._connection, None, DatabaseError, - { - u'msg': (u"Failed to connect to DB: {host}:{port}, " - u"{message}").format( - host=self._rest._host, - port=self._rest._port, - message=ret[u'message'], - ), - u'errno': int(ret.get(u'code', -1)), - u'sqlstate': SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, - }) diff --git a/auth_default.py b/auth_default.py deleted file mode 100644 index 2eb0e84e4..000000000 --- a/auth_default.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from .auth_by_plugin import AuthByPlugin - - -class AuthByDefault(AuthByPlugin): - """ - Default username and password authenticator - """ - - @property - def assertion_content(self): - return "*********" - - def __init__(self, password): - """ - Initializes an instance with a password - """ - self._password = password - - def authenticate( - self, authenticator, service_name, account, user, password): - """ - NOP. - """ - pass - - def update_body(self, body): - """ - Set the password if available - """ - if self._password: - body[u'data'][u"PASSWORD"] = self._password diff --git a/auth_keypair.py b/auth_keypair.py deleted file mode 100644 index b728158c3..000000000 --- a/auth_keypair.py +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2017 Snowflake Computing Inc. All right reserved. -# - -import jwt -from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey -from cryptography.hazmat.primitives.serialization import Encoding -from cryptography.hazmat.primitives.serialization import load_der_private_key -from cryptography.hazmat.primitives.serialization import PublicFormat -from cryptography.hazmat.backends import default_backend -from datetime import datetime, timedelta -from logging import getLogger -from .auth_by_plugin import AuthByPlugin -from .network import KEY_PAIR_AUTHENTICATOR -from .errorcode import ER_INVALID_PRIVATE_KEY -from .errors import ProgrammingError -import base64 -import hashlib - -logger = getLogger(__name__) - - -class AuthByKeyPair(AuthByPlugin): - """ - Key pair based authentication - """ - LIFETIME = timedelta(seconds=120) - ALGORITHM = 'RS256' - ISSUER = 'iss' - SUBJECT = 'sub' - EXPIRE_TIME = 'exp' - ISSUE_TIME = 'iat' - - def __init__(self, private_key): - """ - :param private_key: a byte array of der formats of private key - """ - self._private_key = private_key - self._jwt_token = '' - self._jwt_token_exp = 0 - - def authenticate( - self, authenticator, service_name, account, user, password): - account = account.upper() - user = user.upper() - - now = datetime.utcnow() - - try: - private_key = load_der_private_key(data=self._private_key, password=None, backend=default_backend()) - except Exception as e: - raise ProgrammingError( - msg=u'Failed to load private key: {}\nPlease provide a valid unencrypted rsa private ' - u'key in DER format as bytes object'.format(str(e)), - errno=ER_INVALID_PRIVATE_KEY - ) - - if not isinstance(private_key, RSAPrivateKey): - raise ProgrammingError( - msg=u'Private key type ({}) not supported.\nPlease provide a valid rsa private ' - u'key in DER format as bytes object'.format(private_key.__class__.__name__), - errno=ER_INVALID_PRIVATE_KEY - ) - - public_key_fp = self.calculate_public_key_fingerprint(private_key) - - self._jwt_token_exp = now + self.LIFETIME - payload = { - self.ISSUER: "{}.{}.{}".format(account, user, public_key_fp), - self.SUBJECT: "{}.{}".format(account, user), - self.ISSUE_TIME: now, - self.EXPIRE_TIME: self._jwt_token_exp - } - - self._jwt_token = jwt.encode(payload, private_key, - algorithm=self.ALGORITHM).decode('utf-8') - - return self._jwt_token - - @staticmethod - def calculate_public_key_fingerprint(private_key): - # get public key bytes - public_key_der = private_key.public_key().public_bytes(Encoding.DER, PublicFormat.SubjectPublicKeyInfo) - - # take sha256 on raw bytes and then do base64 encode - sha256hash = hashlib.sha256() - sha256hash.update(public_key_der) - - public_key_fp = 'SHA256:' + base64.b64encode(sha256hash.digest()).decode('utf-8') - logger.debug("Public key fingerprint is %s", public_key_fp) - - return public_key_fp - - def update_body(self, body): - body[u'data'][u'AUTHENTICATOR'] = KEY_PAIR_AUTHENTICATOR - body[u'data'][u'TOKEN'] = self._jwt_token - - def assertion_content(self): - return self._jwt_token diff --git a/auth_oauth.py b/auth_oauth.py deleted file mode 100644 index 815fb95d0..000000000 --- a/auth_oauth.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from .auth_by_plugin import AuthByPlugin -from .network import OAUTH_AUTHENTICATOR - - -class AuthByOAuth(AuthByPlugin): - """ - OAuth Based Authentication. Works by accepting an OAuth token and - using that to authenticate. - """ - - @property - def assertion_content(self): - """ Returns the token.""" - return self._oauth_token - - def __init__(self, oauth_token): - """ - Initializes an instance with an OAuth Token. - """ - self._oauth_token = oauth_token - - def authenticate( - self, authenticator, service_name, account, user, password): - """ - Nothing to do here, token should be obtained outside of the driver. - """ - pass - - def update_body(self, body): - """ - OAuth needs the authenticator and token attributes set, as well as - loginname, which is set already in auth.py .""" - body[u'data'][u'AUTHENTICATOR'] = OAUTH_AUTHENTICATOR - body[u'data'][u'TOKEN'] = self._oauth_token diff --git a/auth_okta.py b/auth_okta.py deleted file mode 100644 index 66d1ab66d..000000000 --- a/auth_okta.py +++ /dev/null @@ -1,241 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import json -import logging - -from .auth import Auth -from .auth_by_plugin import AuthByPlugin -from .compat import (urlsplit, unescape, urlencode) -from .constants import ( - HTTP_HEADER_CONTENT_TYPE, - HTTP_HEADER_ACCEPT, - HTTP_HEADER_USER_AGENT, - HTTP_HEADER_SERVICE_NAME, -) -from .errorcode import (ER_IDP_CONNECTION_ERROR, - ER_INCORRECT_DESTINATION) -from .errors import (Error, DatabaseError) -from .network import (CONTENT_TYPE_APPLICATION_JSON, - PYTHON_CONNECTOR_USER_AGENT) -from .sqlstate import (SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED) - -logger = logging.getLogger(__name__) - - -def _is_prefix_equal(url1, url2): - """ - Checks if URL prefixes are identical. The scheme, hostname and port number - are compared. If the port number is not specified and the scheme is https, - the port number is assumed to be 443. - """ - parsed_url1 = urlsplit(url1) - parsed_url2 = urlsplit(url2) - - port1 = parsed_url1.port - if not port1 and parsed_url1.scheme == 'https': - port1 = '443' - port2 = parsed_url1.port - if not port2 and parsed_url2.scheme == 'https': - port2 = '443' - - return parsed_url1.hostname == parsed_url2.hostname and \ - port1 == port2 and \ - parsed_url1.scheme == parsed_url2.scheme - - -def _get_post_back_url_from_html(html): - """ - Gets the post back URL. - - Since the HTML is not well formed, minidom cannot be used to convert to - DOM. The first discovered form is assumed to be the form to post back - and the URL is taken from action attributes. - """ - logger.debug(html) - - idx = html.find(' 0: - # clean up the previously fetched data - n = self._next_chunk_to_consume - 1 - self._chunks[n] = self._chunks[n]._replace(result_data=None, ready=False) - - if self._next_chunk_to_download < self._chunk_size: - self._pool.apply_async( - self._download_chunk, - [self._next_chunk_to_download]) - self._next_chunk_to_download += 1 - - if self._downloader_error is not None: - raise self._downloader_error - - for attempt in range(MAX_RETRY_DOWNLOAD): - logger.debug(u'waiting for chunk %s/%s' - u' in %s/%s download attempt', - self._next_chunk_to_consume + 1, - self._chunk_size, - attempt + 1, - MAX_RETRY_DOWNLOAD) - done = False - for wait_counter in range(MAX_WAIT): - with self._chunk_cond: - if self._downloader_error: - raise self._downloader_error - if self._chunks[self._next_chunk_to_consume].ready: - done = True - break - logger.debug(u'chunk %s/%s is NOT ready to consume' - u' in %s/%s(s)', - self._next_chunk_to_consume + 1, - self._chunk_size, - (wait_counter + 1) * WAIT_TIME_IN_SECONDS, - MAX_WAIT * WAIT_TIME_IN_SECONDS) - self._chunk_cond.wait(WAIT_TIME_IN_SECONDS) - else: - logger.debug( - u'chunk %s/%s is still NOT ready. Restarting chunk ' - u'downloader threads', - self._next_chunk_to_consume + 1, - self._chunk_size) - self._pool.terminate() # terminate the thread pool - self._pool = ThreadPool(self._effective_threads) - for idx0 in range(self._effective_threads): - idx = idx0 + self._next_chunk_to_consume - self._pool.apply_async(self._download_chunk, [idx]) - if done: - break - else: - Error.errorhandler_wrapper( - self._connection, - self._cursor, - OperationalError, - { - u'msg': u'The result set chunk download fails or hang for ' - u'unknown reason.', - u'errno': ER_CHUNK_DOWNLOAD_FAILED - }) - logger.debug(u'chunk %s/%s is ready to consume', - self._next_chunk_to_consume + 1, - self._chunk_size) - - ret = self._chunks[self._next_chunk_to_consume] - self._next_chunk_to_consume += 1 - return ret - - def terminate(self): - """ - Terminates downloading the chunks. - """ - if hasattr(self, u'_pool') and self._pool is not None: - self._pool.close() - self._pool.join() - self._pool = None - - def __del__(self): - try: - self.terminate() - except: - # ignore all errors in the destructor - pass - - def _fetch_chunk(self, url, headers): - """ - Fetch the chunk from S3. - """ - handler = JsonBinaryHandler(is_raw_binary_iterator=True, - use_ijson=self._use_ijson) \ - if self._query_result_format == 'json' else \ - ArrowBinaryHandler(self._cursor.description, self._connection) - - return self._connection.rest.fetch( - u'get', url, headers, - timeout=DEFAULT_REQUEST_TIMEOUT, - is_raw_binary=True, - binary_data_handler=handler) - - -class ResultIterWithTimings: - DOWNLOAD = u"download" - PARSE = u"parse" - - def __init__(self, it, timings): - self._it = it - self._timings = timings - - def __next__(self): - return next(self._it) - - def next(self): - return self.__next__() - - def get_timings(self): - return self._timings - - -class RawBinaryDataHandler: - """ - Abstract class being passed to network.py to handle raw binary data - """ - def to_iterator(self, raw_data_fd, download_time): - pass - - -class JsonBinaryHandler(RawBinaryDataHandler): - """ - Convert result chunk in json format into interator - """ - def __init__(self, is_raw_binary_iterator, use_ijson): - self._is_raw_binary_iterator = is_raw_binary_iterator - self._use_ijson = use_ijson - - def to_iterator(self, raw_data_fd, download_time): - parse_start_time = get_time_millis() - raw_data = decompress_raw_data( - raw_data_fd, add_bracket=True - ).decode('utf-8', 'replace') - if not self._is_raw_binary_iterator: - ret = json.loads(raw_data) - elif not self._use_ijson: - ret = iter(json.loads(raw_data)) - else: - ret = split_rows_from_stream(StringIO(raw_data)) - - parse_end_time = get_time_millis() - - timing_metrics = { - ResultIterWithTimings.DOWNLOAD: download_time, - ResultIterWithTimings.PARSE: parse_end_time - parse_start_time - } - - return ResultIterWithTimings(ret, timing_metrics) - - -class ArrowBinaryHandler(RawBinaryDataHandler): - - def __init__(self, meta, connection): - self._meta = meta - self._arrow_context = ArrowConverterContext(connection._session_parameters) - - """ - Handler to consume data as arrow stream - """ - def to_iterator(self, raw_data_fd, download_time): - gzip_decoder = GzipFile(fileobj=raw_data_fd, mode='r') - reader = open_stream(gzip_decoder) - it = PyArrowIterator(reader, self._arrow_context) - return it diff --git a/ci/anaconda/bld.bat b/ci/anaconda/bld.bat new file mode 100644 index 000000000..5a5aeeb48 --- /dev/null +++ b/ci/anaconda/bld.bat @@ -0,0 +1 @@ +$PYTHON setup.py install diff --git a/ci/anaconda/build.sh b/ci/anaconda/build.sh new file mode 100644 index 000000000..a6609066d --- /dev/null +++ b/ci/anaconda/build.sh @@ -0,0 +1 @@ +$PYTHON setup.py install --single-version-externally-managed --record=record.txt diff --git a/ci/anaconda/meta.yaml b/ci/anaconda/meta.yaml new file mode 100644 index 000000000..09f764849 --- /dev/null +++ b/ci/anaconda/meta.yaml @@ -0,0 +1,29 @@ +package: + name: snowflake_connector_python + version: "1.2.3" + +source: + path: /tmp/anaconda_workspace/src + +requirements: + build: + - python + - setuptools + + run: + - python + - boto3 ==1.3.1 + - botocore ==1.4.26 + - future + - six + - pytz + - pycrypto ==2.6.1 + - pyopenssl ==0.15.1 + - cryptography ==1.2.3 + - cffi ==1.6.0 + +about: + home: https://www.snowflake.com/ + license: Apache 2.0 + license_file: /tmp/anaconda_workspace/src/LICENSE.txt + summary: Snowflake Connector for Python diff --git a/ci/build_darwin.sh b/ci/build_darwin.sh new file mode 100755 index 000000000..abeaf98f3 --- /dev/null +++ b/ci/build_darwin.sh @@ -0,0 +1,43 @@ +#!/bin/bash -e +# +# Build Snowflake Python Connector on Mac +# NOTES: +# - To compile only a specific version(s) pass in versions like: `./build_darwin.sh "3.7 3.8"` +PYTHON_VERSIONS="${1:-3.7 3.8 3.9 3.10}" +THIS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +CONNECTOR_DIR="$(dirname "${THIS_DIR}")" +DIST_DIR="$CONNECTOR_DIR/dist" + +cd $CONNECTOR_DIR +# Clean up previously built DIST_DIR +if [ -d "${DIST_DIR}" ]; then + echo "[WARN] ${DIST_DIR} already existing, deleting it..." + rm -rf "${DIST_DIR}" +fi +mkdir -p ${DIST_DIR} + +# Make sure we build for our lowest target +# Should be kept in sync with .github/worklfows/build_test.yml +export MACOSX_DEPLOYMENT_TARGET="10.14" +for PYTHON_VERSION in ${PYTHON_VERSIONS}; do + # Constants and setup + PYTHON="python${PYTHON_VERSION}" + VENV_DIR="${CONNECTOR_DIR}/venv-${PYTHON_VERSION}" + + # Need to create a venv to update build dependencies + ${PYTHON} -m venv ${VENV_DIR} + source ${VENV_DIR}/bin/activate + echo "[Info] Created and activated new venv at ${VENV_DIR}" + + # Build + echo "[Info] Creating a wheel: snowflake_connector using $PYTHON" + # Clean up possible build artifacts + rm -rf build generated_version.py + # Update PEP-517 dependencies + python -m pip install -U pip setuptools wheel build + # Use new PEP-517 build + python -m build --wheel . + deactivate + echo "[Info] Deleting venv at ${VENV_DIR}" + rm -rf ${VENV_DIR} +done diff --git a/ci/build_docker.sh b/ci/build_docker.sh new file mode 100755 index 000000000..471775fec --- /dev/null +++ b/ci/build_docker.sh @@ -0,0 +1,37 @@ +#!/bin/bash -e +# +# Build Snowflake Python Connector in Docker +# NOTES: +# - To compile only a specific version(s) pass in versions like: `./build_docker.sh "3.7 3.8"` +set -o pipefail + +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +source $THIS_DIR/set_base_image.sh +CONNECTOR_DIR="$( dirname "${THIS_DIR}")" + +mkdir -p $CONNECTOR_DIR/dist +cd $THIS_DIR/docker/connector_build + +CONTAINER_NAME=build_pyconnector +arch=$(uname -p) + +echo "[Info] Building docker image" +if [[ "$arch" == "aarch64" ]]; then + BASE_IMAGE=$BASE_IMAGE_MANYLINUX2014AARCH64 + GOSU_URL=https://github.com/tianon/gosu/releases/download/1.11/gosu-arm64 +else + BASE_IMAGE=$BASE_IMAGE_MANYLINUX2010 + GOSU_URL=https://github.com/tianon/gosu/releases/download/1.11/gosu-amd64 +fi + +docker build --pull -t ${CONTAINER_NAME}:1.0 --build-arg BASE_IMAGE=$BASE_IMAGE --build-arg GOSU_URL="$GOSU_URL" . -f Dockerfile + +echo "[Info] Building Python Connector" +user_id=$(id -u ${USER}) +docker run \ + -e TERM=vt102 \ + -e PIP_DISABLE_PIP_VERSION_CHECK=1 \ + -e LOCAL_USER_ID=${user_id} \ + --mount type=bind,source="${CONNECTOR_DIR}",target=/home/user/snowflake-connector-python \ + ${CONTAINER_NAME}:1.0 \ + /home/user/snowflake-connector-python/ci/build_linux.sh $1 diff --git a/ci/build_linux.sh b/ci/build_linux.sh new file mode 100755 index 000000000..dbf10b014 --- /dev/null +++ b/ci/build_linux.sh @@ -0,0 +1,59 @@ +#!/bin/bash -e +# +# Build Snowflake Python Connector on Linux +# NOTES: +# - This is designed to ONLY be called in our build docker image +# - To compile only a specific version(s) pass in versions like: `./build_linux.sh "3.7 3.8"` +set -o pipefail + +U_WIDTH=16 +PYTHON_VERSIONS="${1:-3.7 3.8 3.9 3.10}" +THIS_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +CONNECTOR_DIR="$(dirname "${THIS_DIR}")" +DIST_DIR="${CONNECTOR_DIR}/dist" +REPAIRED_DIR=${DIST_DIR}/repaired_wheels + +cd "$CONNECTOR_DIR" +# Clean up previously built DIST_DIR +if [ -d "${DIST_DIR}" ]; then + echo "[WARN] ${DIST_DIR} already existing, deleting it..." + rm -rf "${DIST_DIR}" +fi +mkdir -p ${REPAIRED_DIR} + +# Necessary for cpython_path +source /home/user/multibuild/manylinux_utils.sh + +for PYTHON_VERSION in ${PYTHON_VERSIONS}; do + # Constants and setup + PYTHON="$(cpython_path ${PYTHON_VERSION} ${U_WIDTH})/bin/python" + BUILD_DIR="${DIST_DIR}/$PYTHON_VERSION" + + # Build + echo "[Info] Building for ${PYTHON_VERSION} with $PYTHON" + # Clean up possible build artifacts + rm -rf build generated_version.py + # Update PEP-517 dependencies + ${PYTHON} -m pip install --upgrade pip setuptools wheel build + # Use new PEP-517 build + ${PYTHON} -m build --outdir ${BUILD_DIR} . + # On Linux we should repair wheel(s) generated +arch=$(uname -p) +if [[ $arch == x86_64 ]]; then + auditwheel repair --plat manylinux2014_x86_64 ${BUILD_DIR}/*.whl -w ${REPAIRED_DIR} +else + auditwheel repair --plat manylinux2014_aarch64 ${BUILD_DIR}/*.whl -w ${REPAIRED_DIR} +fi + + # Generate reqs files + FULL_PYTHON_VERSION="$(${PYTHON} --version | cut -d' ' -f2-)" + REQS_FILE="${BUILD_DIR}/requirements_$(${PYTHON} -c 'from sys import version_info;print(str(version_info.major)+str(version_info.minor))').txt" + ${PYTHON} -m pip install ${BUILD_DIR}/*.whl + echo "# Generated on: $(${PYTHON} --version)" >${REQS_FILE} + echo "# With snowflake-connector-python version: $(${PYTHON} -m pip show snowflake-connector-python | grep ^Version | cut -d' ' -f2-)" >>${REQS_FILE} + ${PYTHON} -m pip freeze | grep -v snowflake-connector-python 1>>${REQS_FILE} 2>/dev/null +done + +# Move lowest Python version generated sdist to right location +LOWEST_SDIST="$(find dist -iname '*.tar.gz' | sort | head -n 1)" +mv "${LOWEST_SDIST}" dist diff --git a/ci/build_windows.bat b/ci/build_windows.bat new file mode 100644 index 000000000..f7c254aaf --- /dev/null +++ b/ci/build_windows.bat @@ -0,0 +1,48 @@ +:: +:: Build Snowflake Python Connector on Windows +:: NOTES: +:: - This is designed to ONLY be called in our Windows workers in Jenkins +:: - To restrict what version gets created edit this file +SET SCRIPT_DIR=%~dp0 +SET CONNECTOR_DIR=%~dp0\..\ + +set python_versions= 3.7 3.8 3.9 3.10 + +cd %CONNECTOR_DIR% + +set venv_dir=%WORKSPACE%\venv-flake8 +if %errorlevel% neq 0 goto :error + +py -3.7 -m venv %venv_dir% +if %errorlevel% neq 0 goto :error + +call %venv_dir%\scripts\activate +if %errorlevel% neq 0 goto :error + +python -m pip install --upgrade pip setuptools wheel +if %errorlevel% neq 0 goto :error + +(for %%v in (%python_versions%) do ( + call :build_wheel_file %%v || goto :error +)) + +call deactivate + +dir dist + +EXIT /B %ERRORLEVEL% + +:build_wheel_file +set pv=%~1 + +echo Going to compile wheel for Python %pv% +py -%pv% -m pip install --upgrade pip setuptools wheel build +if %errorlevel% neq 0 goto :error + +py -%pv% -m build --wheel . +if %errorlevel% neq 0 goto :error + +EXIT /B 0 + +:error +exit /b %errorlevel% diff --git a/ci/change_snowflake_test_pwd.py b/ci/change_snowflake_test_pwd.py new file mode 100644 index 000000000..b7f1953e2 --- /dev/null +++ b/ci/change_snowflake_test_pwd.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +# +# Set a complex password for test user snowman +# +from __future__ import annotations + +import os +import sys + +import snowflake.connector + +sys.path.append( + os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "test") +) + +CLIENT_KNOWN_SSM_FILE_PATH_DOCKER = "CLIENT_KNOWN_SSM_FILE_PATH_DOCKER" + + +def change_password(): + params = { + "account": "", + "user": "", + "password": "", + "database": "", + "schema": "", + "protocol": "https", + "host": "", + "port": "443", + } + + for k, v in CONNECTION_PARAMETERS.items(): + params[k] = v + + conn = snowflake.connector.connect(**params) + conn.cursor().execute("use role accountadmin") + cmd = f"alter user set password = '{SNOWFLAKE_TEST_PASSWORD_NEW}'" + print(cmd) + conn.cursor().execute(cmd) + conn.close() + + +def generate_known_ssm_file(): + with open(os.getenv(CLIENT_KNOWN_SSM_FILE_PATH_DOCKER), "w") as f: + f.write(SNOWFLAKE_TEST_PASSWORD_NEW + "\n") + + +if __name__ == "__main__": + from jenkins_test_parameters import SNOWFLAKE_TEST_PASSWORD_NEW + + from parameters import CONNECTION_PARAMETERS + + change_password() + generate_known_ssm_file() diff --git a/ci/docker/connector_build/Dockerfile b/ci/docker/connector_build/Dockerfile new file mode 100644 index 000000000..7074b95bc --- /dev/null +++ b/ci/docker/connector_build/Dockerfile @@ -0,0 +1,20 @@ +# We use manylinux1 base image because pyarrow_manylinux2010 has a bug and wheel failed to be audited +ARG BASE_IMAGE=quay.io/pypa/manylinux2010_x86_64 +FROM $BASE_IMAGE + +# This is to solve permission issue, read https://denibertovic.com/posts/handling-permissions-with-docker-volumes/ +ARG GOSU_URL=https://github.com/tianon/gosu/releases/download/1.11/gosu-amd64 +ENV GOSU_PATH $GOSU_URL +RUN curl -o /usr/local/bin/gosu -SL $GOSU_PATH +RUN chmod +x /usr/local/bin/gosu + +COPY scripts/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh + +WORKDIR /home/user +RUN chmod 777 /home/user +RUN git clone https://github.com/matthew-brett/multibuild.git && cd /home/user/multibuild && git checkout bfc6d8b82d8c37b8ca1e386081fd800e81c6ab4a + +ENV PATH="${PATH}:/opt/python/cp37-cp37m/bin:/opt/python/cp38-cp38/bin:/opt/python/cp39-cp39/bin:/opt/python/cp310-cp310/bin" + +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/ci/docker/connector_build/scripts/entrypoint.sh b/ci/docker/connector_build/scripts/entrypoint.sh new file mode 100755 index 000000000..f84205e74 --- /dev/null +++ b/ci/docker/connector_build/scripts/entrypoint.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# Add local user +# Either use the LOCAL_USER_ID if passed in at runtime or +# fallback + +USER_ID=${LOCAL_USER_ID:-9001} + +echo "Starting with UID : $USER_ID" +useradd --shell /bin/bash -u $USER_ID -o -c "" -m user +export HOME=/home/user + +/usr/local/bin/gosu user "$@" diff --git a/ci/docker/connector_test/Dockerfile b/ci/docker/connector_test/Dockerfile new file mode 100644 index 000000000..ef56caec7 --- /dev/null +++ b/ci/docker/connector_test/Dockerfile @@ -0,0 +1,18 @@ +# We use manylinux1 base image because pyarrow_manylinux2010 has a bug and wheel failed to be audited +ARG BASE_IMAGE=quay.io/pypa/manylinux2010_x86_64 +FROM $BASE_IMAGE + +# This is to solve permission issue, read https://denibertovic.com/posts/handling-permissions-with-docker-volumes/ +ARG GOSU_URL=https://github.com/tianon/gosu/releases/download/1.11/gosu-amd64 +ENV GOSU_PATH $GOSU_URL +RUN curl -o /usr/local/bin/gosu -SL $GOSU_PATH +RUN chmod +x /usr/local/bin/gosu + +COPY scripts/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh + +WORKDIR /home/user +RUN chmod 777 /home/user +ENV PATH="${PATH}:/opt/python/cp37-cp37m/bin:/opt/python/cp38-cp38/bin/:/opt/python/cp39-cp39/bin/:/opt/python/cp310-cp310/bin/" + +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] diff --git a/ci/docker/connector_test/scripts/entrypoint.sh b/ci/docker/connector_test/scripts/entrypoint.sh new file mode 100755 index 000000000..f84205e74 --- /dev/null +++ b/ci/docker/connector_test/scripts/entrypoint.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +# Add local user +# Either use the LOCAL_USER_ID if passed in at runtime or +# fallback + +USER_ID=${LOCAL_USER_ID:-9001} + +echo "Starting with UID : $USER_ID" +useradd --shell /bin/bash -u $USER_ID -o -c "" -m user +export HOME=/home/user + +/usr/local/bin/gosu user "$@" diff --git a/ci/docker/connector_test_fips/Dockerfile b/ci/docker/connector_test_fips/Dockerfile new file mode 100644 index 000000000..461bb0dde --- /dev/null +++ b/ci/docker/connector_test_fips/Dockerfile @@ -0,0 +1,17 @@ +FROM centos:7 + +# This is to solve permission issue, read https://denibertovic.com/posts/handling-permissions-with-docker-volumes/ +RUN curl -o /usr/local/bin/gosu -SL "https://github.com/tianon/gosu/releases/download/1.11/gosu-amd64" +RUN chmod +x /usr/local/bin/gosu + +COPY scripts/entrypoint.sh /usr/local/bin/entrypoint.sh +RUN chmod +x /usr/local/bin/entrypoint.sh + +WORKDIR /home/user +RUN chmod 777 /home/user + +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] + +RUN yum install -y redhat-rpm-config gcc libffi-devel openssl openssl-devel centos-release-scl +RUN yum install -y rh-python38 rh-python38-python-devel +RUN scl enable rh-python38 "python3.8 -m pip install --user --upgrade pip setuptools wheel" diff --git a/ci/docker/connector_test_fips/scripts/entrypoint.sh b/ci/docker/connector_test_fips/scripts/entrypoint.sh new file mode 100755 index 000000000..055e17b82 --- /dev/null +++ b/ci/docker/connector_test_fips/scripts/entrypoint.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +# Add local user +# Either use the LOCAL_USER_ID if passed in at runtime or +# fallback + +USER_ID=${LOCAL_USER_ID:-9001} + +echo "Starting with UID : $USER_ID" +useradd --shell /bin/bash -u $USER_ID -o -c "" -m user +export HOME=/home/user + +. /opt/rh/rh-python38/enable +/usr/local/bin/gosu user "$@" diff --git a/ci/log_analyze_setup.sh b/ci/log_analyze_setup.sh new file mode 100644 index 000000000..729cfea27 --- /dev/null +++ b/ci/log_analyze_setup.sh @@ -0,0 +1,23 @@ +#!/bin/bash -e +# +# preparation for log analyze +# + +# DOCKER ROOT /home/user/snowflake-connector-python + +export CLIENT_LOG_DIR_PATH_DOCKER=/home/user/snowflake-connector-python/ssm_rt_log +export CLIENT_LOG_DIR_PATH=$WORKSPACE/target_client/ssm_rt_log +echo "[INFO] CLIENT_LOG_DIR_PATH=$CLIENT_LOG_DIR_PATH" +echo "[INFO] CLIENT_LOG_DIR_PATH_DOCKER=$CLIENT_LOG_DIR_PATH_DOCKER" + +export CLIENT_KNOWN_SSM_FILE_PATH_DOCKER=$CLIENT_LOG_DIR_PATH_DOCKER/rt_jenkins_log_known_ssm.txt +export CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_LOG_DIR_PATH/rt_jenkins_log_known_ssm.txt +echo "[INFO] CLIENT_KNOWN_SSM_FILE_PATH=$CLIENT_KNOWN_SSM_FILE_PATH" +echo "[INFO] CLIENT_KNOWN_SSM_FILE_PATH_DOCKER=$CLIENT_KNOWN_SSM_FILE_PATH_DOCKER" + +# [required envs] +# To close log analyze, just set ENABLE_CLIENT_LOG_ANALYZE to not "true", e.g. "false". +export ENABLE_CLIENT_LOG_ANALYZE="true" + +# The environment variable used by log analyze module +export CLIENT_DRIVER_NAME=PYTHON diff --git a/ci/set_base_image.sh b/ci/set_base_image.sh new file mode 100644 index 000000000..e0249118b --- /dev/null +++ b/ci/set_base_image.sh @@ -0,0 +1,19 @@ +#!/bin/bash -e +# +# Use the internal docker registry if running on Jenkins +# +set -o pipefail +INTERNAL_REPO=nexus.int.snowflakecomputing.com:8086 +if [[ -n "$NEXUS_PASSWORD" ]]; then + echo "[INFO] Pull docker images from $INTERNAL_REPO" + NEXUS_USER=${USERNAME:-jenkins} + docker login --username "$NEXUS_USER" --password "$NEXUS_PASSWORD" $INTERNAL_REPO + export BASE_IMAGE_MANYLINUX2010=nexus.int.snowflakecomputing.com:8086/docker/manylinux2010_x86_64 + export BASE_IMAGE_MANYLINUX2014=nexus.int.snowflakecomputing.com:8086/docker/manylinux2014_x86_64 + export BASE_IMAGE_MANYLINUX2014AARCH64=nexus.int.snowflakecomputing.com:8086/docker/manylinux2014_aarch64 +else + echo "[INFO] Pull docker images from public registry" + export BASE_IMAGE_MANYLINUX2010=quay.io/pypa/manylinux2010_x86_64 + export BASE_IMAGE_MANYLINUX2014=quay.io/pypa/manylinux2014_x86_64 + export BASE_IMAGE_MANYLINUX2014AARCH64=quay.io/pypa/manylinux2014_aarch64 +fi diff --git a/ci/test.sh b/ci/test.sh new file mode 100755 index 000000000..101dd6379 --- /dev/null +++ b/ci/test.sh @@ -0,0 +1,43 @@ +#!/bin/bash -e +# Start Snowflake Python Connector tests +# NOTES: +# - This script is used by Jenkins to start various tests +# - Assumes that py_test_mode And python_env (not required for FIPS tests as of now) were previously set +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +CONNECTOR_DIR="$( dirname "${THIS_DIR}")" +PARAMETERS_DIR="${CONNECTOR_DIR}/.github/workflows/parameters/private" + +cd "${CONNECTOR_DIR}" + +# Check Requirements +if [ -z "${PARAMETERS_SECRET}" ]; then + echo "Missing PARAMETERS_SECRET, failing..." + exit 1 +fi + +# Decrypt parameters file +PARAMS_FILE="${PARAMETERS_DIR}/parameters_aws.py.gpg" +[ ${cloud_provider} == azure ] && PARAMS_FILE="${PARAMETERS_DIR}/parameters_azure.py.gpg" +[ ${cloud_provider} == gcp ] && PARAMS_FILE="${PARAMETERS_DIR}/parameters_gcp.py.gpg" +gpg --quiet --batch --yes --decrypt --passphrase="${PARAMETERS_SECRET}" ${PARAMS_FILE} > test/parameters.py + +# Decrypt jenkins version parameters file +PARAMS_FILE="${PARAMETERS_DIR}/parameters_aws_jenkins.py.gpg" +[ ${cloud_provider} == azure ] && PARAMS_FILE="${PARAMETERS_DIR}/parameters_azure_jenkins.py.gpg" +[ ${cloud_provider} == gcp ] && PARAMS_FILE="${PARAMETERS_DIR}/parameters_gcp_jenkins.py.gpg" +gpg --quiet --batch --yes --decrypt --passphrase="${PARAMETERS_SECRET}" ${PARAMS_FILE} > test/parameters_jenkins.py + +# Decrypt to get new test password +gpg --quiet --batch --yes --decrypt --passphrase="${PARAMETERS_SECRET}" ${PARAMETERS_DIR}/jenkins_test_parameters.py.gpg > ci/jenkins_test_parameters.py + +# Download artifacts made by build +aws s3 cp --recursive --only-show-errors s3://sfc-jenkins/repository/python_connector/linux/${client_git_branch}/${client_git_commit}/ dist + +# Run one of the tests +if [ "${py_test_mode}" = "fips" ]; then + echo "[Info] Going to run FIPS tests" + ${THIS_DIR}/test_fips_docker.sh +else + echo "[Info] Going to run regular tests for Python ${python_env}" + ${THIS_DIR}/test_docker.sh ${python_env} +fi diff --git a/ci/test_darwin.sh b/ci/test_darwin.sh new file mode 100755 index 000000000..a0135e48b --- /dev/null +++ b/ci/test_darwin.sh @@ -0,0 +1,34 @@ +#!/bin/bash -e +# +# Test Snowflake Connector on a Darwin Jenkins slave +# NOTES: +# - Versions to be tested should be passed in as the first argument, e.g: "3.7 3.8". If omitted 3.7-3.10 will be assumed. +# - This script uses .. to download the newest wheel files from S3 + +PYTHON_VERSIONS="${1:-3.7 3.8 3.9 3.10}" +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +CONNECTOR_DIR="$( dirname "${THIS_DIR}")" +PARAMETERS_DIR="${CONNECTOR_DIR}/.github/workflows/parameters/public" + +export JUNIT_REPORT_DIR=${SF_REGRESS_LOGS:-$CONNECTOR_DIR} +export COV_REPORT_DIR=${CONNECTOR_DIR} + +# Decrypt parameters file +PARAMS_FILE="${PARAMETERS_DIR}/parameters_aws.py.gpg" +[ ${cloud_provider} == azure ] && PARAMS_FILE="${PARAMETERS_DIR}/parameters_azure.py.gpg" +[ ${cloud_provider} == gcp ] && PARAMS_FILE="${PARAMETERS_DIR}/parameters_gcp.py.gpg" +gpg --quiet --batch --yes --decrypt --passphrase="${PARAMETERS_SECRET}" ${PARAMS_FILE} > test/parameters.py + +# Run tests +cd $CONNECTOR_DIR +for PYTHON_VERSION in ${PYTHON_VERSIONS}; do + echo "[Info] Testing with ${PYTHON_VERSION}" + SHORT_VERSION=$(python3 -c "print('${PYTHON_VERSION}'.replace('.', ''))") + CONNECTOR_WHL=$(ls ${CONNECTOR_DIR}/dist/snowflake_connector_python*cp${SHORT_VERSION}*.whl) + TEST_ENVLIST=py${SHORT_VERSION}-{unit,integ,pandas,sso}-ci + echo "[Info] Running tox for ${TEST_ENVLIST}" + + # https://github.com/tox-dev/tox/issues/1485 + # tox seems to not work inside virtualenv, so manually installed tox and trigger system default tox + /Library/Frameworks/Python.framework/Versions/3.5/bin/tox -e ${TEST_ENVLIST} --external_wheels ${CONNECTOR_WHL} +done diff --git a/ci/test_docker.sh b/ci/test_docker.sh new file mode 100755 index 000000000..cde440256 --- /dev/null +++ b/ci/test_docker.sh @@ -0,0 +1,52 @@ +#!/bin/bash -e +# Test Snowflake Python Connector in Docker +# NOTES: +# - By default this script runs Python 3.7 tests, as these are installed in dev vms +# - To compile only a specific version(s) pass in versions like: `./test_docker.sh "3.7 3.8"` + +set -o pipefail + +# In case this is ran from dev-vm +PYTHON_ENV=${1:-3.7} + +# Set constants +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +CONNECTOR_DIR="$( dirname "${THIS_DIR}")" +WORKSPACE=${WORKSPACE:-${CONNECTOR_DIR}} +source $THIS_DIR/set_base_image.sh + +cd $THIS_DIR/docker/connector_test + +CONTAINER_NAME=test_pyconnector + +echo "[Info] Building docker image" +arch=$(uname -p) + +echo "[Info] Building docker image" +if [[ "$arch" == "aarch64" ]]; then + BASE_IMAGE=$BASE_IMAGE_MANYLINUX2014AARCH64 + GOSU_URL=https://github.com/tianon/gosu/releases/download/1.11/gosu-arm64 +else + BASE_IMAGE=$BASE_IMAGE_MANYLINUX2010 + GOSU_URL=https://github.com/tianon/gosu/releases/download/1.11/gosu-amd64 +fi + +docker build --pull -t ${CONTAINER_NAME}:1.0 --build-arg BASE_IMAGE=$BASE_IMAGE --build-arg GOSU_URL="$GOSU_URL" . -f Dockerfile + +user_id=$(id -u ${USER}) +docker run --network=host \ + -e TERM=vt102 \ + -e PIP_DISABLE_PIP_VERSION_CHECK=1 \ + -e OPENSSL_FIPS=1 \ + -e LOCAL_USER_ID=${user_id} \ + -e AWS_ACCESS_KEY_ID \ + -e AWS_SECRET_ACCESS_KEY \ + -e SF_REGRESS_LOGS \ + -e SF_PROJECT_ROOT \ + -e cloud_provider \ + -e JENKINS_HOME \ + -e is_old_driver \ + -e GITHUB_ACTIONS \ + --mount type=bind,source="${CONNECTOR_DIR}",target=/home/user/snowflake-connector-python \ + ${CONTAINER_NAME}:1.0 \ + /home/user/snowflake-connector-python/ci/test_linux.sh ${PYTHON_ENV} diff --git a/ci/test_fips.sh b/ci/test_fips.sh new file mode 100755 index 000000000..ab4c9d385 --- /dev/null +++ b/ci/test_fips.sh @@ -0,0 +1,27 @@ +#!/bin/bash -e +# +# Test Snowflake Connector +# Note this is the script that test_docker.sh runs inside of the docker container +# +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +# shellcheck disable=SC1090 +CONNECTOR_DIR="$( dirname "${THIS_DIR}")" +CONNECTOR_WHL="$(ls $CONNECTOR_DIR/dist/*cp38*manylinux2010*.whl | sort -r | head -n 1)" + +python3.8 -m venv fips_env +source fips_env/bin/activate +pip install -U setuptools pip +pip install "${CONNECTOR_WHL}[pandas,secure-local-storage,development]" +pip install "cryptography<3.3.0" --force-reinstall --no-binary cryptography + +echo "!!! Environment description !!!" +echo "Default installed OpenSSL version" +openssl version +python -c "import ssl; print('Python openssl library: ' + ssl.OPENSSL_VERSION)" +python -c "from cryptography.hazmat.backends.openssl import backend;print('Cryptography openssl library: ' + backend.openssl_version_text())" +pip freeze + +cd $CONNECTOR_DIR +pytest -vvv --cov=snowflake.connector --cov-report=xml:coverage.xml test + +deactivate diff --git a/ci/test_fips_docker.sh b/ci/test_fips_docker.sh new file mode 100755 index 000000000..3405a0f8b --- /dev/null +++ b/ci/test_fips_docker.sh @@ -0,0 +1,37 @@ +#!/bin/bash -x + +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +CONNECTOR_DIR="$( dirname "${THIS_DIR}")" +# In case this is not run locally and not on Jenkins + +if [[ ! -d "$CONNECTOR_DIR/dist/" ]] || [[ $(ls $CONNECTOR_DIR/dist/*cp38*manylinux2010*.whl) == '' ]]; then + echo "Missing wheel files, going to compile Python connector in Docker..." + $THIS_DIR/build_docker.sh 3.8 + cp $CONNECTOR_DIR/dist/repaired_wheels/*cp38*manylinux2010*.whl $CONNECTOR_DIR/dist/ +fi + +cd $THIS_DIR/docker/connector_test_fips + +CONTAINER_NAME=test_fips_connector + +echo "[Info] Start building docker image" +docker build -t ${CONTAINER_NAME}:1.0 -f Dockerfile . + +user_id=$(id -u $USER) +docker run --network=host \ + -e LANG=en_US.UTF-8 \ + -e TERM=vt102 \ + -e SF_USE_OPENSSL_ONLY=True \ + -e PIP_DISABLE_PIP_VERSION_CHECK=1 \ + -e LOCAL_USER_ID=$user_id \ + -e CRYPTOGRAPHY_ALLOW_OPENSSL_102=1 \ + -e AWS_ACCESS_KEY_ID \ + -e AWS_SECRET_ACCESS_KEY \ + -e SF_REGRESS_LOGS \ + -e SF_PROJECT_ROOT \ + -e cloud_provider \ + -e PYTEST_ADDOPTS \ + -e GITHUB_ACTIONS \ + --mount type=bind,source="${CONNECTOR_DIR}",target=/home/user/snowflake-connector-python \ + ${CONTAINER_NAME}:1.0 \ + /home/user/snowflake-connector-python/ci/test_fips.sh $1 diff --git a/ci/test_linux.sh b/ci/test_linux.sh new file mode 100755 index 000000000..d4318953e --- /dev/null +++ b/ci/test_linux.sh @@ -0,0 +1,45 @@ +#!/bin/bash -e +# +# Test Snowflake Connector in Linux +# NOTES: +# - Versions to be tested should be passed in as the first argument, e.g: "3.7 3.8". If omitted 3.7-3.10 will be assumed. +# - This script assumes that ../dist/repaired_wheels has the wheel(s) built for all versions to be tested +# - This is the script that test_docker.sh runs inside of the docker container + +PYTHON_VERSIONS="${1:-3.7 3.8 3.9 3.10}" +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +CONNECTOR_DIR="$( dirname "${THIS_DIR}")" + +# Install one copy of tox +python3 -m pip install -U tox tox-external-wheels + +source ${THIS_DIR}/log_analyze_setup.sh + +if [[ -d ${CLIENT_LOG_DIR_PATH_DOCKER} ]]; then + rm -rf ${CLIENT_LOG_DIR_PATH_DOCKER}/* +else + mkdir ${CLIENT_LOG_DIR_PATH_DOCKER} +fi + +# replace test password with a more complex one, and generate known ssm file +pip3 install -U snowflake-connector-python >& /dev/null +python3 ${THIS_DIR}/change_snowflake_test_pwd.py +mv ${CONNECTOR_DIR}/test/parameters_jenkins.py ${CONNECTOR_DIR}/test/parameters.py + +# Run tests +cd $CONNECTOR_DIR +if [[ "$is_old_driver" == "true" ]]; then + # Old Driver Test + echo "[Info] Running old connector tests" + python3 -m tox -e olddriver +else + for PYTHON_VERSION in ${PYTHON_VERSIONS}; do + echo "[Info] Testing with ${PYTHON_VERSION}" + SHORT_VERSION=$(python3 -c "print('${PYTHON_VERSION}'.replace('.', ''))") + CONNECTOR_WHL=$(ls $CONNECTOR_DIR/dist/snowflake_connector_python*cp${SHORT_VERSION}*manylinux2014*.whl | sort -r | head -n 1) + TEST_ENVLIST=fix_lint,py${SHORT_VERSION}-{unit,integ,pandas,sso}-ci,py${SHORT_VERSION}-coverage + echo "[Info] Running tox for ${TEST_ENVLIST}" + + python3 -m tox -e ${TEST_ENVLIST} --external_wheels ${CONNECTOR_WHL} + done +fi diff --git a/ci/test_windows.bat b/ci/test_windows.bat new file mode 100644 index 000000000..a02f2c20d --- /dev/null +++ b/ci/test_windows.bat @@ -0,0 +1,53 @@ +:: +:: Test PythonConnector on Windows +:: + + +SET SCRIPT_DIR=%~dp0 +SET CONNECTOR_DIR=%~dp0\..\ +:: E.g.: 35 +set pv=%1 + +cd %CONNECTOR_DIR% + +dir /b * | findstr ^snowflake_connector_python.*%pv%.*whl$ > whl_name +if %errorlevel% neq 0 goto :error + +set /p connector_whl= test\parameters.py + +:: create tox execution virtual env +set venv_dir=%WORKSPACE%\tox_venv +py -3.7 -m venv %venv_dir% +if %errorlevel% neq 0 goto :error + +call %venv_dir%\scripts\activate +if %errorlevel% neq 0 goto :error + +python -m pip install -U pip tox tox-external-wheels +if %errorlevel% neq 0 goto :error + +cd %CONNECTOR_DIR% + +set JUNIT_REPORT_DIR=%workspace% +set COV_REPORT_DIR=%workspace% +tox -e py%pv%-{unit,integ,pandas,sso}-ci --external_wheels %connector_whl% -- --basetemp=%workspace%\pytest-tmp\ +if %errorlevel% neq 0 goto :error + +call deactivate +EXIT /B 0 + +:error +exit /b %errorlevel% diff --git a/ci/wss.sh b/ci/wss.sh new file mode 100755 index 000000000..a32a34abf --- /dev/null +++ b/ci/wss.sh @@ -0,0 +1,178 @@ +#!/usr/bin/env bash +# +# Run whitesource for components which need versioning +set -e +set -o pipefail + +THIS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +CONNECTOR_DIR="$( dirname "${THIS_DIR}")" +SCAN_DIRECTORIES="${CONNECTOR_DIR}" + +[[ -z "$WHITESOURCE_API_KEY" ]] && echo "[WARNING] No WHITESOURCE_API_KEY is set. No WhiteSource scan will occur." && exit 1 + +export PRODUCT_NAME=snowflake-connector-python +export PROD_BRANCH=main +export PROJECT_VERSION="${GITHUB_SHA}" + +BRANCH_OR_PR_NUMBER="$(echo "${GITHUB_REF}" | awk 'BEGIN { FS = "/" } ; { print $3 }')" + +# GITHUB_EVENT_NAME should either be 'push', or 'pull_request' +if [[ "$GITHUB_EVENT_NAME" == "pull_request" ]]; then + echo "[INFO] Pull Request" + export PROJECT_NAME="PR-${BRANCH_OR_PR_NUMBER}" +elif [[ "${BRANCH_OR_PR_NUMBER}" == "$PROD_BRANCH" ]]; then + echo "[INFO] Production branch" + export PROJECT_NAME="$PROD_BRANCH" +else + echo "[INFO] Non Production branch. Skipping wss..." + export PROJECT_NAME="" +fi + +if [[ -n "$PROJECT_NAME" ]]; then + rm -f wss-unified-agent.jar + curl -LO https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar +fi +SCAN_CONFIG=wss-agent.config +cat > $SCAN_CONFIG < or **//** +excludes=**/*sources.jar **/*javadoc.jar + +case.sensitive.glob=false +followSymbolicLinks=true +CONFIG + +set +e +echo "[INFO] Running wss.sh for ${PRODUCT_NAME}-${PROJECT_NAME} under ${SCAN_DIRECTORIES}" +if [[ "$PROJECT_NAME" == "$PROD_BRANCH" ]]; then + # Prod branch + java -jar wss-unified-agent.jar -apiKey ${WHITESOURCE_API_KEY} \ + -c ${SCAN_CONFIG} \ + -d ${SCAN_DIRECTORIES} \ + -product ${PRODUCT_NAME} \ + -project ${PROJECT_NAME} \ + -projectVersion ${PROJECT_VERSION} \ + -offline true + ERR=$? + if [[ "$ERR" != "254" && "$ERR" != "0" ]]; then + echo "failed to run wss for PROJECT_VERSION=${PROJECT_VERSION} in ${PROJECT_VERSION}..." + exit 1 + fi + + java -jar wss-unified-agent.jar -apiKey ${WHITESOURCE_API_KEY} \ + -c ${SCAN_CONFIG} \ + -product ${PRODUCT_NAME} \ + -project ${PROJECT_NAME} \ + -projectVersion baseline \ + -requestFiles whitesource/update-request.txt + ERR=$? + if [[ "$ERR" != "254" && "$ERR" != "0" ]]; then + echo "failed to run wss for PROJECT_VERSION=${PROJECT_VERSION} in baseline" + exit 1 + fi + java -jar wss-unified-agent.jar -apiKey ${WHITESOURCE_API_KEY} \ + -c ${SCAN_CONFIG} \ + -product ${PRODUCT_NAME} \ + -project ${PROJECT_NAME} \ + -projectVersion ${PROJECT_VERSION} \ + -requestFiles whitesource/update-request.txt + ERR=$? + if [[ "$ERR" != "254" && "$ERR" != "0" ]]; then + echo "failed to run wss for PROJECT_VERSION=${PROJECT_VERSION} in ${PROJECT_VERSION}" + exit 1 + fi +elif [[ -n "$PROJECT_NAME" ]]; then + # PR + java -jar wss-unified-agent.jar -apiKey ${WHITESOURCE_API_KEY} \ + -c ${SCAN_CONFIG} \ + -d ${SCAN_DIRECTORIES} \ + -product ${PRODUCT_NAME} \ + -project ${PROJECT_NAME} \ + -projectVersion ${PROJECT_VERSION} + ERR=$? + if [[ "$ERR" != "254" && "$ERR" != "0" ]]; then + echo "failed to run wss for PROJECT_VERSION=${PROJECT_VERSION} in ${PROJECT_VERSION}..." + exit 1 + fi +fi +set -e diff --git a/compat.py b/compat.py deleted file mode 100644 index 22c586282..000000000 --- a/compat.py +++ /dev/null @@ -1,168 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import decimal -import os -import platform -import sys - -from six import string_types, text_type, binary_type, PY2 - -from snowflake.connector.constants import UTF8 - -IS_LINUX = platform.system() == 'Linux' -IS_WINDOWS = platform.system() == 'Windows' - -NUM_DATA_TYPES = [] -try: - import numpy - - NUM_DATA_TYPES = [numpy.int8, numpy.int16, numpy.int32, numpy.int64, - numpy.float16, numpy.float32, numpy.float64, - numpy.uint8, numpy.uint16, numpy.uint32, numpy.uint64, numpy.bool_] -except: - numpy = None - -STR_DATA_TYPE = string_types -UNICODE_DATA_TYPE = text_type -BYTE_DATA_TYPE = binary_type -if PY2: - import urlparse - import urllib - import httplib - import Queue - from HTMLParser import HTMLParser - import collections - - GET_CWD = os.getcwdu - BASE_EXCEPTION_CLASS = StandardError # noqa: F821 - TO_UNICODE = unicode # noqa: F821 - ITERATOR = collections.Iterator - MAPPING = collections.Mapping - - urlsplit = urlparse.urlsplit - urlunsplit = urlparse.urlunsplit - parse_qs = urlparse.parse_qs - urlparse = urlparse.urlparse - - NUM_DATA_TYPES += [int, float, long, decimal.Decimal] # noqa: F821 - PKCS5_UNPAD = lambda v: v[0:-ord(v[-1])] - PKCS5_OFFSET = lambda v: ord(v[-1]) - IS_BINARY = lambda v: isinstance(v, bytearray) - - METHOD_NOT_ALLOWED = httplib.METHOD_NOT_ALLOWED - BAD_GATEWAY = httplib.BAD_GATEWAY - BAD_REQUEST = httplib.BAD_REQUEST - REQUEST_TIMEOUT = httplib.REQUEST_TIMEOUT - SERVICE_UNAVAILABLE = httplib.SERVICE_UNAVAILABLE - GATEWAY_TIMEOUT = httplib.GATEWAY_TIMEOUT - FORBIDDEN = httplib.FORBIDDEN - UNAUTHORIZED = httplib.UNAUTHORIZED - INTERNAL_SERVER_ERROR = httplib.INTERNAL_SERVER_ERROR - IncompleteRead = httplib.IncompleteRead - OK = httplib.OK - BadStatusLine = httplib.BadStatusLine - - urlencode = urllib.urlencode - unquote = urllib.unquote - unescape = HTMLParser().unescape - - EmptyQueue = Queue.Empty - Queue = Queue.Queue - - -else: - import urllib.parse - import http.client - import urllib.request - import queue - import html - import collections.abc - - GET_CWD = os.getcwd - BASE_EXCEPTION_CLASS = Exception - TO_UNICODE = str - ITERATOR = collections.abc.Iterator - MAPPING = collections.abc.Mapping - - urlsplit = urllib.parse.urlsplit - urlunsplit = urllib.parse.urlunsplit - parse_qs = urllib.parse.parse_qs - urlparse = urllib.parse.urlparse - - NUM_DATA_TYPES += [int, float, decimal.Decimal] - PKCS5_UNPAD = lambda v: v[0:-v[-1]] - PKCS5_OFFSET = lambda v: v[-1] - IS_BINARY = lambda v: isinstance(v, (bytes, bytearray)) - - METHOD_NOT_ALLOWED = http.client.METHOD_NOT_ALLOWED - BAD_GATEWAY = http.client.BAD_GATEWAY - BAD_REQUEST = http.client.BAD_REQUEST - REQUEST_TIMEOUT = http.client.REQUEST_TIMEOUT - SERVICE_UNAVAILABLE = http.client.SERVICE_UNAVAILABLE - GATEWAY_TIMEOUT = http.client.GATEWAY_TIMEOUT - FORBIDDEN = http.client.FORBIDDEN - UNAUTHORIZED = http.client.UNAUTHORIZED - INTERNAL_SERVER_ERROR = http.client.INTERNAL_SERVER_ERROR - IncompleteRead = http.client.IncompleteRead - OK = http.client.OK - BadStatusLine = http.client.BadStatusLine - - urlencode = urllib.parse.urlencode - unquote = urllib.parse.unquote - unescape = html.unescape - - EmptyQueue = queue.Empty - Queue = queue.Queue - -IS_BYTES = lambda v: isinstance(v, BYTE_DATA_TYPE) -IS_STR = lambda v: isinstance(v, STR_DATA_TYPE) -IS_UNICODE = lambda v: isinstance(v, UNICODE_DATA_TYPE) -IS_NUMERIC = lambda v: isinstance(v, tuple(NUM_DATA_TYPES)) - -# Some tests don't need to run on Python34, because SnowSQL specific. -# SnowSQL runs on Python 3.5+ -PY34_EXACT = sys.version_info[0:2] == (3, 4) - - -def PKCS5_PAD(value, block_size): - return b"".join( - [value, (block_size - len(value) % block_size) * chr( - block_size - len(value) % block_size).encode(UTF8)]) - - -def PRINT(msg): - if PY2: - if isinstance(msg, unicode): # noqa: F821 - print(msg.encode(UTF8)) - else: - print(msg) - else: - print(msg) - - -def INPUT(prompt): - if PY2: - return raw_input(prompt).decode(UTF8) # noqa: F821 - else: - return input(prompt) - - -def IS_OLD_PYTHON(): - """ - Is old Python - """ - return PY2 and sys.hexversion < 0x02070900 or \ - not PY2 and sys.hexversion < 0x03040300 - - -""" -Is Python 3.4.3 or 3.5.0 -This is to check if a workaround for http://bugs.python.org/issue23517 -is required or not. 3.6.0 already has the fix. -No RC or dev version will be checked. -""" -PY_ISSUE_23517 = 0x03040300 <= sys.hexversion < 0x03040400 or \ - 0x03050000 <= sys.hexversion < 0x03050100 diff --git a/connection.py b/connection.py deleted file mode 100644 index 9810d36bb..000000000 --- a/connection.py +++ /dev/null @@ -1,1172 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import logging -import os -import re -import sys -import uuid -from io import StringIO -from logging import getLogger -from threading import Lock -from time import strptime - -from .incident import IncidentAPI -from . import errors -from . import proxy -from .auth import Auth -from .auth_default import AuthByDefault -from .auth_keypair import AuthByKeyPair -from .auth_oauth import AuthByOAuth -from .auth_okta import AuthByOkta -from .auth_webbrowser import AuthByWebBrowser -from .chunk_downloader import ( - SnowflakeChunkDownloader, - DEFAULT_CLIENT_PREFETCH_THREADS, - MAX_CLIENT_PREFETCH_THREADS) -from .compat import ( - TO_UNICODE, IS_OLD_PYTHON, urlencode, PY2, PY_ISSUE_23517, IS_WINDOWS) -from .constants import ( - PARAMETER_AUTOCOMMIT, - PARAMETER_CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY, - PARAMETER_CLIENT_SESSION_KEEP_ALIVE, - PARAMETER_CLIENT_TELEMETRY_ENABLED, - PARAMETER_CLIENT_TELEMETRY_OOB_ENABLED, - PARAMETER_TIMEZONE, - PARAMETER_SERVICE_NAME, - PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL, - PARAMETER_CLIENT_PREFETCH_THREADS, - PARAMETER_CLIENT_VALIDATE_DEFAULT_PARAMETERS, - OCSPMode -) -from .cursor import SnowflakeCursor, LOG_MAX_QUERY_LENGTH -from .description import ( - SNOWFLAKE_CONNECTOR_VERSION, - PYTHON_VERSION, - PLATFORM, - CLIENT_NAME, - CLIENT_VERSION -) -from .errorcode import (ER_CONNECTION_IS_CLOSED, - ER_NO_ACCOUNT_NAME, ER_OLD_PYTHON, ER_NO_USER, - ER_NO_PASSWORD, ER_INVALID_VALUE, - ER_FAILED_PROCESSING_PYFORMAT, - ER_NOT_IMPLICITY_SNOWFLAKE_DATATYPE) -from .errors import (Error, ProgrammingError, InterfaceError, - DatabaseError) -from .network import ( - DEFAULT_AUTHENTICATOR, - EXTERNAL_BROWSER_AUTHENTICATOR, - KEY_PAIR_AUTHENTICATOR, - OAUTH_AUTHENTICATOR, - REQUEST_ID, - SnowflakeRestful, -) -from .sqlstate import (SQLSTATE_CONNECTION_NOT_EXISTS, - SQLSTATE_FEATURE_NOT_SUPPORTED) -from .telemetry import (TelemetryClient) -from .telemetry_oob import TelemetryService -from .time_util import ( - DEFAULT_MASTER_VALIDITY_IN_SECONDS, - HeartBeatTimer, get_time_millis) -from .util_text import split_statements, construct_hostname, parse_account - - -def DefaultConverterClass(): - if PY_ISSUE_23517 or IS_WINDOWS: - from .converter_issue23517 import SnowflakeConverterIssue23517 - return SnowflakeConverterIssue23517 - else: - from .converter import SnowflakeConverter - return SnowflakeConverter - - -SUPPORTED_PARAMSTYLES = { - u"qmark", - u'numeric', - u'format', - u'pyformat', -} -# default configs -DEFAULT_CONFIGURATION = { - u'dsn': None, # standard - u'user': u'', # standard - u'password': u'', # standard - u'host': u'127.0.0.1', # standard - u'port': 8080, # standard - u'database': None, # standard - u'proxy_host': None, # snowflake - u'proxy_port': None, # snowflake - u'proxy_user': None, # snowflake - u'proxy_password': None, # snowflake - u'protocol': u'http', # snowflake - u'warehouse': None, # snowflake - u'region': None, # snowflake - u'account': None, # snowflake - u'schema': None, # snowflake - u'role': None, # snowflake - u'session_id': None, # snowflake - u'login_timeout': 120, # login timeout - u'network_timeout': None, # network timeout (infinite by default) - u'passcode_in_password': False, # Snowflake MFA - u'passcode': None, # Snowflake MFA - u'private_key': None, - u'token': None, # OAuth or JWT Token - u'authenticator': DEFAULT_AUTHENTICATOR, - u'mfa_callback': None, - u'password_callback': None, - u'application': CLIENT_NAME, - u'internal_application_name': CLIENT_NAME, - u'internal_application_version': CLIENT_VERSION, - - u'insecure_mode': False, # Error security fix requirement - u'ocsp_fail_open': True, # fail open on ocsp issues, default true - u'inject_client_pause': 0, # snowflake internal - u'session_parameters': None, # snowflake session parameters - u'autocommit': None, # snowflake - u'client_session_keep_alive': False, # snowflake - u'client_session_keep_alive_heartbeat_frequency': None, # snowflake - u'client_prefetch_threads': 4, # snowflake - u'numpy': False, # snowflake - u'ocsp_response_cache_filename': None, # snowflake internal - u'converter_class': DefaultConverterClass(), - u'chunk_downloader_class': SnowflakeChunkDownloader, # snowflake internal - u'validate_default_parameters': False, # snowflake - u'probe_connection': False, # snowflake - u'paramstyle': None, # standard/snowflake - u'timezone': None, # snowflake - u'consent_cache_id_token': True, # snowflake - u'service_name': None, # snowflake, - u'support_negative_year': True, # snowflake - u'log_max_query_length': LOG_MAX_QUERY_LENGTH, # snowflake - u'disable_request_pooling': False, # snowflake -} - -APPLICATION_RE = re.compile(r'[\w\d_]+') - -# adding the exception class to Connection class -for m in [method for method in dir(errors) if - callable(getattr(errors, method))]: - setattr(sys.modules[__name__], m, getattr(errors, m)) - -# Workaround for https://bugs.python.org/issue7980 -strptime('20150102030405', '%Y%m%d%H%M%S') - -logger = getLogger(__name__) - - -class SnowflakeConnection(object): - u""" - Implementation of the connection object for the Snowflake Database. Use - connect(..) to get the object. - """ - - OCSP_ENV_LOCK = Lock() - - def __init__(self, **kwargs): - self._lock_sequence_counter = Lock() - self.sequence_counter = 0 - self._errorhandler = Error.default_errorhandler - self._lock_converter = Lock() - self.messages = [] - logger.info( - u"Snowflake Connector for Python Version: %s, " - u"Python Version: %s, Platform: %s", - SNOWFLAKE_CONNECTOR_VERSION, - PYTHON_VERSION, PLATFORM) - - self._rest = None - for name, value in DEFAULT_CONFIGURATION.items(): - setattr(self, u'_' + name, value) - - self.heartbeat_thread = None - - self.converter = None - self.connect(**kwargs) - self._telemetry = TelemetryClient(self._rest) - self.telemetry_enabled = False - self.incident = IncidentAPI(self._rest) - - def __del__(self): - try: - self.close() - except: - pass - - @property - def insecure_mode(self): - u""" - insecure mode. It validates the TLS certificate but doesn't check - a revocation status. - :return: - """ - return self._insecure_mode - - @property - def ocsp_fail_open(self): - u""" - fail open mode. TLS cerificates continue to be validated. Revoked - certificates are blocked. Any other exceptions are disregarded. - :return: - """ - return self._ocsp_fail_open - - def _ocsp_mode(self): - """ - OCSP mode. INSECURE, FAIL_OPEN or FAIL_CLOSED - :return: - """ - if self.insecure_mode: - return OCSPMode.INSECURE - return OCSPMode.FAIL_OPEN \ - if self.ocsp_fail_open else OCSPMode.FAIL_CLOSED - - @property - def session_id(self): - u""" - session id - """ - return self._session_id - - @property - def user(self): - u""" - User name - """ - return self._user - - @property - def host(self): - u""" - Host name - """ - return self._host - - @property - def port(self): - u""" - Port number - """ - return self._port - - @property - def region(self): - u""" - Region name if not the default Snowflake Database deployment - """ - return self._region - - @property - def proxy_host(self): - u""" - Proxy host name - """ - return self._proxy_host - - @property - def proxy_port(self): - u""" - Proxy port number - """ - return self._proxy_port - - @property - def proxy_user(self): - u""" - Proxy user name - """ - return self._proxy_user - - @property - def proxy_password(self): - u""" - Proxy password - """ - return self._proxy_password - - @property - def account(self): - u""" - Account name - """ - return self._account - - @property - def database(self): - u""" - Database name - """ - return self._database - - @property - def schema(self): - u""" - Schema name - """ - return self._schema - - @property - def warehouse(self): - u""" - Schema name - """ - return self._warehouse - - @property - def role(self): - u""" - Role name - """ - return self._role - - @property - def login_timeout(self): - """ - Login timeout. Used in authentication - """ - return int(self._login_timeout) if self._login_timeout is not None \ - else None - - @property - def network_timeout(self): - """ - Network timeout. Used for general purpose - """ - return int(self._network_timeout) if self._network_timeout is not \ - None else None - - @property - def client_session_keep_alive(self): - u""" - Keep connection alive by issuing a heartbeat. - """ - return self._client_session_keep_alive - - @client_session_keep_alive.setter - def client_session_keep_alive(self, value): - u""" - Keep connection alive by issuing a heartbeat. - """ - self._client_session_keep_alive = True if value else False - - @property - def client_session_keep_alive_heartbeat_frequency(self): - u""" - Heartbeat frequency to keep connection alive in seconds. - """ - return self._client_session_keep_alive_heartbeat_frequency if \ - self._client_session_keep_alive_heartbeat_frequency else \ - DEFAULT_MASTER_VALIDITY_IN_SECONDS / 16 - - @client_session_keep_alive_heartbeat_frequency.setter - def client_session_keep_alive_heartbeat_frequency(self, value): - u""" - Specify the heartbeat frequency to keep connection alive in seconds. - """ - self._client_session_keep_alive_heartbeat_frequency = value - self._validate_client_session_keep_alive_heartbeat_frequency() - - @property - def client_prefetch_threads(self): - u""" - Number of threads to download the result set - """ - return self._client_prefetch_threads if \ - self._client_prefetch_threads else DEFAULT_CLIENT_PREFETCH_THREADS - - @client_prefetch_threads.setter - def client_prefetch_threads(self, value): - """ - Number of threads to download the result set - """ - self._client_prefetch_threads = value - self._validate_client_prefetch_threads() - - @property - def rest(self): - u""" - Snowflake REST API object. Internal use only. Maybe removed in the - later release - """ - return self._rest - - @property - def application(self): - u""" - Application name. By default, PythonConnector. - Set this for Snowflake to identify the application by name - """ - return self._application - - @property - def errorhandler(self): - u""" - Error handler. By default, an exception is raised on error. - """ - return self._errorhandler - - @errorhandler.setter - def errorhandler(self, value): - if value is None: - raise ProgrammingError(u'None errorhandler is specified') - self._errorhandler = value - - @property - def converter_class(self): - """ - Converter Class - """ - return self._converter_class - - @property - def validate_default_parameters(self): - """ - Validate default database, schema, role and warehouse? - """ - return self._validate_default_parameters - - @property - def is_pyformat(self): - """ - Is binding parameter style pyformat or format? - - The default value should be True. - """ - return self._paramstyle in (u'pyformat', u'format') - - @property - def consent_cache_id_token(self): - """ - Consented cache ID token - """ - return self._consent_cache_id_token - - @property - def telemetry_enabled(self): - return self._telemetry_enabled - - @telemetry_enabled.setter - def telemetry_enabled(self, value): - self._telemetry_enabled = True if value else False - - @property - def service_name(self): - return self._service_name - - @service_name.setter - def service_name(self, value): - self._service_name = value - - @property - def log_max_query_length(self): - return self._log_max_query_length - - @property - def disable_request_pooling(self): - return self._disable_request_pooling - - @disable_request_pooling.setter - def disable_request_pooling(self, value): - self._disable_request_pooling = True if value else False - - def connect(self, **kwargs): - u""" - Connects to the database - """ - logger.debug(u'connect') - if len(kwargs) > 0: - self.__config(**kwargs) - TelemetryService.get_instance().update_context(kwargs) - - self.__set_error_attributes() - self.__open_connection() - - def close(self): - u""" - Closes the connection. - """ - try: - if not self.rest: - return - - # will hang if the application doesn't close the connection and - # CLIENT_SESSION_KEEP_ALIVE is set, because the heartbeat runs on - # a separate thread. - self._cancel_heartbeat() - - # close telemetry first, since it needs rest to send remaining data - logger.info('closed') - self._telemetry.close() - self.rest.delete_session() - self.rest.close() - self._rest = None - del self.messages[:] - except: - pass - - def is_closed(self): - u""" - Is closed? - """ - return self.rest is None - - def autocommit(self, mode): - u""" - Sets autocommit mode. True/False. Default: True - """ - if not self.rest: - Error.errorhandler_wrapper( - self, None, DatabaseError, - { - u'msg': u"Connection is closed", - u'errno': ER_CONNECTION_IS_CLOSED, - u'sqlstate': SQLSTATE_CONNECTION_NOT_EXISTS, - }) - if not isinstance(mode, bool): - Error.errorhandler_wrapper( - self, None, ProgrammingError, - { - u'msg': u'Invalid parameter: {0}'.format(mode), - u'errno': ER_INVALID_VALUE, - } - ) - try: - self.cursor().execute( - "ALTER SESSION SET autocommit={0}".format(mode)) - except Error as e: - if e.sqlstate == SQLSTATE_FEATURE_NOT_SUPPORTED: - logger.debug(u"Autocommit feature is not enabled for this " - u"connection. Ignored") - else: - raise e - - def commit(self): - u"""Commits the current transaction. - """ - self.cursor().execute("COMMIT") - - def rollback(self): - u"""Rollbacks the current transaction. - """ - self.cursor().execute("ROLLBACK") - - def cursor(self, cursor_class=SnowflakeCursor): - u"""Creates a cursor object. Each statement should create a new cursor - object. - """ - logger.debug(u'cursor') - if not self.rest: - Error.errorhandler_wrapper( - self, None, DatabaseError, - { - u'msg': u"Connection is closed", - u'errno': ER_CONNECTION_IS_CLOSED, - u'sqlstate': SQLSTATE_CONNECTION_NOT_EXISTS, - - }) - return cursor_class(self) - - def execute_string(self, sql_text, - remove_comments=False, - return_cursors=True, - **kwargs): - """ - Executes a SQL text including multiple statements. - This is a non-standard convenient method. - """ - ret = [] - if PY2: - stream = StringIO(sql_text.decode('utf-8') if isinstance( - sql_text, str) else sql_text) - else: - stream = StringIO(sql_text) - for sql, is_put_or_get in split_statements( - stream, remove_comments=remove_comments): - cur = self.cursor() - if return_cursors: - ret.append(cur) - cur.execute(sql, _is_put_get=is_put_or_get, **kwargs) - return ret - - def execute_stream(self, stream, - remove_comments=False): - """ - Executes a stream of SQL statements. - This is a non-standard convenient method. - """ - for sql, is_put_or_get in split_statements( - stream, remove_comments=remove_comments): - cur = self.cursor() - cur.execute(sql, _is_put_get=is_put_or_get) - yield cur - - def __set_error_attributes(self): - for m in [method for method in dir(errors) if - callable(getattr(errors, method))]: - setattr(self, m, getattr(errors, m)) - - @staticmethod - def setup_ocsp_privatelink(app, hostname): - SnowflakeConnection.OCSP_ENV_LOCK.acquire() - ocsp_cache_server = \ - u'http://ocsp.{}/ocsp_response_cache.json'.format( - hostname) - os.environ[ - 'SF_OCSP_RESPONSE_CACHE_SERVER_URL'] = ocsp_cache_server - logger.debug(u"OCSP Cache Server is updated: %s", ocsp_cache_server) - SnowflakeConnection.OCSP_ENV_LOCK.release() - - def __open_connection(self): - u""" - Opens a new network connection - """ - self.converter = self._converter_class( - use_numpy=self._numpy, - support_negative_year=self._support_negative_year) - - proxy.set_proxies( - self.proxy_host, self.proxy_port, self.proxy_user, - self.proxy_password) - - self._rest = SnowflakeRestful( - host=self.host, - port=self.port, - protocol=self._protocol, - inject_client_pause=self._inject_client_pause, - connection=self) - logger.debug(u'REST API object was created: %s:%s', - self.host, - self.port) - - if 'SF_OCSP_RESPONSE_CACHE_SERVER_URL' in os.environ: - logger.debug( - u"Custom OCSP Cache Server URL found in environment - %s", - os.environ['SF_OCSP_RESPONSE_CACHE_SERVER_URL']) - - if self.host.endswith(u".privatelink.snowflakecomputing.com"): - SnowflakeConnection.setup_ocsp_privatelink(self.application, self.host) - else: - if 'SF_OCSP_RESPONSE_CACHE_SERVER_URL' in os.environ: - del os.environ['SF_OCSP_RESPONSE_CACHE_SERVER_URL'] - - if self._authenticator == DEFAULT_AUTHENTICATOR: - auth_instance = AuthByDefault(self._password) - elif self._authenticator == EXTERNAL_BROWSER_AUTHENTICATOR: - auth_instance = AuthByWebBrowser( - self.rest, self.application, protocol=self._protocol, - host=self.host, port=self.port) - elif self._authenticator == KEY_PAIR_AUTHENTICATOR: - auth_instance = AuthByKeyPair(self._private_key) - elif self._authenticator == OAUTH_AUTHENTICATOR: - auth_instance = AuthByOAuth(self._token) - else: - # okta URL, e.g., https://.okta.com/ - auth_instance = AuthByOkta(self.rest, self.application) - - if self._session_parameters is None: - self._session_parameters = {} - if self._autocommit is not None: - self._session_parameters[PARAMETER_AUTOCOMMIT] = self._autocommit - - if self._timezone is not None: - self._session_parameters[PARAMETER_TIMEZONE] = self._timezone - - if self._validate_default_parameters: - # Snowflake will validate the requested database, schema, and warehouse - self._session_parameters[PARAMETER_CLIENT_VALIDATE_DEFAULT_PARAMETERS] = True - - if self.client_session_keep_alive: - self._session_parameters[PARAMETER_CLIENT_SESSION_KEEP_ALIVE] = True - - if self.client_session_keep_alive_heartbeat_frequency: - self._session_parameters[ - PARAMETER_CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY] = \ - self._validate_client_session_keep_alive_heartbeat_frequency() - - if self.client_prefetch_threads: - self._session_parameters[PARAMETER_CLIENT_PREFETCH_THREADS] = \ - self._validate_client_prefetch_threads() - - if self._authenticator == EXTERNAL_BROWSER_AUTHENTICATOR: - # enable storing temporary credential in a file - self._session_parameters[ - PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL] = True - - auth = Auth(self.rest) - if not auth.read_temporary_credential( - self.account, self.user, self._session_parameters): - self.__authenticate(auth_instance) - else: - # set the current objects as the session is derived from the id - # token, and the current objects may be different. - self._set_current_objects() - - self._password = None # ensure password won't persist - - if self.client_session_keep_alive: - self._add_heartbeat() - - def __config(self, **kwargs): - u""" - Sets the parameters - """ - logger.debug(u'__config') - for name, value in kwargs.items(): - if name == u'sequence_counter': - self.sequence_counter = value - elif name == u'application': - if not APPLICATION_RE.match(value): - msg = u'Invalid application name: {0}'.format(value) - raise ProgrammingError( - msg=msg, - errno=0 - ) - else: - setattr(self, u'_' + name, value) - else: - setattr(self, u'_' + name, value) - - if self._paramstyle is None: - import snowflake.connector - self._paramstyle = snowflake.connector.paramstyle - elif self._paramstyle not in SUPPORTED_PARAMSTYLES: - raise ProgrammingError( - msg=u'Invalid paramstyle is specified', - errno=ER_INVALID_VALUE - ) - - if u'account' in kwargs: - if u'host' not in kwargs: - setattr(self, u'_host', - construct_hostname( - kwargs.get(u'region'), self._account)) - if u'port' not in kwargs: - setattr(self, u'_port', u'443') - if u'protocol' not in kwargs: - setattr(self, u'_protocol', u'https') - - if not self.user and self._authenticator != OAUTH_AUTHENTICATOR: - # OAuth Authentication does not require a username - Error.errorhandler_wrapper( - self, None, ProgrammingError, - { - u'msg': u"User is empty", - u'errno': ER_NO_USER - }) - - if self._private_key: - self._authenticator = KEY_PAIR_AUTHENTICATOR - - if self._authenticator: - self._authenticator = self._authenticator.upper() - - if self._authenticator != EXTERNAL_BROWSER_AUTHENTICATOR and \ - self._authenticator != OAUTH_AUTHENTICATOR and \ - self._authenticator != KEY_PAIR_AUTHENTICATOR: - # authentication is done by the browser if the authenticator - # is externalbrowser - if not self._password: - Error.errorhandler_wrapper( - self, None, ProgrammingError, - { - u'msg': u"Password is empty", - u'errno': ER_NO_PASSWORD - }) - - if not self._account: - Error.errorhandler_wrapper( - self, None, ProgrammingError, - { - u'msg': u"Account must be specified", - u'errno': ER_NO_ACCOUNT_NAME - }) - if u'.' in self._account: - self._account = parse_account(self._account) - - if self.ocsp_fail_open: - logger.info( - u'This connection is in OCSP Fail Open Mode. ' - u'TLS Certificates would be checked for validity ' - u'and revocation status. Any other Certificate ' - u'Revocation related exceptions or OCSP Responder ' - u'failures would be disregarded in favor of ' - u'connectivity.') - - if self.insecure_mode: - logger.info( - u'THIS CONNECTION IS IN INSECURE MODE. IT ' - u'MEANS THE CERTIFICATE WILL BE VALIDATED BUT THE ' - u'CERTIFICATE REVOCATION STATUS WILL NOT BE ' - u'CHECKED.') - - elif self._protocol == u'https': - if IS_OLD_PYTHON(): - msg = (u"ERROR: The ssl package installed with your Python " - u"- version {0} - does not have the security fix. " - u"Upgrade to Python 2.7.9/3.4.3 or higher.\n").format( - PYTHON_VERSION) - raise InterfaceError( - msg=msg, - errno=ER_OLD_PYTHON) - - def cmd_query(self, sql, sequence_counter, request_id, - binding_params=None, - is_file_transfer=False, statement_params=None, - is_internal=False, _no_results=False, - _update_current_object=True): - u""" - Executes a query with a sequence counter. - """ - logger.debug(u'_cmd_query') - data = { - u'sqlText': sql, - u'asyncExec': _no_results, - u'sequenceId': sequence_counter, - u'querySubmissionTime': get_time_millis(), - } - if statement_params is not None: - data[u'parameters'] = statement_params - if is_internal: - data[u'isInternal'] = is_internal - if binding_params is not None: - # binding parameters. This is for qmarks paramstyle. - data[u'bindings'] = binding_params - - client = u'sfsql_file_transfer' if is_file_transfer else u'sfsql' - - if logger.getEffectiveLevel() <= logging.DEBUG: - logger.debug( - u'sql=[%s], sequence_id=[%s], is_file_transfer=[%s]', - self._format_query_for_log(data[u'sqlText']), - data[u'sequenceId'], - is_file_transfer - ) - - url_parameters = {REQUEST_ID: request_id} - - ret = self.rest.request( - u'/queries/v1/query-request?' + urlencode(url_parameters), - data, client=client, _no_results=_no_results, - _include_retry_params=True) - - if ret is None: - ret = {u'data': {}} - if ret.get(u'data') is None: - ret[u'data'] = {} - if _update_current_object: - data = ret['data'] - if u'finalDatabaseName' in data: - self._database = data[u'finalDatabaseName'] - if u'finalSchemaName' in data: - self._schema = data[u'finalSchemaName'] - if u'finalWarehouseName' in data: - self._warehouse = data[u'finalWarehouseName'] - if u'finalRoleName' in data: - self._role = data[u'finalRoleName'] - - return ret - - def _set_current_objects(self): - """ - Sets the current objects to the specified ones. This is mainly used - when a session token is derived from an id token. - """ - - def cmd(sql, params, _update_current_object=False): - processed_params = self._process_params_qmarks(params) - sequence_counter = self._next_sequence_counter() - request_id = uuid.uuid4() - self.cmd_query( - sql, - sequence_counter, - request_id, - binding_params=processed_params, - is_internal=True, - _update_current_object=_update_current_object) - - if self._role: - cmd(u"USE ROLE IDENTIFIER(?)", (self._role,)) - if self._warehouse: - cmd(u"USE WAREHOUSE IDENTIFIER(?)", (self._warehouse,)) - if self._database: - cmd(u"USE DATABASE IDENTIFIER(?)", (self._database,)) - if self._schema: - cmd(u'USE SCHEMA IDENTIFIER(?)', (self._schema,)) - cmd(u"SELECT 1", (), _update_current_object=True) - - def _reauthenticate_by_webbrowser(self): - auth_instance = AuthByWebBrowser( - self.rest, self.application, protocol=self._protocol, - host=self.host, port=self.port) - self.__authenticate(auth_instance) - self._set_current_objects() - return {u'success': True} - - def __authenticate(self, auth_instance): - auth_instance.authenticate( - authenticator=self._authenticator, - service_name=self.service_name, - account=self.account, - user=self.user, - password=self._password, - ) - self._consent_cache_id_token = getattr( - auth_instance, 'consent_cache_id_token', True) - - auth = Auth(self.rest) - self._session_parameters = auth.authenticate( - auth_instance=auth_instance, - account=self.account, - user=self.user, - database=self.database, - schema=self.schema, - warehouse=self.warehouse, - role=self.role, - passcode=self._passcode, - passcode_in_password=self._passcode_in_password, - mfa_callback=self._mfa_callback, - password_callback=self._password_callback, - session_parameters=self._session_parameters, - ) - - def _process_params_qmarks(self, params, cursor=None): - if not params: - return None - processed_params = {} - if not isinstance(params, (list, tuple)): - errorvalue = { - u'msg': u"Binding parameters must be a list: {0}".format( - params - ), - u'errno': ER_FAILED_PROCESSING_PYFORMAT - } - Error.errorhandler_wrapper(self, cursor, - ProgrammingError, - errorvalue) - return None - for idx, v in enumerate(params): - if isinstance(v, tuple): - if len(v) != 2: - Error.errorhandler_wrapper( - self, cursor, - ProgrammingError, - { - u'msg': u"Binding parameters must be a list " - u"where one element is a single value or " - u"a pair of Snowflake datatype and a value", - u'errno': ER_FAILED_PROCESSING_PYFORMAT, - } - ) - return None - processed_params[TO_UNICODE(idx + 1)] = { - 'type': v[0], - 'value': self.converter.to_snowflake_bindings( - v[0], v[1])} - else: - snowflake_type = self.converter.snowflake_type( - v) - if snowflake_type is None: - Error.errorhandler_wrapper( - self, cursor, - ProgrammingError, - { - u'msg': u"Python data type [{0}] cannot be " - u"automatically mapped to Snowflake data " - u"type. Specify the snowflake data type " - u"explicitly.".format( - v.__class__.__name__.lower()), - u'errno': ER_NOT_IMPLICITY_SNOWFLAKE_DATATYPE - } - ) - return None - if isinstance(v, list): - vv = [ - self.converter.to_snowflake_bindings( - self.converter.snowflake_type(v0), v0) for v0 in v] - else: - vv = self.converter.to_snowflake_bindings( - snowflake_type, v) - processed_params[TO_UNICODE(idx + 1)] = { - 'type': snowflake_type, - 'value': vv} - if logger.getEffectiveLevel() <= logging.DEBUG: - for k, v in processed_params.items(): - logger.debug("idx: %s, type: %s", k, v.get('type')) - return processed_params - - def _process_params(self, params, cursor=None): - if params is None: - return {} - if isinstance(params, dict): - return self.__process_params_dict(params) - - if not isinstance(params, (tuple, list)): - params = [params, ] - - try: - res = params - res = map(self.converter.to_snowflake, res) - res = map(self.converter.escape, res) - res = map(self.converter.quote, res) - ret = tuple(res) - logger.debug(u'parameters: %s', ret) - return ret - except Exception as e: - errorvalue = { - u'msg': u"Failed processing pyformat-parameters; {0}".format( - e), - u'errno': ER_FAILED_PROCESSING_PYFORMAT} - Error.errorhandler_wrapper(self, cursor, - ProgrammingError, - errorvalue) - - def __process_params_dict(self, params, cursor=None): - try: - to_snowflake = self.converter.to_snowflake - escape = self.converter.escape - quote = self.converter.quote - res = {} - for k, v in params.items(): - c = v - c = to_snowflake(c) - c = escape(c) - c = quote(c) - res[k] = c - logger.debug(u'parameters: %s', res) - return res - except Exception as e: - errorvalue = { - u'msg': u"Failed processing pyformat-parameters; {0}".format( - e), - u'errno': ER_FAILED_PROCESSING_PYFORMAT} - Error.errorhandler_wrapper( - self, cursor, ProgrammingError, errorvalue) - - def _cancel_query(self, sql, request_id): - u""" - Cancels the query by the query and requestId - """ - logger.debug(u'_cancel_query sql=[%s], request_id=[%s]', sql, - request_id) - url_parameters = {REQUEST_ID: TO_UNICODE(uuid.uuid4())} - - return self.rest.request( - u'/queries/v1/abort-request?' + urlencode(url_parameters), { - u'sqlText': sql, - REQUEST_ID: TO_UNICODE(request_id), - }) - - def _next_sequence_counter(self): - u"""Gets next sequence counter. Used internally. - """ - with self._lock_sequence_counter: - self.sequence_counter += 1 - logger.debug(u'sequence counter: %s', self.sequence_counter) - return self.sequence_counter - - def _log_telemetry(self, telemetry_data): - u""" - Logs data to telemetry - """ - if self.telemetry_enabled: - self._telemetry.try_add_log_to_batch(telemetry_data) - - def _add_heartbeat(self): - """Add an hourly heartbeat query in order to keep connection alive.""" - if not self.heartbeat_thread: - self._validate_client_session_keep_alive_heartbeat_frequency() - self.heartbeat_thread = HeartBeatTimer( - self.client_session_keep_alive_heartbeat_frequency, - self._heartbeat_tick) - self.heartbeat_thread.start() - logger.debug("started heartbeat") - - def _cancel_heartbeat(self): - """Cancel a heartbeat thread.""" - if self.heartbeat_thread: - self.heartbeat_thread.cancel() - self.heartbeat_thread.join() - self.heartbeat_thread = None - logger.debug("stopped heartbeat") - - def _heartbeat_tick(self): - """Execute a hearbeat if connection isn't closed yet.""" - if not self.is_closed(): - logger.debug("heartbeating!") - self.rest._heartbeat() - - def _validate_client_session_keep_alive_heartbeat_frequency(self): - """Validate and return heartbeat frequency in seconds""" - real_max = int(self.rest.master_validity_in_seconds / 4) - real_min = int(real_max / 4) - if self.client_session_keep_alive_heartbeat_frequency > real_max: - self._client_session_keep_alive_heartbeat_frequency = real_max - elif self.client_session_keep_alive_heartbeat_frequency < real_min: - self._client_session_keep_alive_heartbeat_frequency = real_min - - # ensure the type is integer - self._client_session_keep_alive_heartbeat_frequency = int( - self.client_session_keep_alive_heartbeat_frequency) - return self.client_session_keep_alive_heartbeat_frequency - - def _validate_client_prefetch_threads(self): - if self.client_prefetch_threads <= 0: - self._client_prefetch_threads = 1 - elif self.client_prefetch_threads > MAX_CLIENT_PREFETCH_THREADS: - self._client_prefetch_threads = MAX_CLIENT_PREFETCH_THREADS - self._client_prefetch_threads = int( - self.client_prefetch_threads) - return self.client_prefetch_threads - - def _set_parameters(self, ret, session_parameters): - """ - Set session parameters - """ - if u'parameters' not in ret[u'data']: - return - parameters = ret[u'data'][u'parameters'] - with self._lock_converter: - self.converter.set_parameters(parameters) - for kv in parameters: - name = kv['name'] - value = kv['value'] - session_parameters[name] = value - if PARAMETER_CLIENT_TELEMETRY_ENABLED == name: - self.telemetry_enabled = value - elif PARAMETER_CLIENT_TELEMETRY_OOB_ENABLED == name: - if value: - TelemetryService.get_instance().enable() - else: - TelemetryService.get_instance().disable() - elif PARAMETER_CLIENT_SESSION_KEEP_ALIVE == name: - self.client_session_keep_alive = value - elif PARAMETER_CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY == \ - name: - self.client_session_keep_alive_heartbeat_frequency = value - elif PARAMETER_SERVICE_NAME == name: - self.service_name = value - elif PARAMETER_CLIENT_PREFETCH_THREADS == name: - self.client_prefetch_threads = value - - def _format_query_for_log(self, query): - ret = u' '.join(line.strip() for line in query.split(u'\n')) - return (ret if len(ret) < self.log_max_query_length - else ret[0:self.log_max_query_length] + '...') - - def __enter__(self): - u""" - context manager - """ - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - u""" - context manager with commit or rollback - """ - if exc_tb is None: - self.commit() - else: - self.rollback() - self.close() diff --git a/connector_python2 b/connector_python2 deleted file mode 120000 index 30d74d258..000000000 --- a/connector_python2 +++ /dev/null @@ -1 +0,0 @@ -test \ No newline at end of file diff --git a/connector_python3 b/connector_python3 deleted file mode 120000 index 30d74d258..000000000 --- a/connector_python3 +++ /dev/null @@ -1 +0,0 @@ -test \ No newline at end of file diff --git a/constants.py b/constants.py deleted file mode 100644 index 7322309a6..000000000 --- a/constants.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -""" -Various constants -""" - -from collections import defaultdict -from collections import namedtuple -from enum import Enum - -from six import PY2 - -DBAPI_TYPE_STRING = 0 -DBAPI_TYPE_BINARY = 1 -DBAPI_TYPE_NUMBER = 2 -DBAPI_TYPE_TIMESTAMP = 3 - -FIELD_TYPES = [ - {'name': 'FIXED', 'dbapi_type': [DBAPI_TYPE_NUMBER]}, - {'name': 'REAL', 'dbapi_type': [DBAPI_TYPE_NUMBER]}, - {'name': 'TEXT', 'dbapi_type': [DBAPI_TYPE_STRING]}, - {'name': 'DATE', 'dbapi_type': [DBAPI_TYPE_TIMESTAMP]}, - {'name': 'TIMESTAMP', 'dbapi_type': [DBAPI_TYPE_TIMESTAMP]}, - {'name': 'VARIANT', 'dbapi_type': [DBAPI_TYPE_BINARY]}, - {'name': 'TIMESTAMP_LTZ', 'dbapi_type': [DBAPI_TYPE_TIMESTAMP]}, - {'name': 'TIMESTAMP_TZ', 'dbapi_type': [DBAPI_TYPE_TIMESTAMP]}, - {'name': 'TIMESTAMP_NTZ', 'dbapi_type': [DBAPI_TYPE_TIMESTAMP]}, - {'name': 'OBJECT', 'dbapi_type': [DBAPI_TYPE_BINARY]}, - {'name': 'ARRAY', 'dbapi_type': [DBAPI_TYPE_BINARY]}, - {'name': 'BINARY', 'dbapi_type': [DBAPI_TYPE_BINARY]}, - {'name': 'TIME', 'dbapi_type': [DBAPI_TYPE_TIMESTAMP]}, - {'name': 'BOOLEAN', 'dbapi_type': []}, -] - -FIELD_NAME_TO_ID = defaultdict(int) -FIELD_ID_TO_NAME = defaultdict(unicode if PY2 else str) # noqa: F821 - -__binary_types = [] -__binary_type_names = [] -__string_types = [] -__string_type_names = [] -__number_types = [] -__number_type_names = [] -__timestamp_types = [] -__timestamp_type_names = [] - -for idx, type in enumerate(FIELD_TYPES): - FIELD_ID_TO_NAME[idx] = type['name'] - FIELD_NAME_TO_ID[type['name']] = idx - - dbapi_types = type['dbapi_type'] - for dbapi_type in dbapi_types: - if dbapi_type == DBAPI_TYPE_BINARY: - __binary_types.append(idx) - __binary_type_names.append(type['name']) - elif dbapi_type == DBAPI_TYPE_TIMESTAMP: - __timestamp_types.append(idx) - __timestamp_type_names.append(type['name']) - elif dbapi_type == DBAPI_TYPE_NUMBER: - __number_types.append(idx) - __number_type_names.append(type['name']) - elif dbapi_type == DBAPI_TYPE_STRING: - __string_types.append(idx) - __string_type_names.append(type['name']) - - -def get_binary_types(): - return __binary_types - - -def is_binary_type_name(type_name): - return type_name in __binary_type_names - - -def get_string_types(): - return __string_types - - -def is_string_type_name(type_name): - return type_name in __string_type_names - - -def get_number_types(): - return __number_types - - -def is_number_type_name(type_name): - return type_name in __number_type_names - - -def get_timestamp_types(): - return __timestamp_types - - -def is_timestamp_type_name(type_name): - return type_name in __timestamp_type_names - - -def is_date_type_name(type_name): - return type_name == u'DATE' - - -# Log format -LOG_FORMAT = (u'%(asctime)s - %(filename)s:%(lineno)d - ' - u'%(funcName)s() - %(levelname)s - %(message)s') - -# String literals -UTF8 = u'utf-8' -SHA256_DIGEST = u'sha256_digest' - - -class ResultStatus(Enum): - ERROR = u'ERROR' - UPLOADED = u'UPLOADED' - DOWNLOADED = u'DOWNLOADED' - COLLISION = u'COLLISION' - SKIPPED = u'SKIPPED' - RENEW_TOKEN = u'RENEW_TOKEN' - NOT_FOUND_FILE = u'NOT_FOUND_FILE' - NEED_RETRY = u'NEED_RETRY' - NEED_RETRY_WITH_LOWER_CONCURRENCY = u'NEED_RETRY_WITH_LOWER_CONCURRENCY' - - -FileHeader = namedtuple( - "FileReader", [ - "digest", - "content_length", - "encryption_metadata" - ] -) - -PARAMETER_AUTOCOMMIT = u'AUTOCOMMIT' -PARAMETER_CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY = u'CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY' -PARAMETER_CLIENT_SESSION_KEEP_ALIVE = u'CLIENT_SESSION_KEEP_ALIVE' -PARAMETER_CLIENT_PREFETCH_THREADS = u'CLIENT_PREFETCH_THREADS' -PARAMETER_CLIENT_TELEMETRY_ENABLED = u'CLIENT_TELEMETRY_ENABLED' -PARAMETER_CLIENT_TELEMETRY_OOB_ENABLED = u'CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED' -PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL = u'CLIENT_STORE_TEMPORARY_CREDENTIAL' -PARAMETER_CLIENT_USE_SECURE_STORAGE_FOR_TEMPORARY_CREDENTIAL = \ - u'CLIENT_USE_SECURE_STORAGE_FOR_TEMPORARY_CREDENTAIL' -PARAMETER_TIMEZONE = u'TIMEZONE' -PARAMETER_SERVICE_NAME = u'SERVICE_NAME' -PARAMETER_CLIENT_VALIDATE_DEFAULT_PARAMETERS = u'CLIENT_VALIDATE_DEFAULT_PARAMETERS' - -HTTP_HEADER_CONTENT_TYPE = u'Content-Type' -HTTP_HEADER_ACCEPT = u"accept" -HTTP_HEADER_USER_AGENT = u"User-Agent" -HTTP_HEADER_SERVICE_NAME = u'X-Snowflake-Service' - -HTTP_HEADER_VALUE_OCTET_STREAM = u'application/octet-stream' - - -class OCSPMode(Enum): - """ - OCSP Mode - """ - FAIL_CLOSED = u'FAIL_CLOSED' - FAIL_OPEN = u'FAIL_OPEN' - INSECURE = u'INSECURE' diff --git a/converter.py b/converter.py deleted file mode 100644 index 92bc0374e..000000000 --- a/converter.py +++ /dev/null @@ -1,670 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import binascii -import decimal -import time -from datetime import datetime, timedelta, date -from logging import getLogger - -import pytz - -from .compat import (PY2, IS_BINARY, TO_UNICODE, IS_NUMERIC) -from .errorcode import ( - ER_NOT_SUPPORT_DATA_TYPE) -from .errors import (ProgrammingError) -from .sfbinaryformat import (binary_to_python, - binary_to_snowflake) -from .sfdatetime import sfdatetime_total_seconds_from_timedelta - -try: - import numpy -except ImportError: - numpy = None -try: - import tzlocal -except ImportError: - tzlocal = None - -BITS_FOR_TIMEZONE = 14 -ZERO_TIMEDELTA = timedelta(seconds=0) -ZERO_EPOCH_DATE = date(1970, 1, 1) -ZERO_EPOCH = datetime.utcfromtimestamp(0) -ZERO_FILL = u'000000000' - -logger = getLogger(__name__) - -PYTHON_TO_SNOWFLAKE_TYPE = { - u'int': u'FIXED', - u'long': u'FIXED', - u'decimal': u'FIXED', - u'float': u'REAL', - u'str': u'TEXT', - u'unicode': u'TEXT', - u'bytes': u'BINARY', - u'bytearray': u'BINARY', - u'bool': u'BOOLEAN', - u'bool_': u'BOOLEAN', - u'nonetype': u'ANY', - u'datetime': u'TIMESTAMP_NTZ', - u'sfdatetime': u'TIMESTAMP_NTZ', - u'date': u'DATE', - u'time': u'TIME', - u'struct_time': u'TIMESTAMP_NTZ', - u'timedelta': u'TIME', - u'list': u'TEXT', - u'tuple': u'TEXT', - u'int8': u'FIXED', - u'int16': u'FIXED', - u'int32': u'FIXED', - u'int64': u'FIXED', - u'uint8': u'FIXED', - u'uint16': u'FIXED', - u'uint32': u'FIXED', - u'uint64': u'FIXED', - u'float16': u'REAL', - u'float32': u'REAL', - u'float64': u'REAL', - u'datetime64': u'TIMESTAMP_NTZ', - u'quoted_name': u'TEXT', -} - - -def convert_datetime_to_epoch(dt): - """ - Converts datetime to epoch time in seconds. - If Python > 3.3, you may use timestamp() method - """ - if dt.tzinfo is not None: - dt0 = dt.astimezone(pytz.UTC).replace(tzinfo=None) - else: - dt0 = dt - return (dt0 - ZERO_EPOCH).total_seconds() - - -def _convert_datetime_to_epoch_nanoseconds(dt): - return u"{:f}".format( - convert_datetime_to_epoch(dt)).replace(u'.', u'') + u'000' - - -def _convert_date_to_epoch_milliseconds(dt): - return u'{:.3f}'.format( - (dt - ZERO_EPOCH_DATE).total_seconds()).replace(u'.', u'') - - -def _convert_time_to_epoch_nanoseconds(tm): - return TO_UNICODE(tm.hour * 3600 + tm.minute * 60 + tm.second) + \ - "{:06d}".format(tm.microsecond) + u'000' - - -def _extract_timestamp(value, ctx): - """ - Extracts timestamp from a raw data - """ - scale = ctx['scale'] - microseconds = float( - value[0:-scale + 6]) if scale > 6 else float(value) - fraction_of_nanoseconds = _adjust_fraction_of_nanoseconds( - value, ctx['max_fraction'], scale) - - return microseconds, fraction_of_nanoseconds - - -def _adjust_fraction_of_nanoseconds(value, max_fraction, scale): - if scale == 0: - return 0 - if value[0] != '-': - return int((value[-scale:] + ZERO_FILL[:9 - scale])) - - frac = int(value[-scale:]) - if frac == 0: - return 0 - else: - return int(TO_UNICODE(max_fraction - frac) + ZERO_FILL[:9 - scale]) - - -def _generate_tzinfo_from_tzoffset(tzoffset_minutes): - """ - Generates tzinfo object from tzoffset. - """ - return pytz.FixedOffset(tzoffset_minutes) - - -class SnowflakeConverter(object): - def __init__(self, **kwargs): - self._parameters = {} - self._use_numpy = kwargs.get('use_numpy', False) and numpy is not None - - logger.debug('use_numpy: %s', self._use_numpy) - - def set_parameters(self, parameters): - self._parameters = {} - for kv in parameters: - self._parameters[kv[u'name']] = kv[u'value'] - - def set_parameter(self, param, value): - self._parameters[param] = value - - def get_parameters(self): - return self._parameters - - def get_parameter(self, param): - return self._parameters[param] if param in self._parameters else None - - # - # FROM Snowflake to Python Objects - # - def to_python_method(self, type_name, column): - ctx = column.copy() - if ctx.get('scale') is not None: - ctx['max_fraction'] = int(10 ** ctx['scale']) - ctx['zero_fill'] = '0' * (9 - ctx['scale']) - converters = [u'_{type_name}_to_python'.format(type_name=type_name)] - if self._use_numpy: - converters.insert(0, u'_{type_name}_numpy_to_python'.format( - type_name=type_name)) - for conv in converters: - try: - return getattr(self, conv)(ctx) - except AttributeError: - pass - logger.warning( - "No column converter found for type: %s", type_name) - return None # Skip conversion - - def _FIXED_to_python(self, ctx): - return int if ctx['scale'] == 0 else decimal.Decimal - - def _FIXED_numpy_to_python(self, ctx): - if ctx['scale']: - return numpy.float64 - else: - - def conv(value): - try: - return numpy.int64(value) - except OverflowError: - return int(value) - - return conv - - def _REAL_to_python(self, _): - return float - - def _REAL_numpy_to_python(self, _): - return numpy.float64 - - def _TEXT_to_python(self, _): - return None # skip conv - - def _BINARY_to_python(self, _): - return binary_to_python - - def _DATE_to_python(self, _): - """ - DATE to datetime - - No timezone is attached. - """ - - def conv(value): - try: - return datetime.utcfromtimestamp(int(value) * 86400).date() - except OSError as e: - logger.debug("Failed to convert: %s", e) - ts = ZERO_EPOCH + timedelta( - seconds=int(value) * (24 * 60 * 60)) - return date(ts.year, ts.month, ts.day) - - return conv - - def _DATE_numpy_to_python(self, _): - """ - DATE to datetime - - No timezone is attached. - """ - return lambda x: numpy.datetime64(int(x), 'D') - - def _TIMESTAMP_TZ_to_python(self, ctx): - """ - TIMESTAMP TZ to datetime - - The timezone offset is piggybacked. - """ - - scale = ctx['scale'] - - def conv0(encoded_value): - value, tz = encoded_value.split() - tzinfo = _generate_tzinfo_from_tzoffset(int(tz) - 1440) - return datetime.fromtimestamp(float(value), tz=tzinfo) - - def conv(encoded_value): - value, tz = encoded_value.split() - microseconds = float(value[0:-scale + 6]) - tzinfo = _generate_tzinfo_from_tzoffset(int(tz) - 1440) - return datetime.fromtimestamp(microseconds, tz=tzinfo) - - return conv if scale > 6 else conv0 - - def _get_session_tz(self): - """ Get the session timezone or use the local computer's timezone. """ - try: - tz = self.get_parameter(u'TIMEZONE') - if not tz: - tz = 'UTC' - return pytz.timezone(tz) - except pytz.exceptions.UnknownTimeZoneError: - logger.warning('converting to tzinfo failed') - if tzlocal is not None: - return tzlocal.get_localzone() - else: - try: - return datetime.timezone.utc - except AttributeError: # py2k - return pytz.timezone('UTC') - - def _pre_TIMESTAMP_LTZ_to_python(self, value, ctx): - """ - TIMESTAMP LTZ to datetime - - This takes consideration of the session parameter TIMEZONE if - available. If not, tzlocal is used - """ - microseconds, fraction_of_nanoseconds = _extract_timestamp(value, ctx) - tzinfo_value = self._get_session_tz() - - try: - t0 = ZERO_EPOCH + timedelta(seconds=(microseconds)) - t = pytz.utc.localize(t0, is_dst=False).astimezone(tzinfo_value) - return t, fraction_of_nanoseconds - except OverflowError: - logger.debug( - "OverflowError in converting from epoch time to " - "timestamp_ltz: %s(ms). Falling back to use struct_time." - ) - return time.localtime(microseconds), fraction_of_nanoseconds - - def _TIMESTAMP_LTZ_to_python(self, ctx): - tzinfo = self._get_session_tz() - scale = ctx['scale'] - - conv0 = lambda value: datetime.fromtimestamp(float(value), tz=tzinfo) - - def conv(value): - microseconds = float(value[0:-scale + 6]) - return datetime.fromtimestamp(microseconds, tz=tzinfo) - - return conv if scale > 6 else conv0 - - _TIMESTAMP_to_python = _TIMESTAMP_LTZ_to_python - - def _TIMESTAMP_NTZ_to_python(self, ctx): - """ - TIMESTAMP NTZ to datetime - - No timezone info is attached. - """ - - scale = ctx['scale'] - - conv0 = lambda value: datetime.utcfromtimestamp(float(value)) - - def conv(value): - microseconds = float(value[0:-scale + 6]) - return datetime.utcfromtimestamp(microseconds) - - return conv if scale > 6 else conv0 - - def _TIMESTAMP_NTZ_numpy_to_python(self, ctx): - """ - TIMESTAMP NTZ to datetime64 - - No timezone info is attached. - """ - - scale = ctx['scale'] - - def conv(value): - nanoseconds = int(decimal.Decimal(value).scaleb(scale)) - return numpy.datetime64(nanoseconds, 'ns') - - return conv - - def _TIME_to_python(self, ctx): - """ - TIME to formatted string, SnowflakeDateTime, or datetime.time - - No timezone is attached. - """ - - scale = ctx['scale'] - - conv0 = lambda value: datetime.utcfromtimestamp(float(value)).time() - - def conv(value): - microseconds = float(value[0:-scale + 6]) - return datetime.utcfromtimestamp(microseconds).time() - - return conv if scale > 6 else conv0 - - def _VARIANT_to_python(self, _): - return None # skip conv - - _OBJECT_to_python = _VARIANT_to_python - - _ARRAY_to_python = _VARIANT_to_python - - def _BOOLEAN_to_python(self, ctx): - return lambda value: value in (u'1', u'TRUE') - - def snowflake_type(self, value): - """ - Returns Snowflake data type for the value. This is used for qmark - parameter style - """ - type_name = value.__class__.__name__.lower() - return PYTHON_TO_SNOWFLAKE_TYPE.get(type_name) - - def to_snowflake_bindings(self, snowflake_type, value): - """ - Converts Python data to snowflake data for qmark and numeric - parameter style - - The output is bound in a query in the server side. - """ - type_name = value.__class__.__name__.lower() - return getattr(self, u"_{type_name}_to_snowflake_bindings".format( - type_name=type_name))(snowflake_type, value) - - def _str_to_snowflake_bindings(self, _, value): - # NOTE: str type is always taken as a text data and never binary - return TO_UNICODE(value) - - _int_to_snowflake_bindings = _str_to_snowflake_bindings - _long_to_snowflake_bindings = _str_to_snowflake_bindings - _float_to_snowflake_bindings = _str_to_snowflake_bindings - _unicode_to_snowflake_bindings = _str_to_snowflake_bindings - _decimal_to_snowflake_bindings = _str_to_snowflake_bindings - - def _bytes_to_snowflake_bindings(self, _, value): - return binascii.hexlify(value).decode(u'utf-8') - - _bytearray_to_snowflake_bindings = _bytes_to_snowflake_bindings - - def _bool_to_snowflake_bindings(self, _, value): - return TO_UNICODE(value).lower() - - def _nonetype_to_snowflake_bindings(self, *_): - return None - - def _date_to_snowflake_bindings(self, _, value): - # milliseconds - return _convert_date_to_epoch_milliseconds(value) - - def _time_to_snowflake_bindings(self, _, value): - # nanoseconds - return _convert_time_to_epoch_nanoseconds(value) - - def _datetime_to_snowflake_bindings(self, snowflake_type, value): - snowflake_type = snowflake_type.upper() - if snowflake_type == 'TIMESTAMP_LTZ': - _, t = self._derive_offset_timestamp(value) - return _convert_datetime_to_epoch_nanoseconds(t) - elif snowflake_type == 'TIMESTAMP_NTZ': - # nanoseconds - return _convert_datetime_to_epoch_nanoseconds(value) - elif snowflake_type == 'TIMESTAMP_TZ': - offset, t = self._derive_offset_timestamp(value, is_utc=True) - return _convert_datetime_to_epoch_nanoseconds(t) + \ - u' {:04d}'.format(int(offset)) - else: - raise ProgrammingError( - msg=u'Binding datetime object with Snowflake data type {} is ' - u'not supported.'.format(snowflake_type), - errno=ER_NOT_SUPPORT_DATA_TYPE) - - def _derive_offset_timestamp(self, value, is_utc=False): - """ - Derives TZ offset and timestamp from the datatime object - """ - tzinfo = value.tzinfo - if tzinfo is None: - # If no tzinfo is attached, use local timezone. - tzinfo = self._get_session_tz() if not is_utc else pytz.UTC - t = pytz.utc.localize(value, is_dst=False).astimezone(tzinfo) - else: - # if tzinfo is attached, just covert to epoch time - # as the server expects it in UTC anyway - t = value - offset = tzinfo.utcoffset( - t.replace(tzinfo=None)).total_seconds() / 60 + 1440 - return offset, t - - def _struct_time_to_snowflake_bindings(self, snowflake_type, value): - return self._datetime_to_snowflake_bindings( - snowflake_type, - datetime.fromtimestamp(time.mktime(value))) - - def _timedelta_to_snowflake_bindings(self, snowflake_type, value): - snowflake_type = snowflake_type.upper() - if snowflake_type != u'TIME': - raise ProgrammingError( - msg=u'Binding timedelta object with Snowflake data type {} is ' - u'not supported.'.format(snowflake_type), - errno=ER_NOT_SUPPORT_DATA_TYPE) - (hours, r) = divmod(value.seconds, 3600) - (mins, secs) = divmod(r, 60) - hours += value.days * 24 - return TO_UNICODE(hours * 3600 + mins * 60 + secs) + \ - "{:06d}".format(value.microseconds) + u'000' - - def to_snowflake(self, value): - """ - Converts Python data to Snowflake data for pyformat/format style. - - The output is bound in a query in the client side. - """ - type_name = value.__class__.__name__.lower() - return getattr(self, u"_{type_name}_to_snowflake".format( - type_name=type_name))(value) - - def _int_to_snowflake(self, value): - return int(value) - - def _long_to_snowflake(self, value): - return long(value) # noqa: F821 - - def _float_to_snowflake(self, value): - return float(value) - - def _str_to_snowflake(self, value): - return TO_UNICODE(value) - - _unicode_to_snowflake = _str_to_snowflake - - def _bytes_to_snowflake(self, value): - return binary_to_snowflake(value) - - _bytearray_to_snowflake = _bytes_to_snowflake - - def _bool_to_snowflake(self, value): - return value - - def _bool__to_snowflake(self, value): - return bool(value) - - def _nonetype_to_snowflake(self, _): - return None - - def _total_seconds_from_timedelta(self, td): - return sfdatetime_total_seconds_from_timedelta(td) - - def _datetime_to_snowflake(self, value): - tzinfo_value = value.tzinfo - if tzinfo_value: - if pytz.utc != tzinfo_value: - try: - td = tzinfo_value.utcoffset(value) - except pytz.exceptions.AmbiguousTimeError: - td = tzinfo_value.utcoffset(value, is_dst=False) - else: - td = ZERO_TIMEDELTA - sign = u'+' if td >= ZERO_TIMEDELTA else u'-' - td_secs = sfdatetime_total_seconds_from_timedelta(td) - h, m = divmod(abs(td_secs // 60), 60) - if value.microsecond: - return ( - u'{year:d}-{month:02d}-{day:02d} ' - u'{hour:02d}:{minute:02d}:{second:02d}.' - u'{microsecond:06d}{sign}{tzh:02d}:{tzm:02d}').format( - year=value.year, month=value.month, day=value.day, - hour=value.hour, minute=value.minute, - second=value.second, - microsecond=value.microsecond, sign=sign, tzh=h, - tzm=m - ) - return ( - u'{year:d}-{month:02d}-{day:02d} ' - u'{hour:02d}:{minute:02d}:{second:02d}' - u'{sign}{tzh:02d}:{tzm:02d}').format( - year=value.year, month=value.month, day=value.day, - hour=value.hour, minute=value.minute, - second=value.second, - sign=sign, tzh=h, tzm=m - ) - else: - if value.microsecond: - return (u'{year:d}-{month:02d}-{day:02d} ' - u'{hour:02d}:{minute:02d}:{second:02d}.' - u'{microsecond:06d}').format( - year=value.year, month=value.month, day=value.day, - hour=value.hour, minute=value.minute, - second=value.second, - microsecond=value.microsecond - ) - return (u'{year:d}-{month:02d}-{day:02d} ' - u'{hour:02d}:{minute:02d}:{second:02d}').format( - year=value.year, month=value.month, day=value.day, - hour=value.hour, minute=value.minute, - second=value.second - ) - - def date_to_snowflake(self, value): - """ - Converts Date object to Snowflake object - """ - return self._date_to_snowflake(value) - - def _date_to_snowflake(self, value): - return u'{year:d}-{month:02d}-{day:02d}'.format( - year=value.year, month=value.month, day=value.day) - - def _time_to_snowflake(self, value): - if value.microsecond: - return value.strftime(u'%H:%M:%S.%%06d') % value.microsecond - return value.strftime(u'%H:%M:%S') - - def _struct_time_to_snowflake(self, value): - tzinfo_value = _generate_tzinfo_from_tzoffset(time.timezone // 60) - t = datetime.fromtimestamp(time.mktime(value)) - if pytz.utc != tzinfo_value: - t += tzinfo_value.utcoffset(t) - t = t.replace(tzinfo=tzinfo_value) - return self._datetime_to_snowflake(t) - - def _timedelta_to_snowflake(self, value): - (hours, r) = divmod(value.seconds, 3600) - (mins, secs) = divmod(r, 60) - hours += value.days * 24 - if value.microseconds: - return (u'{hour:02d}:{minute:02d}:{second:02d}.' - u'{microsecond:06d}').format( - hour=hours, minute=mins, - second=secs, - microsecond=value.microseconds) - return u'{hour:02d}:{minute:02d}:{second:02d}'.format(hour=hours, - minute=mins, - second=secs) - - def _decimal_to_snowflake(self, value): - if isinstance(value, decimal.Decimal): - return TO_UNICODE(value) - - return None - - def _list_to_snowflake(self, value): - return [SnowflakeConverter.quote(v0) for v0 in - [SnowflakeConverter.escape(v) for v in value]] - - _tuple_to_snowflake = _list_to_snowflake - - def __numpy_to_snowflake(self, value): - return value - - _int8_to_snowflake = __numpy_to_snowflake - _int16_to_snowflake = __numpy_to_snowflake - _int32_to_snowflake = __numpy_to_snowflake - _int64_to_snowflake = __numpy_to_snowflake - _uint8_to_snowflake = __numpy_to_snowflake - _uint16_to_snowflake = __numpy_to_snowflake - _uint32_to_snowflake = __numpy_to_snowflake - _uint64_to_snowflake = __numpy_to_snowflake - _float16_to_snowflake = __numpy_to_snowflake - _float32_to_snowflake = __numpy_to_snowflake - _float64_to_snowflake = __numpy_to_snowflake - - def _datetime64_to_snowflake(self, value): - return TO_UNICODE(value) + u'+00:00' - - def _quoted_name_to_snowflake(self, value): - return TO_UNICODE(value) - - def __getattr__(self, item): - if item.endswith('_to_snowflake'): - raise ProgrammingError( - msg=u"Binding data in type ({0}) is not supported.".format( - item[1:item.find('_to_snowflake')]), - errno=ER_NOT_SUPPORT_DATA_TYPE - ) - elif item.endswith('to_snowflake_bindings'): - raise ProgrammingError( - msg=u"Binding data in type ({0}) is not supported.".format( - item[1:item.find('_to_snowflake_bindings')]), - errno=ER_NOT_SUPPORT_DATA_TYPE - ) - raise AttributeError('No method is available: {0}'.format(item)) - - @staticmethod - def escape(value): - if isinstance(value, list): - return value - if value is None or IS_NUMERIC(value) or IS_BINARY(value): - return value - res = value - res = res.replace(u'\\', u'\\\\') - res = res.replace(u'\n', u'\\n') - res = res.replace(u'\r', u'\\r') - res = res.replace(u'\047', u'\134\047') # single quotes - return res - - @staticmethod - def quote(value): - if isinstance(value, list): - return ','.join(value) - if value is None: - return u'NULL' - elif isinstance(value, bool): - return u'TRUE' if value else u'FALSE' - elif PY2 and isinstance(value, long): - return TO_UNICODE(str(value)) - elif IS_NUMERIC(value): - return TO_UNICODE(repr(value)) - elif IS_BINARY(value): - # Binary literal syntax - return u"X'{0}'".format(value.decode('ascii')) - - return u"'{0}'".format(value) diff --git a/converter_issue23517.py b/converter_issue23517.py deleted file mode 100644 index 1a496ca2e..000000000 --- a/converter_issue23517.py +++ /dev/null @@ -1,101 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -from datetime import timedelta -from logging import getLogger - -import pytz - -from .converter import ( - SnowflakeConverter, - ZERO_EPOCH, - _generate_tzinfo_from_tzoffset) - -logger = getLogger(__name__) - - -class SnowflakeConverterIssue23517(SnowflakeConverter): - """ - Converter for Python 3.4.3 and 3.5.0 - This is to address http://bugs.python.org/issue23517 - """ - - def __init__(self, **kwargs): - super(SnowflakeConverterIssue23517, self).__init__(**kwargs) - logger.debug('initialized') - - def _TIMESTAMP_TZ_to_python(self, ctx): - """ - TIMESTAMP TZ to datetime - - The timezone offset is piggybacked. - """ - - scale = ctx['scale'] - - def conv0(encoded_value): - value, tz = encoded_value.split() - tzinfo = _generate_tzinfo_from_tzoffset(int(tz) - 1440) - microseconds = float(value) - t = ZERO_EPOCH + timedelta(seconds=microseconds) - if pytz.utc != tzinfo: - t += tzinfo.utcoffset(t) - return t.replace(tzinfo=tzinfo) - - def conv(encoded_value): - value, tz = encoded_value.split() - tzinfo = _generate_tzinfo_from_tzoffset(int(tz) - 1440) - microseconds = float(value[0:-scale + 6]) - t = ZERO_EPOCH + timedelta(seconds=microseconds) - if pytz.utc != tzinfo: - t += tzinfo.utcoffset(t) - return t.replace(tzinfo=tzinfo) - - return conv if scale > 6 else conv0 - - def _TIMESTAMP_NTZ_to_python(self, ctx): - """ - TIMESTAMP NTZ to datetime - - No timezone info is attached. - """ - - scale = ctx['scale'] - - def conv0(value): - logger.debug('timestamp_ntz: %s', value) - return ZERO_EPOCH + timedelta(seconds=(float(value))) - - def conv(value): - logger.debug('timestamp_ntz: %s', value) - microseconds = float(value[0:-scale + 6]) - return ZERO_EPOCH + timedelta(seconds=(microseconds)) - - return conv if scale > 6 else conv0 - - def _TIMESTAMP_LTZ_to_python(self, ctx): - def conv(value): - t, _ = self._pre_TIMESTAMP_LTZ_to_python(value, ctx) - return t - - return conv - - def _TIME_to_python(self, ctx): - """ - TIME to formatted string, SnowflakeDateTime, or datetime.time - - No timezone is attached. - """ - - scale = ctx['scale'] - - conv0 = lambda value: ( - ZERO_EPOCH + timedelta(seconds=(float(value)))).time() - - def conv(value): - microseconds = float(value[0:-scale + 6]) - return (ZERO_EPOCH + timedelta(seconds=(microseconds))).time() - - return conv if scale > 6 else conv0 diff --git a/cpp/ArrowIterator/CArrowIterator.cpp b/cpp/ArrowIterator/CArrowIterator.cpp deleted file mode 100644 index 6ce412719..000000000 --- a/cpp/ArrowIterator/CArrowIterator.cpp +++ /dev/null @@ -1,19 +0,0 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ - -#include "CArrowIterator.hpp" - -namespace sf -{ - -Logger CArrowIterator::logger("snowflake.connector.CArrowIterator"); - -void CArrowIterator::addRecordBatch(PyObject* rb) -{ - std::shared_ptr cRecordBatch; - arrow::Status status = arrow::py::unwrap_record_batch(rb, &cRecordBatch); - m_cRecordBatches.push_back(cRecordBatch); -} - -} \ No newline at end of file diff --git a/cpp/ArrowIterator/CArrowIterator.hpp b/cpp/ArrowIterator/CArrowIterator.hpp deleted file mode 100644 index 746ade59c..000000000 --- a/cpp/ArrowIterator/CArrowIterator.hpp +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ -#ifndef PC_ARROWITERATOR_HPP -#define PC_ARROWITERATOR_HPP - -#include -#include -#include -#include -#include -#include "logging.hpp" - -namespace sf -{ - -/** - * Arrow base iterator implementation in C++. - */ - -class CArrowIterator -{ -public: - CArrowIterator() = default; - - virtual ~CArrowIterator() = default; - - /** - * Add Arrow RecordBach to current chunk - * @param rb recordbatch to be added - */ - virtual void addRecordBatch(PyObject* rb); - - /** - * @return a python object which might be current row or an Arrow Table - */ - virtual PyObject* next() = 0; - - virtual void reset() = 0; - -protected: - /** list of all record batch in current chunk */ - std::vector> m_cRecordBatches; - - static Logger logger; -}; -} - -#endif // PC_ARROWITERATOR_HPP diff --git a/cpp/ArrowIterator/CArrowTableIterator.cpp b/cpp/ArrowIterator/CArrowTableIterator.cpp deleted file mode 100644 index 43e3c34c8..000000000 --- a/cpp/ArrowIterator/CArrowTableIterator.cpp +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ -#include "CArrowTableIterator.hpp" -#include "SnowflakeType.hpp" -#include - -namespace sf -{ - -CArrowTableIterator::CArrowTableIterator(PyObject* context) -: m_context(context) -{ -} - -void CArrowTableIterator::addRecordBatch(PyObject* rb) -{ - // may add some specific behaviors for this iterator - // e.g. support retrieve table with row size - CArrowIterator::addRecordBatch(rb); -} - -void CArrowTableIterator::reset() -{ -} - -PyObject* CArrowTableIterator::next() -{ - bool firstDone = this->convertRecordBatchesToTable(); - return (firstDone && m_cTable) ? arrow::py::wrap_table(m_cTable) : Py_None; -} - -void CArrowTableIterator::reconstructRecordBatches() -{ - // TODO: type conversion, the code needs to be optimized - for (unsigned int batchIdx = 0; batchIdx < m_cRecordBatches.size(); batchIdx++) - { - std::shared_ptr currentBatch = m_cRecordBatches[batchIdx]; - std::shared_ptr schema = currentBatch->schema(); - for (int colIdx = 0; colIdx < currentBatch->num_columns(); colIdx++) - { - std::shared_ptr columnArray = currentBatch->column(colIdx); - std::shared_ptr dt = schema->field(colIdx)->type(); - std::shared_ptr metaData = - schema->field(colIdx)->metadata(); - SnowflakeType::Type st = SnowflakeType::snowflakeTypeFromString( - metaData->value(metaData->FindKey("logicalType"))); - // TODO: reconstruct columnArray in place, use method like - // columnArray->SetData(const std::shared_ptr& data) - switch (st) - { - case SnowflakeType::Type::FIXED: - { - int scale = metaData - ? std::stoi(metaData->value(metaData->FindKey("scale"))) - : 0; -// int precision = -// metaData -// ? std::stoi(metaData->value(metaData->FindKey("precision"))) -// : 38; - switch (dt->id()) - { - - case arrow::Type::type::INT8: - { - if (scale > 0) - { - // TODO: convert to arrow float64 - } - - // Do nothing if scale = 0, but may have edge case - break; - } - - case arrow::Type::type::INT16: - { - if (scale > 0) - { - // TODO: convert to arrow float64 - } - - // Do nothing if scale = 0, but may have edge case - break; - } - - case arrow::Type::type::INT32: - { - if (scale > 0) - { - // TODO: convert to arrow float64 - } - - // Do nothing if scale = 0, but may have edge case - break; - } - - case arrow::Type::type::INT64: - { - if (scale > 0) - { - // TODO: convert to arrow float64 - } - - // Do nothing if scale = 0, but may have edge case - break; - } - - case arrow::Type::type::DECIMAL: - { - // TODO: convert to arrow float64 - break; - } - - default: - { - std::string errorInfo = Logger::formatString( - "[Snowflake Exception] unknown arrow internal data type(%d) " - "for FIXED data", - dt->id()); - logger.error(errorInfo.c_str()); - PyErr_SetString(PyExc_Exception, errorInfo.c_str()); - return; - } - } - break; - } - - case SnowflakeType::Type::ANY: - case SnowflakeType::Type::CHAR: - case SnowflakeType::Type::OBJECT: - case SnowflakeType::Type::BINARY: - case SnowflakeType::Type::VARIANT: - case SnowflakeType::Type::TEXT: - { - // TODO: convert to arrow string (utf8) - break; - } - - case SnowflakeType::Type::BOOLEAN: - { - // Do nothing - break; - } - - case SnowflakeType::Type::REAL: - { - // TODO: convert to arrow float64 - break; - } - - case SnowflakeType::Type::DATE: - { - // TODO: convert to arrow dateDay - break; - } - - case SnowflakeType::Type::TIME: - { -// int scale = metaData -// ? std::stoi(metaData->value(metaData->FindKey("scale"))) -// : 9; - switch (dt->id()) - { - case arrow::Type::type::INT32: - { - // TODO: convert to arrow timestamp - break; - } - - case arrow::Type::type::INT64: - { - // TODO: convert to arrow timestamp - break; - } - - default: - { - std::string errorInfo = Logger::formatString( - "[Snowflake Exception] unknown arrow internal data type(%d) " - "for TIME data", - dt->id()); - logger.error(errorInfo.c_str()); - PyErr_SetString(PyExc_Exception, errorInfo.c_str()); - return; - } - } - break; - } - - case SnowflakeType::Type::TIMESTAMP_NTZ: - { -// int scale = metaData -// ? std::stoi(metaData->value(metaData->FindKey("scale"))) -// : 9; - switch (dt->id()) - { - case arrow::Type::type::INT64: - { - // TODO: convert to arrow timestamp - break; - } - - case arrow::Type::type::STRUCT: - { - // TODO: convert to arrow timestamp - break; - } - - default: - { - std::string errorInfo = Logger::formatString( - "[Snowflake Exception] unknown arrow internal data type(%d) " - "for TIMESTAMP_NTZ data", - dt->id()); - logger.error(errorInfo.c_str()); - PyErr_SetString(PyExc_Exception, errorInfo.c_str()); - return; - } - } - break; - } - - case SnowflakeType::Type::TIMESTAMP_LTZ: - { -// int scale = metaData -// ? std::stoi(metaData->value(metaData->FindKey("scale"))) -// : 9; - switch (dt->id()) - { - case arrow::Type::type::INT64: - { - // TODO: convert to arrow timestamp - break; - } - - case arrow::Type::type::STRUCT: - { - // TODO: convert to arrow timestamp - break; - } - - default: - { - std::string errorInfo = Logger::formatString( - "[Snowflake Exception] unknown arrow internal data type(%d) " - "for TIMESTAMP_LTZ data", - dt->id()); - logger.error(errorInfo.c_str()); - PyErr_SetString(PyExc_Exception, errorInfo.c_str()); - return; - } - } - break; - } - - case SnowflakeType::Type::TIMESTAMP_TZ: - { - int scale = metaData - ? std::stoi(metaData->value(metaData->FindKey("scale"))) - : 9; - int byteLength = - metaData - ? std::stoi(metaData->value(metaData->FindKey("byteLength"))) - : 16; - switch (byteLength) - { - case 8: - { - // TODO: convert to arrow timestamp - break; - } - - case 16: - { - // TODO: convert to arrow timestamp - break; - } - - default: - { - std::string errorInfo = Logger::formatString( - "[Snowflake Exception] unknown arrow internal data type(%d) " - "for TIMESTAMP_TZ data", - dt->id()); - logger.error(errorInfo.c_str()); - PyErr_SetString(PyExc_Exception, errorInfo.c_str()); - return; - } - } - - break; - } - - default: - { - std::string errorInfo = Logger::formatString( - "[Snowflake Exception] unknown snowflake data type : %d", - metaData->value(metaData->FindKey("logicalType"))); - logger.error(errorInfo.c_str()); - PyErr_SetString(PyExc_Exception, errorInfo.c_str()); - return; - } - } - } - } -} - -bool CArrowTableIterator::convertRecordBatchesToTable() -{ - // only do conversion once and there exist some record batches - if (!m_cTable && !m_cRecordBatches.empty()) - { - reconstructRecordBatches(); - arrow::Table::FromRecordBatches(m_cRecordBatches, &m_cTable); - return true; - } - return false; -} - -} // namespace sf \ No newline at end of file diff --git a/cpp/ArrowIterator/CArrowTableIterator.hpp b/cpp/ArrowIterator/CArrowTableIterator.hpp deleted file mode 100644 index 6b3a6b8e8..000000000 --- a/cpp/ArrowIterator/CArrowTableIterator.hpp +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ -#ifndef PC_ARROWTABLEITERATOR_HPP -#define PC_ARROWTABLEITERATOR_HPP - -#include -#include "CArrowIterator.hpp" - -namespace sf -{ - -/** - * Arrow table iterator implementation in C++. - * The caller will ask for an Arrow Table to be returned back to Python - * This conversion is zero-copy, just aggregate every columns from mutiple record batches - * and build a new table. - */ -class CArrowTableIterator : public CArrowIterator -{ -public: - /** - * Constructor - */ - CArrowTableIterator(PyObject* context); - - /** - * Desctructor - */ - ~CArrowTableIterator() = default; - - /** - * Add Arrow RecordBach to current chunk - * @param rb recordbatch to be added - */ - void addRecordBatch(PyObject* rb) override; - - /** - * @return an arrow table containing all data in all record batches - */ - PyObject* next() override; - - void reset() override; - -private: - /* arrow table of all record batches in current chunk */ - std::shared_ptr m_cTable; - - /** arrow format convert context for the current session */ - PyObject* m_context; - - /** - * Reconstruct record batches with type conversion in place - */ - void reconstructRecordBatches(); - - /** - * Convert all current RecordBatches to Arrow Table - * @return if conversion is executed at first time and sucessfully - */ - bool convertRecordBatchesToTable(); -}; -} - -#endif // PC_ARROWTABLEITERATOR_HPP diff --git a/cpp/ArrowIterator/FloatConverter.cpp b/cpp/ArrowIterator/FloatConverter.cpp deleted file mode 100644 index 29f923699..000000000 --- a/cpp/ArrowIterator/FloatConverter.cpp +++ /dev/null @@ -1,27 +0,0 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ -#include "FloatConverter.hpp" - -namespace sf -{ - -/** snowflake float is 64-precision, which refers to double here */ -FloatConverter::FloatConverter(std::shared_ptr array) -: m_array(std::dynamic_pointer_cast(array)) -{ -} - -PyObject* FloatConverter::toPyObject(int64_t rowIndex) const -{ - if (m_array->IsValid(rowIndex)) - { - return PyFloat_FromDouble(m_array->Value(rowIndex)); - } - else - { - Py_RETURN_NONE; - } -} - -} // namespace sf diff --git a/cpp/ArrowIterator/TimeStampConverter.cpp b/cpp/ArrowIterator/TimeStampConverter.cpp deleted file mode 100644 index 99ce2976b..000000000 --- a/cpp/ArrowIterator/TimeStampConverter.cpp +++ /dev/null @@ -1,197 +0,0 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ - -#include "TimeStampConverter.hpp" -#include "Python/Helpers.hpp" -#include "Util/time.hpp" - -namespace sf -{ -TimeStampBaseConverter::TimeStampBaseConverter(PyObject* context, int32_t scale) -: m_context(context), m_scale(scale) -{ -} - -OneFieldTimeStampNTZConverter::OneFieldTimeStampNTZConverter( - std::shared_ptr array, int32_t scale, PyObject* context) -: TimeStampBaseConverter(context, scale), - m_array(std::dynamic_pointer_cast(array)) -{ -} - -PyObject* OneFieldTimeStampNTZConverter::toPyObject(int64_t rowIndex) const -{ - if (m_array->IsValid(rowIndex)) - { - double microseconds = internal::getFormattedDoubleFromEpoch( - m_array->Value(rowIndex), m_scale); -#ifdef _WIN32 - return PyObject_CallMethod(m_context, "TIMESTAMP_NTZ_to_python_windows", - "d", microseconds); -#else - return PyObject_CallMethod(m_context, "TIMESTAMP_NTZ_to_python", "d", - microseconds); -#endif - } - else - { - Py_RETURN_NONE; - } -} - -TwoFieldTimeStampNTZConverter::TwoFieldTimeStampNTZConverter( - std::shared_ptr array, int32_t scale, PyObject* context) -: TimeStampBaseConverter(context, scale), - m_array(std::dynamic_pointer_cast(array)), - m_epoch(std::dynamic_pointer_cast( - m_array->GetFieldByName(internal::FIELD_NAME_EPOCH))), - m_fraction(std::dynamic_pointer_cast( - m_array->GetFieldByName(internal::FIELD_NAME_FRACTION))) -{ -} - -PyObject* TwoFieldTimeStampNTZConverter::toPyObject(int64_t rowIndex) const -{ - if (m_array->IsValid(rowIndex)) - { - int64_t epoch = m_epoch->Value(rowIndex); - int32_t frac = m_fraction->Value(rowIndex); - double microseconds = - internal::getFormattedDoubleFromEpochFraction(epoch, frac, m_scale); -#ifdef _WIN32 - return PyObject_CallMethod(m_context, "TIMESTAMP_NTZ_to_python_windows", - "d", microseconds); -#else - return PyObject_CallMethod(m_context, "TIMESTAMP_NTZ_to_python", "d", - microseconds); -#endif - } - else - { - Py_RETURN_NONE; - } -} - -OneFieldTimeStampLTZConverter::OneFieldTimeStampLTZConverter( - std::shared_ptr array, int32_t scale, PyObject* context) -: TimeStampBaseConverter(context, scale), - m_array(std::dynamic_pointer_cast(array)) -{ -} - -PyObject* OneFieldTimeStampLTZConverter::toPyObject(int64_t rowIndex) const -{ - if (m_array->IsValid(rowIndex)) - { - double microseconds = internal::getFormattedDoubleFromEpoch( - m_array->Value(rowIndex), m_scale); -#ifdef _WIN32 - // this macro is enough for both win32 and win64 - return PyObject_CallMethod(m_context, "TIMESTAMP_LTZ_to_python_windows", - "d", microseconds); -#else - return PyObject_CallMethod(m_context, "TIMESTAMP_LTZ_to_python", "d", - microseconds); -#endif - } - - Py_RETURN_NONE; -} - -TwoFieldTimeStampLTZConverter::TwoFieldTimeStampLTZConverter( - std::shared_ptr array, int32_t scale, PyObject* context) -: TimeStampBaseConverter(context, scale), - m_array(std::dynamic_pointer_cast(array)), - m_epoch(std::dynamic_pointer_cast( - m_array->GetFieldByName(internal::FIELD_NAME_EPOCH))), - m_fraction(std::dynamic_pointer_cast( - m_array->GetFieldByName(internal::FIELD_NAME_FRACTION))) -{ -} - -PyObject* TwoFieldTimeStampLTZConverter::toPyObject(int64_t rowIndex) const -{ - if (m_array->IsValid(rowIndex)) - { - int64_t epoch = m_epoch->Value(rowIndex); - int32_t frac = m_fraction->Value(rowIndex); - double microseconds = - internal::getFormattedDoubleFromEpochFraction(epoch, frac, m_scale); -#ifdef _WIN32 - return PyObject_CallMethod(m_context, "TIMESTAMP_LTZ_to_python_windows", - "d", microseconds); -#else - return PyObject_CallMethod(m_context, "TIMESTAMP_LTZ_to_python", "d", - microseconds); -#endif - } - - Py_RETURN_NONE; -} - -TwoFieldTimeStampTZConverter::TwoFieldTimeStampTZConverter( - std::shared_ptr array, int32_t scale, PyObject* context) -: TimeStampBaseConverter(context, scale), - m_array(std::dynamic_pointer_cast(array)), - m_epoch(std::dynamic_pointer_cast( - m_array->GetFieldByName(internal::FIELD_NAME_EPOCH))), - m_timezone(std::dynamic_pointer_cast( - m_array->GetFieldByName(internal::FIELD_NAME_TIME_ZONE))) -{ -} - -PyObject* TwoFieldTimeStampTZConverter::toPyObject(int64_t rowIndex) const -{ - if (m_array->IsValid(rowIndex)) - { - int64_t epoch = m_epoch->Value(rowIndex); - double microseconds = internal::getFormattedDoubleFromEpoch(epoch, m_scale); - int32_t timezone = m_timezone->Value(rowIndex); -#ifdef _WIN32 - return PyObject_CallMethod(m_context, "TIMESTAMP_TZ_to_python_windows", - "di", microseconds, timezone); -#else - return PyObject_CallMethod(m_context, "TIMESTAMP_TZ_to_python", "di", - microseconds, timezone); -#endif - } - - Py_RETURN_NONE; -} - -ThreeFieldTimeStampTZConverter::ThreeFieldTimeStampTZConverter( - std::shared_ptr array, int32_t scale, PyObject* context) -: TimeStampBaseConverter(context, scale), - m_array(std::dynamic_pointer_cast(array)), - m_epoch(std::dynamic_pointer_cast( - m_array->GetFieldByName(internal::FIELD_NAME_EPOCH))), - m_timezone(std::dynamic_pointer_cast( - m_array->GetFieldByName(internal::FIELD_NAME_TIME_ZONE))), - m_fraction(std::dynamic_pointer_cast( - m_array->GetFieldByName(internal::FIELD_NAME_FRACTION))) -{ -} - -PyObject* ThreeFieldTimeStampTZConverter::toPyObject(int64_t rowIndex) const -{ - if (m_array->IsValid(rowIndex)) - { - int64_t epoch = m_epoch->Value(rowIndex); - int32_t frac = m_fraction->Value(rowIndex); - double microseconds = - internal::getFormattedDoubleFromEpochFraction(epoch, frac, m_scale); - int32_t timezone = m_timezone->Value(rowIndex); -#ifdef _WIN32 - return PyObject_CallMethod(m_context, "TIMESTAMP_TZ_to_python_windows", - "di", microseconds, timezone); -#else - return PyObject_CallMethod(m_context, "TIMESTAMP_TZ_to_python", "di", - microseconds, timezone); -#endif - } - - Py_RETURN_NONE; -} - -} // namespace sf diff --git a/cpp/ArrowIterator/Util/time.cpp b/cpp/ArrowIterator/Util/time.cpp deleted file mode 100644 index 09564d62c..000000000 --- a/cpp/ArrowIterator/Util/time.cpp +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ -#include "time.hpp" - -namespace sf -{ - -namespace internal -{ - -int32_t getNumberOfDigit(int32_t num) -{ - return (num >= 100000000) - ? 9 - : (num >= 10000000) - ? 8 - : (num >= 1000000) - ? 7 - : (num >= 100000) - ? 6 - : (num >= 10000) - ? 5 - : (num >= 1000) - ? 4 - : (num >= 100) ? 3 : (num >= 10) - ? 2 - : (num >= 1) - ? 1 - : 0; -} - -int32_t getHourFromSeconds(int64_t seconds, int32_t scale) -{ - return seconds / powTenSB4[scale] / SECONDS_PER_HOUR; -} - -int32_t getHourFromSeconds(int32_t seconds, int32_t scale) -{ - return seconds / powTenSB4[scale] / SECONDS_PER_HOUR; -} - -int32_t getMinuteFromSeconds(int64_t seconds, int32_t scale) -{ - return seconds / powTenSB4[scale] % SECONDS_PER_HOUR / SECONDS_PER_MINUTE; -} - -int32_t getMinuteFromSeconds(int32_t seconds, int32_t scale) -{ - return seconds / powTenSB4[scale] % SECONDS_PER_HOUR / SECONDS_PER_MINUTE; -} - -int32_t getSecondFromSeconds(int64_t seconds, int32_t scale) -{ - return seconds / powTenSB4[scale] % SECONDS_PER_MINUTE; -} - -int32_t getSecondFromSeconds(int32_t seconds, int32_t scale) -{ - return seconds / powTenSB4[scale] % SECONDS_PER_MINUTE; -} - -int32_t getMicrosecondFromSeconds(int64_t seconds, int32_t scale) -{ - int32_t microsec = seconds % powTenSB4[scale]; - return scale > PYTHON_DATETIME_TIME_MICROSEC_DIGIT ? microsec /= - powTenSB4[scale - PYTHON_DATETIME_TIME_MICROSEC_DIGIT] : microsec *= - powTenSB4[PYTHON_DATETIME_TIME_MICROSEC_DIGIT - scale]; -} - -double getFormattedDoubleFromEpoch(int64_t epoch, int32_t scale) -{ - return scale > PYTHON_DATETIME_TIME_MICROSEC_DIGIT - ? static_cast( - epoch / - powTenSB4[scale - PYTHON_DATETIME_TIME_MICROSEC_DIGIT]) / - powTenSB4[PYTHON_DATETIME_TIME_MICROSEC_DIGIT] - : static_cast(epoch) / powTenSB4[scale]; -} - -double getFormattedDoubleFromEpochFraction(int64_t epoch, int32_t frac, - int32_t scale) -{ - return static_cast(epoch) + - static_cast(castToFormattedFraction(frac, epoch > 0, scale)) / - powTenSB4[std::min(scale, PYTHON_DATETIME_TIME_MICROSEC_DIGIT)]; -} - -int32_t castToFormattedFraction(int32_t frac, bool isPositive, int32_t scale) -{ - // if scale > 6 or not - constexpr int DIFF_DIGIT = - NANOSEC_DIGIT - PYTHON_DATETIME_TIME_MICROSEC_DIGIT; - if (scale > 6) - { - return isPositive - ? (frac / powTenSB4[DIFF_DIGIT]) - : (powTenSB4[PYTHON_DATETIME_TIME_MICROSEC_DIGIT] - - (powTenSB4[NANOSEC_DIGIT] - frac) / powTenSB4[DIFF_DIGIT]); - } - else - { - return isPositive - ? (frac / powTenSB4[NANOSEC_DIGIT - scale]) - : (powTenSB4[scale] - (powTenSB4[NANOSEC_DIGIT] - frac) / - powTenSB4[NANOSEC_DIGIT - scale]); - } -} - -} // namespace internal -} // namespace sf diff --git a/cpp/Logging/logging.cpp b/cpp/Logging/logging.cpp deleted file mode 100644 index 2b3cf6c55..000000000 --- a/cpp/Logging/logging.cpp +++ /dev/null @@ -1,70 +0,0 @@ -#include "logging.hpp" -#include "Python/Helpers.hpp" -#include - -namespace sf -{ -std::string Logger::formatString(const char *format, ...) -{ - char msg[1000] = {0}; - va_list args; - va_start(args, format); - vsnprintf(msg, sizeof(msg), format, args); - va_end(args); - - return std::string(msg); -} - -Logger::Logger(const char *name) -{ - py::UniqueRef pyLoggingModule; - py::importPythonModule("logging", pyLoggingModule); - PyObject *logger = - PyObject_CallMethod(pyLoggingModule.get(), "getLogger", "s", name); - m_pyLogger.reset(logger); -} - -void Logger::debug(const char *format, ...) -{ - char msg[1000] = {0}; - va_list args; - va_start(args, format); - vsnprintf(msg, sizeof(msg), format, args); - va_end(args); - - PyObject_CallMethod(m_pyLogger.get(), "debug", "s", msg); -} - -void Logger::info(const char *format, ...) -{ - char msg[1000] = {0}; - va_list args; - va_start(args, format); - vsnprintf(msg, sizeof(msg), format, args); - va_end(args); - - PyObject_CallMethod(m_pyLogger.get(), "info", "s", msg); -} - -void Logger::warn(const char *format, ...) -{ - char msg[1000] = {0}; - va_list args; - va_start(args, format); - vsnprintf(msg, sizeof(msg), format, args); - va_end(args); - - PyObject_CallMethod(m_pyLogger.get(), "warn", "s", msg); -} - -void Logger::error(const char *format, ...) -{ - char msg[1000] = {0}; - va_list args; - va_start(args, format); - vsnprintf(msg, sizeof(msg), format, args); - va_end(args); - - PyObject_CallMethod(m_pyLogger.get(), "error", "s", msg); -} -} diff --git a/cpp/Logging/logging.hpp b/cpp/Logging/logging.hpp deleted file mode 100644 index 632e9809e..000000000 --- a/cpp/Logging/logging.hpp +++ /dev/null @@ -1,34 +0,0 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ -#ifndef PC_LOGGING_HPP -#define PC_LOGGING_HPP - -#include "Python/Common.hpp" -#include - -namespace sf -{ - -class Logger -{ -public: - explicit Logger(const char *name); - - void debug(const char *fmt, ...); - - void info(const char *fmt, ...); - - void warn(const char *fmt, ...); - - void error(const char *fmt, ...); - - static std::string formatString(const char *fmt, ...); - -private: - py::UniqueRef m_pyLogger; -}; - -} // namespace sf - -#endif // PC_LOGGING_HPP diff --git a/cursor.py b/cursor.py deleted file mode 100644 index 91a0275dd..000000000 --- a/cursor.py +++ /dev/null @@ -1,875 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import logging -import re -import signal -import sys -import uuid -from logging import getLogger -from threading import (Timer, Lock) -from six import u - -from .compat import (BASE_EXCEPTION_CLASS) -from .constants import ( - FIELD_NAME_TO_ID, -) -from .errorcode import (ER_UNSUPPORTED_METHOD, - ER_CURSOR_IS_CLOSED, - ER_FAILED_TO_REWRITE_MULTI_ROW_INSERT, - ER_NOT_POSITIVE_SIZE, - ER_INVALID_VALUE) -from .errors import (Error, ProgrammingError, NotSupportedError, - DatabaseError, InterfaceError) -from .file_transfer_agent import (SnowflakeFileTransferAgent) -from .json_result import JsonResult, DictJsonResult -from .sqlstate import (SQLSTATE_FEATURE_NOT_SUPPORTED) -from .telemetry import (TelemetryData, TelemetryField) -from .time_util import get_time_millis - -try: - from .arrow_result import ArrowResult -except ImportError: - pass - -STATEMENT_TYPE_ID_DML = 0x3000 -STATEMENT_TYPE_ID_INSERT = STATEMENT_TYPE_ID_DML + 0x100 -STATEMENT_TYPE_ID_UPDATE = STATEMENT_TYPE_ID_DML + 0x200 -STATEMENT_TYPE_ID_DELETE = STATEMENT_TYPE_ID_DML + 0x300 -STATEMENT_TYPE_ID_MERGE = STATEMENT_TYPE_ID_DML + 0x400 -STATEMENT_TYPE_ID_MULTI_TABLE_INSERT = STATEMENT_TYPE_ID_DML + 0x500 - -STATEMENT_TYPE_ID_DML_SET = frozenset( - [STATEMENT_TYPE_ID_DML, STATEMENT_TYPE_ID_INSERT, - STATEMENT_TYPE_ID_UPDATE, - STATEMENT_TYPE_ID_DELETE, STATEMENT_TYPE_ID_MERGE, - STATEMENT_TYPE_ID_MULTI_TABLE_INSERT]) - -DESC_TABLE_RE = re.compile(u(r'desc(?:ribe)?\s+([\w_]+)\s*;?\s*$'), - flags=re.IGNORECASE) - -logger = getLogger(__name__) - -LOG_MAX_QUERY_LENGTH = 80 - - -class SnowflakeCursor(object): - u""" - Implementation of Cursor object that is returned from Connection.cursor() - method. - """ - PUT_SQL_RE = re.compile(u(r'^(?:/\*.*\*/\s*)*put\s+'), flags=re.IGNORECASE) - GET_SQL_RE = re.compile(u(r'^(?:/\*.*\*/\s*)*get\s+'), flags=re.IGNORECASE) - INSERT_SQL_RE = re.compile(u(r'^insert\s+into'), flags=re.IGNORECASE) - COMMENT_SQL_RE = re.compile(r"/\*.*\*/") - INSERT_SQL_VALUES_RE = re.compile(u(r'.*VALUES\s*(\(.*\)).*'), - re.IGNORECASE | re.MULTILINE | re.DOTALL) - ALTER_SESSION_RE = re.compile( - u(r'alter\s+session\s+set\s+(.*)=\'?([^\']+)\'?\s*;'), - flags=re.IGNORECASE | re.MULTILINE | re.DOTALL) - - def __init__(self, connection, json_result_class=JsonResult): - self._connection = connection - - self._errorhandler = Error.default_errorhandler - self.messages = [] - self._timebomb = None # must be here for abort_exit method - self._description = None - self._column_idx_to_name = None - self._sfqid = None - self._sqlstate = None - self._total_rowcount = -1 - self._sequence_counter = -1 - self._request_id = None - self._is_file_transfer = False - - self._timestamp_output_format = None - self._timestamp_ltz_output_format = None - self._timestamp_ntz_output_format = None - self._timestamp_tz_output_format = None - self._date_output_format = None - self._time_output_format = None - self._timezone = None - self._binary_output_format = None - self._result = None - self._json_result_class = json_result_class - - self._arraysize = 1 # PEP-0249: defaults to 1 - - self._lock_canceling = Lock() - - self._first_chunk_time = None - - self._log_max_query_length = connection.log_max_query_length - - self.reset() - - def __del__(self): - try: - self.close() - except BASE_EXCEPTION_CLASS as e: - logger = getLogger(__name__) - if logger.getEffectiveLevel() <= logging.INFO: - logger.info(e) - - @property - def description(self): - u""" - Columns information in a tuple: - - name - - type_code - - display_size - - internal_size - - precision - - scale - - null_ok - """ - return self._description - - @property - def rowcount(self): - u""" - The number of records updated or selected. - If not clear, -1 is returned - """ - return self._total_rowcount if self._total_rowcount >= 0 else None - - @property - def rownumber(self): - u""" - The current 0-based index of the cursor in the result set or None if - the index cannot be determined. - """ - return self._result.total_row_index if self._result.total_row_index >= 0 else None - - @property - def sfqid(self): - u""" - Snowflake query id in UUID form. Include this in the problem report to - the customer support - """ - return self._sfqid - - @property - def sqlstate(self): - u""" - SQL State code - """ - return self._sqlstate - - @property - def timestamp_output_format(self): - u""" - Snowflake timestamp_output_format - """ - return self._timestamp_output_format - - @property - def timestamp_ltz_output_format(self): - u""" - Snowflake timestamp_output_format - """ - return self._timestamp_ltz_output_format if \ - self._timestamp_ltz_output_format else \ - self._timestamp_output_format - - @property - def timestamp_tz_output_format(self): - u""" - Snowflake timestamp_output_format - """ - return self._timestamp_tz_output_format if \ - self._timestamp_tz_output_format else \ - self._timestamp_output_format - - @property - def timestamp_ntz_output_format(self): - u""" - Snowflake timestamp_output_format - """ - return self._timestamp_ntz_output_format if \ - self._timestamp_ntz_output_format else \ - self._timestamp_output_format - - @property - def date_output_format(self): - u""" - Snowflake date_output_format - """ - return self._date_output_format - - @property - def time_output_format(self): - u""" - Snowflake time_output_format - """ - return self._time_output_format - - @property - def timezone(self): - u""" - Snowflake timezone - """ - return self._timezone - - @property - def binary_output_format(self): - u""" - Snowflake binary_output_format - """ - return self._binary_output_format - - @property - def arraysize(self): - u""" - The default number of rows fetched in fetchmany - """ - return self._arraysize - - @arraysize.setter - def arraysize(self, value): - self._arraysize = int(value) - - @property - def connection(self): - u""" - The connection object on which the cursor was created - """ - return self._connection - - @property - def errorhandler(self): - return self._errorhandler - - @errorhandler.setter - def errorhandler(self, value): - logger.debug(u'setting errorhandler: %s', value) - if value is None: - raise ProgrammingError(u'Invalid errorhandler is specified') - self._errorhandler = value - - @property - def is_file_transfer(self): - """ - Is PUT or GET command? - """ - return hasattr(self, '_is_file_transfer') and self._is_file_transfer - - def callproc(self, procname, args=()): - u""" - Not supported - """ - Error.errorhandler_wrapper( - self.connection, self, - NotSupportedError, - { - u'msg': u"callproc is not supported.", - u'errno': ER_UNSUPPORTED_METHOD, - u'sqlstate': SQLSTATE_FEATURE_NOT_SUPPORTED}) - - def close(self): - u""" - Closes the cursor object - """ - try: - if self.is_closed(): - return False - - with self._lock_canceling: - self.reset() - self._connection = None - del self.messages[:] - return True - except: - pass - - def is_closed(self): - return self._connection is None or self._connection.is_closed() - - def _execute_helper( - self, query, timeout=0, statement_params=None, - binding_params=None, - is_internal=False, _no_results=False, _is_put_get=None): - del self.messages[:] - - if statement_params is not None and not isinstance( - statement_params, dict): - Error.errorhandler_wrapper( - self.connection, self, - ProgrammingError, - { - u'msg': u"The data type of statement params is invalid. " - u"It must be dict.", - u'errno': ER_INVALID_VALUE, - }) - - self._sequence_counter = self._connection._next_sequence_counter() - self._request_id = uuid.uuid4() - - if logger.getEffectiveLevel() <= logging.DEBUG: - logger.debug( - u'running query [%s]', self._format_query_for_log(query)) - if _is_put_get is not None: - # if told the query is PUT or GET, use the information - self._is_file_transfer = _is_put_get - else: - # or detect it. - self._is_file_transfer = self.PUT_SQL_RE.match( - query) or self.GET_SQL_RE.match(query) - logger.debug(u'is_file_transfer: %s', - self._is_file_transfer is not None) - - real_timeout = timeout if timeout and timeout > 0 \ - else self._connection.network_timeout - - if real_timeout is not None: - self._timebomb = Timer( - real_timeout, self.__cancel_query, [query]) - self._timebomb.start() - logger.debug(u'started timebomb in %ss', real_timeout) - else: - self._timebomb = None - - original_sigint = signal.getsignal(signal.SIGINT) - - def abort_exit(*_): - try: - signal.signal(signal.SIGINT, signal.SIG_IGN) - except (ValueError, TypeError): - # ignore failures - pass - try: - if self._timebomb is not None: - self._timebomb.cancel() - logger.debug(u'cancelled timebomb in finally') - self._timebomb = None - self.__cancel_query(query) - finally: - if original_sigint: - try: - signal.signal(signal.SIGINT, original_sigint) - except (ValueError, TypeError): - # ignore failures - pass - raise KeyboardInterrupt - - try: - signal.signal(signal.SIGINT, abort_exit) - except ValueError: - logger.debug( - u'Failed to set SIGINT handler. ' - u'Not in main thread. Ignored...') - ret = {u'data': {}} - try: - ret = self._connection.cmd_query( - query, - self._sequence_counter, - self._request_id, - binding_params=binding_params, - is_file_transfer=self._is_file_transfer, - statement_params=statement_params, - is_internal=is_internal, - _no_results=_no_results) - finally: - try: - if original_sigint: - signal.signal(signal.SIGINT, original_sigint) - except (ValueError, TypeError): - logger.debug( - u'Failed to reset SIGINT handler. Not in main ' - u'thread. Ignored...') - except Exception: - self.connection.incident.report_incident() - raise - if self._timebomb is not None: - self._timebomb.cancel() - logger.debug(u'cancelled timebomb in finally') - - if u'data' in ret and u'parameters' in ret[u'data']: - for kv in ret[u'data'][u'parameters']: - if u'TIMESTAMP_OUTPUT_FORMAT' in kv[u'name']: - self._timestamp_output_format = kv[u'value'] - if u'TIMESTAMP_NTZ_OUTPUT_FORMAT' in kv[u'name']: - self._timestamp_ntz_output_format = kv[u'value'] - if u'TIMESTAMP_LTZ_OUTPUT_FORMAT' in kv[u'name']: - self._timestamp_ltz_output_format = kv[u'value'] - if u'TIMESTAMP_TZ_OUTPUT_FORMAT' in kv[u'name']: - self._timestamp_tz_output_format = kv[u'value'] - if u'DATE_OUTPUT_FORMAT' in kv[u'name']: - self._date_output_format = kv[u'value'] - if u'TIME_OUTPUT_FORMAT' in kv[u'name']: - self._time_output_format = kv[u'value'] - if u'TIMEZONE' in kv[u'name']: - self._timezone = kv[u'value'] - if u'BINARY_OUTPUT_FORMAT' in kv[u'name']: - self._binary_output_format = kv[u'value'] - self._connection._set_parameters( - ret, self._connection._session_parameters) - - self._sequence_counter = -1 - return ret - - def execute(self, command, params=None, timeout=None, - _do_reset=True, - _put_callback=None, - _put_azure_callback=None, - _put_callback_output_stream=sys.stdout, - _get_callback=None, - _get_azure_callback=None, - _get_callback_output_stream=sys.stdout, - _show_progress_bar=True, - _statement_params=None, - _is_internal=False, - _no_results=False, - _use_ijson=False, - _is_put_get=None, - _raise_put_get_error=False, - _force_put_overwrite=False): - u""" - Executes a command/query - """ - logger.debug(u'executing SQL/command') - if self.is_closed(): - Error.errorhandler_wrapper( - self.connection, self, - DatabaseError, - {u'msg': u"Cursor is closed in execute.", - u'errno': ER_CURSOR_IS_CLOSED}) - - if _do_reset: - self.reset() - command = command.strip(u' \t\n\r') if command else None - if not command: - logger.warning(u'execute: no query is given to execute') - return - - try: - if self._connection.is_pyformat: - # pyformat/format paramstyle - # client side binding - processed_params = self._connection._process_params(params, self) - if logger.getEffectiveLevel() <= logging.DEBUG: - logger.debug(u'binding: [%s] with input=[%s], processed=[%s]', - self._format_query_for_log(command), - params, processed_params) - if len(processed_params) > 0: - query = command % processed_params - else: - query = command - processed_params = None # reset to None - else: - # qmark and numeric paramstyle - # server side binding - query = command - processed_params = self._connection._process_params_qmarks( - params, self) - # Skip reporting Key, Value and Type errors - except KeyError: - raise - except ValueError: - raise - except TypeError: - raise - except Exception: - self.connection.incident.report_incident() - raise - - m = DESC_TABLE_RE.match(query) - if m: - query1 = u'describe table {0}'.format(m.group(1)) - if logger.getEffectiveLevel() <= logging.WARNING: - logger.info( - u'query was rewritten: org=%s, new=%s', - u' '.join(line.strip() for line in query.split(u'\n')), - query1 - ) - query = query1 - - if logger.getEffectiveLevel() <= logging.INFO: - logger.info( - u'query: [%s]', self._format_query_for_log(query)) - ret = self._execute_helper( - query, - timeout=timeout, - binding_params=processed_params, - statement_params=_statement_params, - is_internal=_is_internal, - _no_results=_no_results, - _is_put_get=_is_put_get) - self._sfqid = ret[u'data'][ - u'queryId'] if u'data' in ret and u'queryId' in ret[ - u'data'] else None - self._sqlstate = ret[u'data'][ - u'sqlState'] if u'data' in ret and u'sqlState' in ret[ - u'data'] else None - self._first_chunk_time = get_time_millis() - - # if server gives a send time, log the time it took to arrive - if u'data' in ret and u'sendResultTime' in ret[u'data']: - time_consume_first_result = self._first_chunk_time - ret[u'data'][ - u'sendResultTime'] - self._log_telemetry_job_data( - TelemetryField.TIME_CONSUME_FIRST_RESULT, - time_consume_first_result) - logger.debug('sfqid: %s', self.sfqid) - - logger.info('query execution done') - if ret[u'success']: - logger.debug(u'SUCCESS') - data = ret[u'data'] - - # logger.debug(ret) - logger.debug(u"PUT OR GET: %s", self.is_file_transfer) - if self.is_file_transfer: - sf_file_transfer_agent = SnowflakeFileTransferAgent( - self, query, ret, - put_callback=_put_callback, - put_azure_callback=_put_azure_callback, - put_callback_output_stream=_put_callback_output_stream, - get_callback=_get_callback, - get_azure_callback=_get_azure_callback, - get_callback_output_stream=_get_callback_output_stream, - show_progress_bar=_show_progress_bar, - raise_put_get_error=_raise_put_get_error, - force_put_overwrite=_force_put_overwrite) - sf_file_transfer_agent.execute() - data = sf_file_transfer_agent.result() - self._total_rowcount = len(data[u'rowset']) if \ - u'rowset' in data else -1 - m = self.ALTER_SESSION_RE.match(query) - if m: - # session parameters - param = m.group(1).upper() - value = m.group(2) - self._connection.converter.set_parameter(param, value) - - if _no_results: - self._total_rowcount = ret[u'data'][ - u'total'] if u'data' in ret and u'total' in ret[ - u'data'] else -1 - return data - self._init_result_and_meta(data, _use_ijson) - else: - self._total_rowcount = ret[u'data'][ - u'total'] if u'data' in ret and u'total' in ret[u'data'] else -1 - logger.debug(ret) - err = ret[u'message'] - code = ret.get(u'code', -1) - if u'data' in ret: - err += ret[u'data'].get(u'errorMessage', '') - errvalue = { - u'msg': err, - u'errno': int(code), - u'sqlstate': self._sqlstate, - u'sfqid': self._sfqid - } - Error.errorhandler_wrapper(self.connection, self, - ProgrammingError, - errvalue) - return self - - def _format_query_for_log(self, query): - return self._connection._format_query_for_log(query) - - def _is_dml(self, data): - return u'statementTypeId' in data \ - and int(data[u'statementTypeId']) in \ - STATEMENT_TYPE_ID_DML_SET - - def _init_result_and_meta(self, data, use_ijson=False): - is_dml = self._is_dml(data) - self._query_result_format = data.get(u'queryResultFormat', u'json') - - if self._total_rowcount == -1 and not is_dml and data.get(u'total') \ - is not None: - self._total_rowcount = data['total'] - - self._description = [] - - for column in data[u'rowtype']: - type_value = FIELD_NAME_TO_ID[column[u'type'].upper()] - self._description.append((column[u'name'], - type_value, - None, - column[u'length'], - column[u'precision'], - column[u'scale'], - column[u'nullable'])) - - self._result = ArrowResult(data, self) if self._query_result_format == 'arrow' \ - else self._json_result_class(data, self, use_ijson) - - if is_dml: - updated_rows = 0 - for idx, desc in enumerate(self._description): - if desc[0] in ( - u'number of rows updated', - u'number of multi-joined rows updated', - u'number of rows deleted') or \ - desc[0].startswith(u'number of rows inserted'): - updated_rows += int(data[u'rowset'][0][idx]) - if self._total_rowcount == -1: - self._total_rowcount = updated_rows - else: - self._total_rowcount += updated_rows - - def query_result(self, qid, _use_ijson=False): - url = '/queries/{qid}/result'.format(qid=qid) - ret = self._connection.rest.request(url=url, method='get') - self._sfqid = ret[u'data'][ - u'queryId'] if u'data' in ret and u'queryId' in ret[ - u'data'] else None - self._sqlstate = ret[u'data'][ - u'sqlState'] if u'data' in ret and u'sqlState' in ret[ - u'data'] else None - logger.debug(u'sfqid=%s', self._sfqid) - - if ret.get(u'success'): - data = ret.get(u'data') - self._init_result_and_meta(data, _use_ijson) - else: - logger.info(u'failed') - logger.debug(ret) - err = ret[u'message'] - code = ret.get(u'code', -1) - if u'data' in ret: - err += ret[u'data'].get(u'errorMessage', '') - errvalue = { - u'msg': err, - u'errno': int(code), - u'sqlstate': self._sqlstate, - u'sfqid': self._sfqid - } - Error.errorhandler_wrapper(self.connection, self, - ProgrammingError, - errvalue) - return self - - def fetch_pandas_batches(self): - u""" - Fetch a single Arrow Table - """ - if self._query_result_format != 'arrow': - raise NotSupportedError - for df in self._result._fetch_pandas_batches(): - yield df - - def fetch_pandas_all(self): - u""" - Fetch Pandas dataframes in batch, where 'batch' refers to Snowflake Chunk - """ - if self._query_result_format != 'arrow': - raise NotSupportedError - return self._result._fetch_pandas_all() - - def abort_query(self, qid): - url = '/queries/{qid}/abort-request'.format(qid=qid) - ret = self._connection.rest.request(url=url, method='post') - return ret.get(u'success') - - def executemany(self, command, seqparams): - u""" - Executes a command/query with the given set of parameters sequentially. - """ - logger.debug(u'executing many SQLs/commands') - command = command.strip(u' \t\n\r') if command else None - - if len(seqparams) == 0: - errorvalue = { - u'msg': u"No parameters are specified for the command: " - u"{}".format(command), - u'errno': ER_INVALID_VALUE, - } - Error.errorhandler_wrapper( - self.connection, self, InterfaceError, errorvalue - ) - return self - - if self.INSERT_SQL_RE.match(command): - if self._connection.is_pyformat: - logger.debug(u'rewriting INSERT query') - command_wo_comments = re.sub(self.COMMENT_SQL_RE, u'', command) - m = self.INSERT_SQL_VALUES_RE.match(command_wo_comments) - if not m: - errorvalue = { - u'msg': u"Failed to rewrite multi-row insert", - u'errno': ER_FAILED_TO_REWRITE_MULTI_ROW_INSERT - } - Error.errorhandler_wrapper( - self.connection, self, InterfaceError, errorvalue - ) - - fmt = m.group(1) - values = [] - for param in seqparams: - logger.debug(u'parameter: %s', param) - values.append(fmt % self._connection._process_params( - param, self)) - command = command.replace(fmt, u','.join(values), 1) - self.execute(command) - return self - else: - logger.debug(u'bulk insert') - num_params = len(seqparams[0]) - pivot_param = [] - for idx in range(num_params): - pivot_param.append([]) - for row in seqparams: - if len(row) != num_params: - errorvalue = { - u'msg': - u"Bulk data size don't match. expected: {0}, " - u"got: {1}, command: {2}".format( - num_params, len(row), command), - u'errno': ER_INVALID_VALUE, - } - Error.errorhandler_wrapper( - self.connection, self, InterfaceError, errorvalue - ) - return self - for idx, value in enumerate(row): - pivot_param[idx].append(value) - self.execute(command, params=pivot_param) - return self - - self.reset() - for param in seqparams: - self.execute(command, param, _do_reset=False) - return self - - def fetchone(self): - """ - Fetch one row - """ - try: - return next(self._result) - except StopIteration: - return None - - def fetchmany(self, size=None): - u""" - Fetch the number of specified rows - """ - if size is None: - size = self.arraysize - - if size < 0: - errorvalue = { - u'msg': (u"The number of rows is not zero or " - u"positive number: {0}").format( - size), - u'errno': ER_NOT_POSITIVE_SIZE} - Error.errorhandler_wrapper( - self.connection, self, ProgrammingError, errorvalue) - ret = [] - while size > 0: - row = self.fetchone() - if row is None: - break - ret.append(row) - if size is not None: - size -= 1 - - return ret - - def fetchall(self): - u""" - Fetch all data - """ - ret = [] - while True: - row = self.fetchone() - if row is None: - break - ret.append(row) - return ret - - def nextset(self): - u""" - Not supporeted - """ - logger.debug(u'nop') - return None - - def setinputsizes(self, _): - u""" - Not supported - """ - logger.debug(u'nop') - - def setoutputsize(self, _, column=None): - u""" - Not supported - """ - del column - logger.debug(u'nop') - - def scroll(self, value, mode=u'relative'): - Error.errorhandler_wrapper( - self.connection, self, - NotSupportedError, - { - u'msg': u"scroll is not supported.", - u'errno': ER_UNSUPPORTED_METHOD, - u'sqlstate': SQLSTATE_FEATURE_NOT_SUPPORTED}) - - def reset(self): - u""" - Reset the result set - """ - self._total_rowcount = -1 # reset the rowcount - if self._result is not None: - self._result._reset() - - def __iter__(self): - u""" - Iteration over the result set - """ - return iter(self._result) - - def __cancel_query(self, query): - if self._sequence_counter >= 0 and not self.is_closed(): - logger.debug(u'canceled. %s, request_id: %s', - query, self._request_id) - with self._lock_canceling: - self._connection._cancel_query(query, self._request_id) - - def _log_telemetry_job_data(self, telemetry_field, value): - u""" - Builds an instance of TelemetryData with the given field and logs it - """ - obj = { - 'type': telemetry_field, - 'query_id': self._sfqid, - 'value': int(value) - } - ts = get_time_millis() - try: - self._connection._log_telemetry(TelemetryData(obj, ts)) - except AttributeError: - logger.warning( - "Cursor failed to log to telemetry. Connection object may be None.", - exc_info=True) - - def __enter__(self): - """ - context manager - """ - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - """ - context manager with commit or rollback - """ - self.close() - - -class DictCursor(SnowflakeCursor): - """ - Cursor returning results in a dictionary - """ - - def __init__(self, connection): - SnowflakeCursor.__init__(self, connection, DictJsonResult) diff --git a/description.py b/description.py deleted file mode 100644 index e6e433111..000000000 --- a/description.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -""" -Various constants -""" - -import platform -import sys - -from .compat import TO_UNICODE -from .version import VERSION - - -SNOWFLAKE_CONNECTOR_VERSION = u'.'.join(TO_UNICODE(v) for v in VERSION[0:3]) -PYTHON_VERSION = u'.'.join(TO_UNICODE(v) for v in sys.version_info[:3]) -OPERATING_SYSTEM = platform.system() -PLATFORM = platform.platform() -IMPLEMENTATION = platform.python_implementation() -COMPILER = platform.python_compiler() - -CLIENT_NAME = u"PythonConnector" # don't change! -CLIENT_VERSION = u'.'.join([TO_UNICODE(v) for v in VERSION[:3]]) diff --git a/encryption_util.py b/encryption_util.py deleted file mode 100644 index 77ead1587..000000000 --- a/encryption_util.py +++ /dev/null @@ -1,164 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - - -import base64 - -from logging import getLogger -import json -from Cryptodome.Cipher import AES -import os -import tempfile -from collections import namedtuple -from .compat import (PKCS5_PAD, PKCS5_UNPAD, PKCS5_OFFSET, TO_UNICODE) -from .constants import UTF8 - - -def matdesc_to_unicode(matdesc): - """ - Convert Material Descriptor to Unicode String - """ - return TO_UNICODE( - json.dumps({ - u'queryId': matdesc.query_id, - u'smkId': str(matdesc.smk_id), - u'keySize': str(matdesc.key_size) - }, - separators=(',', ':'))) - - -""" -Material Description -""" -MaterialDescriptor = namedtuple( - "MaterialDescriptor", [ - "smk_id", # SMK id - "query_id", # query id - "key_size" # key size, 128 or 256 - ] -) - -""" -Metadata for encrpytion -""" -EncryptionMetadata = namedtuple( - "EncryptionMetadata", [ - "key", - "iv", - "matdesc" - ] -) - - -class SnowflakeEncryptionUtil(object): - @staticmethod - def get_secure_random(byte_length): - return os.urandom(byte_length) - - @staticmethod - def encrypt_file(encryption_material, in_filename, - chunk_size=AES.block_size * 4 * 1024, tmp_dir=None): - """ - Encrypts a file - :param s3_metadata: S3 metadata output - :param encryption_material: encryption material - :param in_filename: input file name - :param chunk_size: read chunk size - :param tmp_dir: temporary directory, optional - :return: a encrypted file - """ - logger = getLogger(__name__) - decoded_key = base64.standard_b64decode( - encryption_material.query_stage_master_key) - key_size = len(decoded_key) - logger.debug(u'key_size = %s', key_size) - - # Generate key for data encryption - iv_data = SnowflakeEncryptionUtil.get_secure_random(AES.block_size) - file_key = SnowflakeEncryptionUtil.get_secure_random(key_size) - data_cipher = AES.new(key=file_key, mode=AES.MODE_CBC, IV=iv_data) - - temp_output_fd, temp_output_file = tempfile.mkstemp( - text=False, dir=tmp_dir, - prefix=os.path.basename(in_filename) + "#") - padded = False - logger.debug(u'unencrypted file: %s, temp file: %s, tmp_dir: %s', - in_filename, temp_output_file, tmp_dir) - with open(in_filename, u'rb') as infile: - with os.fdopen(temp_output_fd, u'wb') as outfile: - while True: - chunk = infile.read(chunk_size) - if len(chunk) == 0: - break - elif len(chunk) % AES.block_size != 0: - chunk = PKCS5_PAD(chunk, AES.block_size) - padded = True - outfile.write(data_cipher.encrypt(chunk)) - if not padded: - outfile.write(data_cipher.encrypt( - AES.block_size * chr(AES.block_size).encode(UTF8))) - - # encrypt key with QRMK - key_cipher = AES.new(key=decoded_key, mode=AES.MODE_ECB) - enc_kek = key_cipher.encrypt(PKCS5_PAD(file_key, AES.block_size)) - - mat_desc = MaterialDescriptor( - smk_id=encryption_material.smk_id, - query_id=encryption_material.query_id, - key_size=key_size * 8) - metadata = EncryptionMetadata( - key=base64.b64encode(enc_kek).decode('utf-8'), - iv=base64.b64encode(iv_data).decode('utf-8'), - matdesc=matdesc_to_unicode(mat_desc), - ) - return (metadata, temp_output_file) - - @staticmethod - def decrypt_file(metadata, encryption_material, in_filename, - chunk_size=AES.block_size * 4 * 1024, tmp_dir=None): - """ - Decrypts a file and stores the output in the temporary directory - :param metadata: metadata input - :param encryption_material: encryption material - :param in_filename: input file name - :param chunk_size: read chunk size - :param tmp_dir: temporary directory, optional - :return: a decrypted file name - """ - logger = getLogger(__name__) - key_base64 = metadata.key - iv_base64 = metadata.iv - decoded_key = base64.standard_b64decode( - encryption_material.query_stage_master_key) - key_bytes = base64.standard_b64decode(key_base64) - iv_bytes = base64.standard_b64decode(iv_base64) - - key_cipher = AES.new(key=decoded_key, mode=AES.MODE_ECB) - file_key = PKCS5_UNPAD(key_cipher.decrypt(key_bytes)) - - data_cipher = AES.new(key=file_key, mode=AES.MODE_CBC, IV=iv_bytes) - - temp_output_fd, temp_output_file = tempfile.mkstemp( - text=False, dir=tmp_dir, - prefix=os.path.basename(in_filename) + "#") - total_file_size = 0 - prev_chunk = None - logger.debug(u'encrypted file: %s, tmp file: %s', - in_filename, temp_output_file) - with open(in_filename, u'rb') as infile: - with os.fdopen(temp_output_fd, u'wb') as outfile: - while True: - chunk = infile.read(chunk_size) - if len(chunk) == 0: - break - total_file_size += len(chunk) - d = data_cipher.decrypt(chunk) - outfile.write(d) - prev_chunk = d - if prev_chunk is not None: - total_file_size -= PKCS5_OFFSET(prev_chunk) - outfile.truncate(total_file_size) - return temp_output_file diff --git a/errorcode.py b/errorcode.py deleted file mode 100644 index 5f739d0e8..000000000 --- a/errorcode.py +++ /dev/null @@ -1,74 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -u"""This module contains Snowflake error codes""" - -# network -ER_FAILED_TO_CONNECT_TO_DB = 250001 -ER_CONNECTION_IS_CLOSED = 250002 -ER_FAILED_TO_REQUEST = 250003 -ER_SERVER_CERTIFICATE_REVOKED = 250004 -ER_NOT_HTTPS_USED = 250005 -ER_FAILED_TO_SERVER = 250006 -ER_IDP_CONNECTION_ERROR = 250007 -ER_INCORRECT_DESTINATION = 250008 -ER_UNABLE_TO_OPEN_BROWSER = 250009 -ER_UNABLE_TO_START_WEBSERVER = 250010 -ER_INVALID_CERTIFICATE = 250011 # not used but keep here to reserve errno - -# connection -ER_NO_ACCOUNT_NAME = 251001 -ER_OLD_PYTHON = 251002 -ER_NO_WINDOWS_SUPPORT = 251003 -ER_FAILED_TO_GET_BOOTSTRAP = 251004 -ER_NO_USER = 251005 -ER_NO_PASSWORD = 251006 -ER_INVALID_VALUE = 251007 -ER_INVALID_PRIVATE_KEY = 251008 -ER_NO_HOSTNAME_FOUND = 251009 - -# cursor -ER_FAILED_TO_REWRITE_MULTI_ROW_INSERT = 252001 -ER_NO_ADDITIONAL_CHUNK = 252002 -ER_NOT_POSITIVE_SIZE = 252003 -ER_FAILED_PROCESSING_PYFORMAT = 252004 -ER_FAILED_TO_CONVERT_ROW_TO_PYTHON_TYPE = 252005 -ER_CURSOR_IS_CLOSED = 252006 -ER_FAILED_TO_RENEW_SESSION = 252007 -ER_UNSUPPORTED_METHOD = 252008 -ER_NO_DATA_FOUND = 252009 -ER_CHUNK_DOWNLOAD_FAILED = 252010 -ER_NOT_IMPLICITY_SNOWFLAKE_DATATYPE = 252011 - -# sfdatetime - -# file_transfer -ER_INVALID_STAGE_FS = 253001 -ER_FAILED_TO_DOWNLOAD_FROM_STAGE = 253002 -ER_FAILED_TO_UPLOAD_TO_STAGE = 253003 -ER_INVALID_STAGE_LOCATION = 253004 -ER_LOCAL_PATH_NOT_DIRECTORY = 253005 -ER_FILE_NOT_EXISTS = 253006 -ER_COMPRESSION_NOT_SUPPORTED = 253007 -ER_INTERNAL_NOT_MATCH_ENCRYPT_MATERIAL = 253008 -ER_FAILED_TO_CHECK_EXISTING_FILES = 253009 - -# chunk_downloader - -# ocsp -ER_FAILED_TO_GET_X509 = 254001 -ER_NO_CURL_CONFIG_FOUND = 254002 -ER_FAILED_TO_GET_CERTIFICATE_CHAIN = 254003 -ER_FAILED_TO_GET_OCSP_URI = 254004 -ER_OCSP_FAILED_TO_CONNECT_HOST = 254005 -ER_OPENSSL_IS_NOT_ACCESSIBLE = 254006 -ER_INVALID_OCSP_RESPONSE = 254007 -ER_CA_CERTIFICATE_NOT_FOUND = 254008 -ER_SERVER_CERTIFICATE_UNKNOWN = 254009 -ER_INVALID_OCSP_RESPONSE_CODE = 254010 -ER_INVALID_SSD = 254011 - -# converter -ER_NOT_SUPPORT_DATA_TYPE = 255001 diff --git a/errors.py b/errors.py deleted file mode 100644 index b6fd9c6d0..000000000 --- a/errors.py +++ /dev/null @@ -1,269 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import logging -from logging import getLogger - -from snowflake.connector.constants import UTF8 -from .compat import BASE_EXCEPTION_CLASS, PY2 - -logger = getLogger(__name__) - - -class Error(BASE_EXCEPTION_CLASS): - u""" - Exception that is base class for all other error exceptions - """ - - def __init__(self, msg=None, errno=None, sqlstate=None, sfqid=None, - done_format_msg=False): - self.msg = msg - self.raw_msg = msg - self.errno = errno or -1 - self.sqlstate = sqlstate or "n/a" - self.sfqid = sfqid - - if not self.msg: - self.msg = u'Unknown error' - - if self.errno != -1 and not done_format_msg: - if self.sqlstate != "n/a": - if logger.getEffectiveLevel() in (logging.INFO, - logging.DEBUG): - self.msg = u'{errno:06d} ({sqlstate}): {sfqid}: {msg}'.format( - errno=self.errno, msg=self.msg, - sqlstate=self.sqlstate, - sfqid=self.sfqid) - else: - self.msg = u'{errno:06d} ({sqlstate}): {msg}'.format( - errno=self.errno, - sqlstate=self.sqlstate, - msg=self.msg) - else: - if logger.getEffectiveLevel() in (logging.INFO, - logging.DEBUG): - self.msg = u'{errno:06d}: {sfqid}: {msg}'.format( - errno=self.errno, msg=self.msg, - sfqid=self.sfqid) - else: - self.msg = u'{errno:06d}: {msg}'.format(errno=self.errno, - msg=self.msg) - - def __repr__(self): - return self.__str__() - - def __unicode__(self): - return self.msg - - def __bytes__(self): - return self.__unicode__().encode(UTF8) - - @staticmethod - def default_errorhandler(connection, cursor, errorclass, errorvalue): - u""" - Default error handler that raises an error - """ - raise errorclass( - msg=errorvalue.get(u'msg'), - errno=errorvalue.get(u'errno'), - sqlstate=errorvalue.get(u'sqlstate'), - sfqid=errorvalue.get(u'sfqid'), - done_format_msg=errorvalue.get(u'done_format_msg')) - - @staticmethod - def errorhandler_wrapper(connection, cursor, errorclass, errorvalue=None): - u""" - Error handler wrapper that calls the errorhandler method - """ - if errorvalue is None: - # no value indicates errorclass is errorobject - errorobject = errorclass - errorclass = type(errorobject) - errorvalue = { - u'msg': errorobject.msg, - u'errno': errorobject.errno, - u'sqlstate': errorobject.sqlstate, - u'done_format_msg': True - } - else: - errorvalue[u'done_format_msg'] = False - - if connection is not None: - connection.messages.append((errorclass, errorvalue)) - if cursor is not None: - cursor.messages.append((errorclass, errorvalue)) - cursor.errorhandler(connection, cursor, errorclass, errorvalue) - return - elif connection is not None: - connection.errorhandler(connection, cursor, errorclass, errorvalue) - return - - if issubclass(errorclass, Error): - raise errorclass(msg=errorvalue[u'msg'], - errno=errorvalue.get(u'errno'), - sqlstate=errorvalue.get(u'sqlstate'), - sfqid=errorvalue.get(u'sfqid')) - else: - raise errorclass(errorvalue) - - -if PY2: - Error.__str__ = lambda self: self.__unicode__().encode(UTF8) -else: - Error.__str__ = lambda self: self.__unicode__() - - -class Warning(BASE_EXCEPTION_CLASS): - u"""Exception for important warnings""" - pass - - -class InterfaceError(Error): - u"""Exception for errors related to the interface""" - pass - - -class DatabaseError(Error): - u"""Exception for errors related to the database""" - pass - - -class InternalError(DatabaseError): - u"""Exception for errors internal database errors""" - pass - - -class OperationalError(DatabaseError): - u"""Exception for errors related to the database's operation""" - pass - - -class ProgrammingError(DatabaseError): - u"""Exception for errors programming errors""" - pass - - -class IntegrityError(DatabaseError): - u"""Exception for errors regarding relational integrity""" - pass - - -class DataError(DatabaseError): - u"""Exception for errors reporting problems with processed data""" - pass - - -class NotSupportedError(DatabaseError): - u"""Exception for errors when an unsupported database feature was used""" - pass - - -class RevocationCheckError(OperationalError): - u"""Exception for errors during certificate revocation check""" - pass - - -# internal errors -class InternalServerError(Error): - u"""Exception for 500 HTTP code for retry""" - - def __init__(self, **kwargs): - Error.__init__( - self, - msg=kwargs.get('msg') or u'HTTP 500: Internal Server Error', - errno=kwargs.get('errno'), - sqlstate=kwargs.get('sqlstate'), - sfqid=kwargs.get('sfqid')) - - -class ServiceUnavailableError(Error): - u"""Exception for 503 HTTP code for retry""" - - def __init__(self, **kwargs): - Error.__init__( - self, msg=kwargs.get('msg') or u'HTTP 503: Service Unavailable', - errno=kwargs.get('errno'), - sqlstate=kwargs.get('sqlstate'), - sfqid=kwargs.get('sfqid')) - - -class GatewayTimeoutError(Error): - u"""Exception for 504 HTTP error for retry""" - - def __init__(self, **kwargs): - Error.__init__( - self, msg=kwargs.get('msg') or u'HTTP 504: Gateway Timeout', - errno=kwargs.get('errno'), - sqlstate=kwargs.get('sqlstate'), - sfqid=kwargs.get('sfqid')) - - -class ForbiddenError(Error): - """Exception for 403 HTTP error for retry""" - - def __init__(self, **kwargs): - Error.__init__( - self, msg=kwargs.get('msg') or u'HTTP 403: Forbidden', - errno=kwargs.get('errno'), - sqlstate=kwargs.get('sqlstate'), - sfqid=kwargs.get('sfqid')) - - -class RequestTimeoutError(Error): - u"""Exception for 408 HTTP error for retry""" - - def __init__(self, **kwargs): - Error.__init__( - self, msg=kwargs.get('msg') or u'HTTP 408: Request Timeout', - errno=kwargs.get('errno'), - sqlstate=kwargs.get('sqlstate'), - sfqid=kwargs.get('sfqid')) - - -class BadRequest(Error): - u"""Exception for 400 HTTP error for retry""" - - def __init__(self, **kwargs): - Error.__init__( - self, msg=kwargs.get('msg') or u'HTTP 400: Bad Request', - errno=kwargs.get('errno'), - sqlstate=kwargs.get('sqlstate'), - sfqid=kwargs.get('sfqid')) - - -class BadGatewayError(Error): - u"""Exception for 502 HTTP error for retry""" - - def __init__(self, **kwargs): - Error.__init__( - self, msg=kwargs.get('msg') or u'HTTP 502: Bad Gateway', - errno=kwargs.get('errno'), - sqlstate=kwargs.get('sqlstate'), - sfqid=kwargs.get('sfqid')) - - -class MethodNotAllowed(Error): - u"""Exception for 405 HTTP error for retry""" - - def __init__(self, **kwargs): - Error.__init__( - self, msg=kwargs.get('msg') or u'HTTP 405: Method not allowed', - errno=kwargs.get('errno'), - sqlstate=kwargs.get('sqlstate'), - sfqid=kwargs.get('sfqid')) - - -class OtherHTTPRetryableError(Error): - """ - Exception for other HTTP error for retry - """ - - def __init__(self, **kwargs): - code = kwargs.get('code', 'n/a') - Error.__init__( - self, msg=kwargs.get('msg') or u'HTTP {0}'.format(code), - errno=kwargs.get('errno'), - sqlstate=kwargs.get('sqlstate'), - sfqid=kwargs.get('sfqid')) diff --git a/file_compression_type.py b/file_compression_type.py deleted file mode 100644 index 39a8a9bf1..000000000 --- a/file_compression_type.py +++ /dev/null @@ -1,125 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - - -class FileCompressionType(): - def __init__(self): - pass - - Types = { - u'GZIP': { - u'name': u'GZIP', - u'file_extension': u'.gz', - u'mime_type': u'application', - u'mime_subtypes': [u'gzip', u'x-gzip'], - u'is_supported': True, - }, - u'DEFLATE': { - u'name': u'DEFLATE', - u'file_extention': u'.deflate', - u'mime_type': u'application', - u'mime_subtypes': [u'zlib', u'deflate'], - u'is_supported': True, - }, - u'RAW_DEFLATE': { - u'name': u'RAW_DEFLATE', - u'file_extention': u'.raw_deflate', - u'mime_type': u'application', - u'mime_subtypes': [u'raw_deflate'], - u'is_supported': True, - }, - u'BZIP2': { - u'name': u'BZIP2', - u'file_extention': u'.bz2', - u'mime_type': u'application', - u'mime_subtypes': [u'bzip2', u'x-bzip2', u'x-bz2', u'x-bzip', u'bz2'], - u'is_supported': True, - }, - u'LZIP': { - u'name': u'LZIP', - u'file_extention': u'.lz', - u'mime_type': u'application', - u'mime_subtypes': [u'lzip', u'x-lzip'], - u'is_supported': False, - }, - u'LZMA': { - u'name': u'LZMA', - u'file_extention': u'.lzma', - u'mime_type': u'application', - u'mime_subtypes': [u'lzma', u'x-lzma'], - u'is_supported': False, - }, - u'LZO': { - u'name': u'LZO', - u'file_extention': u'.lzo', - u'mime_type': u'application', - u'mime_subtypes': [u'lzo', u'x-lzo'], - u'is_supported': False, - }, - u'XZ': { - u'name': u'XZ', - u'file_extention': u'.xz', - u'mime_type': u'application', - u'mime_subtypes': [u'xz', u'x-xz'], - u'is_supported': False, - }, - u'COMPRESS': { - u'name': u'COMPRESS', - u'file_extention': u'.Z', - u'mime_type': u'application', - u'mime_subtypes': [u'compress', u'x-compress'], - u'is_supported': False, - }, - u'PARQUET': { - u'name': u'PARQUET', - u'file_extention': u'.parquet', - u'mime_type': u'snowflake', - u'mime_subtypes': [u'parquet'], - u'is_supported': True, - }, - u'ZSTD': { - u'name': u'ZSTD', - u'file_extention': u'.zst', - u'mime_type': u'application', - u'mime_subtypes': [u'zstd', u'x-zstd'], - u'is_supported': True, - }, - u'BROTLI': { - u'name': u'BROTLI', - u'file_extention': u'.br', - u'mime_type': u'application', - u'mime_subtypes': [u'br', u'x-br'], - u'is_supported': True, - }, - u'ORC': { - u'name': u'ORC', - u'file_extention': u'.orc', - u'mime_type': u'snowflake', - u'mime_subtypes': [u'orc'], - u'is_supported': True, - }, - } - - subtype_to_meta = {} - - # TODO: Snappy avro doen't need to be compressed again - - @classmethod - def init(cls): - for meta in cls.Types.values(): - for ms in meta[u'mime_subtypes']: - cls.subtype_to_meta[ms] = meta - - @classmethod - def lookupByMimeSubType(cls, mime_subtype): - if mime_subtype.lower() in cls.subtype_to_meta: - return cls.subtype_to_meta[mime_subtype] - else: - return None - - -# do init once -FileCompressionType.init() diff --git a/file_transfer_agent.py b/file_transfer_agent.py deleted file mode 100644 index dc869d80c..000000000 --- a/file_transfer_agent.py +++ /dev/null @@ -1,973 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import binascii -import glob -import mimetypes -import os -import shutil -import sys -import tempfile -import threading -from logging import getLogger -from multiprocessing.pool import ThreadPool -from time import (time, sleep) - -import botocore.exceptions - -from .azure_util import SnowflakeAzureUtil -from .compat import (GET_CWD, TO_UNICODE, IS_WINDOWS) -from .constants import (SHA256_DIGEST, ResultStatus) -from .converter_snowsql import SnowflakeConverterSnowSQL -from .errorcode import (ER_INVALID_STAGE_FS, ER_INVALID_STAGE_LOCATION, - ER_LOCAL_PATH_NOT_DIRECTORY, - ER_FILE_NOT_EXISTS, - ER_COMPRESSION_NOT_SUPPORTED, - ER_INTERNAL_NOT_MATCH_ENCRYPT_MATERIAL, - ER_FAILED_TO_DOWNLOAD_FROM_STAGE, - ER_FAILED_TO_UPLOAD_TO_STAGE) -from .errors import (Error, OperationalError, InternalError, DatabaseError, - ProgrammingError) -from .file_compression_type import FileCompressionType -from .file_util import SnowflakeFileUtil -from .local_util import SnowflakeLocalUtil -from .remote_storage_util import (SnowflakeFileEncryptionMaterial, - SnowflakeRemoteStorageUtil, - ) -from .s3_util import SnowflakeS3Util - -S3_FS = u'S3' -AZURE_FS = u'AZURE' -LOCAL_FS = u'LOCAL_FS' -CMD_TYPE_UPLOAD = u'UPLOAD' -CMD_TYPE_DOWNLOAD = u'DOWNLOAD' - -RESULT_TEXT_COLUMN_DESC = lambda name: { - u'name': name, u'type': u'text', - u'length': 16777216, u'precision': None, - u'scale': None, u'nullable': False} -RESULT_FIXED_COLUMN_DESC = lambda name: { - u'name': name, u'type': u'fixed', - u'length': 5, u'precision': 0, - u'scale': 0, - u'nullable': False} - -MB = 1024.0 * 1024.0 - -INJECT_WAIT_IN_PUT = 0 - -logger = getLogger(__name__) - - -def _update_progress( - file_name, start_time, total_size, progress, - output_stream=sys.stdout, show_progress_bar=True): - barLength = 10 # Modify this to change the length of the progress bar - total_size /= MB - status = "" - elapsed_time = time() - start_time - throughput = (total_size / elapsed_time) if elapsed_time != 0.0 else 0.0 - if isinstance(progress, int): - progress = float(progress) - if not isinstance(progress, float): - progress = 0 - status = "error: progress var must be float\r\n" - if progress < 0: - progress = 0 - status = "Halt...\r\n" - if progress >= 1: - progress = 1 - status = "Done ({elapsed_time:.3f}s, {throughput:.2f}MB/s).\r\n".format( - elapsed_time=elapsed_time, - throughput=throughput) - if not status and show_progress_bar: - status = "({elapsed_time:.3f}s, {throughput:.2f}MB/s)".format( - elapsed_time=elapsed_time, - throughput=throughput) - if status: - block = int(round(barLength * progress)) - text = "\r{file_name}({size:.2f}MB): [{bar}] {percentage:.2f}% {status}".format( - file_name=file_name, - size=total_size, - bar="#" * block + "-" * (barLength - block), - percentage=progress * 100.0, - status=status) - output_stream.write(text) - output_stream.flush() - logger.debug('filename: %s, start_time: %s, total_size: %s, progress: %s, ' - 'show_progress_bar: %s', - file_name, start_time, total_size, progress, show_progress_bar) - return progress == 1.0 - - -class SnowflakeProgressPercentage(object): - """ - Built-in Progress bar for PUT commands. - """ - - def __init__( - self, filename, filesize, - output_stream=sys.stdout, - show_progress_bar=True): - last_pound_char = filename.rfind('#') - if last_pound_char < 0: - last_pound_char = len(filename) - self._filename = os.path.basename(filename[0:last_pound_char]) - self._output_stream = output_stream - self._show_progress_bar = show_progress_bar - self._size = float(filesize) - self._seen_so_far = 0 - self._done = False - self._start_time = time() - self._lock = threading.Lock() - - def __call__(self, bytes_amount): - raise NotImplementedError - - -class SnowflakeS3ProgressPercentage(SnowflakeProgressPercentage): - def __init__( - self, filename, filesize, - output_stream=sys.stdout, - show_progress_bar=True): - super(SnowflakeS3ProgressPercentage, self).__init__( - filename, filesize, - output_stream=output_stream, - show_progress_bar=show_progress_bar) - - def __call__(self, bytes_amount): - # logger.debug("Bytes returned from callback %s", bytes_amount) - with self._lock: - if self._output_stream: - self._seen_so_far += bytes_amount - percentage = float(self._seen_so_far / self._size) - if not self._done: - self._done = _update_progress( - self._filename, self._start_time, - self._size, percentage, - output_stream=self._output_stream, - show_progress_bar=self._show_progress_bar) - - -class SnowflakeAzureProgressPercentage(SnowflakeProgressPercentage): - def __init__(self, filename, filesize, - output_stream=sys.stdout, - show_progress_bar=True): - super(SnowflakeAzureProgressPercentage, self).__init__( - filename, filesize, - output_stream=output_stream, - show_progress_bar=show_progress_bar) - - def __call__(self, current): - with self._lock: - if self._output_stream: - self._seen_so_far = current - percentage = float(self._seen_so_far / self._size) - if not self._done: - self._done = _update_progress( - self._filename, self._start_time, - self._size, percentage, - output_stream=self._output_stream, - show_progress_bar=self._show_progress_bar) - - -class SnowflakeFileTransferAgent(object): - """ - Snowflake File Transfer Agent """ - - def __init__(self, cursor, command, ret, - put_callback=None, - put_azure_callback=None, - put_callback_output_stream=sys.stdout, - get_callback=None, - get_azure_callback=None, - get_callback_output_stream=sys.stdout, - show_progress_bar=True, - raise_put_get_error=False, - force_put_overwrite=True): - self._cursor = cursor - self._command = command - self._ret = ret - self._put_callback = put_callback - self._put_azure_callback = \ - put_azure_callback if put_azure_callback else put_callback - self._put_callback_output_stream = put_callback_output_stream - self._get_callback = get_callback - self._get_azure_callback = \ - get_azure_callback if get_azure_callback else get_callback - self._get_callback_output_stream = get_callback_output_stream - self._use_accelerate_endpoint = False - self._raise_put_get_error = raise_put_get_error - self._show_progress_bar = show_progress_bar - self._force_put_overwrite = force_put_overwrite - - def execute(self): - self._parse_command() - - self._init_file_metadata() - - if self._command_type == CMD_TYPE_UPLOAD: - self._process_file_compression_type() - - self._transfer_accelerate_config() - - if self._command_type == CMD_TYPE_DOWNLOAD: - if not os.path.isdir(self._local_location): - os.makedirs(self._local_location) - - if self._stage_location_type == LOCAL_FS: - if not os.path.isdir(self._stage_info[u'location']): - os.makedirs(self._stage_info[u'location']) - - small_file_metas = [] - large_file_metas = [] - for meta in self._file_metadata.values(): - meta[u'overwrite'] = self._overwrite - meta[u'self'] = self - if self._stage_location_type != LOCAL_FS: - meta[u'put_callback'] = self._put_callback - meta[u'put_azure_callback'] = self._put_azure_callback - meta[u'put_callback_output_stream'] = \ - self._put_callback_output_stream - meta[u'get_callback'] = self._get_callback - meta[u'get_azure_callback'] = self._get_azure_callback - meta[u'get_callback_output_stream'] = \ - self._get_callback_output_stream - meta[u'show_progress_bar'] = self._show_progress_bar - - # multichunk uploader threshold - if self._stage_location_type == S3_FS: - size_threshold = SnowflakeS3Util.DATA_SIZE_THRESHOLD - else: - size_threshold = SnowflakeAzureUtil.DATA_SIZE_THRESHOLD - if meta.get(u'src_file_size', 1) > size_threshold: - meta[u'parallel'] = self._parallel - large_file_metas.append(meta) - else: - meta[u'parallel'] = 1 - small_file_metas.append(meta) - else: - meta[u'parallel'] = 1 - small_file_metas.append(meta) - - logger.debug(u'parallel=[%s]', self._parallel) - self._results = [] - if self._command_type == CMD_TYPE_UPLOAD: - self.upload(large_file_metas, small_file_metas) - else: - self.download(large_file_metas, small_file_metas) - - # turn enum to string, in order to have backward compatible interface - for result in self._results: - result[u'result_status'] = result[u'result_status'].value - - def upload(self, large_file_metas, small_file_metas): - storage_client = SnowflakeFileTransferAgent.get_storage_client( - self._stage_location_type) - client = storage_client.create_client( - self._stage_info, - use_accelerate_endpoint=self._use_accelerate_endpoint - ) - for meta in small_file_metas: - meta[u'client'] = client - for meta in large_file_metas: - meta[u'client'] = client - - if len(small_file_metas) > 0: - self._upload_files_in_parallel(small_file_metas) - if len(large_file_metas) > 0: - self._upload_files_in_sequential(large_file_metas) - - def _transfer_accelerate_config(self): - if self._stage_location_type == S3_FS: - client = SnowflakeRemoteStorageUtil.create_client( - self._stage_info, - use_accelerate_endpoint=False) - s3location = SnowflakeS3Util.extract_bucket_name_and_path( - self._stage_info[u'location'] - ) - try: - ret = client.meta.client.get_bucket_accelerate_configuration( - Bucket=s3location.bucket_name) - self._use_accelerate_endpoint = \ - ret and 'Status' in ret and \ - ret['Status'] == 'Enabled' - except botocore.exceptions.ClientError as e: - if e.response['Error'].get('Code', 'Unknown') == \ - 'AccessDenied': - logger.debug(e) - else: - # unknown error - logger.debug(e, exc_info=True) - - logger.debug( - 'use_accelerate_endpoint: %s', - self._use_accelerate_endpoint) - - def _upload_files_in_parallel(self, file_metas): - """ - Uploads files in parallel - """ - idx = 0 - len_file_metas = len(file_metas) - while idx < len_file_metas: - end_of_idx = idx + self._parallel if \ - idx + self._parallel <= len_file_metas else \ - len_file_metas - - logger.debug( - u'uploading files idx: {0}/{1}'.format(idx + 1, end_of_idx)) - - target_meta = file_metas[idx:end_of_idx] - while True: - pool = ThreadPool(processes=len(target_meta)) - results = pool.map( - SnowflakeFileTransferAgent.upload_one_file, - target_meta) - pool.close() - pool.join() - - # need renew AWS token? - retry_meta = [] - for result_meta in results: - if result_meta[ - u'result_status'] == ResultStatus.RENEW_TOKEN: - retry_meta.append(result_meta) - else: - self._results.append(result_meta) - - if len(retry_meta) == 0: - # no new AWS token is required - break - client = self.renew_expired_aws_token() - for result_meta in retry_meta: - result_meta[u'client'] = client - if end_of_idx < len_file_metas: - for idx0 in range(idx + self._parallel, len_file_metas): - file_metas[idx0][u'client'] = client - target_meta = retry_meta - - if end_of_idx == len_file_metas: - break - idx += self._parallel - - def _upload_files_in_sequential(self, file_metas): - """ - Uploads files in sequential. Retry if the AWS token expires - """ - idx = 0 - len_file_metas = len(file_metas) - while idx < len_file_metas: - logger.debug( - u'uploading files idx: {0}/{1}'.format(idx + 1, len_file_metas)) - result = SnowflakeFileTransferAgent.upload_one_file( - file_metas[idx]) - if result[u'result_status'] == ResultStatus.RENEW_TOKEN: - client = self.renew_expired_aws_token() - for idx0 in range(idx, len_file_metas): - file_metas[idx0][u'client'] = client - continue - self._results.append(result) - idx += 1 - if INJECT_WAIT_IN_PUT > 0: - logger.debug('LONGEVITY TEST: waiting for %s', - INJECT_WAIT_IN_PUT) - sleep(INJECT_WAIT_IN_PUT) - - @staticmethod - def get_storage_client(stage_location_type): - if (stage_location_type == LOCAL_FS): - return SnowflakeLocalUtil - elif (stage_location_type in [S3_FS, AZURE_FS]): - return SnowflakeRemoteStorageUtil - else: - return None - - @staticmethod - def upload_one_file(meta): - """ - Upload a one file - """ - logger = getLogger(__name__) - - logger.debug(u"uploading file=%s", meta[u'src_file_name']) - meta[u'real_src_file_name'] = meta[u'src_file_name'] - tmp_dir = tempfile.mkdtemp() - meta[u'tmp_dir'] = tmp_dir - try: - if meta[u'require_compress']: - logger.debug(u'compressing file=%s', meta[u'src_file_name']) - meta[u'real_src_file_name'], upload_size = \ - SnowflakeFileUtil.compress_file_with_gzip( - meta[u'src_file_name'], tmp_dir) - logger.debug( - u'getting digest file=%s', meta[u'real_src_file_name']) - sha256_digest, upload_size = \ - SnowflakeFileUtil.get_digest_and_size_for_file( - meta[u'real_src_file_name']) - meta[SHA256_DIGEST] = sha256_digest - meta[u'upload_size'] = upload_size - logger.debug(u'really uploading data') - storage_client = SnowflakeFileTransferAgent.get_storage_client( - meta[u'stage_location_type']) - storage_client.upload_one_file_with_retry(meta) - logger.debug( - u'done: status=%s, file=%s, real file=%s', - meta[u'result_status'], - meta[u'src_file_name'], - meta[u'real_src_file_name']) - except Exception as e: - logger.exception( - u'Failed to upload a file: file=%s, real file=%s', - meta[u'src_file_name'], - meta[u'real_src_file_name']) - meta[u'dst_file_size'] = 0 - if u'result_status' not in meta: - meta[u'result_status'] = ResultStatus.ERROR - meta[u'error_details'] = TO_UNICODE(e) - meta[u'error_details'] += \ - u", file={}, real file={}".format( - meta.get(u'src_file_name'), meta.get(u'real_src_file_name')) - finally: - logger.debug(u'cleaning up tmp dir: %s', tmp_dir) - shutil.rmtree(tmp_dir) - return meta - - def download(self, large_file_metas, small_file_metas): - storage_client = SnowflakeFileTransferAgent.get_storage_client( - self._stage_location_type) - client = storage_client.create_client( - self._stage_info, - use_accelerate_endpoint=self._use_accelerate_endpoint - ) - for meta in small_file_metas: - meta[u'client'] = client - for meta in large_file_metas: - meta[u'client'] = client - - if len(small_file_metas) > 0: - self._download_files_in_parallel(small_file_metas) - if len(large_file_metas) > 0: - self._download_files_in_sequential(large_file_metas) - - def _download_files_in_parallel(self, file_metas): - """ - Download files in parallel - """ - idx = 0 - len_file_metas = len(file_metas) - while idx < len_file_metas: - end_of_idx = idx + self._parallel if \ - idx + self._parallel <= len_file_metas else \ - len_file_metas - - logger.debug( - 'downloading files idx: {0} to {1}'.format(idx, end_of_idx)) - - target_meta = file_metas[idx:end_of_idx] - while True: - pool = ThreadPool(processes=len(target_meta)) - results = pool.map( - SnowflakeFileTransferAgent.download_one_file, - target_meta) - pool.close() - pool.join() - - # need renew AWS token? - retry_meta = [] - for result_meta in results: - if result_meta[ - u'result_status'] == ResultStatus.RENEW_TOKEN: - retry_meta.append(result_meta) - else: - self._results.append(result_meta) - - if len(retry_meta) == 0: - # no new AWS token is required - break - client = self.renew_expired_aws_token() - for result_meta in retry_meta: - result_meta[u'client'] = client - if end_of_idx < len_file_metas: - for idx0 in range(idx + self._parallel, len_file_metas): - file_metas[idx0][u'client'] = client - target_meta = retry_meta - - if end_of_idx == len_file_metas: - break - idx += self._parallel - - def _download_files_in_sequential(self, file_metas): - """ - Downloads files in sequential. Retry if the AWS token expires - """ - idx = 0 - len_file_metas = len(file_metas) - while idx < len_file_metas: - result = SnowflakeFileTransferAgent.download_one_file( - file_metas[idx]) - if result[u'result_status'] == ResultStatus.RENEW_TOKEN: - client = self.renew_expired_aws_token() - for idx0 in range(idx, len_file_metas): - file_metas[idx0][u'client'] = client - continue - self._results.append(result) - idx += 1 - if INJECT_WAIT_IN_PUT > 0: - logger.debug('LONGEVITY TEST: waiting for %s', - INJECT_WAIT_IN_PUT) - sleep(INJECT_WAIT_IN_PUT) - - @staticmethod - def download_one_file(meta): - """ - Download a one file - """ - logger = getLogger(__name__) - - tmp_dir = tempfile.mkdtemp() - meta[u'tmp_dir'] = tmp_dir - try: - storage_client = SnowflakeFileTransferAgent.get_storage_client( - meta[u'stage_location_type']) - storage_client.download_one_file(meta) - logger.debug( - u'done: status=%s, file=%s', - meta.get(u'result_status'), - meta.get(u'dst_file_name')) - except Exception as e: - logger.exception(u'Failed to download a file: %s', - meta[u'dst_file_name']) - meta[u'dst_file_size'] = -1 - if u'result_status' not in meta: - meta[u'result_status'] = ResultStatus.ERROR - meta[u'error_details'] = TO_UNICODE(e) - meta[u'error_details'] += \ - u', file={}'.format(meta.get(u'dst_file_name')) - finally: - logger.debug(u'cleaning up tmp dir: %s', tmp_dir) - shutil.rmtree(tmp_dir) - return meta - - def renew_expired_aws_token(self): - logger = getLogger(__name__) - logger.debug(u'renewing expired aws token') - ret = self._cursor._execute_helper( - self._command) # rerun the command to get the credential - stage_info = ret[u'data'][u'stageInfo'] - storage_client = SnowflakeFileTransferAgent.get_storage_client( - self._stage_location_type) - return storage_client.create_client( - stage_info, - use_accelerate_endpoint=self._use_accelerate_endpoint) - - def result(self): - converter_class = self._cursor._connection.converter_class - rowset = [] - if self._command_type == CMD_TYPE_UPLOAD: - if hasattr(self, u'_results'): - for meta in self._results: - if meta[u'src_compression_type'] is not None: - src_compression_type = meta[u'src_compression_type'][ - u'name'] - else: - src_compression_type = u'NONE' - - if meta[u'dst_compression_type'] is not None: - dst_compression_type = meta[u'dst_compression_type'][ - u'name'] - else: - dst_compression_type = u'NONE' - - error_details = meta.get(u'error_details', u'') - - src_file_size = meta[u'src_file_size'] \ - if converter_class != SnowflakeConverterSnowSQL \ - else TO_UNICODE(meta[u'src_file_size']) - - dst_file_size = meta[u'dst_file_size'] \ - if converter_class != SnowflakeConverterSnowSQL \ - else TO_UNICODE(meta[u'dst_file_size']) - - logger.debug("raise_put_get_error: %s, %s, %s, %s, %s", - self._raise_put_get_error, - meta[u'result_status'], - type(meta[u'result_status']), - ResultStatus.ERROR, - type(ResultStatus.ERROR)) - if self._raise_put_get_error and error_details: - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - OperationalError, - { - u'msg': error_details, - u'errno': ER_FAILED_TO_UPLOAD_TO_STAGE, - } - ) - rowset.append([ - meta[u'name'], - meta[u'dst_file_name'], - src_file_size, - dst_file_size, - src_compression_type, - dst_compression_type, - meta[u'result_status'], - error_details - ]) - return { - u'rowtype': [ - RESULT_TEXT_COLUMN_DESC(u'source'), - RESULT_TEXT_COLUMN_DESC(u'target'), - RESULT_FIXED_COLUMN_DESC(u'source_size'), - RESULT_FIXED_COLUMN_DESC(u'target_size'), - RESULT_TEXT_COLUMN_DESC(u'source_compression'), - RESULT_TEXT_COLUMN_DESC(u'target_compression'), - RESULT_TEXT_COLUMN_DESC(u'status'), - RESULT_TEXT_COLUMN_DESC(u'message'), - ], - u'rowset': sorted(rowset), - } - else: # DOWNLOAD - if hasattr(self, u'_results'): - for meta in self._results: - dst_file_size = meta[u'dst_file_size'] \ - if converter_class != SnowflakeConverterSnowSQL \ - else TO_UNICODE(meta[u'dst_file_size']) - - error_details = meta.get(u'error_details', u'') - - if self._raise_put_get_error and error_details: - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - OperationalError, - { - u'msg': error_details, - u'errno': ER_FAILED_TO_DOWNLOAD_FROM_STAGE, - } - ) - - rowset.append([ - meta[u'dst_file_name'], - dst_file_size, - meta[u'result_status'], - error_details - ]) - return { - u'rowtype': [ - RESULT_TEXT_COLUMN_DESC(u'file'), - RESULT_FIXED_COLUMN_DESC(u'size'), - RESULT_TEXT_COLUMN_DESC(u'status'), - RESULT_TEXT_COLUMN_DESC(u'message'), - ], - u'rowset': sorted(rowset), - } - - def _expand_filenames(self, locations): - canonical_locations = [] - for file_name in locations: - if self._command_type == CMD_TYPE_UPLOAD: - file_name = os.path.expanduser(file_name) - if not os.path.isabs(file_name): - file_name = os.path.join(GET_CWD(), file_name) - if IS_WINDOWS and len(file_name) > 2 \ - and file_name[0] == u'/' and file_name[2] == u':': - # Windows path: /C:/data/file1.txt where it starts with slash - # followed by a drive letter and colon. - file_name = file_name[1:] - files = glob.glob(file_name) - canonical_locations += files - else: - canonical_locations.append(file_name) - - return canonical_locations - - def _init_encryption_material(self): - self._encryption_material = [] - - if u'data' in self._ret and \ - u'encryptionMaterial' in self._ret[u'data'] and \ - self._ret[u'data'][u'encryptionMaterial'] is not None: - root_node = self._ret[u'data'][u'encryptionMaterial'] - logger.debug(self._command_type) - logger.debug(u'root_node=%s', root_node) - - if self._command_type == CMD_TYPE_UPLOAD: - self._encryption_material.append( - SnowflakeFileEncryptionMaterial( - query_stage_master_key=root_node[ - u'queryStageMasterKey'], - query_id=root_node[u'queryId'], - smk_id=root_node[u'smkId'])) - else: - for elem in root_node: - if elem is not None: - self._encryption_material.append( - SnowflakeFileEncryptionMaterial( - query_stage_master_key=elem[ - u'queryStageMasterKey'], - query_id=elem[u'queryId'], - smk_id=elem[u'smkId'])) - - def _parse_command(self): - if u'data' in self._ret: - self._command_type = self._ret[u'data'][u'command'] - else: - self._command_type = u'Unknown' - - self._init_encryption_material() - if u'data' in self._ret and \ - u'src_locations' in self._ret[u'data'] and \ - isinstance(self._ret[u'data'][u'src_locations'], list): - self._src_locations = self._ret[u'data'][u'src_locations'] - else: - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - DatabaseError, - { - u'msg': u'Failed to parse the location', - u'errno': ER_INVALID_STAGE_LOCATION - } - ) - - if self._command_type == CMD_TYPE_UPLOAD: - self._src_files = self._expand_filenames(self._src_locations) - self._auto_compress = \ - u'autoCompress' not in self._ret[u'data'] or \ - self._ret[u'data'][u'autoCompress'] - self._source_compression = self._ret[u'data'][ - u'sourceCompression'].lower() \ - if u'sourceCompression' in self._ret[u'data'] else u'' - else: - self._src_files = set(self._src_locations) - self._src_file_to_encryption_material = {} - if len(self._ret[u'data'][u'src_locations']) == len( - self._encryption_material): - for idx, src_location in enumerate(self._src_locations): - logger.debug(src_location) - logger.debug(self._encryption_material[idx]) - self._src_file_to_encryption_material[src_location] = \ - self._encryption_material[idx] - elif len(self._encryption_material) != 0: - # some encryption material exists. Zero means no encryption - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - InternalError, - { - u'msg': ( - u"The number of downloading files doesn't match " - u"the encryption materials: " - u"files={files}, encmat={encmat}").format( - files=len(self._ret[u'data'][u'src_locations']), - encmat=len(self._encryption_material)), - u'errno': - ER_INTERNAL_NOT_MATCH_ENCRYPT_MATERIAL - }) - - self._local_location = os.path.expanduser( - self._ret[u'data'][u'localLocation']) - if not os.path.isdir(self._local_location): - # NOTE: isdir follows the symlink - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - ProgrammingError, - { - u'msg': - u'The local path is not a directory: {0}'.format( - self._local_location), - u'errno': ER_LOCAL_PATH_NOT_DIRECTORY - }) - - self._parallel = self._ret[u'data'].get(u'parallel', 1) - self._overwrite = self._force_put_overwrite or \ - self._ret[u'data'].get(u'overwrite', False) - self._stage_location_type = self._ret[u'data'][u'stageInfo'][ - u'locationType'].upper() - self._stage_location = self._ret[u'data'][u'stageInfo'][u'location'] - self._stage_info = self._ret[u'data'][u'stageInfo'] - - if self.get_storage_client(self._stage_location_type) is None: - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - OperationalError, - { - u'msg': (u'Destination location type is not valid: ' - u'{stage_location_type}').format( - stage_location_type=self._stage_location_type, - ), - u'errno': ER_INVALID_STAGE_FS - }) - - def _init_file_metadata(self): - logger.debug(u"command type: %s", self._command_type) - self._file_metadata = {} - if self._command_type == CMD_TYPE_UPLOAD: - if len(self._src_files) == 0: - file_name = self._ret[u'data'][u'src_locations'] \ - if u'data' in self._ret and u'src_locations' in \ - self._ret[u'data'] else u'None' - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - ProgrammingError, - { - u'msg': u"File doesn't exist: {file}".format( - file=file_name), - u'errno': ER_FILE_NOT_EXISTS - }) - for file_name in self._src_files: - if not os.path.exists(file_name): - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - ProgrammingError, - { - u'msg': u"File doesn't exist: {file}".format( - file=file_name), - u'errno': ER_FILE_NOT_EXISTS - }) - elif os.path.isdir(file_name): - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - ProgrammingError, - { - u'msg': (u"Not a file but " - u"a directory: {file}").format( - file=file_name), - u'errno': ER_FILE_NOT_EXISTS - }) - statinfo = os.stat(file_name) - self._file_metadata[file_name] = { - u'name': os.path.basename(file_name), - u'src_file_name': file_name, - u'src_file_size': statinfo.st_size, - u'stage_location_type': self._stage_location_type, - u'stage_info': self._stage_info, - } - if len(self._encryption_material) > 0: - self._file_metadata[file_name][u'encryption_material'] = \ - self._encryption_material[0] - elif self._command_type == CMD_TYPE_DOWNLOAD: - for file_name in self._src_files: - if len(file_name) > 0: - logger.debug(file_name) - first_path_sep = file_name.find(u'/') - dst_file_name = file_name[first_path_sep + 1:] \ - if first_path_sep >= 0 else file_name - self._file_metadata[file_name] = { - u'name': os.path.basename(file_name), - u'src_file_name': file_name, - u'dst_file_name': dst_file_name, - u'stage_location_type': self._stage_location_type, - u'stage_info': self._stage_info, - u'local_location': self._local_location, - } - if file_name in self._src_file_to_encryption_material: - self._file_metadata[file_name][ - u'encryption_material'] = \ - self._src_file_to_encryption_material[file_name] - - def _process_file_compression_type(self): - user_specified_source_compression = None - if self._source_compression == u'auto_detect': - auto_detect = True - elif self._source_compression == u'none': - auto_detect = False - else: - user_specified_source_compression = \ - FileCompressionType.lookupByMimeSubType( - self._source_compression) - if user_specified_source_compression is None or not \ - user_specified_source_compression[u'is_supported']: - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - ProgrammingError, - { - u'msg': (u'Feature is not supported: ' - u'{0}').format( - user_specified_source_compression - ), - u'errno': ER_COMPRESSION_NOT_SUPPORTED - }) - - auto_detect = False - - for file_name in self._src_files: - meta = self._file_metadata[file_name] - - current_file_compression_type = None - if auto_detect: - mimetypes.init() - _, encoding = mimetypes.guess_type(file_name) - - if encoding is None: - test = None - with open(file_name, 'rb') as f: - test = f.read(4) - if file_name.endswith('.br'): - encoding = 'br' - elif test and test[:3] == b'ORC': - encoding = 'orc' - elif test and test == b'PAR1': - encoding = 'parquet' - elif test and ( - int(binascii.hexlify(test), 16) == 0x28B52FFD): - encoding = 'zstd' - - if encoding is not None: - logger.debug(u'detected the encoding %s: file=%s', - encoding, file_name) - current_file_compression_type = \ - FileCompressionType.lookupByMimeSubType(encoding) - else: - logger.debug(u'no file encoding was detected: file=%s', - file_name) - - if current_file_compression_type is not None and not \ - current_file_compression_type[u'is_supported']: - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - ProgrammingError, - { - u'msg': (u'Feature is not supported: ' - u'{0}').format( - current_file_compression_type - ), - u'errno': ER_COMPRESSION_NOT_SUPPORTED - }) - else: - current_file_compression_type = \ - user_specified_source_compression - - if current_file_compression_type is not None: - meta[u'src_compression_type'] = current_file_compression_type - if current_file_compression_type[u'is_supported']: - meta[u'dst_compression_type'] = \ - current_file_compression_type - meta[u'require_compress'] = False - meta[u'dst_file_name'] = meta[u'name'] - else: - Error.errorhandler_wrapper( - self._cursor.connection, self._cursor, - ProgrammingError, - { - u'msg': (u'Feature is not supported: ' - u'{0}').format( - current_file_compression_type - ), u'errno': ER_COMPRESSION_NOT_SUPPORTED - }) - else: - # src is not compressed but the destination want to be - # compressed unless the users disable it - meta[u'require_compress'] = self._auto_compress - meta[u'src_compression_type'] = None - if self._auto_compress: - meta[u'dst_file_name'] = \ - meta[u'name'] + \ - FileCompressionType.Types[u'GZIP'][u'file_extension'] - meta[u'dst_compression_type'] = \ - FileCompressionType.Types[u'GZIP'] - else: - meta[u'dst_file_name'] = meta[u'name'] - meta[u'dst_compression_type'] = None - - self._file_metadata[file_name] = meta diff --git a/file_util.py b/file_util.py deleted file mode 100644 index 26a5efd32..000000000 --- a/file_util.py +++ /dev/null @@ -1,86 +0,0 @@ - -from __future__ import division - -import base64 -import gzip -import os -import shutil -import struct -from io import open -from logging import getLogger -from Cryptodome.Hash import SHA256 - -from .constants import UTF8 - - -class SnowflakeFileUtil(object): - - @staticmethod - def compress_file_with_gzip(file_name, tmp_dir): - """ - Compresses a file by GZIP - :param file_name: a file name - :param tmp_dir: temprary directory where an GZIP file will be created - :return: a paif of gzip file name and size - """ - logger = getLogger(__name__) - base_name = os.path.basename(file_name) - gzip_file_name = os.path.join(tmp_dir, base_name + u'_c.gz') - logger.debug(u'gzip file: %s, original file: %s', gzip_file_name, - file_name) - fr = open(file_name, u'rb') - fw = gzip.GzipFile(gzip_file_name, u'wb') - shutil.copyfileobj(fr, fw) - fw.close() - fr.close() - SnowflakeFileUtil.normalize_gzip_header(gzip_file_name) - - statinfo = os.stat(gzip_file_name) - return gzip_file_name, statinfo.st_size - - @staticmethod - def normalize_gzip_header(gzip_file_name): - """ - Normalize GZIP file header. For consistent file digest, this removes - creation timestamp from the header. - :param gzip_file_name: gzip file name - """ - with open(gzip_file_name, u'r+b') as f: - # reset the timestamp in gzip header - f.seek(4, 0) - f.write(struct.pack(' size: - self.leftover = data[size:] - - return data[:size] - - -def decompress_raw_data_to_unicode_stream(raw_data_fd): - """ - Decompresses a raw data in file like object and yields - a Unicode string. - """ - obj = zlib.decompressobj(MAGIC_NUMBER + zlib.MAX_WBITS) - yield u'[' - d = raw_data_fd.read(CHUNK_SIZE) - while d: - yield obj.decompress(d).decode(u'utf-8') - while obj.unused_data != b'': - unused_data = obj.unused_data - obj = zlib.decompressobj(MAGIC_NUMBER + zlib.MAX_WBITS) - yield obj.decompress(unused_data).decode(u'utf-8') - d = raw_data_fd.read(CHUNK_SIZE) - yield obj.flush().decode(u'utf-8') + u']' diff --git a/incident.py b/incident.py deleted file mode 100644 index 25794597c..000000000 --- a/incident.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2019 Snowflake Computing Inc. All right reserved. -# - -import logging -import platform -from sys import exc_info -from traceback import format_exc -from uuid import uuid4 -from datetime import datetime - - -from .network import REQUEST_ID -from .compat import TO_UNICODE, urlencode -from .constants import (HTTP_HEADER_CONTENT_TYPE, - HTTP_HEADER_ACCEPT, - HTTP_HEADER_USER_AGENT, - HTTP_HEADER_SERVICE_NAME) -from .network import (CONTENT_TYPE_APPLICATION_JSON, - ACCEPT_TYPE_APPLICATION_SNOWFLAKE, - PYTHON_CONNECTOR_USER_AGENT) -from .errors import (ServiceUnavailableError, - ForbiddenError, ProgrammingError) - -logger = logging.getLogger(__name__) -URL = u'/incidents/v2/create-incident' -CLS_BLACKLIST = frozenset({ProgrammingError}) - - -class Incident(object): - - def __init__(self, - job_id, - request_id, - driver, - driver_version, - error_message, - error_stack_trace, - os=platform.system(), - os_version=platform.release()): - self.uuid = TO_UNICODE(uuid4()) - self.createdOn = TO_UNICODE(datetime.utcnow())[:-3] # utcnow returns 6 ms digits, we only want 3 - self.jobId = TO_UNICODE(job_id) if job_id is not None else None - self.requestId = TO_UNICODE(request_id) if request_id is not None else None - self.errorMessage = TO_UNICODE(error_message) - self.errorStackTrace = TO_UNICODE(error_stack_trace) - self.os = TO_UNICODE(os) if os is not None else None - self.osVersion = TO_UNICODE(os_version) if os_version is not None else None - self.signature = TO_UNICODE(self.__generate_signature(error_message, error_stack_trace)) - self.driver = TO_UNICODE(driver) - self.driverVersion = TO_UNICODE(driver_version) - - def to_dict(self): - ret = {u"Tags": [{u"Name": u"driver", u"Value": self.driver}, - {u"Name": u"version", u"Value": self.driverVersion}], - u"Name": self.signature, - u"UUID": self.uuid, - u"Created_On": self.createdOn, - u"Value": { - u"exceptionMessage": self.errorMessage, - u"exceptionStackTrace": self.errorStackTrace - }} - # Add optional values - if self.os: - ret[u"Tags"].append({u"Name": u"os", u"Value": self.os}) - if self.osVersion: - ret[u"Tags"].append({u"Name": u"osVersion", u"Value": self.osVersion}) - if self.requestId: - ret[u"Value"][u"requestId"] = self.requestId - if self.jobId: - ret[u"Value"][u"jobId"] = self.jobId - return ret - - def __str__(self): - return str(self.to_dict()) - - def __repr__(self): - return "Incident {id}".format(id=self.uuid) - - @staticmethod - def __generate_signature(error_message, error_stack_trace): - """Automatically generate signature of Incident""" - return error_message - - @classmethod - def from_exception(cls, exc): - """Generate an incident from an Exception""" - pass - - -class IncidentAPI(object): - """Snowflake Incident""" - - def __init__(self, rest): - self._rest = rest - - def report_incident(self, incident=None, job_id=None, request_id=None, session_parameters=None): - """ - Report an incident created - - Example usage: - - from traceback import format_exc - - try: - doing_my_thing() - except Exception as e: - incident = Incident(None, requestId, e.message, format_exc) - incidentAPI.report_automatic_incident(incident) - raise - - -- or -- - - try: - doing_my_thing() - except Exception: - incidentAPI.report_incident() - raise - """ - if incident is None: - cls, exc, trace = exc_info() - if cls in CLS_BLACKLIST: - logger.warn("Ignoring blacklisted exception type: {type}".format(type=cls)) - return - incident = Incident(job_id, - request_id, - self._rest._connection._internal_application_name, - self._rest._connection._internal_application_version, - str(exc), - format_exc()) - - if session_parameters is None: - session_parameters = {} - headers = {HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, - HTTP_HEADER_ACCEPT: ACCEPT_TYPE_APPLICATION_SNOWFLAKE, - HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT} - if HTTP_HEADER_SERVICE_NAME in session_parameters: - headers[HTTP_HEADER_SERVICE_NAME] = \ - session_parameters[HTTP_HEADER_SERVICE_NAME] - body = incident.to_dict() - logger.debug(u"Going to report incident with body: {}".format(body)) - try: - ret = self._rest.request( - u'/incidents/v2/create-incident?' + urlencode({REQUEST_ID: uuid4()}), - body, _include_retry_params=True) - except (ForbiddenError, ServiceUnavailableError): - logger.error("Unable to reach endpoint to report incident at url: '{url}' with headers='{headers}' " - "and body: '{body}'".format(url=URL, - headers=headers, - body=body)) - raise - if not ret[u'success']: - logger.warn(u"Reporting incident failed for reason: '{reason}'".format(reason=ret)) - return - new_incident_id = ret[u'data'][u'incidentId'] if ret.get(u'data') else None - if not new_incident_id: - logger.debug(u"Reported incident was ignored") - else: - logger.info(u"Incident has been reported with new incident id: {}".format(ret[u'data'][u'incidentId'])) - return new_incident_id diff --git a/json_result.py b/json_result.py deleted file mode 100644 index 8949056f8..000000000 --- a/json_result.py +++ /dev/null @@ -1,198 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from logging import getLogger -from .telemetry import TelemetryField -from .constants import FIELD_ID_TO_NAME -from .errors import Error, InterfaceError -from .errorcode import ER_FAILED_TO_CONVERT_ROW_TO_PYTHON_TYPE -from .time_util import get_time_millis - -logger = getLogger(__name__) - - -class JsonResult: - def __init__(self, raw_response, cursor, use_ijson=False): - self._reset() - self._cursor = cursor - self._connection = cursor.connection - self._init_from_meta(raw_response, use_ijson) - - def _init_from_meta(self, data, use_ijson): - self._total_row_index = -1 # last fetched number of rows - self._chunk_index = 0 - self._chunk_count = 0 - - self._current_chunk_row = iter(data.get(u'rowset')) - self._current_chunk_row_count = len(data.get(u'rowset')) - - self._column_converter = [] - self._column_idx_to_name = {} - for idx, column in enumerate(data[u'rowtype']): - self._column_idx_to_name[idx] = column[u'name'] - self._column_converter.append( - self._connection.converter.to_python_method( - column[u'type'].upper(), column)) - - if u'chunks' in data: - chunks = data[u'chunks'] - self._chunk_count = len(chunks) - logger.debug(u'chunk size=%s', self._chunk_count) - # prepare the downloader for further fetch - qrmk = data[u'qrmk'] if u'qrmk' in data else None - chunk_headers = None - if u'chunkHeaders' in data: - chunk_headers = {} - for header_key, header_value in data[ - u'chunkHeaders'].items(): - chunk_headers[header_key] = header_value - logger.debug( - u'added chunk header: key=%s, value=%s', - header_key, - header_value) - - logger.debug(u'qrmk=%s', qrmk) - self._chunk_downloader = self._connection._chunk_downloader_class( - chunks, self._connection, self._cursor, qrmk, chunk_headers, - query_result_format='json', - prefetch_threads=self._connection.client_prefetch_threads, - use_ijson=use_ijson) - - def __iter__(self): - return self - - def next(self): - return self.__next__() - - def __next__(self): - is_done = False - try: - row = None - self.total_row_index += 1 - try: - row = next(self._current_chunk_row) - except StopIteration: - if self._chunk_index < self._chunk_count: - logger.debug( - u"chunk index: %s, chunk_count: %s", - self._chunk_index, self._chunk_count) - next_chunk = self._chunk_downloader.next_chunk() - self._current_chunk_row_count = next_chunk.row_count - self._current_chunk_row = next_chunk.result_data - self._chunk_index += 1 - try: - row = next(self._current_chunk_row) - except StopIteration: - is_done = True - raise IndexError - else: - if self._chunk_count > 0 and \ - self._chunk_downloader is not None: - self._chunk_downloader.terminate() - self._cursor._log_telemetry_job_data( - TelemetryField.TIME_DOWNLOADING_CHUNKS, - self._chunk_downloader._total_millis_downloading_chunks) - self._cursor._log_telemetry_job_data( - TelemetryField.TIME_PARSING_CHUNKS, - self._chunk_downloader._total_millis_parsing_chunks) - self._chunk_downloader = None - self._chunk_count = 0 - self._current_chunk_row = iter(()) - is_done = True - - if is_done: - raise StopIteration - - return self._row_to_python(row) if row is not None else None - - except IndexError: - # returns None if the iteration is completed so that iter() stops - return None - finally: - if is_done and self._cursor._first_chunk_time: - logger.info("fetching data done") - time_consume_last_result = get_time_millis() - self._cursor._first_chunk_time - self._cursor._log_telemetry_job_data( - TelemetryField.TIME_CONSUME_LAST_RESULT, - time_consume_last_result) - - def _row_to_python(self, row): - """ - Converts data in row if required. - - NOTE: surprisingly using idx+1 is faster than enumerate here. Also - removing generator improved performance even better. - """ - idx = 0 - for col in row: - conv = self._column_converter[idx] - try: - row[idx] = col if conv is None or col is None else conv(col) - except Exception as e: - col_desc = self._cursor.description[idx] - msg = u'Failed to convert: ' \ - u'field {name}: {type}::{value}, Error: ' \ - u'{error}'.format( - name=col_desc[0], - type=FIELD_ID_TO_NAME[col_desc[1]], - value=col, - error=e) - logger.exception(msg) - Error.errorhandler_wrapper( - self._connection, self._cursor, InterfaceError, { - u'msg': msg, - u'errno': ER_FAILED_TO_CONVERT_ROW_TO_PYTHON_TYPE, - }) - idx += 1 - return tuple(row) - - def _reset(self): - self.total_row_index = -1 # last fetched number of rows - self._current_chunk_row_count = 0 - self._current_chunk_row = iter(()) - self._chunk_index = 0 - - if hasattr(self, u'_chunk_count') and self._chunk_count > 0 and \ - self._chunk_downloader is not None: - self._chunk_downloader.terminate() - - self._chunk_count = 0 - self._chunk_downloader = None - - -class DictJsonResult(JsonResult): - - def __init__(self, raw_response, cursor, use_ijson): - JsonResult.__init__(self, raw_response, cursor, use_ijson) - - def _row_to_python(self, row): - # see the base class - res = {} - idx = 0 - for col in row: - col_name = self._column_idx_to_name[idx] - conv = self._column_converter[idx] - try: - res[col_name] = col if conv is None or col is None else conv( - col) - except Exception as e: - col_desc = self._cursor.description[idx] - msg = u'Failed to convert: ' \ - u'field {name}: {type}::{value}, Error: ' \ - u'{error}'.format( - name=col_desc[0], - type=FIELD_ID_TO_NAME[col_desc[1]], - value=col, - error=e - ) - logger.exception(msg) - Error.errorhandler_wrapper( - self._connection, self._cursor, InterfaceError, { - u'msg': msg, - u'errno': ER_FAILED_TO_CONVERT_ROW_TO_PYTHON_TYPE, - }) - idx += 1 - return res diff --git a/license_header.txt b/license_header.txt new file mode 100644 index 000000000..42f7a8ee2 --- /dev/null +++ b/license_header.txt @@ -0,0 +1,3 @@ + +Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. + diff --git a/local_util.py b/local_util.py deleted file mode 100644 index 5fadf8333..000000000 --- a/local_util.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from __future__ import division - -import os -from logging import getLogger - -from .constants import ResultStatus - - -class SnowflakeLocalUtil(object): - @staticmethod - def create_client(stage_info, use_accelerate_endpoint=False): - return None - - @staticmethod - def upload_one_file_with_retry(meta): - logger = getLogger(__name__) - logger.debug( - u"src_file_name=[%s], " - u"real_src_file_name=[%s], " - u"stage_info=[%s], " - u"dst_file_name=[%s]", - meta[u'src_file_name'], - meta[u'real_src_file_name'], - meta[u'stage_info'], - meta[u'dst_file_name'] - ) - with open(meta[u'real_src_file_name'], u'rb') as frd: - with open(os.path.join( - os.path.expanduser(meta[u'stage_info'][u'location']), - meta[u'dst_file_name']), u'wb') as output: - output.writelines(frd) - - meta[u'dst_file_size'] = meta[u'upload_size'] - meta[u'result_status'] = ResultStatus.UPLOADED - - @staticmethod - def download_one_file(meta): - full_src_file_name = os.path.join( - os.path.expanduser(meta[u'stage_info'][u'location']), - meta[u'src_file_name'] if not meta[u'src_file_name'].startswith( - os.sep) else - meta[u'src_file_name'][1:]) - full_dst_file_name = os.path.join( - meta[u'local_location'], - os.path.basename(meta[u'dst_file_name'])) - base_dir = os.path.dirname(full_dst_file_name) - if not os.path.exists(base_dir): - os.makedirs(base_dir) - - with open(full_src_file_name, u'rb') as frd: - with open(full_dst_file_name, u'wb+') as output: - output.writelines(frd) - statinfo = os.stat(full_dst_file_name) - meta[u'dst_file_size'] = statinfo.st_size - meta[u'result_status'] = ResultStatus.DOWNLOADED diff --git a/mixin.py b/mixin.py deleted file mode 100644 index d72cdc574..000000000 --- a/mixin.py +++ /dev/null @@ -1,15 +0,0 @@ -from .compat import PY2 - - -class UnicodeMixin(object): - u""" - Mixin class to handle defining the proper __str__/__unicode__ - methods in Python 2 or 3. - """ - - if PY2: - def __str__(self): - return self.__unicode__().encode('utf8') - else: # Python 2 - def __str__(self): - return self.__unicode__() diff --git a/network.py b/network.py deleted file mode 100644 index a85c12a25..000000000 --- a/network.py +++ /dev/null @@ -1,930 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import collections -import contextlib -import gzip -import itertools -import json -import logging -import sys -import time -import traceback -import uuid -from io import BytesIO -from threading import Lock - -import OpenSSL.SSL -from botocore.vendored import requests -from botocore.vendored.requests.adapters import HTTPAdapter -from botocore.vendored.requests.auth import AuthBase -from botocore.vendored.requests.exceptions import ( - ConnectionError, ConnectTimeout, ReadTimeout, SSLError) -from botocore.vendored.requests.packages.urllib3.exceptions import ( - ProtocolError, ReadTimeoutError) - -from snowflake.connector.time_util import get_time_millis -from . import ssl_wrap_socket -from .compat import ( - PY2, - METHOD_NOT_ALLOWED, BAD_REQUEST, SERVICE_UNAVAILABLE, GATEWAY_TIMEOUT, - FORBIDDEN, BAD_GATEWAY, REQUEST_TIMEOUT, - UNAUTHORIZED, INTERNAL_SERVER_ERROR, OK, BadStatusLine) -from .compat import (TO_UNICODE, urlencode, urlparse, IncompleteRead) -from .constants import ( - HTTP_HEADER_CONTENT_TYPE, - HTTP_HEADER_ACCEPT, - HTTP_HEADER_USER_AGENT, - HTTP_HEADER_SERVICE_NAME -) -from .description import ( - SNOWFLAKE_CONNECTOR_VERSION, - PYTHON_VERSION, - OPERATING_SYSTEM, - PLATFORM, - IMPLEMENTATION, - COMPILER, - CLIENT_NAME, - CLIENT_VERSION -) -from .errorcode import (ER_FAILED_TO_CONNECT_TO_DB, ER_CONNECTION_IS_CLOSED, - ER_FAILED_TO_REQUEST, ER_FAILED_TO_RENEW_SESSION) -from .errors import (Error, OperationalError, DatabaseError, ProgrammingError, - GatewayTimeoutError, ServiceUnavailableError, - InterfaceError, InternalServerError, ForbiddenError, - BadGatewayError, BadRequest, MethodNotAllowed, - OtherHTTPRetryableError) -from .sqlstate import (SQLSTATE_CONNECTION_NOT_EXISTS, - SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, - SQLSTATE_CONNECTION_REJECTED, - SQLSTATE_IO_ERROR) -from .telemetry_oob import TelemetryService -from .time_util import ( - DecorrelateJitterBackoff, - DEFAULT_MASTER_VALIDITY_IN_SECONDS -) -from .tool.probe_connection import probe_connection - -if PY2: - from pyasn1.error import PyAsn1Error - -logger = logging.getLogger(__name__) - -""" -Monkey patch for PyOpenSSL Socket wrapper -""" -ssl_wrap_socket.inject_into_urllib3() - -# known applications -APPLICATION_SNOWSQL = u'SnowSQL' - -# requests parameters -REQUESTS_RETRY = 1 # requests library builtin retry -DEFAULT_SOCKET_CONNECT_TIMEOUT = 1 * 60 # don't reduce less than 45 seconds - -# return codes -QUERY_IN_PROGRESS_CODE = u'333333' # GS code: the query is in progress -QUERY_IN_PROGRESS_ASYNC_CODE = u'333334' # GS code: the query is detached - -ID_TOKEN_EXPIRED_GS_CODE = u'390110' -SESSION_EXPIRED_GS_CODE = u'390112' # GS code: session expired. need to renew -MASTER_TOKEN_NOTFOUND_GS_CODE = u'390113' -MASTER_TOKEN_EXPIRED_GS_CODE = u'390114' -MASTER_TOKEN_INVALD_GS_CODE = u'390115' -BAD_REQUEST_GS_CODE = u'390400' - -# other constants -CONTENT_TYPE_APPLICATION_JSON = u'application/json' -ACCEPT_TYPE_APPLICATION_SNOWFLAKE = u'application/snowflake' - -REQUEST_TYPE_RENEW = u'RENEW' -REQUEST_TYPE_ISSUE = u'ISSUE' - -UPDATED_BY_ID_TOKEN = u'updated_by_id_token' - -HEADER_AUTHORIZATION_KEY = u"Authorization" -HEADER_SNOWFLAKE_TOKEN = u'Snowflake Token="{token}"' - -REQUEST_ID = u'requestId' -REQUEST_GUID = u'request_guid' -SNOWFLAKE_HOST_SUFFIX = u'.snowflakecomputing.com' - -SNOWFLAKE_CONNECTOR_VERSION = SNOWFLAKE_CONNECTOR_VERSION -PYTHON_VERSION = PYTHON_VERSION -OPERATING_SYSTEM = OPERATING_SYSTEM -PLATFORM = PLATFORM -IMPLEMENTATION = IMPLEMENTATION -COMPILER = COMPILER - -CLIENT_NAME = CLIENT_NAME # don't change! -CLIENT_VERSION = CLIENT_VERSION -PYTHON_CONNECTOR_USER_AGENT = \ - u'{name}/{version}/{python_implementation}/{python_version}/{platform}'.format( - name=CLIENT_NAME, - version=SNOWFLAKE_CONNECTOR_VERSION, - python_implementation=IMPLEMENTATION, - python_version=PYTHON_VERSION, - platform=PLATFORM) - -NO_TOKEN = u'no-token' - -STATUS_TO_EXCEPTION = { - INTERNAL_SERVER_ERROR: InternalServerError, - FORBIDDEN: ForbiddenError, - SERVICE_UNAVAILABLE: ServiceUnavailableError, - GATEWAY_TIMEOUT: GatewayTimeoutError, - BAD_REQUEST: BadRequest, - BAD_GATEWAY: BadGatewayError, - METHOD_NOT_ALLOWED: MethodNotAllowed, -} - -DEFAULT_AUTHENTICATOR = u'SNOWFLAKE' # default authenticator name -EXTERNAL_BROWSER_AUTHENTICATOR = u'EXTERNALBROWSER' -KEY_PAIR_AUTHENTICATOR = u'SNOWFLAKE_JWT' -OAUTH_AUTHENTICATOR = u'OAUTH' - - -def is_retryable_http_code(code): - """ - Is retryable HTTP code? - """ - return 500 <= code < 600 or code in ( - BAD_REQUEST, # 400 - FORBIDDEN, # 403 - METHOD_NOT_ALLOWED, # 405 - REQUEST_TIMEOUT, # 408 - ) - - -class RetryRequest(Exception): - """ - Signal to retry request - """ - pass - - -class ReauthenticationRequest(Exception): - """ - Signal to reauthenticate - """ - - def __init__(self, cause): - self.cause = cause - - -class SnowflakeAuth(AuthBase): - """ - Attaches HTTP Authorization header for Snowflake - """ - - def __init__(self, token): - # setup any auth-related data here - self.token = token - - def __call__(self, r): - # modify and return the request - if HEADER_AUTHORIZATION_KEY in r.headers: - del r.headers[HEADER_AUTHORIZATION_KEY] - if self.token != NO_TOKEN: - r.headers[ - HEADER_AUTHORIZATION_KEY] = HEADER_SNOWFLAKE_TOKEN.format( - token=self.token) - return r - - -class SnowflakeRestful(object): - """ - Snowflake Restful class - """ - - def __init__(self, host=u'127.0.0.1', port=8080, - protocol=u'http', - inject_client_pause=0, - connection=None): - self._host = host - self._port = port - self._protocol = protocol - self._inject_client_pause = inject_client_pause - self._connection = connection - self._lock_token = Lock() - self._idle_sessions = collections.deque() - self._active_sessions = set() - - # OCSP mode (OCSPMode.FAIL_OPEN by default) - ssl_wrap_socket.FEATURE_OCSP_MODE = \ - self._connection and self._connection._ocsp_mode() - # cache file name (enabled by default) - ssl_wrap_socket.FEATURE_OCSP_RESPONSE_CACHE_FILE_NAME = \ - self._connection and self._connection._ocsp_response_cache_filename - - # This is to address the issue where requests hangs - _ = 'dummy'.encode('idna').decode('utf-8') # noqa - - @property - def token(self): - return self._token if hasattr(self, u'_token') else None - - @property - def master_token(self): - return self._master_token if hasattr(self, u'_master_token') else None - - @property - def master_validity_in_seconds(self): - return self._master_validity_in_seconds \ - if hasattr(self, u'_master_validity_in_seconds') and \ - self._master_validity_in_seconds \ - else DEFAULT_MASTER_VALIDITY_IN_SECONDS - - @master_validity_in_seconds.setter - def master_validity_in_seconds(self, value): - self._master_validity_in_seconds = value \ - if value else DEFAULT_MASTER_VALIDITY_IN_SECONDS - - @property - def id_token(self): - return self._id_token if hasattr(self, u'_id_token') else None - - @id_token.setter - def id_token(self, value): - self._id_token = value - - def close(self): - if hasattr(self, u'_token'): - del self._token - if hasattr(self, u'_master_token'): - del self._master_token - if hasattr(self, u'_id_token'): - del self._id_token - sessions = list(self._active_sessions) - if sessions: - logger.debug("Closing %s active sessions", len(sessions)) - sessions.extend(self._idle_sessions) - self._active_sessions.clear() - self._idle_sessions.clear() - for s in sessions: - try: - s.close() - except Exception as e: - logger.info("Session cleanup failed: %s", e) - - def request(self, url, body=None, method=u'post', client=u'sfsql', - _no_results=False, timeout=None, _include_retry_params=False): - if body is None: - body = {} - if self.master_token is None and self.token is None: - Error.errorhandler_wrapper( - self._connection, None, DatabaseError, - { - u'msg': u"Connection is closed", - u'errno': ER_CONNECTION_IS_CLOSED, - u'sqlstate': SQLSTATE_CONNECTION_NOT_EXISTS, - }) - - if client == u'sfsql': - accept_type = ACCEPT_TYPE_APPLICATION_SNOWFLAKE - else: - accept_type = CONTENT_TYPE_APPLICATION_JSON - - if timeout is None: - timeout = self._connection.network_timeout - - headers = { - HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, - HTTP_HEADER_ACCEPT: accept_type, - HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT, - } - if self._connection.service_name: - headers[HTTP_HEADER_SERVICE_NAME] = self._connection.service_name - if method == u'post': - return self._post_request( - url, headers, json.dumps(body), - token=self.token, _no_results=_no_results, - timeout=timeout, _include_retry_params=_include_retry_params) - else: - return self._get_request( - url, headers, token=self.token, - timeout=timeout) - - def update_tokens(self, session_token, master_token, - master_validity_in_seconds=None, - id_token=None): - """ - Update session and master tokens and optionally temporary credential - """ - with self._lock_token: - self._token = session_token - self._master_token = master_token - self._id_token = id_token - self._master_validity_in_seconds = master_validity_in_seconds - - def _renew_session(self): - """ - Renew a session and master token. - """ - try: - return self._token_request(REQUEST_TYPE_RENEW) - except ReauthenticationRequest as ex: - if not self.id_token: - raise ex.cause - return self._token_request(REQUEST_TYPE_ISSUE) - - def _id_token_session(self): - """ - Issue a session token by the id token. No master token is returned. - As a result, the session token is not renewable. - """ - return self._token_request(REQUEST_TYPE_ISSUE) - - def _token_request(self, request_type): - logger.debug( - u'updating session. master_token: %s, id_token: %s', - u'****' if self.master_token else None, - u'****' if self.id_token else None) - headers = { - HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, - HTTP_HEADER_ACCEPT: CONTENT_TYPE_APPLICATION_JSON, - HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT, - } - if self._connection.service_name: - headers[HTTP_HEADER_SERVICE_NAME] = self._connection.service_name - request_id = TO_UNICODE(uuid.uuid4()) - logger.debug(u'request_id: %s', request_id) - url = u'/session/token-request?' + urlencode({ - REQUEST_ID: request_id}) - - if request_type == REQUEST_TYPE_ISSUE: - header_token = self.id_token - body = { - u"idToken": self.id_token, - u"requestType": REQUEST_TYPE_ISSUE, - } - else: - # NOTE: ensure an empty key if master token is not set. - # This avoids HTTP 400. - header_token = self.master_token or "" - body = { - u"oldSessionToken": self.token, - u"requestType": request_type, - } - ret = self._post_request( - url, headers, json.dumps(body), - token=header_token, - timeout=self._connection.network_timeout) - if ret.get(u'success') and ret.get(u'data', {}).get(u'sessionToken'): - logger.debug(u'success: %s', ret) - self.update_tokens( - ret[u'data'][u'sessionToken'], - ret[u'data'].get(u'masterToken'), - master_validity_in_seconds=ret[u'data'].get( - u'masterValidityInSeconds'), - id_token=self.id_token) - logger.debug(u'updating session completed') - ret[UPDATED_BY_ID_TOKEN] = request_type == REQUEST_TYPE_ISSUE - return ret - else: - logger.debug(u'failed: %s', ret) - err = ret.get(u'message') - if err is not None and ret.get(u'data'): - err += ret[u'data'].get(u'errorMessage', '') - errno = ret.get(u'code') or ER_FAILED_TO_RENEW_SESSION - if errno in ( - ID_TOKEN_EXPIRED_GS_CODE, - SESSION_EXPIRED_GS_CODE, - MASTER_TOKEN_NOTFOUND_GS_CODE, - MASTER_TOKEN_EXPIRED_GS_CODE, - MASTER_TOKEN_INVALD_GS_CODE, - BAD_REQUEST_GS_CODE): - raise ReauthenticationRequest( - ProgrammingError( - msg=err, - errno=int(errno), - sqlstate=SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED)) - Error.errorhandler_wrapper( - self._connection, None, ProgrammingError, - { - u'msg': err, - u'errno': int(errno), - u'sqlstate': SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, - }) - - def _heartbeat(self): - headers = { - HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, - HTTP_HEADER_ACCEPT: CONTENT_TYPE_APPLICATION_JSON, - HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT, - } - if self._connection.service_name: - headers[HTTP_HEADER_SERVICE_NAME] = self._connection.service_name - request_id = TO_UNICODE(uuid.uuid4()) - logger.debug(u'request_id: %s', request_id) - url = u'/session/heartbeat?' + urlencode({ - REQUEST_ID: request_id}) - ret = self._post_request( - url, headers, None, - token=self.token, - timeout=self._connection.network_timeout) - if not ret.get(u'success'): - logger.error("Failed to heartbeat. code: %s, url: %s", - ret.get(u'code'), url) - - def delete_session(self): - """ - Deletes the session - """ - if self.master_token is None: - Error.errorhandler_wrapper( - self._connection, None, DatabaseError, - { - u'msg': u"Connection is closed", - u'errno': ER_CONNECTION_IS_CLOSED, - u'sqlstate': SQLSTATE_CONNECTION_NOT_EXISTS, - }) - - url = u'/session?' + urlencode({u'delete': u'true'}) - headers = { - HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, - HTTP_HEADER_ACCEPT: CONTENT_TYPE_APPLICATION_JSON, - HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT, - } - if self._connection.service_name: - headers[HTTP_HEADER_SERVICE_NAME] = self._connection.service_name - - body = {} - try: - ret = self._post_request( - url, headers, json.dumps(body), - token=self.token, timeout=5, no_retry=True) - if not ret or ret.get(u'success'): - return - err = ret.get(u'message') - if err is not None and ret.get(u'data'): - err += ret[u'data'].get(u'errorMessage', '') - # no exception is raised - logger.debug('error in deleting session. ignoring...: %s', err) - except Exception as e: - logger.debug('error in deleting session. ignoring...: %s', e) - - def _get_request(self, url, headers, token=None, - timeout=None, - socket_timeout=DEFAULT_SOCKET_CONNECT_TIMEOUT): - if 'Content-Encoding' in headers: - del headers['Content-Encoding'] - if 'Content-Length' in headers: - del headers['Content-Length'] - - full_url = u'{protocol}://{host}:{port}{url}'.format( - protocol=self._protocol, - host=self._host, - port=self._port, - url=url, - ) - ret = self.fetch(u'get', full_url, headers, timeout=timeout, - token=token, socket_timeout=socket_timeout) - if ret.get(u'code') == SESSION_EXPIRED_GS_CODE: - try: - ret = self._renew_session() - if ret.get(UPDATED_BY_ID_TOKEN): - self._connection._set_current_objects() - except ReauthenticationRequest as ex: - if self._connection._authenticator != \ - EXTERNAL_BROWSER_AUTHENTICATOR: - raise ex.cause - ret = self._connection._reauthenticate_by_webbrowser() - logger.debug( - u'ret[code] = {code} after renew_session'.format( - code=(ret.get(u'code', u'N/A')))) - if ret.get(u'success'): - return self._get_request(url, headers, token=self.token) - - return ret - - def _post_request(self, url, headers, body, token=None, - timeout=None, _no_results=False, no_retry=False, - socket_timeout=DEFAULT_SOCKET_CONNECT_TIMEOUT, - _include_retry_params=False): - full_url = u'{protocol}://{host}:{port}{url}'.format( - protocol=self._protocol, - host=self._host, - port=self._port, - url=url, - ) - if self._connection._probe_connection: - from pprint import pprint - ret = probe_connection(full_url) - pprint(ret) - - ret = self.fetch(u'post', full_url, headers, data=body, - timeout=timeout, token=token, - no_retry=no_retry, socket_timeout=socket_timeout, - _include_retry_params=_include_retry_params) - logger.debug( - u'ret[code] = {code}, after post request'.format( - code=(ret.get(u'code', u'N/A')))) - - if ret.get(u'code') == SESSION_EXPIRED_GS_CODE: - try: - ret = self._renew_session() - if ret.get(UPDATED_BY_ID_TOKEN): - self._connection._set_current_objects() - except ReauthenticationRequest as ex: - if self._connection._authenticator != \ - EXTERNAL_BROWSER_AUTHENTICATOR: - raise ex.cause - ret = self._connection._reauthenticate_by_webbrowser() - logger.debug( - u'ret[code] = {code} after renew_session'.format( - code=(ret.get(u'code', u'N/A')))) - if ret.get(u'success'): - return self._post_request( - url, headers, body, token=self.token, timeout=timeout) - - if ret.get(u'code') == QUERY_IN_PROGRESS_ASYNC_CODE and _no_results: - return ret - - while ret.get(u'code') in \ - (QUERY_IN_PROGRESS_CODE, QUERY_IN_PROGRESS_ASYNC_CODE): - if self._inject_client_pause > 0: - logger.debug( - u'waiting for %s...', self._inject_client_pause) - time.sleep(self._inject_client_pause) - # ping pong - result_url = ret[u'data'][u'getResultUrl'] - logger.debug(u'ping pong starting...') - ret = self._get_request( - result_url, headers, token=self.token, timeout=timeout) - logger.debug(u'ret[code] = %s', ret.get(u'code', u'N/A')) - logger.debug(u'ping pong done') - - return ret - - def fetch(self, method, full_url, headers, data=None, timeout=None, - **kwargs): - """ Curried API request with session management. """ - - class RetryCtx(object): - def __init__(self, timeout, _include_retry_params=False): - self.total_timeout = timeout - self.timeout = timeout - self.cnt = 0 - self.sleeping_time = 1 - self.start_time = get_time_millis() - self._include_retry_params = _include_retry_params - # backoff between 1 and 16 seconds - self._backoff = DecorrelateJitterBackoff(1, 16) - - def next_sleep(self): - self.sleeping_time = self._backoff.next_sleep( - self.cnt, self.sleeping_time) - return self.sleeping_time - - def add_retry_params(self, full_url): - if self._include_retry_params and self.cnt > 0: - suffix = urlencode({ - 'clientStartTime': self.start_time, - 'retryCount': self.cnt - }) - sep = '&' if urlparse(full_url).query else '?' - return full_url + sep + suffix - else: - return full_url - - include_retry_params = kwargs.pop('_include_retry_params', False) - - with self._use_requests_session() as session: - retry_ctx = RetryCtx(timeout, include_retry_params) - while True: - ret = self._request_exec_wrapper( - session, method, full_url, headers, data, retry_ctx, - **kwargs) - if ret is not None: - return ret - - @staticmethod - def add_request_guid(full_url): - """ - Add request_guid parameter for HTTP request tracing - """ - parsed_url = urlparse(full_url) - if not parsed_url.hostname.endswith(SNOWFLAKE_HOST_SUFFIX): - return full_url - suffix = urlencode({ - REQUEST_GUID: TO_UNICODE(uuid.uuid4()) - }) - sep = '&' if parsed_url.query else '?' - # url has query string already, just add fields - return full_url + sep + suffix - - def _request_exec_wrapper( - self, - session, method, full_url, headers, data, retry_ctx, - no_retry=False, token=NO_TOKEN, - **kwargs): - - conn = self._connection - logger.debug('remaining request timeout: %s, retry cnt: %s', - retry_ctx.timeout, retry_ctx.cnt + 1) - - start_request_thread = time.time() - full_url = retry_ctx.add_retry_params(full_url) - full_url = SnowflakeRestful.add_request_guid(full_url) - try: - return_object = self._request_exec( - session=session, - method=method, - full_url=full_url, - headers=headers, - data=data, - token=token, - **kwargs) - if return_object is not None: - return return_object - self._handle_unknown_error( - method, full_url, headers, data, conn) - return {} - except RetryRequest as e: - if retry_ctx.cnt == TelemetryService.get_instance().num_of_retry_to_trigger_telemetry: - _, _, stack_trace = sys.exc_info() - TelemetryService.get_instance().log_http_request( - "HttpRequestRetry%dTimes" % retry_ctx.cnt, - full_url, - method, - SQLSTATE_IO_ERROR, - ER_FAILED_TO_REQUEST, - retry_timeout=retry_ctx.total_timeout, - retry_count=retry_ctx.cnt, - exception=str(e), - stack_trace=traceback.format_exc() - ) - if no_retry: - return {} - cause = e.args[0] - if retry_ctx.timeout is not None: - retry_ctx.timeout -= int(time.time() - start_request_thread) - if retry_ctx.timeout <= 0: - logger.error(cause, exc_info=True) - _, _, stack_trace = sys.exc_info() - TelemetryService.get_instance().log_http_request( - "HttpRequestRetryTimeout", - full_url, - method, - SQLSTATE_IO_ERROR, - ER_FAILED_TO_REQUEST, - retry_timeout=retry_ctx.total_timeout, - retry_count=retry_ctx.cnt, - exception=str(e), - stack_trace=traceback.format_exc() - ) - if isinstance(cause, Error): - Error.errorhandler_wrapper(conn, None, cause) - else: - self.handle_invalid_certificate_error( - conn, full_url, cause) - return {} # required for tests - sleeping_time = retry_ctx.next_sleep() - logger.debug( - u'retrying: errorclass=%s, ' - u'error=%s, ' - u'counter=%s, ' - u'sleeping=%s(s)', - type(cause), - cause, - retry_ctx.cnt + 1, - sleeping_time) - time.sleep(sleeping_time) - retry_ctx.cnt += 1 - if retry_ctx.timeout is not None: - retry_ctx.timeout -= sleeping_time - return None # retry - except Exception as e: - if not no_retry: - raise e - logger.debug("Ignored error", exc_info=True) - return {} - - def handle_invalid_certificate_error(self, conn, full_url, cause): - # all other errors raise exception - Error.errorhandler_wrapper( - conn, None, OperationalError, - { - u'msg': u'Failed to execute request: {0}'.format( - cause), - u'errno': ER_FAILED_TO_REQUEST, - }) - - def _handle_unknown_error( - self, method, full_url, headers, data, conn): - """ - Handle unknown error - """ - if data: - try: - decoded_data = json.loads(data) - if decoded_data.get( - 'data') and decoded_data['data'].get('PASSWORD'): - # masking the password - decoded_data['data']['PASSWORD'] = '********' - data = json.dumps(decoded_data) - except: - logger.info("data is not JSON") - logger.error( - u'Failed to get the response. Hanging? ' - u'method: {method}, url: {url}, headers:{headers}, ' - u'data: {data}'.format( - method=method, - url=full_url, - headers=headers, - data=data, - ) - ) - Error.errorhandler_wrapper( - conn, None, OperationalError, - { - u'msg': u'Failed to get the response. Hanging? ' - u'method: {method}, url: {url}'.format( - method=method, - url=full_url, - ), - u'errno': ER_FAILED_TO_REQUEST, - }) - - def _request_exec( - self, - session, method, full_url, headers, data, - token, - catch_okta_unauthorized_error=False, - is_raw_text=False, - is_raw_binary=False, - binary_data_handler=None, - socket_timeout=DEFAULT_SOCKET_CONNECT_TIMEOUT): - if socket_timeout > DEFAULT_SOCKET_CONNECT_TIMEOUT: - # socket timeout should not be more than the default. - # A shorter timeout may be specified for login time, but - # for query, it should be at least 45 seconds. - socket_timeout = DEFAULT_SOCKET_CONNECT_TIMEOUT - logger.debug('socket timeout: %s', socket_timeout) - try: - if not catch_okta_unauthorized_error and data and len(data) > 0: - gzdata = BytesIO() - gzip.GzipFile(fileobj=gzdata, mode=u'wb').write( - data.encode(u'utf-8')) - gzdata.seek(0, 0) - headers['Content-Encoding'] = 'gzip' - input_data = gzdata - else: - input_data = data - - download_start_time = get_time_millis() - # socket timeout is constant. You should be able to receive - # the response within the time. If not, ConnectReadTimeout or - # ReadTimeout is raised. - raw_ret = session.request( - method=method, - url=full_url, - headers=headers, - data=input_data, - timeout=socket_timeout, - verify=True, - stream=is_raw_binary, - auth=SnowflakeAuth(token), - ) - download_end_time = get_time_millis() - - try: - if raw_ret.status_code == OK: - logger.debug(u'SUCCESS') - if is_raw_text: - ret = raw_ret.text - elif is_raw_binary: - ret = binary_data_handler.to_iterator(raw_ret.raw, - download_end_time - download_start_time) - else: - ret = raw_ret.json() - return ret - - if is_retryable_http_code(raw_ret.status_code): - ex = STATUS_TO_EXCEPTION.get( - raw_ret.status_code, OtherHTTPRetryableError) - exi = ex(code=raw_ret.status_code) - logger.debug('%s. Retrying...', exi) - # retryable server exceptions - raise RetryRequest(exi) - - elif raw_ret.status_code == UNAUTHORIZED and \ - catch_okta_unauthorized_error: - # OKTA Unauthorized errors - Error.errorhandler_wrapper( - self._connection, None, DatabaseError, - { - u'msg': (u'Failed to get authentication by OKTA: ' - u'{status}: {reason}').format( - status=raw_ret.status_code, - reason=raw_ret.reason), - u'errno': ER_FAILED_TO_CONNECT_TO_DB, - u'sqlstate': SQLSTATE_CONNECTION_REJECTED - } - ) - return None # required for tests - else: - TelemetryService.get_instance().log_http_request( - "HttpError%s" % str(raw_ret.status_code), - full_url, - method, - SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, - ER_FAILED_TO_REQUEST, - response=raw_ret - ) - Error.errorhandler_wrapper( - self._connection, None, InterfaceError, - { - u'msg': (u"{status} {reason}: " - u"{method} {url}").format( - status=raw_ret.status_code, - reason=raw_ret.reason, - method=method, - url=full_url), - u'errno': ER_FAILED_TO_REQUEST, - u'sqlstate': SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED - } - ) - return None # required for tests - finally: - raw_ret.close() # ensure response is closed - except (ConnectTimeout, - ReadTimeout, - BadStatusLine, - ConnectionError, - IncompleteRead, - SSLError, - ProtocolError, # from urllib3 - ReadTimeoutError, # from urllib3 - OpenSSL.SSL.SysCallError, - KeyError, # SNOW-39175: asn1crypto.keys.PublicKeyInfo - ValueError, - RuntimeError, - AttributeError, # json decoding error - ) as err: - logger.debug( - "Hit retryable client error. Retrying... Ignore the following " - "error stack: %s", err, - exc_info=True) - raise RetryRequest(err) - except Exception as err: - if PY2 and isinstance(err, PyAsn1Error): - logger.debug( - "Hit retryable client error. Retrying... " - "Ignore the following error stack: %s", err, - exc_info=True) - raise RetryRequest(err) - _, _, stack_trace = sys.exc_info() - TelemetryService.get_instance().log_http_request( - "HttpException%s" % str(err), - full_url, - method, - SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, - ER_FAILED_TO_REQUEST, - exception=err, - stack_trace=traceback.format_exc() - ) - raise err - - def make_requests_session(self): - s = requests.Session() - s.mount(u'http://', HTTPAdapter(max_retries=REQUESTS_RETRY)) - s.mount(u'https://', HTTPAdapter(max_retries=REQUESTS_RETRY)) - s._reuse_count = itertools.count() - return s - - @contextlib.contextmanager - def _use_requests_session(self): - """ Session caching context manager. Note that the session is not - closed until close() is called so each session may be used multiple - times. """ - if self._connection.disable_request_pooling: - session = self.make_requests_session() - try: - yield session - finally: - session.close() - else: - try: - session = self._idle_sessions.pop() - except IndexError: - session = self.make_requests_session() - self._active_sessions.add(session) - logger.debug("Active requests sessions: %s, idle: %s", - len(self._active_sessions), len(self._idle_sessions)) - try: - yield session - finally: - self._idle_sessions.appendleft(session) - try: - self._active_sessions.remove(session) - except KeyError: - logger.debug( - "session doesn't exist in the active session pool. " - "Ignored...") - logger.debug("Active requests sessions: %s, idle: %s", - len(self._active_sessions), - len(self._idle_sessions)) diff --git a/ocsp_asn1crypto.py b/ocsp_asn1crypto.py deleted file mode 100644 index 6a4c8e97f..000000000 --- a/ocsp_asn1crypto.py +++ /dev/null @@ -1,386 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from datetime import datetime, timezone - -from base64 import b64encode, b64decode -from logging import getLogger - -from Cryptodome.Hash import SHA256, SHA384, SHA1, SHA512 -from Cryptodome.PublicKey import RSA -from Cryptodome.Signature import PKCS1_v1_5 -from asn1crypto.algos import DigestAlgorithm -from asn1crypto.core import OctetString, Integer -from asn1crypto.ocsp import CertId, OCSPRequest, TBSRequest, Requests, \ - Request, OCSPResponse, Version -from asn1crypto.x509 import Certificate - -from snowflake.connector.errorcode import ( - ER_INVALID_OCSP_RESPONSE, - ER_INVALID_OCSP_RESPONSE_CODE) -from snowflake.connector.errors import RevocationCheckError -from snowflake.connector.ocsp_snowflake import SnowflakeOCSP -from collections import OrderedDict -from snowflake.connector.ssd_internal_keys import ret_wildcard_hkey -from os import getenv - -logger = getLogger(__name__) - - -class SnowflakeOCSPAsn1Crypto(SnowflakeOCSP): - """ - OCSP checks by asn1crypto - """ - - # map signature algorithm name to digest class - SIGNATURE_ALGORITHM_TO_DIGEST_CLASS = { - 'sha256': SHA256, - 'sha384': SHA384, - 'sha512': SHA512, - } - - WILDCARD_CERTID = None - - def __init__(self, **kwargs): - super(SnowflakeOCSPAsn1Crypto, self).__init__(**kwargs) - self.WILDCARD_CERTID = self.encode_cert_id_key(ret_wildcard_hkey()) - - def encode_cert_id_key(self, hkey): - issuer_name_hash, issuer_key_hash, serial_number = hkey - issuer_name_hash = OctetString.load(issuer_name_hash) - issuer_key_hash = OctetString.load(issuer_key_hash) - serial_number = Integer.load(serial_number) - cert_id = CertId({ - 'hash_algorithm': DigestAlgorithm({ - 'algorithm': u'sha1', - 'parameters': None}), - 'issuer_name_hash': issuer_name_hash, - 'issuer_key_hash': issuer_key_hash, - 'serial_number': serial_number, - }) - return cert_id - - def decode_cert_id_key(self, cert_id): - return (cert_id['issuer_name_hash'].dump(), - cert_id['issuer_key_hash'].dump(), - cert_id['serial_number'].dump()) - - def decode_cert_id_base64(self, cert_id_base64): - return CertId.load(b64decode(cert_id_base64)) - - def encode_cert_id_base64(self, hkey): - return b64encode(self.encode_cert_id_key(hkey).dump()).decode('ascii') - - def read_cert_bundle(self, ca_bundle_file, storage=None): - """ - Reads a certificate file including certificates in PEM format - """ - if storage is None: - storage = SnowflakeOCSP.ROOT_CERTIFICATES_DICT - logger.debug('reading certificate bundle: %s', ca_bundle_file) - all_certs = open(ca_bundle_file, 'rb').read() - - # don't lock storage - from asn1crypto import pem - pem_certs = pem.unarmor(all_certs, multiple=True) - for type_name, _, der_bytes in pem_certs: - if type_name == 'CERTIFICATE': - crt = Certificate.load(der_bytes) - storage[crt.subject.sha256] = crt - - def create_ocsp_request(self, issuer, subject): - """ - Create CertId and OCSPRequest - """ - cert_id = CertId({ - 'hash_algorithm': DigestAlgorithm({ - 'algorithm': u'sha1', - 'parameters': None}), - 'issuer_name_hash': OctetString(subject.issuer.sha1), - 'issuer_key_hash': OctetString(issuer.public_key.sha1), - 'serial_number': subject.serial_number, - }) - ocsp_request = OCSPRequest({ - 'tbs_request': TBSRequest({ - 'version': Version(0), - 'request_list': Requests([ - Request({ - 'req_cert': cert_id, - })]), - }), - }) - return cert_id, ocsp_request - - def extract_ocsp_url(self, cert): - urls = cert.ocsp_urls - ocsp_url = urls[0] if urls else None - return ocsp_url - - def decode_ocsp_request(self, ocsp_request): - return ocsp_request.dump() - - def decode_ocsp_request_b64(self, ocsp_request): - data = self.decode_ocsp_request(ocsp_request) # convert to DER - b64data = b64encode(data).decode('ascii') - return b64data - - def extract_good_status(self, single_response): - """ - Extract GOOD status - """ - this_update_native = single_response['this_update'].native - next_update_native = single_response['next_update'].native - - return this_update_native, next_update_native - - def extract_revoked_status(self, single_response): - """ - Extract REVOKED status - """ - revoked_info = single_response['cert_status'] - revocation_time = revoked_info.native['revocation_time'] - revocation_reason = revoked_info.native['revocation_reason'] - return revocation_time, revocation_reason - - def check_cert_time_validity(self, cur_time, ocsp_cert): - - val_start = ocsp_cert['tbs_certificate']['validity']['not_before'].native - val_end = ocsp_cert['tbs_certificate']['validity']['not_after'].native - - if cur_time > val_end or \ - cur_time < val_start: - debug_msg = "Certificate attached to OCSP response is invalid. OCSP response " \ - "current time - {0} certificate not before time - {1} certificate " \ - "not after time - {2}. Consider running curl -o ocsp.der {3}". \ - format(cur_time, - val_start, - val_end, - super(SnowflakeOCSPAsn1Crypto, self).debug_ocsp_failure_url) - - return False, debug_msg - else: - return True, None - - """ - is_valid_time - checks various components of the OCSP Response - for expiry. - :param cert_id - certificate id corresponding to OCSP Response - :param ocsp_response - :return True/False depending on time validity within the response - """ - def is_valid_time(self, cert_id, ocsp_response): - res = OCSPResponse.load(ocsp_response) - - if res['response_status'].native != 'successful': - raise RevocationCheckError( - msg="Invalid Status: {0}".format(res['response_status'].native), - errno=ER_INVALID_OCSP_RESPONSE) - - basic_ocsp_response = res.basic_ocsp_response - if basic_ocsp_response['certs'].native: - logger.debug("Certificate is attached in Basic OCSP Response") - ocsp_cert = basic_ocsp_response['certs'][0] - logger.debug("Verifying the attached certificate is signed by " - "the issuer") - logger.debug( - "Valid Not After: %s", - ocsp_cert['tbs_certificate']['validity']['not_after'].native) - - cur_time = datetime.now(timezone.utc) - - """ - Note: - We purposefully do not verify certificate signature here. - The OCSP Response is extracted from the OCSP Response Cache - which is expected to have OCSP Responses with verified - attached signature. Moreover this OCSP Response is eventually - going to be processed by the driver before being consumed by - the driver. - This step ensures that the OCSP Response cache does not have - any invalid entries. - """ - cert_valid, debug_msg = self.check_cert_time_validity(cur_time, ocsp_cert) - if not cert_valid: - logger.debug(debug_msg) - return False - - tbs_response_data = basic_ocsp_response['tbs_response_data'] - - single_response = tbs_response_data['responses'][0] - cert_status = single_response['cert_status'].name - - try: - if cert_status == 'good': - self._process_good_status(single_response, cert_id, ocsp_response) - except Exception as ex: - logger.debug("Failed to validate ocsp response %s", ex) - return False - - return True - - def process_ocsp_response(self, issuer, cert_id, ocsp_response): - try: - res = OCSPResponse.load(ocsp_response) - if self.test_mode is not None: - ocsp_load_failure = getenv("SF_TEST_OCSP_FORCE_BAD_OCSP_RESPONSE") - if ocsp_load_failure is not None: - raise RevocationCheckError("Force fail") - except Exception: - raise RevocationCheckError( - msg='Invalid OCSP Response', - errno=ER_INVALID_OCSP_RESPONSE - ) - - if res['response_status'].native != 'successful': - raise RevocationCheckError( - msg="Invalid Status: {0}".format(res['response_status'].native), - errno=ER_INVALID_OCSP_RESPONSE) - - basic_ocsp_response = res.basic_ocsp_response - if basic_ocsp_response['certs'].native: - logger.debug("Certificate is attached in Basic OCSP Response") - ocsp_cert = basic_ocsp_response['certs'][0] - logger.debug("Verifying the attached certificate is signed by " - "the issuer") - logger.debug( - "Valid Not After: %s", - ocsp_cert['tbs_certificate']['validity']['not_after'].native) - - cur_time = datetime.now(timezone.utc) - - """ - Signature verification should happen before any kind of - validation - """ - self.verify_signature( - ocsp_cert.hash_algo, - ocsp_cert.signature, - issuer, - ocsp_cert['tbs_certificate']) - - cert_valid, debug_msg = self.check_cert_time_validity(cur_time, ocsp_cert) - - if not cert_valid: - raise RevocationCheckError( - msg=debug_msg, - errno=ER_INVALID_OCSP_RESPONSE_CODE) - - else: - logger.debug("Certificate is NOT attached in Basic OCSP Response. " - "Using issuer's certificate") - ocsp_cert = issuer - - tbs_response_data = basic_ocsp_response['tbs_response_data'] - - logger.debug("Verifying the OCSP response is signed by the issuer.") - self.verify_signature( - basic_ocsp_response['signature_algorithm'].hash_algo, - basic_ocsp_response['signature'].native, - ocsp_cert, - tbs_response_data) - - single_response = tbs_response_data['responses'][0] - cert_status = single_response['cert_status'].name - if self.test_mode is not None: - test_cert_status = getenv("SF_TEST_OCSP_CERT_STATUS") - if test_cert_status == 'revoked': - cert_status = 'revoked' - elif test_cert_status == 'unknown': - cert_status = 'unknown' - elif test_cert_status == 'good': - cert_status = 'good' - - try: - if cert_status == 'good': - self._process_good_status(single_response, cert_id, ocsp_response) - SnowflakeOCSP.OCSP_CACHE.update_cache(self, cert_id, ocsp_response) - elif cert_status == 'revoked': - self._process_revoked_status(single_response, cert_id) - elif cert_status == 'unknown': - self._process_unknown_status(cert_id) - else: - debug_msg = "Unknown revocation status was returned." \ - "OCSP response may be malformed: {0}.".\ - format(cert_status) - raise RevocationCheckError( - msg=debug_msg, - errno=ER_INVALID_OCSP_RESPONSE_CODE - ) - except RevocationCheckError as op_er: - debug_msg = "{0} Consider running curl -o ocsp.der {1}".\ - format(op_er.msg, - self.debug_ocsp_failure_url) - raise RevocationCheckError(msg=debug_msg, errno=op_er.errno) - - def verify_signature(self, signature_algorithm, signature, cert, data): - pubkey = cert.public_key.unwrap().dump() - rsakey = RSA.importKey(pubkey) - signer = PKCS1_v1_5.new(rsakey) - if signature_algorithm in SnowflakeOCSPAsn1Crypto.SIGNATURE_ALGORITHM_TO_DIGEST_CLASS: - digest = \ - SnowflakeOCSPAsn1Crypto.SIGNATURE_ALGORITHM_TO_DIGEST_CLASS[ - signature_algorithm].new() - else: - # the last resort. should not happen. - digest = SHA1.new() - digest.update(data.dump()) - if not signer.verify(digest, signature): - raise RevocationCheckError( - msg="Failed to verify the signature", - errno=ER_INVALID_OCSP_RESPONSE) - - def extract_certificate_chain(self, connection): - """ - Gets certificate chain and extract the key info from OpenSSL connection - """ - from OpenSSL.crypto import dump_certificate, FILETYPE_ASN1 - cert_map = OrderedDict() - logger.debug( - "# of certificates: %s", - len(connection.get_peer_cert_chain())) - - for cert_openssl in connection.get_peer_cert_chain(): - cert_der = dump_certificate(FILETYPE_ASN1, cert_openssl) - cert = Certificate.load(cert_der) - logger.debug( - u'subject: %s, issuer: %s', - cert.subject.native, cert.issuer.native) - cert_map[cert.subject.sha256] = cert - - return self.create_pair_issuer_subject(cert_map) - - def create_pair_issuer_subject(self, cert_map): - """ - Creates pairs of issuer and subject certificates - """ - issuer_subject = [] - for subject_der in cert_map: - subject = cert_map[subject_der] - if subject.ocsp_no_check_value or \ - subject.ca and not subject.ocsp_urls: - # Root certificate will not be validated - # but it is used to validate the subject certificate - continue - issuer_hash = subject.issuer.sha256 - if issuer_hash not in cert_map: - # IF NO ROOT certificate is attached in the certificate chain - # read it from the local disk - self._lazy_read_ca_bundle() - logger.debug('not found issuer_der: %s', subject.issuer.native) - if issuer_hash not in SnowflakeOCSP.ROOT_CERTIFICATES_DICT: - raise RevocationCheckError( - msg="CA certificate is NOT found in the root " - "certificate list. Make sure you use the latest " - "Python Connector package and the URL is valid.") - issuer = SnowflakeOCSP.ROOT_CERTIFICATES_DICT[issuer_hash] - else: - issuer = cert_map[issuer_hash] - - issuer_subject.append((issuer, subject)) - return issuer_subject - - def subject_name(self, subject): - return subject.subject.native diff --git a/ocsp_pyasn1.py b/ocsp_pyasn1.py deleted file mode 100644 index 72cf23021..000000000 --- a/ocsp_pyasn1.py +++ /dev/null @@ -1,603 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import hashlib -import pytz -from base64 import b64encode, b64decode -from collections import OrderedDict -from datetime import datetime -from logging import getLogger -from threading import Lock -from os import getenv - -import pyasn1 -from Cryptodome.Hash import SHA256, SHA384, SHA1, SHA512 -from Cryptodome.PublicKey import RSA -from Cryptodome.Signature import PKCS1_v1_5 -from OpenSSL.crypto import ( - FILETYPE_PEM, - FILETYPE_ASN1, - load_certificate, dump_certificate) -from pyasn1.codec.der import decoder as der_decoder -from pyasn1.codec.der import encoder as der_encoder -from pyasn1.codec.native.encoder import encode as nat_encoder -from pyasn1.type import (univ, tag) -from pyasn1_modules import (rfc2459, rfc2437, rfc2560) - -from snowflake.connector.ocsp_snowflake import SnowflakeOCSP -from .compat import (PY2) -from .errorcode import (ER_INVALID_OCSP_RESPONSE, ER_INVALID_OCSP_RESPONSE_CODE) -from .errors import (RevocationCheckError) -from .rfc6960 import ( - OCSPRequest, - OCSPResponse, - TBSRequest, - CertID, - Request, - OCSPResponseStatus, - BasicOCSPResponse, - Version) - -from snowflake.connector.ssd_internal_keys import ret_wildcard_hkey - -logger = getLogger(__name__) - - -class SnowflakeOCSPPyasn1(SnowflakeOCSP): - """ - OCSP checks by pyasn1 - """ - - PYASN1_VERSION_LOCK = Lock() - PYASN1_VERSION = None - - # Signature Hash Algorithm - sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5') - sha256WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.11') - sha384WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.12') - sha512WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.13') - - SIGNATURE_HASH_ALGO_TO_DIGEST_CLASS = { - sha1WithRSAEncryption: SHA1, - sha256WithRSAEncryption: SHA256, - sha384WithRSAEncryption: SHA384, - sha512WithRSAEncryption: SHA512, - } - - WILDCARD_CERTID = None - - @staticmethod - def _get_pyasn1_version(): - with SnowflakeOCSPPyasn1.PYASN1_VERSION_LOCK: - if SnowflakeOCSPPyasn1.PYASN1_VERSION is not None: - return SnowflakeOCSPPyasn1.PYASN1_VERSION - - v = pyasn1.__version__ - vv = [int(x, 10) for x in v.split('.')] - vv.reverse() - SnowflakeOCSPPyasn1.PYASN1_VERSION = sum( - x * (1000 ** i) for i, x in enumerate(vv)) - return SnowflakeOCSPPyasn1.PYASN1_VERSION - - def __init__(self, **kwargs): - super(SnowflakeOCSPPyasn1, self).__init__(**kwargs) - self.WILDCARD_CERTID = self.encode_cert_id_key(ret_wildcard_hkey()) - - def encode_cert_id_key(self, hkey): - issuer_name_hash, issuer_key_hash, serial_number = hkey - issuer_name_hash, _ = der_decoder.decode(issuer_name_hash) - issuer_key_hash, _ = der_decoder.decode(issuer_key_hash) - serial_number, _ = der_decoder.decode(serial_number) - cert_id = CertID() - cert_id.setComponentByName( - 'hashAlgorithm', - rfc2459.AlgorithmIdentifier().setComponentByName( - 'algorithm', rfc2437.id_sha1)) - cert_id.setComponentByName('issuerNameHash', issuer_name_hash) - cert_id.setComponentByName('issuerKeyHash', issuer_key_hash) - cert_id.setComponentByName('serialNumber', serial_number) - return cert_id - - def decode_cert_id_key(self, cert_id): - return ( - der_encoder.encode(cert_id.getComponentByName('issuerNameHash')), - der_encoder.encode(cert_id.getComponentByName('issuerKeyHash')), - der_encoder.encode(cert_id.getComponentByName('serialNumber'))) - - def encode_cert_id_base64(self, hkey): - return b64encode(der_encoder.encode( - self.encode_cert_id_key(hkey))).decode('ascii') - - def decode_cert_id_base64(self, cert_id_base64): - cert_id, _ = der_decoder.decode(b64decode(cert_id_base64), CertID()) - return cert_id - - def read_cert_bundle(self, ca_bundle_file, storage=None): - """ - Reads a certificate file including certificates in PEM format - """ - if storage is None: - storage = SnowflakeOCSP.ROOT_CERTIFICATES_DICT - logger.debug('reading certificate bundle: %s', ca_bundle_file) - all_certs = open(ca_bundle_file, 'rb').read() - - state = 0 - contents = [] - for line in all_certs.split(b'\n'): - if state == 0 and line.startswith(b'-----BEGIN CERTIFICATE-----'): - state = 1 - contents.append(line) - elif state == 1: - contents.append(line) - if line.startswith(b'-----END CERTIFICATE-----'): - cert_openssl = load_certificate( - FILETYPE_PEM, - b'\n'.join(contents)) - cert = self._convert_openssl_to_pyasn1_certificate( - cert_openssl) - storage[self._get_subject_hash(cert)] = cert - state = 0 - contents = [] - - def _convert_openssl_to_pyasn1_certificate(self, cert_openssl): - cert_der = dump_certificate(FILETYPE_ASN1, cert_openssl) - cert = der_decoder.decode( - cert_der, asn1Spec=rfc2459.Certificate())[0] - return cert - - def _convert_pyasn1_to_openssl_certificate(self, cert): - cert_der = der_encoder.encode(cert) - cert_openssl = load_certificate(FILETYPE_ASN1, cert_der) - return cert_openssl - - def _get_name_hash(self, cert): - sha1_hash = hashlib.sha1() - sha1_hash.update(der_encoder.encode(self._get_subject(cert))) - return sha1_hash.hexdigest() - - def _get_key_hash(self, cert): - sha1_hash = hashlib.sha1() - h = SnowflakeOCSPPyasn1.bit_string_to_bytearray( - cert.getComponentByName('tbsCertificate').getComponentByName( - 'subjectPublicKeyInfo').getComponentByName('subjectPublicKey')) - sha1_hash.update(h) - return sha1_hash.hexdigest() - - def create_ocsp_request(self, issuer, subject): - """ - Create CertID and OCSPRequest - """ - hashAlgorithm = rfc2459.AlgorithmIdentifier() - hashAlgorithm.setComponentByName("algorithm", rfc2437.id_sha1) - hashAlgorithm.setComponentByName( - "parameters", univ.Any(hexValue='0500')) - - cert_id = CertID() - cert_id.setComponentByName( - 'hashAlgorithm', hashAlgorithm) - cert_id.setComponentByName( - 'issuerNameHash', - univ.OctetString(hexValue=self._get_name_hash(issuer))) - cert_id.setComponentByName( - 'issuerKeyHash', - univ.OctetString(hexValue=self._get_key_hash(issuer))) - cert_id.setComponentByName( - 'serialNumber', - subject.getComponentByName( - 'tbsCertificate').getComponentByName('serialNumber')) - - request = Request() - request.setComponentByName('reqCert', cert_id) - - request_list = univ.SequenceOf(componentType=Request()) - request_list.setComponentByPosition(0, request) - - tbs_request = TBSRequest() - tbs_request.setComponentByName('requestList', request_list) - tbs_request.setComponentByName('version', Version(0).subtype( - explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))) - - ocsp_request = OCSPRequest() - ocsp_request.setComponentByName('tbsRequest', tbs_request) - - return cert_id, ocsp_request - - def extract_certificate_chain(self, connection): - """ - Gets certificate chain and extract the key info from OpenSSL connection - """ - cert_map = OrderedDict() - logger.debug( - "# of certificates: %s", - len(connection.get_peer_cert_chain())) - - for cert_openssl in connection.get_peer_cert_chain(): - cert_der = dump_certificate(FILETYPE_ASN1, cert_openssl) - cert = der_decoder.decode( - cert_der, asn1Spec=rfc2459.Certificate())[0] - subject_sha256 = self._get_subject_hash(cert) - logger.debug( - u'subject: %s, issuer: %s', - nat_encoder(self._get_subject(cert)), - nat_encoder(self._get_issuer(cert))) - cert_map[subject_sha256] = cert - - return self.create_pair_issuer_subject(cert_map) - - def _get_subject(self, cert): - return cert.getComponentByName( - 'tbsCertificate').getComponentByName('subject') - - def _get_issuer(self, cert): - return cert.getComponentByName( - 'tbsCertificate').getComponentByName('issuer') - - def _get_subject_hash(self, cert): - sha256_hash = hashlib.sha256() - sha256_hash.update( - der_encoder.encode(self._get_subject(cert))) - return sha256_hash.digest() - - def _get_issuer_hash(self, cert): - sha256_hash = hashlib.sha256() - sha256_hash.update( - der_encoder.encode(self._get_issuer(cert))) - return sha256_hash.digest() - - def create_pair_issuer_subject(self, cert_map): - """ - Creates pairs of issuer and subject certificates - """ - issuer_subject = [] - for subject_der in cert_map: - cert = cert_map[subject_der] - - nocheck, is_ca, ocsp_urls = self._extract_extensions(cert) - if nocheck or is_ca and not ocsp_urls: - # Root certificate will not be validated - # but it is used to validate the subject certificate - continue - issuer_hash = self._get_issuer_hash(cert) - if issuer_hash not in cert_map: - # IF NO ROOT certificate is attached in the certificate chain - # read it from the local disk - self._lazy_read_ca_bundle() - logger.debug( - 'not found issuer_der: %s', self._get_issuer_hash(cert)) - if issuer_hash not in SnowflakeOCSP.ROOT_CERTIFICATES_DICT: - raise RevocationCheckError( - msg="CA certificate is NOT found in the root " - "certificate list. Make sure you use the latest " - "Python Connector package and the URL is valid.") - issuer = SnowflakeOCSP.ROOT_CERTIFICATES_DICT[issuer_hash] - else: - issuer = cert_map[issuer_hash] - - issuer_subject.append((issuer, cert)) - return issuer_subject - - def _extract_extensions(self, cert): - extensions = cert.getComponentByName( - 'tbsCertificate').getComponentByName('extensions') - is_ca = False - ocsp_urls = [] - nocheck = False - for e in extensions: - oid = e.getComponentByName('extnID') - if oid == rfc2459.id_ce_basicConstraints: - constraints = der_decoder.decode( - e.getComponentByName('extnValue'), - asn1Spec=rfc2459.BasicConstraints())[0] - is_ca = constraints.getComponentByPosition(0) - elif oid == rfc2459.id_pe_authorityInfoAccess: - auth_info = der_decoder.decode( - e.getComponentByName('extnValue'), - asn1Spec=rfc2459.AuthorityInfoAccessSyntax())[0] - for a in auth_info: - if a.getComponentByName('accessMethod') == \ - rfc2560.id_pkix_ocsp: - url = nat_encoder( - a.getComponentByName( - 'accessLocation').getComponentByName( - 'uniformResourceIdentifier')) - ocsp_urls.append(url) - elif oid == rfc2560.id_pkix_ocsp_nocheck: - nocheck = True - - return nocheck, is_ca, ocsp_urls - - def subject_name(self, cert): - return nat_encoder(self._get_subject(cert)) - - def extract_ocsp_url(self, cert): - _, _, ocsp_urls = self._extract_extensions(cert) - return ocsp_urls[0] if ocsp_urls else None - - def decode_ocsp_request(self, ocsp_request): - return der_encoder.encode(ocsp_request) - - def decode_ocsp_request_b64(self, ocsp_request): - data = self.decode_ocsp_request(ocsp_request) - b64data = b64encode(data).decode('ascii') - return b64data - - def extract_good_status(self, single_response): - """ - Extract GOOD status - """ - this_update_native = \ - self._convert_generalized_time_to_datetime( - single_response.getComponentByName('thisUpdate')) - next_update_native = \ - self._convert_generalized_time_to_datetime( - single_response.getComponentByName('nextUpdate')) - return this_update_native, next_update_native - - def extract_revoked_status(self, single_response): - """ - Extract REVOKED status - """ - cert_status = single_response.getComponentByName('certStatus') - revoked = cert_status.getComponentByName('revoked') - revocation_time = \ - self._convert_generalized_time_to_datetime( - revoked.getComponentByName('revocationTime')) - revocation_reason = revoked.getComponentByName('revocationReason') - try: - revocation_reason_str = str(revocation_reason) - except: - revocation_reason_str = 'n/a' - return revocation_time, revocation_reason_str - - def _convert_generalized_time_to_datetime(self, gentime): - return datetime.strptime(str(gentime), '%Y%m%d%H%M%SZ') - - def check_cert_time_validity(self, cur_time, tbs_certificate): - cert_validity = tbs_certificate.getComponentByName('validity') - cert_not_after = cert_validity.getComponentByName('notAfter') - val_end = cert_not_after.getComponentByName('utcTime').asDateTime - cert_not_before = cert_validity.getComponentByName('notBefore') - val_start = cert_not_before.getComponentByName('utcTime').asDateTime - - if cur_time > val_end or cur_time < val_start: - debug_msg = "Certificate attached to OCSP Response is invalid. " \ - "OCSP response current time - {0} certificate not " \ - "before time - {1} certificate not after time - {2}. ". \ - format(cur_time, val_start, val_end) - return False, debug_msg - else: - return True, None - - """ - is_valid_time - checks various components of the OCSP Response - for expiry. - :param cert_id - certificate id corresponding to OCSP Response - :param ocsp_response - :return True/False depending on time validity within the response - """ - def is_valid_time(self, cert_id, ocsp_response): - res = der_decoder.decode(ocsp_response, OCSPResponse())[0] - - if res.getComponentByName('responseStatus') != OCSPResponseStatus( - 'successful'): - raise RevocationCheckError( - msg="Invalid Status: {0}".format( - res.getComponentByName('response_status')), - errno=ER_INVALID_OCSP_RESPONSE) - - response_bytes = res.getComponentByName('responseBytes') - basic_ocsp_response = der_decoder.decode( - response_bytes.getComponentByName('response'), - BasicOCSPResponse())[0] - - attached_certs = basic_ocsp_response.getComponentByName('certs') - if self._has_certs_in_ocsp_response(attached_certs): - logger.debug("Certificate is attached in Basic OCSP Response") - cert_der = der_encoder.encode(attached_certs[0]) - cert_openssl = load_certificate(FILETYPE_ASN1, cert_der) - ocsp_cert = self._convert_openssl_to_pyasn1_certificate( - cert_openssl) - - cur_time = datetime.utcnow().replace(tzinfo=pytz.utc) - tbs_certificate = ocsp_cert.getComponentByName('tbsCertificate') - - """ - Note: - We purposefully do not verify certificate signature here. - The OCSP Response is extracted from the OCSP Response Cache - which is expected to have OCSP Responses with verified - attached signature. Moreover this OCSP Response is eventually - going to be processed by the driver before being consumed by - the driver. - This step ensures that the OCSP Response cache does not have - any invalid entries. - """ - - cert_valid, debug_msg = self.check_cert_time_validity(cur_time, - tbs_certificate) - if not cert_valid: - logger.debug(debug_msg) - return False - - tbs_response_data = basic_ocsp_response.getComponentByName( - 'tbsResponseData') - single_response = tbs_response_data.getComponentByName('responses')[0] - cert_status = single_response.getComponentByName('certStatus') - try: - if cert_status.getName() == 'good': - self._process_good_status(single_response, cert_id, ocsp_response) - except Exception as ex: - logger.debug("Failed to validate ocsp response %s", ex) - return False - - return True - - def process_ocsp_response(self, issuer, cert_id, ocsp_response): - try: - res = der_decoder.decode(ocsp_response, OCSPResponse())[0] - if self.test_mode is not None: - ocsp_load_failure = getenv("SF_TEST_OCSP_FORCE_BAD_OCSP_RESPONSE") - if ocsp_load_failure is not None: - raise RevocationCheckError("Force fail") - except Exception: - raise RevocationCheckError( - msg='Invalid OCSP Response', - errno=ER_INVALID_OCSP_RESPONSE - ) - - if res.getComponentByName('responseStatus') != OCSPResponseStatus( - 'successful'): - raise RevocationCheckError( - msg="Invalid Status: {0}".format( - res.getComponentByName('response_status')), - errno=ER_INVALID_OCSP_RESPONSE) - - response_bytes = res.getComponentByName('responseBytes') - basic_ocsp_response = der_decoder.decode( - response_bytes.getComponentByName('response'), - BasicOCSPResponse())[0] - - attached_certs = basic_ocsp_response.getComponentByName('certs') - if self._has_certs_in_ocsp_response(attached_certs): - logger.debug("Certificate is attached in Basic OCSP Response") - cert_der = der_encoder.encode(attached_certs[0]) - cert_openssl = load_certificate(FILETYPE_ASN1, cert_der) - ocsp_cert = self._convert_openssl_to_pyasn1_certificate(cert_openssl) - - cur_time = datetime.utcnow().replace(tzinfo=pytz.utc) - tbs_certificate = ocsp_cert.getComponentByName('tbsCertificate') - - """ - Signature verification should happen before any kind of - validation - """ - - self.verify_signature( - ocsp_cert.getComponentByName('signatureAlgorithm'), - ocsp_cert.getComponentByName('signatureValue'), - issuer, - ocsp_cert.getComponentByName('tbsCertificate')) - - cert_valid, debug_msg = self.check_cert_time_validity(cur_time, - tbs_certificate) - if not cert_valid: - raise RevocationCheckError( - msg=debug_msg, - errno=ER_INVALID_OCSP_RESPONSE_CODE - ) - else: - logger.debug("Certificate is NOT attached in Basic OCSP Response. " - "Using issuer's certificate") - ocsp_cert = issuer - - tbs_response_data = basic_ocsp_response.getComponentByName( - 'tbsResponseData') - - logger.debug("Verifying the OCSP response is signed by the issuer.") - self.verify_signature( - basic_ocsp_response.getComponentByName('signatureAlgorithm'), - basic_ocsp_response.getComponentByName('signature'), - ocsp_cert, - tbs_response_data - ) - - single_response = tbs_response_data.getComponentByName('responses')[0] - cert_status = single_response.getComponentByName('certStatus') - - if self.test_mode is not None: - test_cert_status = getenv("SF_TEST_OCSP_CERT_STATUS") - if test_cert_status == 'revoked': - cert_status = 'revoked' - elif test_cert_status == 'unknown': - cert_status = 'unknown' - elif test_cert_status == 'good': - cert_status = 'good' - - try: - if cert_status.getName() == 'good': - self._process_good_status(single_response, cert_id, ocsp_response) - SnowflakeOCSP.OCSP_CACHE.update_cache(self, cert_id, ocsp_response) - elif cert_status.getName() == 'revoked': - self._process_revoked_status(single_response, cert_id) - elif cert_status.getName() == 'unknown': - self._process_unknown_status(cert_id) - else: - debug_msg = "Unknown revocation status was returned. " \ - "OCSP response may be malformed: {0}. ".format(cert_status) - raise RevocationCheckError( - msg=debug_msg, - errno=ER_INVALID_OCSP_RESPONSE_CODE) - except RevocationCheckError as op_er: - if not self.debug_ocsp_failure_url: - debug_msg = op_er.msg - else: - debug_msg = "{0} Consider running curl -o ocsp.der {1}".\ - format(op_er.msg, - self.debug_ocsp_failure_url) - raise RevocationCheckError( - msg=debug_msg, - errno=op_er.errno) - - def verify_signature(self, signature_algorithm, signature, cert, data): - """ - Verifies the signature - """ - sig = SnowflakeOCSPPyasn1.bit_string_to_bytearray(signature) - if PY2: - sig = str(sig) - else: - sig = sig.decode('latin-1').encode('latin-1') - - pubkey = SnowflakeOCSPPyasn1.bit_string_to_bytearray( - cert.getComponentByName( - 'tbsCertificate').getComponentByName( - 'subjectPublicKeyInfo').getComponentByName('subjectPublicKey')) - if PY2: - pubkey = str(pubkey) - else: - pubkey = pubkey.decode('latin-1').encode('latin-1') - - rsakey = RSA.importKey(pubkey) - signer = PKCS1_v1_5.new(rsakey) - - algorithm = signature_algorithm[0] - if algorithm in SnowflakeOCSPPyasn1.SIGNATURE_HASH_ALGO_TO_DIGEST_CLASS: - digest = SnowflakeOCSPPyasn1.SIGNATURE_HASH_ALGO_TO_DIGEST_CLASS[ - algorithm].new() - else: - digest = SHA1.new() - - data = der_encoder.encode(data) - digest.update(data) - if not signer.verify(digest, sig): - raise RevocationCheckError( - msg="Failed to verify the signature", - errno=ER_INVALID_OCSP_RESPONSE) - - def _has_certs_in_ocsp_response(self, certs): - """ - Check if the certificate is attached to OCSP response - """ - if SnowflakeOCSPPyasn1._get_pyasn1_version() <= 3000: - return certs is not None - else: - # behavior changed. - return certs is not None and certs.hasValue() and certs[ - 0].hasValue() - - @staticmethod - def bit_string_to_bytearray(bit_string): - """ - Converts Bitstring to bytearray - """ - ret = [] - for idx in range(int(len(bit_string) / 8)): - v = 0 - for idx0, bit in enumerate(bit_string[idx * 8:idx * 8 + 8]): - v = v | (bit << (7 - idx0)) - ret.append(v) - return bytearray(ret) diff --git a/parameters.appveyor.py.enc b/parameters.appveyor.py.enc deleted file mode 100644 index 4e7ddfd60..000000000 Binary files a/parameters.appveyor.py.enc and /dev/null differ diff --git a/parameters.py.enc b/parameters.py.enc deleted file mode 100644 index 91c289842..000000000 Binary files a/parameters.py.enc and /dev/null differ diff --git a/parameters_az.py.enc b/parameters_az.py.enc deleted file mode 100644 index 744b476ac..000000000 Binary files a/parameters_az.py.enc and /dev/null differ diff --git a/proxy.py b/proxy.py deleted file mode 100644 index d7cc0f3cc..000000000 --- a/proxy.py +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import os - -from .compat import (TO_UNICODE) - - -def set_proxies(proxy_host, proxy_port, proxy_user=None, proxy_password=None): - """ - Set proxy dict for requests - """ - PREFIX_HTTP = 'http://' - PREFIX_HTTPS = 'https://' - proxies = None - if proxy_host and proxy_port: - if proxy_host.startswith(PREFIX_HTTP): - proxy_host = proxy_host[len(PREFIX_HTTP):] - elif proxy_host.startswith(PREFIX_HTTPS): - proxy_host = proxy_host[len(PREFIX_HTTPS):] - if proxy_user or proxy_password: - proxy_auth = u'{proxy_user}:{proxy_password}@'.format( - proxy_user=proxy_user if proxy_user is not None else '', - proxy_password=proxy_password if proxy_password is not - None else '' - ) - else: - proxy_auth = u'' - proxies = { - u'http': u'http://{proxy_auth}{proxy_host}:{proxy_port}'.format( - proxy_host=proxy_host, - proxy_port=TO_UNICODE(proxy_port), - proxy_auth=proxy_auth, - ), - u'https': u'http://{proxy_auth}{proxy_host}:{proxy_port}'.format( - proxy_host=proxy_host, - proxy_port=TO_UNICODE(proxy_port), - proxy_auth=proxy_auth, - ), - } - os.environ['HTTP_PROXY'] = proxies[u'http'] - os.environ['HTTPS_PROXY'] = proxies[u'https'] - return proxies diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 000000000..ad1f850c3 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,27 @@ +[build-system] +requires = [ + # The minimum setuptools version is specific to the PEP 517 backend, + # and may be stricter than the version required in `setup.py` + "setuptools>=40.6.0", + "wheel", + "cython", + # Must be kept in sync with the `setup_requirements` in `setup.cfg` + "pyarrow>=6.0.0,<6.1.0", +] + +[tool.cibuildwheel] +test-skip = "*" +manylinux-x86_64-image = "manylinux2010" +environment = {AUDITWHEEL_PLAT="manylinux2014_$(uname -m)"} +build-verbosity = 1 + +[tool.cibuildwheel.linux] +archs = ["x86_64", "aarch64"] + +[tool.cibuildwheel.macos] +archs = ["x86_64", "arm64"] +# Don't repair macOS wheels +repair-wheel-command = "" + +[tool.cibuildwheel.windows] +archs = ["AMD64"] diff --git a/remote_storage_util.py b/remote_storage_util.py deleted file mode 100644 index 7db1209d8..000000000 --- a/remote_storage_util.py +++ /dev/null @@ -1,274 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from __future__ import division - -import os -import shutil -import time -from collections import namedtuple -from logging import getLogger - -from .azure_util import SnowflakeAzureUtil -from .constants import (SHA256_DIGEST, ResultStatus) -from .encryption_util import (SnowflakeEncryptionUtil) -from .s3_util import SnowflakeS3Util - -DEFAULT_CONCURRENCY = 1 -DEFAULT_MAX_RETRY = 5 - -""" -Encryption Material -""" -SnowflakeFileEncryptionMaterial = namedtuple( - "SnowflakeS3FileEncryptionMaterial", [ - "query_stage_master_key", # query stage master key - "query_id", # query id - "smk_id" # SMK id - ] -) - - -class NeedRenewTokenError(Exception): - pass - - -class SnowflakeRemoteStorageUtil(object): - - @staticmethod - def getStorageType(type): - if (type == u'S3'): - return SnowflakeS3Util - elif (type == u'AZURE'): - return SnowflakeAzureUtil - - @staticmethod - def create_client(stage_info, use_accelerate_endpoint=False): - util_class = SnowflakeRemoteStorageUtil.getStorageType( - stage_info[u'locationType']) - return util_class.create_client( - stage_info, - use_accelerate_endpoint=use_accelerate_endpoint) - - @staticmethod - def upload_one_file_to_s3(meta): - """ - Uploads a file to S3 - :param meta: a file meta - """ - logger = getLogger(__name__) - encryption_metadata = None - - if u'encryption_material' in meta: - (encryption_metadata, - data_file) = SnowflakeEncryptionUtil.encrypt_file( - meta[u'encryption_material'], - meta[u'real_src_file_name'], tmp_dir=meta[u'tmp_dir']) - - logger.debug( - u'encrypted data file=%s, size=%s', data_file, - os.path.getsize(data_file)) - else: - logger.debug(u'not encrypted data file') - data_file = meta[u'real_src_file_name'] - util_class = SnowflakeRemoteStorageUtil.getStorageType( - meta[u'stage_info'][u'locationType']) - if not meta.get(u'overwrite'): - file_header = util_class.get_file_header( - meta, meta[u'dst_file_name']) - if meta[u'result_status'] == ResultStatus.RENEW_TOKEN: - # need renew token - return - elif file_header and meta[ - u'result_status'] == ResultStatus.UPLOADED and \ - not meta.get(u'overwrite'): - logger.debug( - u'file already exists, checking digest: ' - u'location="%s", file_name="%s"', - meta[u'stage_info'][u'location'], - meta[u'dst_file_name']) - sfc_digest = file_header.digest - if sfc_digest == meta[SHA256_DIGEST]: - logger.debug(u'file digest matched: digest=%s', - sfc_digest) - meta[u'dst_file_size'] = 0 - meta[u'error_details'] = \ - (u'File with the same destination name ' - u'and checksum already exists') - meta[u'result_status'] = ResultStatus.SKIPPED - return - else: - logger.debug( - u"file digest didn't match: digest_s3=%s, " - u"digest_local=%s", - sfc_digest, meta[SHA256_DIGEST]) - - logger.debug(u'putting a file: %s, %s', - meta[u'stage_info'][u'location'], meta[u'dst_file_name']) - - max_concurrency = meta[u'parallel'] - last_err = None - max_retry = DEFAULT_MAX_RETRY - for retry in range(max_retry): - util_class.upload_file( - data_file, - meta, - encryption_metadata, - max_concurrency - ) - - if (meta[u'result_status'] == ResultStatus.UPLOADED): - return - elif (meta[u'result_status'] == ResultStatus.RENEW_TOKEN): - return - elif (meta[u'result_status'] == ResultStatus.NEED_RETRY): - last_err = meta[u'last_error'] - logger.debug( - 'Failed to upload a file: %s, err: %s. Retrying with ' - 'max concurrency: %s', - data_file, last_err, max_concurrency) - if 'no_sleeping_time' not in meta: - sleeping_time = min(2 ** retry, 16) - logger.debug(u"sleeping: %s", sleeping_time) - time.sleep(sleeping_time) - elif (meta[ - u'result_status'] == ResultStatus.NEED_RETRY_WITH_LOWER_CONCURRENCY): - last_err = meta[u'last_error'] - max_concurrency = meta[u'parallel'] - int( - retry * meta[u'parallel'] / max_retry) - max_concurrency = max(DEFAULT_CONCURRENCY, max_concurrency) - meta['last_max_concurrency'] = max_concurrency - - logger.debug( - 'Failed to upload a file: %s, err: %s. Retrying with ' - 'max concurrency: %s', - data_file, last_err, max_concurrency) - if 'no_sleeping_time' not in meta: - sleeping_time = min(2 ** retry, 16) - logger.debug(u"sleeping: %s", sleeping_time) - time.sleep(sleeping_time) - else: - if last_err: - raise last_err - else: - raise Exception( - "Unknown Error in uploading a file: %s", - data_file) - - @staticmethod - def download_one_file(meta): - """ - Downloads a file from S3 - :param meta: file meta - """ - logger = getLogger(__name__) - full_dst_file_name = os.path.join( - meta[u'local_location'], - os.path.basename(meta[u'dst_file_name'])) - full_dst_file_name = os.path.realpath(full_dst_file_name) - # TODO: validate full_dst_file_name is under the writable directory - base_dir = os.path.dirname(full_dst_file_name) - if not os.path.exists(base_dir): - os.makedirs(base_dir) - - util_class = SnowflakeRemoteStorageUtil.getStorageType( - meta[u'stage_info'][u'locationType']) - file_header = util_class.get_file_header(meta, meta[u'src_file_name']) - meta[u'src_file_size'] = file_header.content_length - - full_dst_file_name = os.path.join( - meta[u'local_location'], - os.path.basename(meta[u'dst_file_name'])) - full_dst_file_name = os.path.realpath(full_dst_file_name) - - max_concurrency = meta[u'parallel'] - last_err = None - max_retry = DEFAULT_MAX_RETRY - for retry in range(max_retry): - util_class._native_download_file(meta, full_dst_file_name, - max_concurrency) - if (meta[u'result_status'] == ResultStatus.DOWNLOADED): - if u'encryption_material' in meta: - logger.debug( - u'encrypted data file=%s', full_dst_file_name) - tmp_dst_file_name = SnowflakeEncryptionUtil.decrypt_file( - file_header.encryption_metadata, - meta[u'encryption_material'], - full_dst_file_name, tmp_dir=meta[u'tmp_dir']) - shutil.copyfile(tmp_dst_file_name, full_dst_file_name) - os.unlink(tmp_dst_file_name) - else: - logger.debug(u'not encrypted data file=%s', - full_dst_file_name) - - statinfo = os.stat(full_dst_file_name) - meta[u'dst_file_size'] = statinfo.st_size - return - elif (meta[u'result_status'] == ResultStatus.RENEW_TOKEN): - return - elif (meta[ - u'result_status'] == ResultStatus.NEED_RETRY_WITH_LOWER_CONCURRENCY): - max_concurrency = meta[u'parallel'] - int( - retry * meta[u'parallel'] / max_retry) - max_concurrency = max(DEFAULT_CONCURRENCY, max_concurrency) - meta['last_max_concurrency'] = max_concurrency - last_err = meta[u'last_error'] - logger.debug( - 'Failed to download a file: %s, err: %s. Retrying with ' - 'max concurrency: %s', - full_dst_file_name, last_err, max_concurrency) - if 'no_sleeping_time' not in meta: - sleeping_time = min(2 ** retry, 16) - logger.debug(u"sleeping: %s", sleeping_time) - time.sleep(sleeping_time) - elif (meta[u'result_status'] == ResultStatus.NEED_RETRY): - last_err = meta[u'last_error'] - logger.debug( - 'Failed to download a file: %s, err: %s. Retrying with ' - 'max concurrency: %s', - full_dst_file_name, last_err, max_concurrency) - if 'no_sleeping_time' not in meta: - sleeping_time = min(2 ** retry, 16) - logger.debug(u"sleeping: %s", sleeping_time) - time.sleep(sleeping_time) - else: - if last_err: - raise last_err - else: - raise Exception( - "Unknown Error in downloading a file: %s", - full_dst_file_name) - - @staticmethod - def upload_one_file_with_retry(meta): - """ - Uploads one file to S3 with retry - :param meta: a file meta - """ - logger = getLogger(__name__) - - util_class = SnowflakeRemoteStorageUtil.getStorageType( - meta[u'stage_info'][u'locationType']) - for _ in range(10): - # retry - SnowflakeRemoteStorageUtil.upload_one_file_to_s3(meta) - if meta[u'result_status'] == ResultStatus.UPLOADED: - for _ in range(10): - util_class.get_file_header( - meta, meta[u'dst_file_name']) - if meta[u'result_status'] == ResultStatus.NOT_FOUND_FILE: - time.sleep(1) # wait 1 second - logger.debug('not found. double checking...') - continue - break - else: - # not found. retry with the outer loop - logger.debug('not found. gave up. reuploading...') - continue - break - else: - # could not upload a file even after retry - meta[u'result_status'] = ResultStatus.ERROR diff --git a/rfc6960.py b/rfc6960.py deleted file mode 100644 index 4e3f81705..000000000 --- a/rfc6960.py +++ /dev/null @@ -1,179 +0,0 @@ -# -*- coding: utf-8 -*- -# -# X.509 Internet Public Key Infrastructure Online Certificate Status -# Protocol - OCSP -# -# ASN.1 source from -# https://tools.ietf.org/html/rfc6960 -# -from pyasn1.type import (univ, namedtype, tag, namedval, useful) -from pyasn1_modules import rfc2459 - - -class Version(univ.Integer): - namedValues = namedval.NamedValues( - ('v1', 0) - ) - - -class CertID(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('hashAlgorithm', rfc2459.AlgorithmIdentifier()), - namedtype.NamedType('issuerNameHash', univ.OctetString()), - namedtype.NamedType('issuerKeyHash', univ.OctetString()), - namedtype.NamedType('serialNumber', rfc2459.CertificateSerialNumber())) - - -class Request(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('reqCert', CertID()), - namedtype.OptionalNamedType( - 'singleRequestExtensions', - rfc2459.Extensions().subtype( - explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0)))) - - -class TBSRequest(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType( - 'version', Version(0).subtype( - explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType( - 'requestorName', - rfc2459.GeneralName().subtype( - explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('requestList', - univ.SequenceOf(componentType=Request())), - namedtype.OptionalNamedType( - 'requestExtensions', - rfc2459.Extensions().subtype( - explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 2)))) - - -class Certs(univ.SequenceOf): - componentType = rfc2459.Certificate() - - -class Signature(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('signatureAlgorithm', - rfc2459.AlgorithmIdentifier()), - namedtype.NamedType('signature', univ.BitString()), - namedtype.NamedType( - 'certs', Certs().subtype( - explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0)))) - - -class OCSPRequest(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('tbsRequest', TBSRequest()), - namedtype.OptionalNamedType( - 'optionalSignature', Signature().subtype( - explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0)))) - - -# OCSP Response -class OCSPResponseStatus(univ.Enumerated): - namedValues = namedval.NamedValues( - ('successful', 0), - ('malformedRequest', 1), - ('internalError', 2), - ('tryLater', 3), - # ('not-used', 4), - ('sigRequired', 5), - ('unauthorized', 6) - ) - - -class ResponseBytes(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('responseType', univ.ObjectIdentifier()), - namedtype.NamedType('response', univ.OctetString()) - ) - - -class OCSPResponse(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('responseStatus', OCSPResponseStatus()), - namedtype.OptionalNamedType('responseBytes', ResponseBytes().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))) - - -KeyHash = univ.OctetString -UnknownInfo = univ.Null - - -class RevokedInfo(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('revocationTime', useful.GeneralizedTime()), - namedtype.OptionalNamedType( - 'revocationReason', - rfc2459.CRLReason().subtype( - explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0)))) - - -class CertStatus(univ.Choice): - componentType = namedtype.NamedTypes( - namedtype.NamedType('good', univ.Null().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('revoked', RevokedInfo().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('unknown', UnknownInfo().subtype( - implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))) - - -class SingleResponse(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('certID', CertID()), - namedtype.NamedType('certStatus', CertStatus()), - namedtype.NamedType('thisUpdate', useful.GeneralizedTime()), - namedtype.OptionalNamedType( - 'nextUpdate', - useful.GeneralizedTime().subtype( - explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.OptionalNamedType( - 'singleExtensions', - rfc2459.Extensions().subtype( - explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1)))) - - -class ResponderID(univ.Choice): - componentType = namedtype.NamedTypes( - namedtype.NamedType('byName', rfc2459.Name().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), - namedtype.NamedType('byKey', KeyHash().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2)))) - - -class ResponseData(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.DefaultedNamedType('version', Version(0).subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), - namedtype.NamedType('responderID', ResponderID()), - namedtype.NamedType('producedAt', useful.GeneralizedTime()), - namedtype.NamedType('responses', - univ.SequenceOf(componentType=SingleResponse())), - namedtype.OptionalNamedType( - 'responseExtensions', - rfc2459.Extensions().subtype( - explicitTag=tag.Tag( - tag.tagClassContext, tag.tagFormatSimple, 1)))) - - -class BasicOCSPResponse(univ.Sequence): - componentType = namedtype.NamedTypes( - namedtype.NamedType('tbsResponseData', ResponseData()), - namedtype.NamedType('signatureAlgorithm', - rfc2459.AlgorithmIdentifier()), - namedtype.NamedType('signature', univ.BitString()), - namedtype.OptionalNamedType('certs', Certs().subtype( - explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0)))) diff --git a/s3_util.py b/s3_util.py deleted file mode 100644 index 75d42a588..000000000 --- a/s3_util.py +++ /dev/null @@ -1,288 +0,0 @@ -from __future__ import division - -import logging -import os -from collections import namedtuple -from logging import getLogger - -import OpenSSL -import boto3 -import botocore.exceptions -from boto3.exceptions import RetriesExceededError, S3UploadFailedError -from boto3.s3.transfer import TransferConfig -from botocore.client import Config - -from .compat import TO_UNICODE -from .constants import ( - SHA256_DIGEST, ResultStatus, FileHeader, - HTTP_HEADER_CONTENT_TYPE, - HTTP_HEADER_VALUE_OCTET_STREAM) -from .encryption_util import (EncryptionMetadata) - -SFC_DIGEST = u'sfc-digest' - -AMZ_MATDESC = u"x-amz-matdesc" -AMZ_KEY = u"x-amz-key" -AMZ_IV = u"x-amz-iv" -ERRORNO_WSAECONNABORTED = 10053 # network connection was aborted - -EXPIRED_TOKEN = u'ExpiredToken' -ADDRESSING_STYLE = u'virtual' # explicit force to use virtual addressing style - -""" -S3 Location: S3 bucket name + path -""" -S3Location = namedtuple( - "S3Location", [ - "bucket_name", # S3 bucket name - "s3path" # S3 path name - - ]) - - -class SnowflakeS3Util: - """ - S3 Utility class - """ - # magic number, given from error message. - DATA_SIZE_THRESHOLD = 5242880 - - @staticmethod - def create_client(stage_info, use_accelerate_endpoint=False): - """ - Creates a client object with a stage credential - :param stage_credentials: a stage credential - :param use_accelerate_endpoint: is accelerate endpoint? - :return: client - """ - logger = getLogger(__name__) - stage_credentials = stage_info[u'creds'] - security_token = stage_credentials.get(u'AWS_TOKEN', None) - logger.debug(u"AWS_ID: %s", stage_credentials[u'AWS_ID']) - - config = Config( - signature_version=u's3v4', - s3={ - 'use_accelerate_endpoint': use_accelerate_endpoint, - 'addressing_style': ADDRESSING_STYLE - }) - client = boto3.resource( - u's3', - region_name=stage_info['region'], - aws_access_key_id=stage_credentials[u'AWS_ID'], - aws_secret_access_key=stage_credentials[u'AWS_KEY'], - aws_session_token=security_token, - config=config, - ) - return client - - @staticmethod - def extract_bucket_name_and_path(stage_location): - stage_location = os.path.expanduser(stage_location) - bucket_name = stage_location - s3path = u'' - - # split stage location as bucket name and path - if u'/' in stage_location: - bucket_name = stage_location[0:stage_location.index(u'/')] - s3path = stage_location[stage_location.index(u'/') + 1:] - if s3path and not s3path.endswith(u'/'): - s3path += u'/' - - return S3Location( - bucket_name=bucket_name, - s3path=s3path) - - @staticmethod - def _get_s3_object(meta, filename): - logger = getLogger(__name__) - client = meta[u'client'] - s3location = SnowflakeS3Util.extract_bucket_name_and_path( - meta[u'stage_info'][u'location']) - s3path = s3location.s3path + filename.lstrip('/') - - if logger.getEffectiveLevel() == logging.DEBUG: - tmp_meta = {} - for k, v in meta.items(): - if k != 'stage_credentials': - tmp_meta[k] = v - logger.debug( - u"s3location.bucket_name: %s, " - u"s3location.s3path: %s, " - u"s3fullpath: %s, " - u'meta: %s', - s3location.bucket_name, - s3location.s3path, - s3path, tmp_meta) - return client.Object(s3location.bucket_name, s3path) - - @staticmethod - def get_file_header(meta, filename): - """ - Gets S3 file object - :param meta: file meta object - :return: S3 object if no error, otherwise None. Check meta[ - u'result_status'] for status. - """ - - logger = getLogger(__name__) - akey = SnowflakeS3Util._get_s3_object(meta, filename) - try: - # HTTP HEAD request - akey.load() - except botocore.exceptions.ClientError as e: - if e.response[u'Error'][u'Code'] == EXPIRED_TOKEN: - logger.debug(u"AWS Token expired. Renew and retry") - meta[u'result_status'] = ResultStatus.RENEW_TOKEN - return None - elif e.response[u'Error'][u'Code'] == u'404': - logger.debug(u'not found. bucket: %s, path: %s', - akey.bucket_name, akey.key) - meta[u'result_status'] = ResultStatus.NOT_FOUND_FILE - return FileHeader( - digest=None, - content_length=None, - encryption_metadata=None, - ) - elif e.response[u'Error'][u'Code'] == u'400': - logger.debug(u'Bad request, token needs to be renewed: %s. ' - u'bucket: %s, path: %s', - e.response[u'Error'][u'Message'], - akey.bucket_name, akey.key) - meta[u'result_status'] = ResultStatus.RENEW_TOKEN - return None - logger.debug( - u"Failed to get metadata for %s, %s: %s", - akey.bucket_name, akey.key, e) - meta[u'result_status'] = ResultStatus.ERROR - return None - - meta[u'result_status'] = ResultStatus.UPLOADED - encryption_metadata = EncryptionMetadata( - key=akey.metadata.get(AMZ_KEY), - iv=akey.metadata.get(AMZ_IV), - matdesc=akey.metadata.get(AMZ_MATDESC), - ) if akey.metadata.get(AMZ_KEY) else None - - return FileHeader( - digest=akey.metadata.get(SFC_DIGEST), - content_length=akey.content_length, - encryption_metadata=encryption_metadata - ) - - @staticmethod - def upload_file(data_file, meta, encryption_metadata, max_concurrency): - logger = getLogger(__name__) - try: - s3_metadata = { - HTTP_HEADER_CONTENT_TYPE: HTTP_HEADER_VALUE_OCTET_STREAM, - SFC_DIGEST: meta[SHA256_DIGEST], - } - if (encryption_metadata): - s3_metadata.update({ - AMZ_IV: encryption_metadata.iv, - AMZ_KEY: encryption_metadata.key, - AMZ_MATDESC: encryption_metadata.matdesc, - }) - s3location = SnowflakeS3Util.extract_bucket_name_and_path( - meta[u'stage_info'][u'location']) - s3path = s3location.s3path + meta[u'dst_file_name'].lstrip('/') - - akey = meta[u'client'].Object(s3location.bucket_name, s3path) - akey.upload_file( - data_file, - Callback=meta[u'put_callback']( - data_file, - os.path.getsize(data_file), - output_stream=meta[u'put_callback_output_stream'], - show_progress_bar=meta[u'show_progress_bar']) if - meta[u'put_callback'] else None, - ExtraArgs={ - u'Metadata': s3_metadata, - }, - Config=TransferConfig( - multipart_threshold=SnowflakeS3Util.DATA_SIZE_THRESHOLD, - max_concurrency=max_concurrency, - num_download_attempts=10, - ) - ) - - logger.debug(u'DONE putting a file') - meta[u'dst_file_size'] = meta[u'upload_size'] - meta[u'result_status'] = ResultStatus.UPLOADED - except botocore.exceptions.ClientError as err: - if err.response[u'Error'][u'Code'] == EXPIRED_TOKEN: - logger.debug(u"AWS Token expired. Renew and retry") - meta[u'result_status'] = ResultStatus.RENEW_TOKEN - return - logger.debug( - u"Failed to upload a file: %s, err: %s", - data_file, err, exc_info=True) - raise err - except S3UploadFailedError as err: - if EXPIRED_TOKEN in TO_UNICODE(err): - # Since AWS token expiration error can be encapsulated in - # S3UploadFailedError, the text match is required to - # identify the case. - logger.debug( - 'Failed to upload a file: %s, err: %s. Renewing ' - 'AWS Token and Retrying', - data_file, err) - meta[u'result_status'] = ResultStatus.RENEW_TOKEN - return - - meta[u'last_error'] = err - meta[u'result_status'] = ResultStatus.NEED_RETRY - except OpenSSL.SSL.SysCallError as err: - meta[u'last_error'] = err - if err.args[0] == ERRORNO_WSAECONNABORTED: - # connection was disconnected by S3 - # because of too many connections. retry with - # less concurrency to mitigate it - meta[ - u'result_status'] = ResultStatus.NEED_RETRY_WITH_LOWER_CONCURRENCY - else: - meta[u'result_status'] = ResultStatus.NEED_RETRY - - @staticmethod - def _native_download_file(meta, full_dst_file_name, max_concurrency): - logger = getLogger(__name__) - try: - akey = SnowflakeS3Util._get_s3_object(meta, meta[u'src_file_name']) - akey.download_file( - full_dst_file_name, - Callback=meta[u'get_callback']( - meta[u'src_file_name'], - meta[u'src_file_size'], - output_stream=meta[u'get_callback_output_stream'], - show_progress_bar=meta[u'show_progress_bar']) if - meta[u'get_callback'] else None, - Config=TransferConfig( - multipart_threshold=SnowflakeS3Util.DATA_SIZE_THRESHOLD, - max_concurrency=max_concurrency, - num_download_attempts=10, - ) - ) - meta[u'result_status'] = ResultStatus.DOWNLOADED - except botocore.exceptions.ClientError as err: - if err.response[u'Error'][u'Code'] == EXPIRED_TOKEN: - meta[u'result_status'] = ResultStatus.RENEW_TOKEN - else: - logger.debug( - u"Failed to download a file: %s, err: %s", - full_dst_file_name, err, exc_info=True) - raise err - except RetriesExceededError as err: - meta[u'result_status'] = ResultStatus.NEED_RETRY - meta[u'last_error'] = err - except OpenSSL.SSL.SysCallError as err: - meta[u'last_error'] = err - if err.args[0] == ERRORNO_WSAECONNABORTED: - # connection was disconnected by S3 - # because of too many connections. retry with - # less concurrency to mitigate it - - meta[ - u'result_status'] = ResultStatus.NEED_RETRY_WITH_LOWER_CONCURRENCY - else: - meta[u'result_status'] = ResultStatus.NEED_RETRY diff --git a/scripts/install.bat b/scripts/install.bat deleted file mode 100644 index ede83a8bf..000000000 --- a/scripts/install.bat +++ /dev/null @@ -1,14 +0,0 @@ -nuget install secure-file -ExcludeVersion -secure-file\tools\secure-file -decrypt parameters.appveyor.py.enc -secret %my_secret% -out parameters.py -copy parameters.py test - -"%PYTHON%/python.exe" -m venv env -call env\Scripts\activate -# https://github.com/pypa/pip/issues/6566 -python -m pip install --upgrade pip==18.1 -pip install pendulum -pip install pyarrow -pip install numpy -pip install pytest pytest-cov pytest-rerunfailures -pip install . -pip list --format=columns diff --git a/scripts/install.sh b/scripts/install.sh deleted file mode 100755 index 6f2476d75..000000000 --- a/scripts/install.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/bash -e -# -# Install Snowflake Python Connector -# -set -o pipefail - -if [ "$TRAVIS_OS_NAME" == "osx" ]; then - brew update - brew install openssl readline sqlite3 xz zlib - brew outdated pyenv || brew upgrade pyenv - brew install pyenv-virtualenv - pyenv install ${PYTHON_VERSION} - export PYENV_VERSION=$PYTHON - export PATH="${HOME}/.pyenv/shims:${PATH}" - if [[ $PYTHON_VERSION == "2.7"* ]]; then - pip install -U virtualenv - python -m virtualenv venv - else - python3 -m venv venv - fi -else - sudo apt-get update - pip install -U virtualenv - python -m virtualenv venv -fi -if [[ -n "$SNOWFLAKE_AZURE" ]]; then - openssl aes-256-cbc -k "$super_azure_secret_password" -in parameters_az.py.enc -out test/parameters.py -d -else - openssl aes-256-cbc -k "$super_secret_password" -in parameters.py.enc -out test/parameters.py -d -fi - -source ./venv/bin/activate -pip install pandas -pip install numpy -pip install pendulum -pip install pyarrow -pip install pytest pytest-cov pytest-rerunfailures -if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]] || [[ $PYTHON_VERSION == "2.7"* ]]; then - pip install mock -fi -pip install . -pip list --format=columns diff --git a/scripts/run_appveyor.bat b/scripts/run_appveyor.bat deleted file mode 100644 index 4a69709fe..000000000 --- a/scripts/run_appveyor.bat +++ /dev/null @@ -1,2 +0,0 @@ -call env\Scripts\activate -py.test -vvv --cov=snowflake.connector test diff --git a/scripts/run_travis.sh b/scripts/run_travis.sh deleted file mode 100755 index 23791f073..000000000 --- a/scripts/run_travis.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash -e -# -# Run Travis Tests -# -set -o pipefail -if [ "$TRAVIS_OS_NAME" == "osx" ]; then - TIMEOUT_CMD=("gtimeout" "-s" "SIGUSR1" "3600s") -else - TIMEOUT_CMD=("timeout" "-s" "SIGUSR1" "3600s") -fi -source ./venv/bin/activate -ret=0 -${TIMEOUT_CMD[@]} py.test -vvv --cov=snowflake.connector test || ret=$? - -# TIMEOUT or SUCCESS -[ $ret != 124 -a $ret != 0 ] && exit 1 || exit 0 diff --git a/secret_detector.py b/secret_detector.py deleted file mode 100644 index b315123d2..000000000 --- a/secret_detector.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -""" -Detects and Masks Secrets. Based on SecretDetector.java in the JDBC Driver -""" - -import re - - -class SecretDetector(object): - - AWS_KEY_PATTERN = re.compile(r"(aws_key_id|aws_secret_key|access_key_id|secret_access_key)\s*=\s*'([^']+)'", flags=re.IGNORECASE) - AWS_TOKEN_PATTERN = re.compile(r'(accessToken|tempToken|keySecret)"\s*:\s*"([a-z0-9/+]{32,}={0,2})"', flags=re.IGNORECASE) - SAS_TOKEN_PATTERN = re.compile(r'(sig|signature|AWSAccessKeyId|password|passcode)=(?P[a-z0-9%/+]{16,})', flags=re.IGNORECASE) - PRIVATE_KEY_PATTERN = re.compile(r'-----BEGIN PRIVATE KEY-----\\n([a-z0-9/+=\\n]{32,})\\n-----END PRIVATE KEY-----', flags=re.MULTILINE | re.IGNORECASE) - PRIVATE_KEY_DATA_PATTERN = re.compile(r'"privateKeyData": "([a-z0-9/+=\\n]{10,})"', flags=re.MULTILINE | re.IGNORECASE) - - @staticmethod - def mask_aws_keys(text): - return SecretDetector.AWS_KEY_PATTERN.sub(r"\1='**********'", text) - - @staticmethod - def mask_sas_tokens(text): - return SecretDetector.SAS_TOKEN_PATTERN.sub(r'\1=**********', text) - - @staticmethod - def mask_aws_tokens(text): - return SecretDetector.AWS_TOKEN_PATTERN.sub(r'\1":"XXXX"', text) - - @staticmethod - def mask_private_key(text): - return SecretDetector.PRIVATE_KEY_PATTERN.sub("-----BEGIN PRIVATE KEY-----\\\\nXXXX\\\\n-----END PRIVATE KEY-----", text) - - @staticmethod - def mask_private_key_data(text): - return SecretDetector.PRIVATE_KEY_DATA_PATTERN.sub('"privateKeyData": "XXXX"', text) - - @staticmethod - def mask_secrets(text): - """ - Masks any secrets. This is the method that should be used by outside classes - - :param text: a string which may contain a secret - :return: the masked string - """ - if text is None: - return None - - masked_text = SecretDetector.mask_private_key_data( - SecretDetector.mask_private_key( - SecretDetector.mask_aws_tokens( - SecretDetector.mask_sas_tokens( - SecretDetector.mask_aws_keys( - text - ) - ) - ) - ) - ) - return masked_text diff --git a/setup.cfg b/setup.cfg index 5660a4bfb..9c7690dba 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,94 @@ -[bdist_wheel] -universal = 1 +[metadata] +name = snowflake-connector-python +description = Snowflake Connector for Python +long_description = file: DESCRIPTION.md +long_description_content_type = text/markdown +url = https://www.snowflake.com/ +author = Snowflake, Inc +author_email = ecosystem-team-dl@snowflake.com +license = Apache-2.0 +license_files = LICENSE.txt, NOTICE +classifiers = + Development Status :: 5 - Production/Stable + Environment :: Console + Environment :: Other Environment + Intended Audience :: Developers + Intended Audience :: Education + Intended Audience :: Information Technology + Intended Audience :: System Administrators + License :: OSI Approved :: Apache Software License + Operating System :: OS Independent + Programming Language :: Python :: 3 + Programming Language :: Python :: 3 :: Only + Programming Language :: Python :: 3.7 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Programming Language :: SQL + Topic :: Database + Topic :: Scientific/Engineering :: Information Analysis + Topic :: Software Development + Topic :: Software Development :: Libraries + Topic :: Software Development :: Libraries :: Application Frameworks + Topic :: Software Development :: Libraries :: Python Modules +keywords = Snowflake db database cloud analytics warehouse +project_urls = + Documentation=https://docs.snowflake.com/en/user-guide/python-connector.html + Source=https://github.com/snowflakedb/snowflake-connector-python + Issues=https://github.com/snowflakedb/snowflake-connector-python/issues + Changelog=https://github.com/snowflakedb/snowflake-connector-python/blob/master/DESCRIPTION.md -[flake8] -ignore=E126,E127 +[options] +python_requires = >=3.7 +packages = find_namespace: +install_requires = + asn1crypto>0.24.0,<2.0.0 + certifi>=2017.4.17 + cffi>=1.9,<2.0.0 + charset-normalizer~=2.0.0 + cryptography>=3.1.0,<37.0.0 + idna>=2.5,<4 + oscrypto<2.0.0 + pyOpenSSL>=16.2.0,<23.0.0 + pycryptodomex!=3.5.0,>=3.2,<4.0.0 + pyjwt<3.0.0 + pytz + requests<3.0.0 + setuptools>34.0.0 +include_package_data = True +namespace_packages = snowflake +package_dir = + =src +zip_safe = False + +[options.packages.find] +where = src +exclude = snowflake.connector.cpp* +include = snowflake.* + +[options.entry_points] +console_scripts = + snowflake-dump-ocsp-response = snowflake.connector.tool.dump_ocsp_response:main + snowflake-dump-ocsp-response-cache = snowflake.connector.tool.dump_ocsp_response_cache:main + snowflake-dump-certs = snowflake.connector.tool.dump_certs:main + snowflake-export-certs = snowflake.connector.tool.export_certs:main + +[options.extras_require] +development = + Cython + coverage + more-itertools + numpy<1.23.0 + pendulum!=2.1.1 + pexpect + pytest<7.2.0 + pytest-cov + pytest-rerunfailures + pytest-timeout + pytest-xdist + pytzdata +pandas = + pandas>=1.0.0,<1.5.0 + pyarrow>=6.0.0,<6.1.0 +secure-local-storage = + keyring!=16.1.0,<24.0.0 diff --git a/setup.py b/setup.py index c6cbb0eed..8fb28c059 100644 --- a/setup.py +++ b/setup.py @@ -1,145 +1,175 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2019 Snowflake Computing Inc. All rights reserved. # -from codecs import open -from os import path + import os import sys -from sys import platform +import warnings from shutil import copy -import glob -from setuptools import setup, Extension +from setuptools import Extension, setup -THIS_DIR = path.dirname(path.realpath(__file__)) +CONNECTOR_SRC_DIR = os.path.join("src", "snowflake", "connector") +VERSION = (1, 1, 1, None) # Default try: - from generated_version import VERSION -except: - from version import VERSION -version = '.'.join([str(v) for v in VERSION if v is not None]) + with open( + os.path.join(CONNECTOR_SRC_DIR, "generated_version.py"), encoding="utf-8" + ) as f: + exec(f.read()) +except Exception: + with open(os.path.join(CONNECTOR_SRC_DIR, "version.py"), encoding="utf-8") as f: + exec(f.read()) +version = ".".join([str(v) for v in VERSION if v is not None]) -with open(path.join(THIS_DIR, 'DESCRIPTION.rst'), encoding='utf-8') as f: - long_description = f.read() +# Parse command line flags +# This list defines the options definitions in a set +options_def = { + "--debug", +} -# Parse command line flags -options = {k: 'OFF' for k in ['--opt', '--debug']} -for flag in options.keys(): +# Options is the final parsed command line options +options = {e.lstrip("-"): False for e in options_def} + +for flag in options_def: if flag in sys.argv: - options[flag] = 'ON' + options[flag.lstrip("-")] = True sys.argv.remove(flag) extensions = None cmd_class = {} -isBuildExtEnabled = (os.getenv('ENABLE_EXT_MODULES', 'false')).lower() - -if isBuildExtEnabled == 'true': - from Cython.Distutils import build_ext - from Cython.Build import cythonize - import os +try: + import numpy import pyarrow + from Cython.Build import cythonize + from Cython.Distutils import build_ext + + _ABLE_TO_COMPILE_EXTENSIONS = True +except ImportError: + warnings.warn("Cannot compile native C code, because of a missing build dependency") + _ABLE_TO_COMPILE_EXTENSIONS = False + +if _ABLE_TO_COMPILE_EXTENSIONS: extensions = cythonize( [ - Extension(name='snowflake.connector.arrow_iterator', sources=['arrow_iterator.pyx']), - Extension(name='snowflake.connector.arrow_result', sources=['arrow_result.pyx']) + Extension( + name="snowflake.connector.arrow_iterator", + sources=[os.path.join(CONNECTOR_SRC_DIR, "arrow_iterator.pyx")], + ), ], - build_dir=os.path.join('build', 'cython')) + ) class MyBuildExt(build_ext): + # list of libraries that will be bundled with python connector, + # this list should be carefully examined when pyarrow lib is + # upgraded + arrow_libs_to_copy = { + "linux": ["libarrow.so.600", "libarrow_python.so.600"], + "darwin": ["libarrow.600.dylib", "libarrow_python.600.dylib"], + "win32": ["arrow.dll", "arrow_python.dll"], + } + + arrow_libs_to_link = { + "linux": ["libarrow.so.600", "libarrow_python.so.600"], + "darwin": ["libarrow.600.dylib", "libarrow_python.600.dylib"], + "win32": ["arrow.lib", "arrow_python.lib"], + } + def build_extension(self, ext): + if options["debug"]: + ext.extra_compile_args.append("-g") + ext.extra_link_args.append("-g") current_dir = os.getcwd() - if ext.name == 'snowflake.connector.arrow_iterator': - self._copy_arrow_lib() - - ext.sources += ['cpp/ArrowIterator/CArrowIterator.cpp', - 'cpp/ArrowIterator/CArrowChunkIterator.cpp', - 'cpp/ArrowIterator/CArrowTableIterator.cpp', - 'cpp/ArrowIterator/SnowflakeType.cpp', - 'cpp/ArrowIterator/BinaryConverter.cpp', - 'cpp/ArrowIterator/BooleanConverter.cpp', - 'cpp/ArrowIterator/DecimalConverter.cpp', - 'cpp/ArrowIterator/DateConverter.cpp', - 'cpp/ArrowIterator/FloatConverter.cpp', - 'cpp/ArrowIterator/IntConverter.cpp', - 'cpp/ArrowIterator/StringConverter.cpp', - 'cpp/ArrowIterator/TimeConverter.cpp', - 'cpp/ArrowIterator/TimeStampConverter.cpp', - 'cpp/ArrowIterator/Python/Common.cpp', - 'cpp/ArrowIterator/Python/Helpers.cpp', - 'cpp/ArrowIterator/Util/time.cpp', - 'cpp/Logging/logging.cpp'] - ext.include_dirs.append('cpp/ArrowIterator/') - ext.include_dirs.append('cpp/Logging') - ext.include_dirs.append(pyarrow.get_include()) - - ext.extra_compile_args.append('-std=c++11') - - ext.library_dirs.append(os.path.join(current_dir, self.build_lib, 'snowflake', 'connector')) + if ext.name == "snowflake.connector.arrow_iterator": + if not os.environ.get("SF_NO_COPY_ARROW_LIB", False): + self._copy_arrow_lib() + CPP_SRC_DIR = os.path.join(CONNECTOR_SRC_DIR, "cpp") + ARROW_ITERATOR_SRC_DIR = os.path.join(CPP_SRC_DIR, "ArrowIterator") + LOGGING_SRC_DIR = os.path.join(CPP_SRC_DIR, "Logging") + + ext.sources += [ + os.path.join(ARROW_ITERATOR_SRC_DIR, "CArrowIterator.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "CArrowChunkIterator.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "CArrowTableIterator.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "SnowflakeType.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "BinaryConverter.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "BooleanConverter.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "DecimalConverter.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "DateConverter.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "FloatConverter.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "IntConverter.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "StringConverter.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "TimeConverter.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "TimeStampConverter.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "Python", "Common.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "Python", "Helpers.cpp"), + os.path.join(ARROW_ITERATOR_SRC_DIR, "Util", "time.cpp"), + LOGGING_SRC_DIR + "/logging.cpp", + ] + ext.include_dirs.append(ARROW_ITERATOR_SRC_DIR) + ext.include_dirs.append(LOGGING_SRC_DIR) + + if sys.platform == "win32": + ext.include_dirs.append(pyarrow.get_include()) + ext.include_dirs.append(numpy.get_include()) + elif sys.platform == "linux" or sys.platform == "darwin": + ext.extra_compile_args.append("-isystem" + pyarrow.get_include()) + ext.extra_compile_args.append("-isystem" + numpy.get_include()) + if "std=" not in os.environ.get("CXXFLAGS", ""): + ext.extra_compile_args.append("-std=c++11") + ext.extra_compile_args.append("-D_GLIBCXX_USE_CXX11_ABI=0") + + ext.library_dirs.append( + os.path.join(current_dir, self.build_lib, "snowflake", "connector") + ) ext.extra_link_args += self._get_arrow_lib_as_linker_input() - if self._is_unix(): - ext.extra_link_args += ['-Wl,-rpath,$ORIGIN'] + # sys.platform for linux used to return with version suffix, (i.e. linux2, linux3) + # After version 3.3, it will always be just 'linux' + # https://docs.python.org/3/library/sys.html#sys.platform + if sys.platform == "linux": + ext.extra_link_args += ["-Wl,-rpath,$ORIGIN"] + elif sys.platform == "darwin": + # rpath,$ORIGIN only work on linux, did not work on darwin. use @loader_path instead + # fyi, https://medium.com/@donblas/fun-with-rpath-otool-and-install-name-tool-e3e41ae86172 + ext.extra_link_args += ["-rpath", "@loader_path"] build_ext.build_extension(self, ext) - def _is_unix(self): - return platform.startswith('linux') or platform == 'darwin' - def _get_arrow_lib_dir(self): + if "SF_ARROW_LIBDIR" in os.environ: + return os.environ["SF_ARROW_LIBDIR"] return pyarrow.get_library_dirs()[0] def _copy_arrow_lib(self): - arrow_lib = self._get_libs_to_copy() + libs_to_bundle = self.arrow_libs_to_copy[sys.platform] - for lib in arrow_lib: - lib_pattern = self._get_pyarrow_lib_pattern(lib) - source = glob.glob(lib_pattern)[0] - copy(source, os.path.join(self.build_lib, 'snowflake', 'connector')) + for lib in libs_to_bundle: + source = f"{self._get_arrow_lib_dir()}/{lib}" + build_dir = os.path.join(self.build_lib, "snowflake", "connector") + copy(source, build_dir) def _get_arrow_lib_as_linker_input(self): - arrow_lib = pyarrow.get_libraries() - link_lib = [] - for lib in arrow_lib: - lib_pattern = self._get_pyarrow_lib_pattern(lib) - source = glob.glob(lib_pattern)[0] - link_lib.append(source) - - return link_lib - - def _get_libs_to_copy(self): - if self._is_unix(): - return pyarrow.get_libraries() + \ - ['arrow_flight', 'arrow_boost_regex', 'arrow_boost_system', 'arrow_boost_filesystem'] - elif platform == 'win32': - return pyarrow.get_libraries() + ['arrow_flight'] - else: - raise RuntimeError('Building on platform {} is not supported yet.'.format(platform)) - - def _get_pyarrow_lib_pattern(self, lib_name): - if platform.startswith('linux'): - return '{}/lib{}.so*'.format(self._get_arrow_lib_dir(), lib_name) - elif platform == 'darwin': - return '{}/lib{}*dylib'.format(self._get_arrow_lib_dir(), lib_name) - elif platform == 'win32': - return '{}\\{}.lib'.format(self._get_arrow_lib_dir(), lib_name) - else: - raise RuntimeError('Building on platform {} is not supported yet.'.format(platform)) - - cmd_class = { - "build_ext": MyBuildExt - } + link_lib = self.arrow_libs_to_link[sys.platform] + ret = [] + + for lib in link_lib: + source = f"{self._get_arrow_lib_dir()}/{lib}" + assert os.path.exists(source) + ret.append(source) + + return ret + + cmd_class = {"build_ext": MyBuildExt} setup( - name='snowflake-connector-python', version=version, - description=u"Snowflake Connector for Python", ext_modules=extensions, cmdclass=cmd_class, long_description=long_description, diff --git a/sfbinaryformat.py b/sfbinaryformat.py deleted file mode 100644 index 699be281c..000000000 --- a/sfbinaryformat.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -from base64 import (b16encode, standard_b64encode, b16decode) - -from .errors import InternalError - -# Converts a Snowflake binary value into a "bytes" object. -binary_to_python = b16decode - - -def binary_to_snowflake(binary_value): - """ - Encodes a "bytes" object for passing to Snowflake. - """ - result = b16encode(binary_value) - - if isinstance(binary_value, bytearray): - return bytearray(result) - return result - - -class SnowflakeBinaryFormat(object): - """ - Formats binary values ("bytes" objects) in hex or base64. - """ - - def __init__(self, name): - name = name.upper() - if name == u'HEX': - self._encode = b16encode - elif name == u'BASE64': - self._encode = standard_b64encode - else: - raise InternalError( - u'Unrecognized binary format {}'.format(name)) - - def format(self, binary_value): - """ - Formats a "bytes" object, returning a string. - """ - return self._encode(binary_value).decode('ascii') diff --git a/src/snowflake/connector/__init__.py b/src/snowflake/connector/__init__.py new file mode 100644 index 000000000..a04774c43 --- /dev/null +++ b/src/snowflake/connector/__init__.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +# Python Db API v2 +# +from __future__ import annotations + +apilevel = "2.0" +threadsafety = 2 +paramstyle = "pyformat" + +import logging +from logging import NullHandler + +from .connection import SnowflakeConnection +from .cursor import DictCursor +from .dbapi import ( + BINARY, + DATETIME, + NUMBER, + ROWID, + STRING, + Binary, + Date, + DateFromTicks, + Json, + Time, + TimeFromTicks, + Timestamp, + TimestampFromTicks, +) +from .errors import ( + DatabaseError, + DataError, + Error, + IntegrityError, + InterfaceError, + InternalError, + NotSupportedError, + OperationalError, + ProgrammingError, + _Warning, +) +from .version import VERSION + +logging.getLogger(__name__).addHandler(NullHandler()) + + +def Connect(**kwargs) -> SnowflakeConnection: + return SnowflakeConnection(**kwargs) + + +connect = Connect + +SNOWFLAKE_CONNECTOR_VERSION = ".".join(str(v) for v in VERSION[0:3]) +__version__ = SNOWFLAKE_CONNECTOR_VERSION + +__all__ = [ + "SnowflakeConnection", + # Error handling + "Error", + "_Warning", + "InterfaceError", + "DatabaseError", + "NotSupportedError", + "DataError", + "IntegrityError", + "ProgrammingError", + "OperationalError", + "InternalError", + # Extended cursor + "DictCursor", + # DBAPI PEP 249 required exports + "connect", + "apilevel", + "threadsafety", + "paramstyle", + "Date", + "Time", + "Timestamp", + "Binary", + "DateFromTicks", + "TimeFromTicks", + "TimestampFromTicks", + "STRING", + "BINARY", + "NUMBER", + "DATETIME", + "ROWID", + # Extended data type (experimental) + "Json", +] diff --git a/src/snowflake/connector/arrow_context.py b/src/snowflake/connector/arrow_context.py new file mode 100644 index 000000000..20895d253 --- /dev/null +++ b/src/snowflake/connector/arrow_context.py @@ -0,0 +1,140 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import decimal +import time +from datetime import datetime, timedelta +from logging import getLogger + +import pytz + +from .constants import PARAMETER_TIMEZONE +from .converter import _generate_tzinfo_from_tzoffset + +try: + import numpy +except ImportError: + numpy = None + + +try: + import tzlocal +except ImportError: + tzlocal = None + +ZERO_EPOCH = datetime.utcfromtimestamp(0) + +logger = getLogger(__name__) + + +class ArrowConverterContext: + """Python helper functions for arrow conversions. + + Windows timestamp functions are necessary because Windows cannot handle -ve timestamps. + Putting the OS check into the non-windows function would probably take up more CPU cycles then + just deciding this at compile time. + """ + + def __init__( + self, + session_parameters: dict[str, str | int | bool] | None = None, + ): + if session_parameters is None: + session_parameters = {} + self._timezone = ( + None + if PARAMETER_TIMEZONE not in session_parameters + else session_parameters[PARAMETER_TIMEZONE] + ) + + @property + def timezone(self): + return self._timezone + + @timezone.setter + def timezone(self, tz): + self._timezone = tz + + def _get_session_tz(self): + """Get the session timezone or use the local computer's timezone.""" + try: + tz = "UTC" if not self.timezone else self.timezone + return pytz.timezone(tz) + except pytz.exceptions.UnknownTimeZoneError: + logger.warning("converting to tzinfo failed") + if tzlocal is not None: + return tzlocal.get_localzone() + else: + try: + return datetime.timezone.utc + except AttributeError: + return pytz.timezone("UTC") + + def TIMESTAMP_TZ_to_python( + self, epoch: int, microseconds: int, tz: int + ) -> datetime: + tzinfo = _generate_tzinfo_from_tzoffset(tz - 1440) + return datetime.fromtimestamp(epoch, tz=tzinfo) + timedelta( + microseconds=microseconds + ) + + def TIMESTAMP_TZ_to_python_windows( + self, epoch: int, microseconds: int, tz: int + ) -> datetime: + tzinfo = _generate_tzinfo_from_tzoffset(tz - 1440) + t = ZERO_EPOCH + timedelta(seconds=epoch, microseconds=microseconds) + if pytz.utc != tzinfo: + t += tzinfo.utcoffset(t) + return t.replace(tzinfo=tzinfo) + + def TIMESTAMP_NTZ_to_python(self, epoch: int, microseconds: int) -> datetime: + return datetime.utcfromtimestamp(epoch) + timedelta(microseconds=microseconds) + + def TIMESTAMP_NTZ_to_python_windows( + self, epoch: int, microseconds: int + ) -> datetime: + return ZERO_EPOCH + timedelta(seconds=epoch, microseconds=microseconds) + + def TIMESTAMP_LTZ_to_python(self, epoch: int, microseconds: int) -> datetime: + tzinfo = self._get_session_tz() + return datetime.fromtimestamp(epoch, tz=tzinfo) + timedelta( + microseconds=microseconds + ) + + def TIMESTAMP_LTZ_to_python_windows( + self, epoch: int, microseconds: int + ) -> datetime: + try: + tzinfo = self._get_session_tz() + ts = ZERO_EPOCH + timedelta(seconds=epoch, microseconds=microseconds) + return pytz.utc.localize(ts, is_dst=False).astimezone(tzinfo) + except OverflowError: + logger.debug( + "OverflowError in converting from epoch time to " + "timestamp_ltz: %s(ms). Falling back to use struct_time." + ) + return time.localtime(microseconds) + + def REAL_to_numpy_float64(self, py_double): + return numpy.float64(py_double) + + def FIXED_to_numpy_int64(self, py_long): + return numpy.int64(py_long) + + def FIXED_to_numpy_float64(self, py_long, scale): + return numpy.float64(decimal.Decimal(py_long).scaleb(-scale)) + + def DATE_to_numpy_datetime64(self, py_days): + return numpy.datetime64(py_days, "D") + + def TIMESTAMP_NTZ_ONE_FIELD_to_numpy_datetime64(self, value, scale): + nanoseconds = int(decimal.Decimal(value).scaleb(9 - scale)) + return numpy.datetime64(nanoseconds, "ns") + + def TIMESTAMP_NTZ_TWO_FIELD_to_numpy_datetime64(self, epoch, fraction): + nanoseconds = int(decimal.Decimal(epoch).scaleb(9) + decimal.Decimal(fraction)) + return numpy.datetime64(nanoseconds, "ns") diff --git a/src/snowflake/connector/arrow_iterator.pyx b/src/snowflake/connector/arrow_iterator.pyx new file mode 100644 index 000000000..2a3aec5e4 --- /dev/null +++ b/src/snowflake/connector/arrow_iterator.pyx @@ -0,0 +1,214 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +# distutils: language = c++ +# cython: language_level=3 + +from cpython.ref cimport PyObject +from cython.operator cimport dereference +from libcpp.memory cimport shared_ptr +from libcpp.vector cimport vector +from pyarrow.includes.common cimport CResult, CStatus, GetResultValue +from pyarrow.includes.libarrow cimport ( + CBuffer, + CInputStream, + CIpcReadOptions, + CRecordBatch, + CRecordBatchReader, + CRecordBatchStreamReader, + FileInterface, + FileMode, + PyReadableFile, + Readable, + Seekable, +) + +from .constants import IterUnit +from .errorcode import ( + ER_FAILED_TO_CONVERT_ROW_TO_PYTHON_TYPE, + ER_FAILED_TO_READ_ARROW_STREAM, +) +from .errors import Error, InterfaceError, OperationalError +from .snow_logging import getSnowLogger + +snow_logger = getSnowLogger(__name__) + + +cdef extern from "cpp/ArrowIterator/CArrowIterator.hpp" namespace "sf": + cdef cppclass ReturnVal: + PyObject * successObj; + + PyObject * exception; + + cdef cppclass CArrowIterator: + shared_ptr[ReturnVal] next(); + + +cdef extern from "cpp/ArrowIterator/CArrowChunkIterator.hpp" namespace "sf": + cdef cppclass CArrowChunkIterator(CArrowIterator): + CArrowChunkIterator( + PyObject* context, + vector[shared_ptr[CRecordBatch]]* batches, + PyObject* use_numpy, + ) except + + + cdef cppclass DictCArrowChunkIterator(CArrowChunkIterator): + DictCArrowChunkIterator( + PyObject* context, + vector[shared_ptr[CRecordBatch]]* batches, + PyObject* use_numpy + ) except + + + +cdef extern from "cpp/ArrowIterator/CArrowTableIterator.hpp" namespace "sf": + cdef cppclass CArrowTableIterator(CArrowIterator): + CArrowTableIterator( + PyObject* context, + vector[shared_ptr[CRecordBatch]]* batches, + bint number_to_decimal, + ) except + + + +cdef class EmptyPyArrowIterator: + + def __iter__(self): + return self + + def __next__(self): + raise StopIteration + + def init(self, str iter_unit, bint number_to_decimal): + pass + + +cdef class PyArrowIterator(EmptyPyArrowIterator): + cdef object context + cdef CArrowIterator* cIterator + cdef str unit + cdef shared_ptr[ReturnVal] cret + cdef vector[shared_ptr[CRecordBatch]] batches + cdef object use_dict_result + cdef object cursor + + # this is the flag indicating whether fetch data as numpy datatypes or not. The flag + # is passed from the constructor of SnowflakeConnection class. Note, only FIXED, REAL + # and TIMESTAMP_NTZ will be converted into numpy data types, all other sql types will + # still be converted into native python types. + # https://docs.snowflake.com/en/user-guide/sqlalchemy.html#numpy-data-type-support + cdef object use_numpy + cdef object number_to_decimal + + def __cinit__( + self, + object cursor, + object py_inputstream, + object arrow_context, + object use_dict_result, + object numpy, + object number_to_decimal, + ): + cdef shared_ptr[CInputStream] input_stream + cdef shared_ptr[CRecordBatch] record_batch + cdef CStatus ret + input_stream.reset(new PyReadableFile(py_inputstream)) + cdef CResult[shared_ptr[CRecordBatchReader]] readerRet = CRecordBatchStreamReader.Open( + input_stream, + CIpcReadOptions.Defaults() + ) + if not readerRet.ok(): + Error.errorhandler_wrapper( + cursor.connection if cursor is not None else None, + cursor, + OperationalError, + { + 'msg': f'Failed to open arrow stream: {readerRet.status().message()}', + 'errno': ER_FAILED_TO_READ_ARROW_STREAM + }) + + cdef shared_ptr[CRecordBatchReader] reader = dereference(readerRet) + + while True: + ret = reader.get().ReadNext(&record_batch) + if not ret.ok(): + Error.errorhandler_wrapper( + cursor.connection if cursor is not None else None, + cursor, + OperationalError, + { + 'msg': f'Failed to read next arrow batch: {ret.message()}', + 'errno': ER_FAILED_TO_READ_ARROW_STREAM + } + ) + + if record_batch.get() is NULL: + break + + self.batches.push_back(record_batch) + + snow_logger.debug(msg=f"Batches read: {self.batches.size()}", path_name=__file__, func_name="__cinit__") + + self.context = arrow_context + self.cIterator = NULL + self.unit = '' + self.use_dict_result = use_dict_result + self.cursor = cursor + self.use_numpy = numpy + self.number_to_decimal = number_to_decimal + + def __dealloc__(self): + del self.cIterator + + def __iter__(self): + return self + + def __next__(self): + if self.cIterator is NULL: + self.init_row_unit() + self.cret = self.cIterator.next() + + if not self.cret.get().successObj: + Error.errorhandler_wrapper( + self.cursor.connection if self.cursor is not None else None, + self.cursor, + InterfaceError, + { + 'msg': f'Failed to convert current row, cause: {self.cret.get().exception}', + 'errno': ER_FAILED_TO_CONVERT_ROW_TO_PYTHON_TYPE + } + ) + # it looks like this line can help us get into python and detect the global variable immediately + # however, this log will not show up for unclear reason + ret = self.cret.get().successObj + + if ret is None: + raise StopIteration + else: + return ret + + def init(self, str iter_unit): + if iter_unit == IterUnit.ROW_UNIT.value: + self.init_row_unit() + elif iter_unit == IterUnit.TABLE_UNIT.value: + self.init_table_unit() + self.unit = iter_unit + + def init_row_unit(self) -> None: + self.cIterator = new CArrowChunkIterator( + self.context, + &self.batches, + self.use_numpy + ) \ + if not self.use_dict_result \ + else new DictCArrowChunkIterator( + self.context, + &self.batches, + self.use_numpy + ) + + def init_table_unit(self) -> None: + self.cIterator = new CArrowTableIterator( + self.context, + &self.batches, + self.number_to_decimal, + ) diff --git a/src/snowflake/connector/auth.py b/src/snowflake/connector/auth.py new file mode 100644 index 000000000..b91cf358a --- /dev/null +++ b/src/snowflake/connector/auth.py @@ -0,0 +1,732 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import codecs +import copy +import json +import logging +import tempfile +import time +import uuid +from datetime import datetime +from os import getenv, makedirs, mkdir, path, remove, removedirs, rmdir +from os.path import expanduser +from threading import Lock, Thread + +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.serialization import ( + Encoding, + NoEncryption, + PrivateFormat, + load_der_private_key, + load_pem_private_key, +) + +from .auth_keypair import AuthByKeyPair +from .auth_usrpwdmfa import AuthByUsrPwdMfa +from .compat import IS_LINUX, IS_MACOS, IS_WINDOWS, urlencode +from .constants import ( + HTTP_HEADER_ACCEPT, + HTTP_HEADER_CONTENT_TYPE, + HTTP_HEADER_SERVICE_NAME, + HTTP_HEADER_USER_AGENT, + PARAMETER_CLIENT_REQUEST_MFA_TOKEN, + PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL, +) +from .description import ( + COMPILER, + IMPLEMENTATION, + OPERATING_SYSTEM, + PLATFORM, + PYTHON_VERSION, +) +from .errorcode import ER_FAILED_TO_CONNECT_TO_DB +from .errors import ( + BadGatewayError, + DatabaseError, + Error, + ForbiddenError, + ProgrammingError, + ServiceUnavailableError, +) +from .network import ( + ACCEPT_TYPE_APPLICATION_SNOWFLAKE, + CONTENT_TYPE_APPLICATION_JSON, + ID_TOKEN_INVALID_LOGIN_REQUEST_GS_CODE, + KEY_PAIR_AUTHENTICATOR, + PYTHON_CONNECTOR_USER_AGENT, + ReauthenticationRequest, +) +from .options import installed_keyring, keyring +from .sqlstate import SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED +from .version import VERSION + +logger = logging.getLogger(__name__) + + +# Cache directory +CACHE_ROOT_DIR = ( + getenv("SF_TEMPORARY_CREDENTIAL_CACHE_DIR") + or expanduser("~") + or tempfile.gettempdir() +) +if IS_WINDOWS: + CACHE_DIR = path.join(CACHE_ROOT_DIR, "AppData", "Local", "Snowflake", "Caches") +elif IS_MACOS: + CACHE_DIR = path.join(CACHE_ROOT_DIR, "Library", "Caches", "Snowflake") +else: + CACHE_DIR = path.join(CACHE_ROOT_DIR, ".cache", "snowflake") + +if not path.exists(CACHE_DIR): + try: + makedirs(CACHE_DIR, mode=0o700) + except Exception as ex: + logger.debug("cannot create a cache directory: [%s], err=[%s]", CACHE_DIR, ex) + CACHE_DIR = None +logger.debug("cache directory: %s", CACHE_DIR) + +# temporary credential cache +TEMPORARY_CREDENTIAL = {} + +TEMPORARY_CREDENTIAL_LOCK = Lock() + +# temporary credential cache file name +TEMPORARY_CREDENTIAL_FILE = "temporary_credential.json" +TEMPORARY_CREDENTIAL_FILE = ( + path.join(CACHE_DIR, TEMPORARY_CREDENTIAL_FILE) if CACHE_DIR else "" +) + +# temporary credential cache lock directory name +TEMPORARY_CREDENTIAL_FILE_LOCK = TEMPORARY_CREDENTIAL_FILE + ".lck" + +# keyring +KEYRING_SERVICE_NAME = "net.snowflake.temporary_token" +KEYRING_USER = "temp_token" +KEYRING_DRIVER_NAME = "SNOWFLAKE-PYTHON-DRIVER" + +ID_TOKEN = "ID_TOKEN" +MFA_TOKEN = "MFATOKEN" + + +class Auth: + """Snowflake Authenticator.""" + + def __init__(self, rest): + self._rest = rest + + @staticmethod + def base_auth_data( + user, + account, + application, + internal_application_name, + internal_application_version, + ocsp_mode, + login_timeout, + network_timeout=None, + ): + return { + "data": { + "CLIENT_APP_ID": internal_application_name, + "CLIENT_APP_VERSION": internal_application_version, + "SVN_REVISION": VERSION[3], + "ACCOUNT_NAME": account, + "LOGIN_NAME": user, + "CLIENT_ENVIRONMENT": { + "APPLICATION": application, + "OS": OPERATING_SYSTEM, + "OS_VERSION": PLATFORM, + "PYTHON_VERSION": PYTHON_VERSION, + "PYTHON_RUNTIME": IMPLEMENTATION, + "PYTHON_COMPILER": COMPILER, + "OCSP_MODE": ocsp_mode.name, + "TRACING": logger.getEffectiveLevel(), + "LOGIN_TIMEOUT": login_timeout, + "NETWORK_TIMEOUT": network_timeout, + }, + }, + } + + def authenticate( + self, + auth_instance, + account, + user, + database=None, + schema=None, + warehouse=None, + role=None, + passcode=None, + passcode_in_password=False, + mfa_callback=None, + password_callback=None, + session_parameters=None, + timeout=120, + ) -> dict[str, str | int | bool]: + logger.debug("authenticate") + + if session_parameters is None: + session_parameters = {} + + request_id = str(uuid.uuid4()) + headers = { + HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, + HTTP_HEADER_ACCEPT: ACCEPT_TYPE_APPLICATION_SNOWFLAKE, + HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT, + } + if HTTP_HEADER_SERVICE_NAME in session_parameters: + headers[HTTP_HEADER_SERVICE_NAME] = session_parameters[ + HTTP_HEADER_SERVICE_NAME + ] + url = "/session/v1/login-request" + + body_template = Auth.base_auth_data( + user, + account, + self._rest._connection.application, + self._rest._connection._internal_application_name, + self._rest._connection._internal_application_version, + self._rest._connection._ocsp_mode(), + self._rest._connection._login_timeout, + self._rest._connection._network_timeout, + ) + + body = copy.deepcopy(body_template) + # updating request body + logger.debug("assertion content: %s", auth_instance.assertion_content) + auth_instance.update_body(body) + + logger.debug( + "account=%s, user=%s, database=%s, schema=%s, " + "warehouse=%s, role=%s, request_id=%s", + account, + user, + database, + schema, + warehouse, + role, + request_id, + ) + url_parameters = {"request_id": request_id} + if database is not None: + url_parameters["databaseName"] = database + if schema is not None: + url_parameters["schemaName"] = schema + if warehouse is not None: + url_parameters["warehouse"] = warehouse + if role is not None: + url_parameters["roleName"] = role + + url = url + "?" + urlencode(url_parameters) + + # first auth request + if passcode_in_password: + body["data"]["EXT_AUTHN_DUO_METHOD"] = "passcode" + elif passcode: + body["data"]["EXT_AUTHN_DUO_METHOD"] = "passcode" + body["data"]["PASSCODE"] = passcode + + if session_parameters: + body["data"]["SESSION_PARAMETERS"] = session_parameters + + logger.debug( + "body['data']: %s", + {k: v for (k, v) in body["data"].items() if k != "PASSWORD"}, + ) + + # accommodate any authenticator specific timeout requirements here. + # login_timeout comes from user configuration. + # Between login timeout and auth specific + # timeout use whichever value is smaller + if hasattr(auth_instance, "get_timeout"): + logger.debug( + f"Authenticator, {type(auth_instance).__name__}, implements get_timeout" + ) + auth_timeout = min( + self._rest._connection.login_timeout, auth_instance.get_timeout() + ) + else: + auth_timeout = self._rest._connection.login_timeout + logger.debug(f"Timeout set to {auth_timeout}") + + try: + ret = self._rest._post_request( + url, + headers, + json.dumps(body), + timeout=auth_timeout, + socket_timeout=auth_timeout, + ) + except ForbiddenError as err: + # HTTP 403 + raise err.__class__( + msg=( + "Failed to connect to DB. " + "Verify the account name is correct: {host}:{port}. " + "{message}" + ).format( + host=self._rest._host, port=self._rest._port, message=str(err) + ), + errno=ER_FAILED_TO_CONNECT_TO_DB, + sqlstate=SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + ) + except (ServiceUnavailableError, BadGatewayError) as err: + # HTTP 502/504 + raise err.__class__( + msg=( + "Failed to connect to DB. " + "Service is unavailable: {host}:{port}. " + "{message}" + ).format( + host=self._rest._host, port=self._rest._port, message=str(err) + ), + errno=ER_FAILED_TO_CONNECT_TO_DB, + sqlstate=SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + ) + + # waiting for MFA authentication + if ret["data"].get("nextAction") in ( + "EXT_AUTHN_DUO_ALL", + "EXT_AUTHN_DUO_PUSH_N_PASSCODE", + ): + body["inFlightCtx"] = ret["data"]["inFlightCtx"] + body["data"]["EXT_AUTHN_DUO_METHOD"] = "push" + self.ret = {"message": "Timeout", "data": {}} + + def post_request_wrapper(self, url, headers, body): + # get the MFA response + self.ret = self._rest._post_request( + url, headers, body, timeout=self._rest._connection.login_timeout + ) + + # send new request to wait until MFA is approved + t = Thread( + target=post_request_wrapper, args=[self, url, headers, json.dumps(body)] + ) + t.daemon = True + t.start() + if callable(mfa_callback): + c = mfa_callback() + while not self.ret or self.ret.get("message") == "Timeout": + next(c) + else: + t.join(timeout=timeout) + + ret = self.ret + if ret and ret["data"].get("nextAction") == "EXT_AUTHN_SUCCESS": + body = copy.deepcopy(body_template) + body["inFlightCtx"] = ret["data"]["inFlightCtx"] + # final request to get tokens + ret = self._rest._post_request( + url, + headers, + json.dumps(body), + timeout=self._rest._connection.login_timeout, + socket_timeout=self._rest._connection.login_timeout, + ) + elif not ret or not ret["data"].get("token"): + # not token is returned. + Error.errorhandler_wrapper( + self._rest._connection, + None, + DatabaseError, + { + "msg": ( + "Failed to connect to DB. MFA " + "authentication failed: {" + "host}:{port}. {message}" + ).format( + host=self._rest._host, + port=self._rest._port, + message=ret["message"], + ), + "errno": ER_FAILED_TO_CONNECT_TO_DB, + "sqlstate": SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + }, + ) + return session_parameters # required for unit test + + elif ret["data"].get("nextAction") == "PWD_CHANGE": + if callable(password_callback): + body = copy.deepcopy(body_template) + body["inFlightCtx"] = ret["data"]["inFlightCtx"] + body["data"]["LOGIN_NAME"] = user + body["data"]["PASSWORD"] = ( + auth_instance.password + if hasattr(auth_instance, "password") + else None + ) + body["data"]["CHOSEN_NEW_PASSWORD"] = password_callback() + # New Password input + ret = self._rest._post_request( + url, + headers, + json.dumps(body), + timeout=self._rest._connection.login_timeout, + socket_timeout=self._rest._connection.login_timeout, + ) + + logger.debug("completed authentication") + if not ret["success"]: + errno = ret.get("code", ER_FAILED_TO_CONNECT_TO_DB) + if errno == ID_TOKEN_INVALID_LOGIN_REQUEST_GS_CODE: + # clear stored id_token if failed to connect because of id_token + # raise an exception for reauth without id_token + self._rest.id_token = None + delete_temporary_credential(self._rest._host, user, ID_TOKEN) + raise ReauthenticationRequest( + ProgrammingError( + msg=ret["message"], + errno=int(errno), + sqlstate=SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + ) + ) + + if type(auth_instance) is AuthByKeyPair: + logger.debug( + "JWT Token authentication failed. " + "Token expires at: %s. " + "Current Time: %s", + str(auth_instance._jwt_token_exp), + str(datetime.utcnow()), + ) + if type(auth_instance) is AuthByUsrPwdMfa: + delete_temporary_credential(self._rest._host, user, MFA_TOKEN) + Error.errorhandler_wrapper( + self._rest._connection, + None, + DatabaseError, + { + "msg": ( + "Failed to connect to DB: {host}:{port}. " "{message}" + ).format( + host=self._rest._host, + port=self._rest._port, + message=ret["message"], + ), + "errno": ER_FAILED_TO_CONNECT_TO_DB, + "sqlstate": SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + }, + ) + else: + logger.debug( + "token = %s", "******" if ret["data"]["token"] is not None else "NULL" + ) + logger.debug( + "master_token = %s", + "******" if ret["data"]["masterToken"] is not None else "NULL", + ) + logger.debug( + "id_token = %s", + "******" if ret["data"].get("idToken") is not None else "NULL", + ) + logger.debug( + "mfa_token = %s", + "******" if ret["data"].get("mfaToken") is not None else "NULL", + ) + self._rest.update_tokens( + ret["data"]["token"], + ret["data"]["masterToken"], + master_validity_in_seconds=ret["data"].get("masterValidityInSeconds"), + id_token=ret["data"].get("idToken"), + mfa_token=ret["data"].get("mfaToken"), + ) + self.write_temporary_credentials( + self._rest._host, user, session_parameters, ret + ) + if "sessionId" in ret["data"]: + self._rest._connection._session_id = ret["data"]["sessionId"] + if "sessionInfo" in ret["data"]: + session_info = ret["data"]["sessionInfo"] + self._rest._connection._database = session_info.get("databaseName") + self._rest._connection._schema = session_info.get("schemaName") + self._rest._connection._warehouse = session_info.get("warehouseName") + self._rest._connection._role = session_info.get("roleName") + if "parameters" in ret["data"]: + session_parameters.update( + {p["name"]: p["value"] for p in ret["data"]["parameters"]} + ) + self._rest._connection._update_parameters(session_parameters) + return session_parameters + + def _read_temporary_credential(self, host, user, cred_type): + cred = None + if IS_MACOS or IS_WINDOWS: + if not installed_keyring: + logger.debug( + "Dependency 'keyring' is not installed, cannot cache id token. You might experience " + "multiple authentication pop ups while using ExternalBrowser Authenticator. To avoid " + "this please install keyring module using the following command : pip install " + "snowflake-connector-python[secure-local-storage]" + ) + return + try: + cred = keyring.get_password( + build_temporary_credential_name(host, user, cred_type), user.upper() + ) + except keyring.errors.KeyringError as ke: + logger.error( + "Could not retrieve {} from secure storage : {}".format( + cred_type, str(ke) + ) + ) + elif IS_LINUX: + read_temporary_credential_file() + cred = TEMPORARY_CREDENTIAL.get(host.upper(), {}).get( + build_temporary_credential_name(host, user, cred_type) + ) + else: + logger.debug("OS not supported for Local Secure Storage") + return cred + + def read_temporary_credentials(self, host, user, session_parameters): + if session_parameters.get(PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL, False): + self._rest.id_token = self._read_temporary_credential(host, user, ID_TOKEN) + + if session_parameters.get(PARAMETER_CLIENT_REQUEST_MFA_TOKEN, False): + self._rest.mfa_token = self._read_temporary_credential( + host, user, MFA_TOKEN + ) + + def _write_temporary_credential(self, host, user, cred_type, cred): + if not cred: + logger.debug( + "no credential is given when try to store temporary credential" + ) + return + if IS_MACOS or IS_WINDOWS: + if not installed_keyring: + logger.debug( + "Dependency 'keyring' is not installed, cannot cache id token. You might experience " + "multiple authentication pop ups while using ExternalBrowser Authenticator. To avoid " + "this please install keyring module using the following command : pip install " + "snowflake-connector-python[secure-local-storage]" + ) + return + try: + keyring.set_password( + build_temporary_credential_name(host, user, cred_type), + user.upper(), + cred, + ) + except keyring.errors.KeyringError as ke: + logger.error("Could not store id_token to keyring, %s", str(ke)) + elif IS_LINUX: + write_temporary_credential_file( + host, build_temporary_credential_name(host, user, cred_type), cred + ) + else: + logger.debug("OS not supported for Local Secure Storage") + + def write_temporary_credentials(self, host, user, session_parameters, response): + if self._rest._connection.consent_cache_id_token and session_parameters.get( + PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL, False + ): + self._write_temporary_credential( + host, user, ID_TOKEN, response["data"].get("idToken") + ) + + if session_parameters.get(PARAMETER_CLIENT_REQUEST_MFA_TOKEN, False): + self._write_temporary_credential( + host, user, MFA_TOKEN, response["data"].get("mfaToken") + ) + return + + +def flush_temporary_credentials(): + """Flush temporary credentials in memory into disk. Need to hold TEMPORARY_CREDENTIAL_LOCK.""" + global TEMPORARY_CREDENTIAL + global TEMPORARY_CREDENTIAL_FILE + for _ in range(10): + if lock_temporary_credential_file(): + break + time.sleep(1) + else: + logger.debug( + "The lock file still persists after the maximum wait time." + "Will ignore it and write temporary credential file: %s", + TEMPORARY_CREDENTIAL_FILE, + ) + try: + with open( + TEMPORARY_CREDENTIAL_FILE, "w", encoding="utf-8", errors="ignore" + ) as f: + json.dump(TEMPORARY_CREDENTIAL, f) + except Exception as ex: + logger.debug( + "Failed to write a credential file: " "file=[%s], err=[%s]", + TEMPORARY_CREDENTIAL_FILE, + ex, + ) + finally: + unlock_temporary_credential_file() + + +def write_temporary_credential_file(host, cred_name, cred): + """Writes temporary credential file when OS is Linux.""" + if not CACHE_DIR: + # no cache is enabled + return + global TEMPORARY_CREDENTIAL + global TEMPORARY_CREDENTIAL_LOCK + with TEMPORARY_CREDENTIAL_LOCK: + # update the cache + host_data = TEMPORARY_CREDENTIAL.get(host.upper(), {}) + host_data[cred_name.upper()] = cred + TEMPORARY_CREDENTIAL[host.upper()] = host_data + flush_temporary_credentials() + + +def read_temporary_credential_file(): + """Reads temporary credential file when OS is Linux.""" + if not CACHE_DIR: + # no cache is enabled + return + + global TEMPORARY_CREDENTIAL + global TEMPORARY_CREDENTIAL_LOCK + global TEMPORARY_CREDENTIAL_FILE + with TEMPORARY_CREDENTIAL_LOCK: + for _ in range(10): + if lock_temporary_credential_file(): + break + time.sleep(1) + else: + logger.debug( + "The lock file still persists. Will ignore and " + "write the temporary credential file: %s", + TEMPORARY_CREDENTIAL_FILE, + ) + try: + with codecs.open( + TEMPORARY_CREDENTIAL_FILE, "r", encoding="utf-8", errors="ignore" + ) as f: + TEMPORARY_CREDENTIAL = json.load(f) + return TEMPORARY_CREDENTIAL + except Exception as ex: + logger.debug( + "Failed to read a credential file. The file may not" + "exists: file=[%s], err=[%s]", + TEMPORARY_CREDENTIAL_FILE, + ex, + ) + finally: + unlock_temporary_credential_file() + return None + + +def lock_temporary_credential_file(): + global TEMPORARY_CREDENTIAL_FILE_LOCK + try: + mkdir(TEMPORARY_CREDENTIAL_FILE_LOCK) + return True + except OSError: + logger.debug( + "Temporary cache file lock already exists. Other " + "process may be updating the temporary " + ) + return False + + +def unlock_temporary_credential_file(): + global TEMPORARY_CREDENTIAL_FILE_LOCK + try: + rmdir(TEMPORARY_CREDENTIAL_FILE_LOCK) + return True + except OSError: + logger.debug("Temporary cache file lock no longer exists.") + return False + + +def delete_temporary_credential(host, user, cred_type): + if (IS_MACOS or IS_WINDOWS) and installed_keyring: + try: + keyring.delete_password( + build_temporary_credential_name(host, user, cred_type), user.upper() + ) + except Exception as ex: + logger.error("Failed to delete credential in the keyring: err=[%s]", ex) + elif IS_LINUX: + temporary_credential_file_delete_password(host, user, cred_type) + + +def temporary_credential_file_delete_password(host, user, cred_type): + """Remove credential from temporary credential file when OS is Linux.""" + if not CACHE_DIR: + # no cache is enabled + return + global TEMPORARY_CREDENTIAL + global TEMPORARY_CREDENTIAL_LOCK + with TEMPORARY_CREDENTIAL_LOCK: + # update the cache + host_data = TEMPORARY_CREDENTIAL.get(host.upper(), {}) + host_data.pop(build_temporary_credential_name(host, user, cred_type), None) + if not host_data: + TEMPORARY_CREDENTIAL.pop(host.upper(), None) + else: + TEMPORARY_CREDENTIAL[host.upper()] = host_data + flush_temporary_credentials() + + +def delete_temporary_credential_file(): + """Deletes temporary credential file and its lock file.""" + global TEMPORARY_CREDENTIAL_FILE + try: + remove(TEMPORARY_CREDENTIAL_FILE) + except Exception as ex: + logger.debug( + "Failed to delete a credential file: " "file=[%s], err=[%s]", + TEMPORARY_CREDENTIAL_FILE, + ex, + ) + try: + removedirs(TEMPORARY_CREDENTIAL_FILE_LOCK) + except Exception as ex: + logger.debug("Failed to delete credential lock file: err=[%s]", ex) + + +def build_temporary_credential_name(host, user, cred_type): + return "{host}:{user}:{driver}:{cred}".format( + host=host.upper(), user=user.upper(), driver=KEYRING_DRIVER_NAME, cred=cred_type + ) + + +def get_token_from_private_key( + user: str, account: str, privatekey_path: str, key_password: str | None +) -> str: + encoded_password = key_password.encode() if key_password is not None else None + with open(privatekey_path, "rb") as key: + p_key = load_pem_private_key( + key.read(), password=encoded_password, backend=default_backend() + ) + + private_key = p_key.private_bytes( + encoding=Encoding.DER, + format=PrivateFormat.PKCS8, + encryption_algorithm=NoEncryption(), + ) + auth_instance = AuthByKeyPair(private_key, 1440 * 60) # token valid for 24 hours + return auth_instance.authenticate( + KEY_PAIR_AUTHENTICATOR, None, account, user, key_password + ) + + +def get_public_key_fingerprint(private_key_file: str, password: str) -> str: + """Helper function to generate the public key fingerprint from the private key file""" + with open(private_key_file, "rb") as key: + p_key = load_pem_private_key( + key.read(), password=password.encode(), backend=default_backend() + ) + private_key = p_key.private_bytes( + encoding=Encoding.DER, + format=PrivateFormat.PKCS8, + encryption_algorithm=NoEncryption(), + ) + private_key = load_der_private_key( + data=private_key, password=None, backend=default_backend() + ) + return AuthByKeyPair.calculate_public_key_fingerprint(private_key) diff --git a/src/snowflake/connector/auth_by_plugin.py b/src/snowflake/connector/auth_by_plugin.py new file mode 100644 index 000000000..9213ddafc --- /dev/null +++ b/src/snowflake/connector/auth_by_plugin.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging +import time +from os import getenv + +from .errorcode import ER_FAILED_TO_CONNECT_TO_DB +from .errors import DatabaseError, Error, OperationalError +from .sqlstate import SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED +from .time_util import DecorrelateJitterBackoff + +logger = logging.getLogger(__name__) + +DEFAULT_MAX_CON_RETRY_ATTEMPTS = 1 + + +class AuthRetryCtx: + def __init__(self) -> None: + self._current_retry_count = 0 + self._max_retry_attempts = int( + getenv("MAX_CON_RETRY_ATTEMPTS", DEFAULT_MAX_CON_RETRY_ATTEMPTS) + ) + self._backoff = DecorrelateJitterBackoff(1, 16) + self._current_sleep_time = 1 + + def get_current_retry_count(self) -> int: + return self._current_retry_count + + def increment_retry(self) -> None: + self._current_retry_count += 1 + + def should_retry(self) -> bool: + """Decides whether to retry connection. + + Default value for max retry is 1 because + Python requests module already tries twice + by default. Unlike JWT where we need to refresh + token every 10 seconds, general authenticators + wait for 60 seconds before connection timeout + per attempt totaling a 240 sec wait time for a non + JWT based authenticator which is more than enough. + This can be changed ofcourse using MAX_CNXN_RETRY_ATTEMPTS + env variable. + """ + return self._current_retry_count < self._max_retry_attempts + + def next_sleep_duration(self) -> int: + self._current_sleep_time = self._backoff.next_sleep( + self._current_retry_count, self._current_sleep_time + ) + logger.debug(f"Sleeping for {self._current_sleep_time} seconds") + return self._current_sleep_time + + def reset(self): + self._current_retry_count = 0 + self._current_sleep_time = 1 + + +class AuthByPlugin: + """External Authenticator interface.""" + + def __init__(self) -> None: + self._retry_ctx = AuthRetryCtx() + + @property + def assertion_content(self): + raise NotImplementedError + + def update_body(self, body): + raise NotImplementedError + + def authenticate(self, authenticator, service_name, account, user, password): + raise NotImplementedError + + def handle_failure(self, ret): + """Handles a failure when connecting to Snowflake.""" + Error.errorhandler_wrapper( + self._rest._connection, + None, + DatabaseError, + { + "msg": ("Failed to connect to DB: {host}:{port}, " "{message}").format( + host=self._rest._host, + port=self._rest._port, + message=ret["message"], + ), + "errno": int(ret.get("code", -1)), + "sqlstate": SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + }, + ) + + def handle_timeout( + self, + authenticator: str, + service_name: str | None, + account: str, + user: str, + password: str, + ) -> None: + """Default timeout handler. + + This will trigger if the authenticator + hasn't implemented one. By default we retry on timeouts and use + jitter to deduce the time to sleep before retrying. The sleep + time ranges between 1 and 16 seconds. + """ + + del authenticator, service_name, account, user, password + logger.debug("Default timeout handler invoked for authenticator") + if not self._retry_ctx.should_retry(): + self._retry_ctx.reset() + raise OperationalError( + msg=f"Could not connect to Snowflake backend after {self._retry_ctx.get_current_retry_count()} attempt(s)." + "Aborting", + errno=ER_FAILED_TO_CONNECT_TO_DB, + ) + else: + logger.debug( + f"Hit connection timeout, attempt number {self._retry_ctx.get_current_retry_count()}." + " Will retry in a bit..." + ) + self._retry_ctx.increment_retry() + time.sleep(self._retry_ctx.next_sleep_duration()) diff --git a/src/snowflake/connector/auth_default.py b/src/snowflake/connector/auth_default.py new file mode 100644 index 000000000..1df8057c7 --- /dev/null +++ b/src/snowflake/connector/auth_default.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from .auth_by_plugin import AuthByPlugin + + +class AuthByDefault(AuthByPlugin): + """Default username and password authenticator.""" + + @property + def assertion_content(self): + return "*********" + + def __init__(self, password): + """Initializes an instance with a password.""" + super().__init__() + self._password = password + + def authenticate(self, authenticator, service_name, account, user, password): + """NOOP.""" + pass + + def update_body(self, body): + """Sets the password if available.""" + if self._password: + body["data"]["PASSWORD"] = self._password diff --git a/src/snowflake/connector/auth_idtoken.py b/src/snowflake/connector/auth_idtoken.py new file mode 100644 index 000000000..ed3d8339c --- /dev/null +++ b/src/snowflake/connector/auth_idtoken.py @@ -0,0 +1,34 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from .auth_by_plugin import AuthByPlugin +from .network import ID_TOKEN_AUTHENTICATOR + + +class AuthByIdToken(AuthByPlugin): + """Internal IdToken Based Authentication. + + Works by accepting an id_toke and use that to authenticate. Only be used when users are using EXTERNAL_BROWSER_AUTHENTICATOR + """ + + @property + def assertion_content(self): + return self._id_token + + def __init__(self, id_token): + """Initialized an instance with an IdToken.""" + super().__init__() + self._id_token = id_token + + def authenticate(self, authenticator, service_name, account, user, password): + """Nothing to do here.""" + pass + + def update_body(self, body): + """Idtoken needs the authenticator and token attributes set.""" + body["data"]["AUTHENTICATOR"] = ID_TOKEN_AUTHENTICATOR + body["data"]["TOKEN"] = self._id_token diff --git a/src/snowflake/connector/auth_keypair.py b/src/snowflake/connector/auth_keypair.py new file mode 100644 index 000000000..3ff65fa04 --- /dev/null +++ b/src/snowflake/connector/auth_keypair.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import base64 +import hashlib +import os +from datetime import datetime, timedelta +from logging import getLogger + +import jwt +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.asymmetric.rsa import RSAPrivateKey +from cryptography.hazmat.primitives.serialization import ( + Encoding, + PublicFormat, + load_der_private_key, +) + +from .auth_by_plugin import AuthByPlugin +from .errorcode import ( + ER_CONNECTION_TIMEOUT, + ER_FAILED_TO_CONNECT_TO_DB, + ER_INVALID_PRIVATE_KEY, +) +from .errors import OperationalError, ProgrammingError +from .network import KEY_PAIR_AUTHENTICATOR + +logger = getLogger(__name__) + + +class AuthByKeyPair(AuthByPlugin): + """Key pair based authentication.""" + + ALGORITHM = "RS256" + ISSUER = "iss" + SUBJECT = "sub" + EXPIRE_TIME = "exp" + ISSUE_TIME = "iat" + LIFETIME = 60 + DEFAULT_JWT_RETRY_ATTEMPTS = 10 + DEFAULT_JWT_CNXN_WAIT_TIME = 10 + + def __init__(self, private_key, lifetime_in_seconds: int = LIFETIME): + """Inits AuthByKeyPair class with private key. + + Args: + private_key: a byte array of der formats of private key + lifetime_in_seconds: number of seconds the JWT token will be valid + """ + super().__init__() + self._private_key = private_key + self._jwt_token = "" + self._jwt_token_exp = 0 + self._lifetime = timedelta( + seconds=int(os.getenv("JWT_LIFETIME_IN_SECONDS", lifetime_in_seconds)) + ) + self._jwt_retry_attempts = int( + os.getenv( + "JWT_CNXN_RETRY_ATTEMPTS", AuthByKeyPair.DEFAULT_JWT_RETRY_ATTEMPTS + ) + ) + self._jwt_cnxn_wait_time = timedelta( + seconds=int( + os.getenv( + "JWT_CNXN_WAIT_TIME", AuthByKeyPair.DEFAULT_JWT_CNXN_WAIT_TIME + ) + ) + ) + self._current_retry_count = 0 + + def authenticate( + self, + authenticator: str, + service_name: str | None, + account: str, + user: str, + password: str | None, + ) -> str: + if ".global" in account: + account = account.partition("-")[0] + else: + account = account.partition(".")[0] + account = account.upper() + user = user.upper() + + now = datetime.utcnow() + + try: + private_key = load_der_private_key( + data=self._private_key, password=None, backend=default_backend() + ) + except Exception as e: + raise ProgrammingError( + msg="Failed to load private key: {}\nPlease provide a valid unencrypted rsa private " + "key in DER format as bytes object".format(str(e)), + errno=ER_INVALID_PRIVATE_KEY, + ) + + if not isinstance(private_key, RSAPrivateKey): + raise ProgrammingError( + msg="Private key type ({}) not supported.\nPlease provide a valid rsa private " + "key in DER format as bytes object".format( + private_key.__class__.__name__ + ), + errno=ER_INVALID_PRIVATE_KEY, + ) + + public_key_fp = self.calculate_public_key_fingerprint(private_key) + + self._jwt_token_exp = now + self._lifetime + payload = { + self.ISSUER: f"{account}.{user}.{public_key_fp}", + self.SUBJECT: f"{account}.{user}", + self.ISSUE_TIME: now, + self.EXPIRE_TIME: self._jwt_token_exp, + } + + _jwt_token = jwt.encode(payload, private_key, algorithm=self.ALGORITHM) + + # jwt.encode() returns bytes in pyjwt 1.x and a string + # in pyjwt 2.x + if isinstance(_jwt_token, bytes): + self._jwt_token = _jwt_token.decode("utf-8") + else: + self._jwt_token = _jwt_token + + return self._jwt_token + + @staticmethod + def calculate_public_key_fingerprint(private_key): + # get public key bytes + public_key_der = private_key.public_key().public_bytes( + Encoding.DER, PublicFormat.SubjectPublicKeyInfo + ) + + # take sha256 on raw bytes and then do base64 encode + sha256hash = hashlib.sha256() + sha256hash.update(public_key_der) + + public_key_fp = "SHA256:" + base64.b64encode(sha256hash.digest()).decode( + "utf-8" + ) + logger.debug("Public key fingerprint is %s", public_key_fp) + + return public_key_fp + + def update_body(self, body): + body["data"]["AUTHENTICATOR"] = KEY_PAIR_AUTHENTICATOR + body["data"]["TOKEN"] = self._jwt_token + + def assertion_content(self): + return self._jwt_token + + def should_retry(self, count: int) -> bool: + return count < self._jwt_retry_attempts + + def get_timeout(self) -> int: + return self._jwt_cnxn_wait_time.seconds + + def handle_timeout( + self, + authenticator: str, + service_name: str | None, + account: str, + user: str, + password: str | None, + ) -> None: + if self._retry_ctx.get_current_retry_count() > self._jwt_retry_attempts: + logger.debug("Exhausted max login attempts. Aborting connection") + self._retry_ctx.reset() + raise OperationalError( + msg=f"Could not connect to Snowflake backend after {self._retry_ctx.get_current_retry_count()} attempt(s)." + "Aborting", + errno=ER_FAILED_TO_CONNECT_TO_DB, + ) + else: + logger.debug( + f"Hit JWT timeout, attempt {self._retry_ctx.get_current_retry_count()}. Retrying..." + ) + self._retry_ctx.increment_retry() + + self.authenticate(authenticator, service_name, account, user, password) + + def can_handle_exception(self, op: OperationalError) -> bool: + if op.errno is ER_CONNECTION_TIMEOUT: + return True + return False diff --git a/src/snowflake/connector/auth_oauth.py b/src/snowflake/connector/auth_oauth.py new file mode 100644 index 000000000..73e85461c --- /dev/null +++ b/src/snowflake/connector/auth_oauth.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from .auth_by_plugin import AuthByPlugin +from .network import OAUTH_AUTHENTICATOR + + +class AuthByOAuth(AuthByPlugin): + """OAuth Based Authentication. + + Works by accepting an OAuth token and using that to authenticate. + """ + + @property + def assertion_content(self): + """Returns the token.""" + return self._oauth_token + + def __init__(self, oauth_token): + """Initializes an instance with an OAuth Token.""" + super().__init__() + self._oauth_token = oauth_token + + def authenticate(self, authenticator, service_name, account, user, password): + """Nothing to do here, token should be obtained outside of the driver.""" + pass + + def update_body(self, body): + """Update some information required by OAuth. + + OAuth needs the authenticator and token attributes set, as well as loginname, which is set already in auth.py. + """ + body["data"]["AUTHENTICATOR"] = OAUTH_AUTHENTICATOR + body["data"]["TOKEN"] = self._oauth_token diff --git a/src/snowflake/connector/auth_okta.py b/src/snowflake/connector/auth_okta.py new file mode 100644 index 000000000..2e42705be --- /dev/null +++ b/src/snowflake/connector/auth_okta.py @@ -0,0 +1,270 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import json +import logging + +from .auth import Auth +from .auth_by_plugin import AuthByPlugin +from .compat import unescape, urlencode, urlsplit +from .constants import ( + HTTP_HEADER_ACCEPT, + HTTP_HEADER_CONTENT_TYPE, + HTTP_HEADER_SERVICE_NAME, + HTTP_HEADER_USER_AGENT, +) +from .errorcode import ER_IDP_CONNECTION_ERROR, ER_INCORRECT_DESTINATION +from .errors import DatabaseError, Error +from .network import CONTENT_TYPE_APPLICATION_JSON, PYTHON_CONNECTOR_USER_AGENT +from .sqlstate import SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED + +logger = logging.getLogger(__name__) + + +def _is_prefix_equal(url1, url2): + """Checks if URL prefixes are identical. + + The scheme, hostname and port number are compared. If the port number is not specified and the scheme is https, + the port number is assumed to be 443. + """ + parsed_url1 = urlsplit(url1) + parsed_url2 = urlsplit(url2) + + port1 = parsed_url1.port + if not port1 and parsed_url1.scheme == "https": + port1 = "443" + port2 = parsed_url1.port + if not port2 and parsed_url2.scheme == "https": + port2 = "443" + + return ( + parsed_url1.hostname == parsed_url2.hostname + and port1 == port2 + and parsed_url1.scheme == parsed_url2.scheme + ) + + +def _get_post_back_url_from_html(html): + """Gets the post back URL. + + Since the HTML is not well formed, minidom cannot be used to convert to + DOM. The first discovered form is assumed to be the form to post back + and the URL is taken from action attributes. + """ + logger.debug(html) + + idx = html.find(" 1 else ( - 443 if self._protocol == u'https' else 80) + port_got = ( + netloc[1] if len(netloc) > 1 else (443 if self._protocol == "https" else 80) + ) - return ret.scheme == self._protocol \ - and host_got == self._host and port_got == self._port + return ( + ret.scheme == self._protocol + and host_got == self._host + and port_got == self._port + ) def _process_receive_saml_token(self, data, socket_client): if not self._process_get(data) and not self._process_post(data): @@ -200,24 +228,25 @@ def _process_receive_saml_token(self, data, socket_client): "Content-Type: text/html", ] if self._origin: - data = {'consent': self._consent_cache_id_token} + data = {"consent": self._consent_cache_id_token} msg = json.dumps(data) - content.append("Access-Control-Allow-Origin: {0}".format( - self._origin)) + content.append(f"Access-Control-Allow-Origin: {self._origin}") content.append("Vary: Accept-Encoding, Origin") else: msg = """ SAML Response for Snowflake -Your identity was confirmed and propagated to Snowflake {0}. +Your identity was confirmed and propagated to Snowflake {}. You can close this window now and go back where you started from. -""".format(self._application) - content.append("Content-Length: {0}".format(len(msg))) +""".format( + self._application + ) + content.append(f"Content-Length: {len(msg)}") content.append("") content.append(msg) - socket_client.sendall('\r\n'.join(content).encode('utf-8')) + socket_client.sendall("\r\n".join(content).encode("utf-8")) def _check_post_requested(self, data): request_line = None @@ -231,12 +260,26 @@ def _check_post_requested(self, data): elif line.startswith("Origin:"): origin_line = line - if not request_line or not header_line or not origin_line \ - or request_line.split(':')[1].strip() != 'POST': + if ( + not request_line + or not header_line + or not origin_line + or request_line.split(":")[1].strip() != "POST" + ): return None, None - return (header_line.split(':')[1].strip(), - ':'.join(origin_line.split(':')[1:]).strip()) + return ( + header_line.split(":")[1].strip(), + ":".join(origin_line.split(":")[1:]).strip(), + ) + + def _process_get_url(self, url: str) -> None: + parsed = parse_qs(urlparse(url).query) + if "token" not in parsed: + return + if not parsed["token"][0]: + return + self._token = parsed["token"][0] def _process_get(self, data): for line in data: @@ -248,7 +291,7 @@ def _process_get(self, data): self._get_user_agent(data) _, url, _ = target_line.split() - self._token = parse_qs(urlparse(url).query)['token'][0] + self._process_get_url(url) return True def _process_post(self, data): @@ -256,37 +299,36 @@ def _process_post(self, data): if line.startswith("POST "): break else: - self.handle_failure({ - u'code': ER_IDP_CONNECTION_ERROR, - u'message': u"Invalid HTTP request from web browser. Idp " - u"authentication could have failed." - }) + self.handle_failure( + { + "code": ER_IDP_CONNECTION_ERROR, + "message": "Invalid HTTP request from web browser. Idp " + "authentication could have failed.", + } + ) return False self._get_user_agent(data) try: # parse the response as JSON payload = json.loads(data[-1]) - self._token = payload.get('token') - self._consent_cache_id_token = payload.get('consent', True) + self._token = payload.get("token") + self._consent_cache_id_token = payload.get("consent", True) except Exception: # key=value form. - self._token = parse_qs(data[-1])['token'][0] + self._token = parse_qs(data[-1])["token"][0] return True def _get_user_agent(self, data): for line in data: - if line.lower().startswith('user-agent'): + if line.lower().startswith("user-agent"): logger.debug(line) break else: logger.debug("No User-Agent") - def _get_sso_url( - self, authenticator, service_name, account, callback_port, user): - """ - Gets SSO URL from Snowflake - """ + def _get_sso_url(self, authenticator, service_name, account, callback_port, user): + """Gets SSO URL from Snowflake.""" headers = { HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, HTTP_HEADER_ACCEPT: CONTENT_TYPE_APPLICATION_JSON, @@ -295,27 +337,33 @@ def _get_sso_url( if service_name: headers[HTTP_HEADER_SERVICE_NAME] = service_name - url = u"/session/authenticator-request" + url = "/session/authenticator-request" body = Auth.base_auth_data( - user, account, + user, + account, self._rest._connection.application, self._rest._connection._internal_application_name, self._rest._connection._internal_application_version, - self._rest._connection._ocsp_mode()) - - body[u'data'][u'AUTHENTICATOR'] = authenticator - body[u'data'][u"BROWSER_MODE_REDIRECT_PORT"] = str(callback_port) - logger.debug(u'account=%s, authenticator=%s, user=%s', - account, authenticator, user) + self._rest._connection._ocsp_mode(), + self._rest._connection._login_timeout, + self._rest._connection._network_timeout, + ) + + body["data"]["AUTHENTICATOR"] = authenticator + body["data"]["BROWSER_MODE_REDIRECT_PORT"] = str(callback_port) + logger.debug( + "account=%s, authenticator=%s, user=%s", account, authenticator, user + ) ret = self._rest._post_request( url, headers, json.dumps(body), timeout=self._rest._connection.login_timeout, - socket_timeout=self._rest._connection.login_timeout) - if not ret[u'success']: + socket_timeout=self._rest._connection.login_timeout, + ) + if not ret["success"]: self.handle_failure(ret) - data = ret[u'data'] - sso_url = data[u'ssoUrl'] - self._proof_key = data[u'proofKey'] + data = ret["data"] + sso_url = data["ssoUrl"] + self._proof_key = data["proofKey"] return sso_url diff --git a/src/snowflake/connector/azure_storage_client.py b/src/snowflake/connector/azure_storage_client.py new file mode 100644 index 000000000..8985e1206 --- /dev/null +++ b/src/snowflake/connector/azure_storage_client.py @@ -0,0 +1,251 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import json +import os +import xml.etree.ElementTree as ET +from datetime import datetime +from logging import getLogger +from random import choice +from string import hexdigits +from typing import TYPE_CHECKING, Any, NamedTuple + +from .compat import quote +from .constants import FileHeader, ResultStatus +from .encryption_util import EncryptionMetadata +from .storage_client import SnowflakeStorageClient +from .vendored import requests + +if TYPE_CHECKING: # pragma: no cover + from .file_transfer_agent import SnowflakeFileMeta, StorageCredential + +logger = getLogger(__name__) + + +class AzureLocation(NamedTuple): + container_name: str + path: str + + +TOKEN_EXPIRATION_ERR_MESSAGE = ( + "Signature not valid in the specified time frame", + "Server failed to authenticate the request.", +) +SFCDIGEST = "x-ms-meta-sfcdigest" +ENCRYPTION_DATA = "x-ms-meta-encryptiondata" +MATDESC = "x-ms-meta-matdesc" + + +class SnowflakeAzureRestClient(SnowflakeStorageClient): + def __init__( + self, + meta: SnowflakeFileMeta, + credentials: StorageCredential | None, + chunk_size: int, + stage_info: dict[str, Any], + use_s3_regional_url: bool = False, + ) -> None: + super().__init__(meta, stage_info, chunk_size, credentials=credentials) + end_point: str = stage_info["endPoint"] + if end_point.startswith("blob."): + end_point = end_point[len("blob.") :] + self.endpoint = end_point + self.storage_account: str = stage_info["storageAccount"] + self.azure_location = self.extract_container_name_and_path( + stage_info["location"] + ) + self.block_ids: list[str] = [] + + @staticmethod + def extract_container_name_and_path(stage_location: str) -> AzureLocation: + stage_location = os.path.expanduser(stage_location) + container_name = stage_location + path = "" + + # split stage location as bucket name and path + if "/" in stage_location: + container_name, _, path = stage_location.partition("/") + if path and not path.endswith("/"): + path += "/" + + return AzureLocation(container_name=container_name, path=path) + + def _has_expired_token(self, response: requests.Response) -> bool: + return response.status_code == 403 and any( + message in response.reason for message in TOKEN_EXPIRATION_ERR_MESSAGE + ) + + def _send_request_with_authentication_and_retry( + self, + verb: str, + url: str, + retry_id: int | str, + headers: dict[str, Any] = None, + data: bytes = None, + ) -> requests.Response: + if not headers: + headers = {} + + def generate_authenticated_url_and_rest_args() -> tuple[bytes, dict[str, Any]]: + curtime = datetime.utcnow() + timestamp = curtime.strftime("YYYY-MM-DD") + sas_token = self.credentials.creds["AZURE_SAS_TOKEN"] + if sas_token and sas_token.startswith("?"): + sas_token = sas_token[1:] + if "?" in url: + _url = url + "&" + sas_token + else: + _url = url + "?" + sas_token + headers["Date"] = timestamp + rest_args = {"headers": headers} + if data: + rest_args["data"] = data + return _url, rest_args + + return self._send_request_with_retry( + verb, generate_authenticated_url_and_rest_args, retry_id + ) + + def get_file_header(self, filename: str) -> FileHeader | None: + """Gets Azure file properties.""" + container_name = quote(self.azure_location.container_name) + path = quote(self.azure_location.path) + quote(filename) + meta = self.meta + # HTTP HEAD request + url = f"https://{self.storage_account}.blob.{self.endpoint}/{container_name}/{path}" + retry_id = "HEAD" + self.retry_count[retry_id] = 0 + r = self._send_request_with_authentication_and_retry("HEAD", url, retry_id) + if r.status_code == 200: + meta.result_status = ResultStatus.UPLOADED + encryption_data = json.loads(r.headers.get(ENCRYPTION_DATA)) + encryption_metadata = ( + None + if not encryption_data + else EncryptionMetadata( + key=encryption_data["WrappedContentKey"]["EncryptedKey"], + iv=encryption_data["ContentEncryptionIV"], + matdesc=r.headers.get(MATDESC), + ) + ) + return FileHeader( + digest=r.headers.get("x-ms-meta-sfcdigest"), + content_length=int(r.headers.get("Content-Length")), + encryption_metadata=encryption_metadata, + ) + elif r.status_code == 404: + meta.result_status = ResultStatus.NOT_FOUND_FILE + return FileHeader( + digest=None, content_length=None, encryption_metadata=None + ) + else: + r.raise_for_status() + + def _prepare_file_metadata(self) -> dict[str, str | None]: + azure_metadata = { + SFCDIGEST: self.meta.sha256_digest, + } + encryption_metadata = self.encryption_metadata + if encryption_metadata: + azure_metadata.update( + { + ENCRYPTION_DATA: json.dumps( + { + "EncryptionMode": "FullBlob", + "WrappedContentKey": { + "KeyId": "symmKey1", + "EncryptedKey": encryption_metadata.key, + "Algorithm": "AES_CBC_256", + }, + "EncryptionAgent": { + "Protocol": "1.0", + "EncryptionAlgorithm": "AES_CBC_128", + }, + "ContentEncryptionIV": encryption_metadata.iv, + "KeyWrappingMetadata": {"EncryptionLibrary": "Java 5.3.0"}, + } + ), + MATDESC: encryption_metadata.matdesc, + } + ) + return azure_metadata + + def _initiate_multipart_upload(self) -> None: + self.block_ids = [ + "".join(choice(hexdigits) for _ in range(20)) + for _ in range(self.num_of_chunks) + ] + + def _upload_chunk(self, chunk_id: int, chunk: bytes) -> None: + container_name = quote(self.azure_location.container_name) + path = quote(self.azure_location.path + self.meta.dst_file_name.lstrip("/")) + + if self.num_of_chunks > 1: + block_id = self.block_ids[chunk_id] + url = ( + f"https://{self.storage_account}.blob.{self.endpoint}/{container_name}/{path}?comp=block" + f"&blockid={block_id}" + ) + headers = {"Content-Length": str(len(chunk))} + r = self._send_request_with_authentication_and_retry( + "PUT", url, chunk_id, headers=headers, data=chunk + ) + else: + # single request + azure_metadata = self._prepare_file_metadata() + url = f"https://{self.storage_account}.blob.{self.endpoint}/{container_name}/{path}" + headers = { + "x-ms-blob-type": "BlockBlob", + "Content-Encoding": "utf-8", + } + headers.update(azure_metadata) + r = self._send_request_with_authentication_and_retry( + "PUT", url, chunk_id, headers=headers, data=chunk + ) + r.raise_for_status() # expect status code 201 + + def _complete_multipart_upload(self) -> None: + container_name = quote(self.azure_location.container_name) + path = quote(self.azure_location.path + self.meta.dst_file_name.lstrip("/")) + url = ( + f"https://{self.storage_account}.blob.{self.endpoint}/{container_name}/{path}?comp" + f"=blocklist" + ) + root = ET.Element("BlockList") + for block_id in self.block_ids: + part = ET.Element("Latest") + part.text = block_id + root.append(part) + headers = {"x-ms-blob-content-encoding": "utf-8"} + azure_metadata = self._prepare_file_metadata() + headers.update(azure_metadata) + retry_id = "COMPLETE" + self.retry_count[retry_id] = 0 + r = self._send_request_with_authentication_and_retry( + "PUT", url, "COMPLETE", headers=headers, data=ET.tostring(root) + ) + r.raise_for_status() # expects status code 201 + + def download_chunk(self, chunk_id: int) -> None: + container_name = quote(self.azure_location.container_name) + path = quote(self.azure_location.path + self.meta.src_file_name.lstrip("/")) + url = f"https://{self.storage_account}.blob.{self.endpoint}/{container_name}/{path}" + if self.num_of_chunks > 1: + chunk_size = self.chunk_size + if chunk_id < self.num_of_chunks - 1: + _range = f"{chunk_id * chunk_size}-{(chunk_id+1)*chunk_size-1}" + else: + _range = f"{chunk_id * chunk_size}-" + headers = {"Range": f"bytes={_range}"} + r = self._send_request_with_authentication_and_retry( + "GET", url, chunk_id, headers=headers + ) # expect 206 + else: + # single request + r = self._send_request_with_authentication_and_retry("GET", url, chunk_id) + if r.status_code in (200, 206): + self.write_downloaded_chunk(chunk_id, r.content) + r.raise_for_status() diff --git a/src/snowflake/connector/bind_upload_agent.py b/src/snowflake/connector/bind_upload_agent.py new file mode 100644 index 000000000..2ccc2f17b --- /dev/null +++ b/src/snowflake/connector/bind_upload_agent.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import uuid +from io import BytesIO +from logging import getLogger +from typing import TYPE_CHECKING + +from .errors import BindUploadError, Error + +if TYPE_CHECKING: # pragma: no cover + from .cursor import SnowflakeCursor + +logger = getLogger(__name__) + + +class BindUploadAgent: + _STAGE_NAME = "SYSTEMBIND" + _CREATE_STAGE_STMT = ( + f"create or replace temporary stage {_STAGE_NAME} " + "file_format=(type=csv field_optionally_enclosed_by='\"')" + ) + + def __init__( + self, + cursor: SnowflakeCursor, + rows: list[bytes], + stream_buffer_size: int = 1024 * 1024 * 10, + ): + """Construct an agent that uploads binding parameters as CSV files to a temporary stage. + + Args: + cursor: The cursor object. + rows: Rows of binding parameters in CSV format. + stream_buffer_size: Size of each file, default to 10MB. + """ + self.cursor = cursor + self.rows = rows + self._stream_buffer_size = stream_buffer_size + self.stage_path = f"@{self._STAGE_NAME}/{uuid.uuid4().hex}" + + def _create_stage(self): + self.cursor.execute(self._CREATE_STAGE_STMT) + + def upload(self): + try: + self._create_stage() + except Error as err: + self.cursor.connection._session_parameters[ + "CLIENT_STAGE_ARRAY_BINDING_THRESHOLD" + ] = 0 + logger.debug("Failed to create stage for binding.") + raise BindUploadError from err + + row_idx = 0 + while row_idx < len(self.rows): + f = BytesIO() + size = 0 + while True: + f.write(self.rows[row_idx]) + size += len(self.rows[row_idx]) + row_idx += 1 + if row_idx >= len(self.rows) or size >= self._stream_buffer_size: + break + try: + self.cursor.execute( + f"PUT file://{row_idx}.csv {self.stage_path}", file_stream=f + ) + except Error as err: + logger.debug("Failed to upload the bindings file to stage.") + raise BindUploadError from err + f.close() diff --git a/src/snowflake/connector/compat.py b/src/snowflake/connector/compat.py new file mode 100644 index 000000000..252f116cb --- /dev/null +++ b/src/snowflake/connector/compat.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import collections.abc +import decimal +import html +import http.client +import os +import platform +import queue +import urllib.parse +import urllib.request +from typing import Any + +from snowflake.connector.constants import UTF8 + +IS_LINUX = platform.system() == "Linux" +IS_WINDOWS = platform.system() == "Windows" +IS_MACOS = platform.system() == "Darwin" + +NUM_DATA_TYPES: tuple[type, ...] = () +try: + import numpy + + NUM_DATA_TYPES = ( + numpy.int8, + numpy.int16, + numpy.int32, + numpy.int64, + numpy.float16, + numpy.float32, + numpy.float64, + numpy.uint8, + numpy.uint16, + numpy.uint32, + numpy.uint64, + numpy.bool_, + ) +except (ImportError, AttributeError): + numpy = None + +GET_CWD = os.getcwd +BASE_EXCEPTION_CLASS = Exception +TO_UNICODE = str +ITERATOR = collections.abc.Iterator +MAPPING = collections.abc.Mapping + +urlsplit = urllib.parse.urlsplit +urlunsplit = urllib.parse.urlunsplit +parse_qs = urllib.parse.parse_qs +urlparse = urllib.parse.urlparse + +NUM_DATA_TYPES += (int, float, decimal.Decimal) + + +def PKCS5_UNPAD(v): + return v[0 : -v[-1]] + + +def PKCS5_OFFSET(v): + return v[-1] + + +def IS_BINARY(v): + return isinstance(v, (bytes, bytearray)) + + +METHOD_NOT_ALLOWED = http.client.METHOD_NOT_ALLOWED +BAD_GATEWAY = http.client.BAD_GATEWAY +BAD_REQUEST = http.client.BAD_REQUEST +REQUEST_TIMEOUT = http.client.REQUEST_TIMEOUT +SERVICE_UNAVAILABLE = http.client.SERVICE_UNAVAILABLE +GATEWAY_TIMEOUT = http.client.GATEWAY_TIMEOUT +FORBIDDEN = http.client.FORBIDDEN +UNAUTHORIZED = http.client.UNAUTHORIZED +INTERNAL_SERVER_ERROR = http.client.INTERNAL_SERVER_ERROR +IncompleteRead = http.client.IncompleteRead +OK = http.client.OK +BadStatusLine = http.client.BadStatusLine + +urlencode = urllib.parse.urlencode +unquote = urllib.parse.unquote +quote = urllib.parse.quote +unescape = html.unescape + +EmptyQueue = queue.Empty +Queue = queue.Queue + + +def IS_BYTES(v: Any) -> bool: + return isinstance(v, bytes) + + +def IS_UNICODE(v: Any) -> bool: + return isinstance(v, str) + + +def IS_NUMERIC(v: Any) -> bool: + return isinstance(v, NUM_DATA_TYPES) + + +IS_STR = IS_UNICODE + + +def PKCS5_PAD(value: bytes, block_size: int) -> bytes: + return b"".join( + [ + value, + (block_size - len(value) % block_size) + * chr(block_size - len(value) % block_size).encode(UTF8), + ] + ) + + +def PRINT(msg: str) -> None: + print(msg) + + +def INPUT(prompt: str) -> str: + return input(prompt) + + +def quote_url_piece(piece: str) -> str: + """Helper function to urlencode a string and turn it into bytes.""" + return quote(piece) diff --git a/src/snowflake/connector/connection.py b/src/snowflake/connector/connection.py new file mode 100644 index 000000000..6a67db5f9 --- /dev/null +++ b/src/snowflake/connector/connection.py @@ -0,0 +1,1495 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import copy +import logging +import os +import re +import sys +import uuid +import warnings +import weakref +from difflib import get_close_matches +from functools import partial +from io import StringIO +from logging import getLogger +from threading import Lock +from time import strptime +from typing import Any, Callable, Generator, Iterable, NamedTuple, Sequence + +from . import errors, proxy +from .auth import Auth +from .auth_default import AuthByDefault +from .auth_idtoken import AuthByIdToken +from .auth_keypair import AuthByKeyPair +from .auth_oauth import AuthByOAuth +from .auth_okta import AuthByOkta +from .auth_usrpwdmfa import AuthByUsrPwdMfa +from .auth_webbrowser import AuthByWebBrowser +from .bind_upload_agent import BindUploadError +from .compat import IS_LINUX, IS_WINDOWS, quote, urlencode +from .constants import ( + ENV_VAR_PARTNER, + PARAMETER_AUTOCOMMIT, + PARAMETER_CLIENT_PREFETCH_THREADS, + PARAMETER_CLIENT_REQUEST_MFA_TOKEN, + PARAMETER_CLIENT_SESSION_KEEP_ALIVE, + PARAMETER_CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY, + PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL, + PARAMETER_CLIENT_TELEMETRY_ENABLED, + PARAMETER_CLIENT_TELEMETRY_OOB_ENABLED, + PARAMETER_CLIENT_VALIDATE_DEFAULT_PARAMETERS, + PARAMETER_ENABLE_STAGE_S3_PRIVATELINK_FOR_US_EAST_1, + PARAMETER_SERVICE_NAME, + PARAMETER_TIMEZONE, + OCSPMode, + QueryStatus, +) +from .converter import SnowflakeConverter +from .cursor import LOG_MAX_QUERY_LENGTH, SnowflakeCursor +from .description import ( + CLIENT_NAME, + CLIENT_VERSION, + PLATFORM, + PYTHON_VERSION, + SNOWFLAKE_CONNECTOR_VERSION, +) +from .errorcode import ( + ER_CONNECTION_IS_CLOSED, + ER_FAILED_PROCESSING_PYFORMAT, + ER_FAILED_PROCESSING_QMARK, + ER_FAILED_TO_CONNECT_TO_DB, + ER_INVALID_VALUE, + ER_NO_ACCOUNT_NAME, + ER_NO_NUMPY, + ER_NO_PASSWORD, + ER_NO_USER, + ER_NOT_IMPLICITY_SNOWFLAKE_DATATYPE, +) +from .errors import DatabaseError, Error, OperationalError, ProgrammingError +from .network import ( + DEFAULT_AUTHENTICATOR, + EXTERNAL_BROWSER_AUTHENTICATOR, + KEY_PAIR_AUTHENTICATOR, + OAUTH_AUTHENTICATOR, + REQUEST_ID, + USR_PWD_MFA_AUTHENTICATOR, + ReauthenticationRequest, + SnowflakeRestful, +) +from .sqlstate import SQLSTATE_CONNECTION_NOT_EXISTS, SQLSTATE_FEATURE_NOT_SUPPORTED +from .telemetry import TelemetryClient +from .telemetry_oob import TelemetryService +from .time_util import HeartBeatTimer, get_time_millis +from .util_text import construct_hostname, parse_account, split_statements + +DEFAULT_CLIENT_PREFETCH_THREADS = 4 +MAX_CLIENT_PREFETCH_THREADS = 10 + + +def DefaultConverterClass(): + if IS_WINDOWS: + from .converter_issue23517 import SnowflakeConverterIssue23517 + + return SnowflakeConverterIssue23517 + else: + from .converter import SnowflakeConverter + + return SnowflakeConverter + + +SUPPORTED_PARAMSTYLES = { + "qmark", + "numeric", + "format", + "pyformat", +} +# Default configs, tuple of default variable and accepted types +DEFAULT_CONFIGURATION: dict[str, tuple[Any, type | tuple[type, ...]]] = { + "dsn": (None, (type(None), str)), # standard + "user": ("", str), # standard + "password": ("", str), # standard + "host": ("127.0.0.1", str), # standard + "port": (8080, (int, str)), # standard + "database": (None, (type(None), str)), # standard + "proxy_host": (None, (type(None), str)), # snowflake + "proxy_port": (None, (type(None), str)), # snowflake + "proxy_user": (None, (type(None), str)), # snowflake + "proxy_password": (None, (type(None), str)), # snowflake + "protocol": ("http", str), # snowflake + "warehouse": (None, (type(None), str)), # snowflake + "region": (None, (type(None), str)), # snowflake + "account": (None, (type(None), str)), # snowflake + "schema": (None, (type(None), str)), # snowflake + "role": (None, (type(None), str)), # snowflake + "session_id": (None, (type(None), str)), # snowflake + "login_timeout": (120, int), # login timeout + "network_timeout": ( + None, + (type(None), int), + ), # network timeout (infinite by default) + "passcode_in_password": (False, bool), # Snowflake MFA + "passcode": (None, (type(None), str)), # Snowflake MFA + "private_key": (None, (type(None), str)), + "token": (None, (type(None), str)), # OAuth or JWT Token + "authenticator": (DEFAULT_AUTHENTICATOR, (type(None), str)), + "mfa_callback": (None, (type(None), Callable)), + "password_callback": (None, (type(None), Callable)), + "application": (CLIENT_NAME, (type(None), str)), + "internal_application_name": (CLIENT_NAME, (type(None), str)), + "internal_application_version": (CLIENT_VERSION, (type(None), str)), + "insecure_mode": (False, bool), # Error security fix requirement + "ocsp_fail_open": (True, bool), # fail open on ocsp issues, default true + "inject_client_pause": (0, int), # snowflake internal + "session_parameters": (None, (type(None), dict)), # snowflake session parameters + "autocommit": (None, (type(None), bool)), # snowflake + "client_session_keep_alive": (None, (type(None), bool)), # snowflake + "client_session_keep_alive_heartbeat_frequency": ( + None, + (type(None), int), + ), # snowflake + "client_prefetch_threads": (4, int), # snowflake + "numpy": (False, bool), # snowflake + "ocsp_response_cache_filename": (None, (type(None), str)), # snowflake internal + "converter_class": (DefaultConverterClass(), SnowflakeConverter), + "validate_default_parameters": (False, bool), # snowflake + "probe_connection": (False, bool), # snowflake + "paramstyle": (None, (type(None), str)), # standard/snowflake + "timezone": (None, (type(None), str)), # snowflake + "consent_cache_id_token": (True, bool), # snowflake + "service_name": (None, (type(None), str)), # snowflake, + "support_negative_year": (True, bool), # snowflake + "log_max_query_length": (LOG_MAX_QUERY_LENGTH, int), # snowflake + "disable_request_pooling": (False, bool), # snowflake + # enable temporary credential file for Linux, default false. Mac/Win will overlook this + "client_store_temporary_credential": (False, bool), + "client_request_mfa_token": (False, bool), + "use_openssl_only": ( + False, + bool, + ), # only use openssl instead of python only crypto modules + # whether to convert Arrow number values to decimal instead of doubles + "arrow_number_to_decimal": (False, bool), + "enable_stage_s3_privatelink_for_us_east_1": ( + False, + bool, + ), # only use regional url when the param is set + # Allows cursors to be re-iterable + "reuse_results": (False, bool), + # parameter protecting behavior change of SNOW-501058 + "interpolate_empty_sequences": (False, bool), +} + +APPLICATION_RE = re.compile(r"[\w\d_]+") + +# adding the exception class to Connection class +for m in [method for method in dir(errors) if callable(getattr(errors, method))]: + setattr(sys.modules[__name__], m, getattr(errors, m)) + +# Workaround for https://bugs.python.org/issue7980 +strptime("20150102030405", "%Y%m%d%H%M%S") + +logger = getLogger(__name__) + + +class TypeAndBinding(NamedTuple): + """Stores the type name and the Snowflake binding.""" + + type: str + binding: str | None + + +class SnowflakeConnection: + """Implementation of the connection object for the Snowflake Database. + + Use connect(..) to get the object. + + Attributes: + insecure_mode: Whether or not the connection is in insecure mode. Insecure mode means that the connection + validates the TLS certificate but doesn't check revocation status. + ocsp_fail_open: Whether or not the connection is in fail open mode. Fail open mode decides if TLS certificates + continue to be validated. Revoked certificates are blocked. Any other exceptions are disregarded. + session_id: The session ID of the connection. + user: The user name used in the connection. + host: The host name the connection attempts to connect to. + port: The port to communicate with on the host. + region: Region name if not the default Snowflake Database deployment. + proxy_host: The hostname used proxy server. + proxy_port: Port on proxy server to communicate with. + proxy_user: User name to login with on the proxy sever. + proxy_password: Password to be used to authenticate with proxy server. + account: Account name to be used to authenticate with Snowflake. + database: Database to use on Snowflake. + schema: Schema in use on Snowflake. + warehouse: Warehouse to be used on Snowflake. + role: Role in use on Snowflake. + login_timeout: Login timeout in seconds. Used while authenticating. + network_timeout: Network timeout. Used for general purpose. + client_session_keep_alive_heartbeat_frequency: Heartbeat frequency to keep connection alive in seconds. + client_prefetch_threads: Number of threads to download the result set. + rest: Snowflake REST API object. Internal use only. Maybe removed in a later release. + application: Application name to communicate with Snowflake as. By default, this is "PythonConnector". + errorhandler: Handler used with errors. By default, an exception will be raised on error. + converter_class: Handler used to convert data to Python native objects. + validate_default_parameters: Validate database, schema, role and warehouse used on Snowflake. + is_pyformat: Whether the current argument binding is pyformat or format. + consent_cache_id_token: Consented cache ID token. + use_openssl_only: Use OpenSSL instead of pure Python libraries for signature verification and encryption. + enable_stage_s3_privatelink_for_us_east_1: when true, clients use regional s3 url to upload files. + """ + + OCSP_ENV_LOCK = Lock() + + def __init__(self, **kwargs): + self._lock_sequence_counter = Lock() + self.sequence_counter = 0 + self._errorhandler = Error.default_errorhandler + self._lock_converter = Lock() + self.messages = [] + self._async_sfqids = set() + self._done_async_sfqids = set() + self.telemetry_enabled = False + self._session_parameters: dict[str, str | int | bool] = {} + logger.info( + "Snowflake Connector for Python Version: %s, " + "Python Version: %s, Platform: %s", + SNOWFLAKE_CONNECTOR_VERSION, + PYTHON_VERSION, + PLATFORM, + ) + + self._rest = None + for name, (value, _) in DEFAULT_CONFIGURATION.items(): + setattr(self, "_" + name, value) + + self.heartbeat_thread = None + + if "application" not in kwargs: + if ENV_VAR_PARTNER in os.environ.keys(): + kwargs["application"] = os.environ[ENV_VAR_PARTNER] + elif "streamlit" in sys.modules: + kwargs["application"] = "streamlit" + + self.converter = None + self.__set_error_attributes() + self.connect(**kwargs) + self._telemetry = TelemetryClient(self._rest) + + def __del__(self): # pragma: no cover + try: + self.close(retry=False) + except Exception: + pass + + @property + def insecure_mode(self) -> bool: + return self._insecure_mode + + @property + def ocsp_fail_open(self) -> bool: + return self._ocsp_fail_open + + def _ocsp_mode(self) -> OCSPMode: + """OCSP mode. INSECURE, FAIL_OPEN or FAIL_CLOSED.""" + if self.insecure_mode: + return OCSPMode.INSECURE + elif self.ocsp_fail_open: + return OCSPMode.FAIL_OPEN + else: + return OCSPMode.FAIL_CLOSED + + @property + def session_id(self): + return self._session_id + + @property + def user(self): + return self._user + + @property + def host(self): + return self._host + + @property + def port(self): + return self._port + + @property + def region(self): + warnings.warn( + "Region has been deprecated and will be removed in the near future", + PendingDeprecationWarning, + ) + return self._region + + @property + def proxy_host(self): + return self._proxy_host + + @property + def proxy_port(self): + return self._proxy_port + + @property + def proxy_user(self): + return self._proxy_user + + @property + def proxy_password(self): + return self._proxy_password + + @property + def account(self): + return self._account + + @property + def database(self): + return self._database + + @property + def schema(self): + return self._schema + + @property + def warehouse(self): + return self._warehouse + + @property + def role(self): + return self._role + + @property + def login_timeout(self): + return int(self._login_timeout) if self._login_timeout is not None else None + + @property + def network_timeout(self): + return int(self._network_timeout) if self._network_timeout is not None else None + + @property + def client_session_keep_alive(self): + return self._client_session_keep_alive + + @client_session_keep_alive.setter + def client_session_keep_alive(self, value): + self._client_session_keep_alive = value + + @property + def client_session_keep_alive_heartbeat_frequency(self): + return self._client_session_keep_alive_heartbeat_frequency + + @client_session_keep_alive_heartbeat_frequency.setter + def client_session_keep_alive_heartbeat_frequency(self, value): + self._client_session_keep_alive_heartbeat_frequency = value + self._validate_client_session_keep_alive_heartbeat_frequency() + + @property + def client_prefetch_threads(self): + return ( + self._client_prefetch_threads + if self._client_prefetch_threads + else DEFAULT_CLIENT_PREFETCH_THREADS + ) + + @client_prefetch_threads.setter + def client_prefetch_threads(self, value): + self._client_prefetch_threads = value + self._validate_client_prefetch_threads() + + @property + def rest(self): + return self._rest + + @property + def application(self): + return self._application + + @property + def errorhandler(self): + return self._errorhandler + + @errorhandler.setter + def errorhandler(self, value): + if value is None: + raise ProgrammingError("None errorhandler is specified") + self._errorhandler = value + + @property + def converter_class(self): + return self._converter_class + + @property + def validate_default_parameters(self): + return self._validate_default_parameters + + @property + def is_pyformat(self): + return self._paramstyle in ("pyformat", "format") + + @property + def consent_cache_id_token(self): + return self._consent_cache_id_token + + @property + def telemetry_enabled(self): + return self._telemetry_enabled + + @telemetry_enabled.setter + def telemetry_enabled(self, value): + self._telemetry_enabled = True if value else False + + @property + def service_name(self): + return self._service_name + + @service_name.setter + def service_name(self, value): + self._service_name = value + + @property + def log_max_query_length(self): + return self._log_max_query_length + + @property + def disable_request_pooling(self): + return self._disable_request_pooling + + @disable_request_pooling.setter + def disable_request_pooling(self, value): + self._disable_request_pooling = True if value else False + + @property + def use_openssl_only(self): + return self._use_openssl_only + + @property + def arrow_number_to_decimal(self): + return self._arrow_number_to_decimal + + @property + def enable_stage_s3_privatelink_for_us_east_1(self): + return self._enable_stage_s3_privatelink_for_us_east_1 + + @enable_stage_s3_privatelink_for_us_east_1.setter + def enable_stage_s3_privatelink_for_us_east_1(self, value): + self._enable_stage_s3_privatelink_for_us_east_1 = True if value else False + + @arrow_number_to_decimal.setter + def arrow_number_to_decimal_setter(self, value: bool): + self._arrow_number_to_decimal = value + + def connect(self, **kwargs): + """Establishes connection to Snowflake.""" + logger.debug("connect") + if len(kwargs) > 0: + self.__config(**kwargs) + TelemetryService.get_instance().update_context(kwargs) + + self.__open_connection() + + def close(self, retry=True): + """Closes the connection.""" + try: + if not self.rest: + logger.debug("Rest object has been destroyed, cannot close session") + return + + # will hang if the application doesn't close the connection and + # CLIENT_SESSION_KEEP_ALIVE is set, because the heartbeat runs on + # a separate thread. + self._cancel_heartbeat() + + # close telemetry first, since it needs rest to send remaining data + logger.info("closed") + self._telemetry.close(send_on_close=retry) + if self._all_async_queries_finished(): + logger.info("No async queries seem to be running, deleting session") + self.rest.delete_session(retry=retry) + else: + logger.info( + "There are {} async queries still running, not deleting session".format( + len(self._async_sfqids) + ) + ) + self.rest.close() + self._rest = None + del self.messages[:] + logger.debug("Session is closed") + except Exception as e: + logger.debug( + "Exception encountered in closing connection. ignoring...: %s", e + ) + + def is_closed(self): + """Checks whether the connection has been closed.""" + return self.rest is None + + def autocommit(self, mode): + """Sets autocommit mode to True, or False. Defaults to True.""" + if not self.rest: + Error.errorhandler_wrapper( + self, + None, + DatabaseError, + { + "msg": "Connection is closed", + "errno": ER_CONNECTION_IS_CLOSED, + "sqlstate": SQLSTATE_CONNECTION_NOT_EXISTS, + }, + ) + if not isinstance(mode, bool): + Error.errorhandler_wrapper( + self, + None, + ProgrammingError, + { + "msg": f"Invalid parameter: {mode}", + "errno": ER_INVALID_VALUE, + }, + ) + try: + self.cursor().execute(f"ALTER SESSION SET autocommit={mode}") + except Error as e: + if e.sqlstate == SQLSTATE_FEATURE_NOT_SUPPORTED: + logger.debug( + "Autocommit feature is not enabled for this " "connection. Ignored" + ) + + def commit(self): + """Commits the current transaction.""" + self.cursor().execute("COMMIT") + + def rollback(self): + """Rolls back the current transaction.""" + self.cursor().execute("ROLLBACK") + + def cursor( + self, cursor_class: type[SnowflakeCursor] = SnowflakeCursor + ) -> SnowflakeCursor: + """Creates a cursor object. Each statement will be executed in a new cursor object.""" + logger.debug("cursor") + if not self.rest: + Error.errorhandler_wrapper( + self, + None, + DatabaseError, + { + "msg": "Connection is closed", + "errno": ER_CONNECTION_IS_CLOSED, + "sqlstate": SQLSTATE_CONNECTION_NOT_EXISTS, + }, + ) + return cursor_class(self) + + def execute_string( + self, + sql_text: str, + remove_comments: bool = False, + return_cursors: bool = True, + cursor_class: SnowflakeCursor = SnowflakeCursor, + **kwargs, + ) -> Iterable[SnowflakeCursor]: + """Executes a SQL text including multiple statements. This is a non-standard convenience method.""" + stream = StringIO(sql_text) + stream_generator = self.execute_stream( + stream, remove_comments=remove_comments, cursor_class=cursor_class, **kwargs + ) + ret = list(stream_generator) + return ret if return_cursors else list() + + def execute_stream( + self, + stream: StringIO, + remove_comments: bool = False, + cursor_class: SnowflakeCursor = SnowflakeCursor, + **kwargs, + ) -> Generator[SnowflakeCursor, None, None]: + """Executes a stream of SQL statements. This is a non-standard convenient method.""" + split_statements_list = split_statements( + stream, remove_comments=remove_comments + ) + # Note: split_statements_list is a list of tuples of sql statements and whether they are put/get + non_empty_statements = [e for e in split_statements_list if e[0]] + for sql, is_put_or_get in non_empty_statements: + cur = self.cursor(cursor_class=cursor_class) + cur.execute(sql, _is_put_get=is_put_or_get, **kwargs) + yield cur + + def __set_error_attributes(self): + for m in [ + method for method in dir(errors) if callable(getattr(errors, method)) + ]: + # If name starts with _ then ignore that + name = m if not m.startswith("_") else m[1:] + setattr(self, name, getattr(errors, m)) + + @staticmethod + def setup_ocsp_privatelink(app, hostname): + SnowflakeConnection.OCSP_ENV_LOCK.acquire() + ocsp_cache_server = f"http://ocsp.{hostname}/ocsp_response_cache.json" + os.environ["SF_OCSP_RESPONSE_CACHE_SERVER_URL"] = ocsp_cache_server + logger.debug("OCSP Cache Server is updated: %s", ocsp_cache_server) + SnowflakeConnection.OCSP_ENV_LOCK.release() + + def __open_connection(self): + """Opens a new network connection.""" + self.converter = self._converter_class( + use_numpy=self._numpy, support_negative_year=self._support_negative_year + ) + + proxy.set_proxies( + self.proxy_host, self.proxy_port, self.proxy_user, self.proxy_password + ) + + self._rest = SnowflakeRestful( + host=self.host, + port=self.port, + protocol=self._protocol, + inject_client_pause=self._inject_client_pause, + connection=self, + ) + logger.debug("REST API object was created: %s:%s", self.host, self.port) + + if "SF_OCSP_RESPONSE_CACHE_SERVER_URL" in os.environ: + logger.debug( + "Custom OCSP Cache Server URL found in environment - %s", + os.environ["SF_OCSP_RESPONSE_CACHE_SERVER_URL"], + ) + + if self.host.endswith(".privatelink.snowflakecomputing.com"): + SnowflakeConnection.setup_ocsp_privatelink(self.application, self.host) + else: + if "SF_OCSP_RESPONSE_CACHE_SERVER_URL" in os.environ: + del os.environ["SF_OCSP_RESPONSE_CACHE_SERVER_URL"] + + if self._authenticator == DEFAULT_AUTHENTICATOR: + auth_instance = AuthByDefault(self._password) + elif self._authenticator == EXTERNAL_BROWSER_AUTHENTICATOR: + auth_instance = AuthByWebBrowser( + self.rest, + self.application, + protocol=self._protocol, + host=self.host, + port=self.port, + ) + elif self._authenticator == KEY_PAIR_AUTHENTICATOR: + auth_instance = AuthByKeyPair(self._private_key) + elif self._authenticator == OAUTH_AUTHENTICATOR: + auth_instance = AuthByOAuth(self._token) + elif self._authenticator == USR_PWD_MFA_AUTHENTICATOR: + auth_instance = AuthByUsrPwdMfa(self._password) + else: + # okta URL, e.g., https://.okta.com/ + auth_instance = AuthByOkta(self.rest, self.application) + + if self._session_parameters is None: + self._session_parameters = {} + if self._autocommit is not None: + self._session_parameters[PARAMETER_AUTOCOMMIT] = self._autocommit + + if self._timezone is not None: + self._session_parameters[PARAMETER_TIMEZONE] = self._timezone + + if self._validate_default_parameters: + # Snowflake will validate the requested database, schema, and warehouse + self._session_parameters[ + PARAMETER_CLIENT_VALIDATE_DEFAULT_PARAMETERS + ] = True + + if self.client_session_keep_alive is not None: + self._session_parameters[ + PARAMETER_CLIENT_SESSION_KEEP_ALIVE + ] = self._client_session_keep_alive + + if self.client_session_keep_alive_heartbeat_frequency is not None: + self._session_parameters[ + PARAMETER_CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY + ] = self._validate_client_session_keep_alive_heartbeat_frequency() + + if self.client_prefetch_threads: + self._session_parameters[ + PARAMETER_CLIENT_PREFETCH_THREADS + ] = self._validate_client_prefetch_threads() + + if self._authenticator == EXTERNAL_BROWSER_AUTHENTICATOR: + # enable storing temporary credential in a file + self._session_parameters[PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL] = ( + self._client_store_temporary_credential if IS_LINUX else True + ) + + if self._authenticator == USR_PWD_MFA_AUTHENTICATOR: + self._session_parameters[PARAMETER_CLIENT_REQUEST_MFA_TOKEN] = ( + self._client_request_mfa_token if IS_LINUX else True + ) + + auth = Auth(self.rest) + auth.read_temporary_credentials(self.host, self.user, self._session_parameters) + self._authenticate(auth_instance) + + self._password = None # ensure password won't persist + + if self.client_session_keep_alive: + # This will be called after the heartbeat frequency has actually been set. + # By this point it should have been decided if the heartbeat has to be enabled + # and what would the heartbeat frequency be + self._add_heartbeat() + + def __preprocess_auth_instance(self, auth_instance): + if type(auth_instance) is AuthByWebBrowser: + if self._rest.id_token is not None: + return AuthByIdToken(self._rest.id_token) + if type(auth_instance) is AuthByUsrPwdMfa: + if self._rest.mfa_token is not None: + auth_instance.set_mfa_token(self._rest.mfa_token) + return auth_instance + + def __config(self, **kwargs): + """Sets up parameters in the connection object.""" + logger.debug("__config") + # Handle special cases first + if "sequence_counter" in kwargs: + self.sequence_counter = kwargs["sequence_counter"] + if "application" in kwargs: + value = kwargs["application"] + if not APPLICATION_RE.match(value): + msg = f"Invalid application name: {value}" + raise ProgrammingError(msg=msg, errno=0) + else: + self._application = value + if "validate_default_parameters" in kwargs: + self._validate_default_parameters = kwargs["validate_default_parameters"] + # Handle rest of arguments + skip_list = ["validate_default_parameters", "sequence_counter", "application"] + for name, value in filter(lambda e: e[0] not in skip_list, kwargs.items()): + if self.validate_default_parameters: + if name not in DEFAULT_CONFIGURATION.keys(): + close_matches = get_close_matches( + name, DEFAULT_CONFIGURATION.keys(), n=1, cutoff=0.8 + ) + guess = close_matches[0] if len(close_matches) > 0 else None + warnings.warn( + "'{}' is an unknown connection parameter{}".format( + name, f", did you mean '{guess}'?" if guess else "" + ) + ) + elif not isinstance(value, DEFAULT_CONFIGURATION[name][1]): + accepted_types = DEFAULT_CONFIGURATION[name][1] + warnings.warn( + "'{}' connection parameter should be of type '{}', but is a '{}'".format( + name, + str(tuple(e.__name__ for e in accepted_types)).replace( + "'", "" + ) + if isinstance(accepted_types, tuple) + else accepted_types.__name__, + type(value).__name__, + ) + ) + setattr(self, "_" + name, value) + + if self._numpy: + try: + import numpy # noqa: F401 + except ModuleNotFoundError: # pragma: no cover + Error.errorhandler_wrapper( + self, + None, + ProgrammingError, + { + "msg": "Numpy module is not installed. Cannot fetch data as numpy", + "errno": ER_NO_NUMPY, + }, + ) + + if self._paramstyle is None: + import snowflake.connector + + self._paramstyle = snowflake.connector.paramstyle + elif self._paramstyle not in SUPPORTED_PARAMSTYLES: + raise ProgrammingError( + msg="Invalid paramstyle is specified", errno=ER_INVALID_VALUE + ) + + if "account" in kwargs: + if "host" not in kwargs: + self._host = construct_hostname(kwargs.get("region"), self._account) + if "port" not in kwargs: + self._port = "443" + if "protocol" not in kwargs: + self._protocol = "https" + + if self._authenticator: + # Only upper self._authenticator if it is a non-okta link + auth_tmp = self._authenticator.upper() + if auth_tmp in [ # Non-okta authenticators + DEFAULT_AUTHENTICATOR, + EXTERNAL_BROWSER_AUTHENTICATOR, + KEY_PAIR_AUTHENTICATOR, + OAUTH_AUTHENTICATOR, + USR_PWD_MFA_AUTHENTICATOR, + ]: + self._authenticator = auth_tmp + + if not self.user and self._authenticator != OAUTH_AUTHENTICATOR: + # OAuth Authentication does not require a username + Error.errorhandler_wrapper( + self, + None, + ProgrammingError, + {"msg": "User is empty", "errno": ER_NO_USER}, + ) + + if self._private_key: + self._authenticator = KEY_PAIR_AUTHENTICATOR + + if self._authenticator not in [ + # when self._authenticator would be in this list it is always upper'd before + EXTERNAL_BROWSER_AUTHENTICATOR, + OAUTH_AUTHENTICATOR, + KEY_PAIR_AUTHENTICATOR, + ]: + # authentication is done by the browser if the authenticator + # is externalbrowser + if not self._password: + Error.errorhandler_wrapper( + self, + None, + ProgrammingError, + {"msg": "Password is empty", "errno": ER_NO_PASSWORD}, + ) + + if not self._account: + Error.errorhandler_wrapper( + self, + None, + ProgrammingError, + {"msg": "Account must be specified", "errno": ER_NO_ACCOUNT_NAME}, + ) + if "." in self._account: + self._account = parse_account(self._account) + + if self.ocsp_fail_open: + logger.info( + "This connection is in OCSP Fail Open Mode. " + "TLS Certificates would be checked for validity " + "and revocation status. Any other Certificate " + "Revocation related exceptions or OCSP Responder " + "failures would be disregarded in favor of " + "connectivity." + ) + + if self.insecure_mode: + logger.info( + "THIS CONNECTION IS IN INSECURE MODE. IT " + "MEANS THE CERTIFICATE WILL BE VALIDATED BUT THE " + "CERTIFICATE REVOCATION STATUS WILL NOT BE " + "CHECKED." + ) + + if "SF_USE_OPENSSL_ONLY" not in os.environ: + logger.info("Setting use_openssl_only mode to %s", self.use_openssl_only) + os.environ["SF_USE_OPENSSL_ONLY"] = str(self.use_openssl_only) + elif ( + os.environ.get("SF_USE_OPENSSL_ONLY", "False") == "True" + ) != self.use_openssl_only: + logger.warning( + "Mode use_openssl_only is already set to: %s, ignoring set request to: %s", + os.environ["SF_USE_OPENSSL_ONLY"], + self.use_openssl_only, + ) + self._use_openssl_only = os.environ["SF_USE_OPENSSL_ONLY"] == "True" + + def cmd_query( + self, + sql: str, + sequence_counter: int, + request_id: uuid.UUID, + binding_params: None | tuple | dict[str, dict[str, str]] = None, + binding_stage: str | None = None, + is_file_transfer: bool = False, + statement_params: dict[str, str] | None = None, + is_internal: bool = False, + describe_only: bool = False, + _no_results: bool = False, + _update_current_object: bool = True, + _no_retry: bool = False, + ): + """Executes a query with a sequence counter.""" + logger.debug("_cmd_query") + data = { + "sqlText": sql, + "asyncExec": _no_results, + "sequenceId": sequence_counter, + "querySubmissionTime": get_time_millis(), + } + if statement_params is not None: + data["parameters"] = statement_params + if is_internal: + data["isInternal"] = is_internal + if describe_only: + data["describeOnly"] = describe_only + if binding_stage is not None: + # binding stage for bulk array binding + data["bindStage"] = binding_stage + if binding_params is not None: + # binding parameters. This is for qmarks paramstyle. + data["bindings"] = binding_params + + client = "sfsql_file_transfer" if is_file_transfer else "sfsql" + + if logger.getEffectiveLevel() <= logging.DEBUG: + logger.debug( + "sql=[%s], sequence_id=[%s], is_file_transfer=[%s]", + self._format_query_for_log(data["sqlText"]), + data["sequenceId"], + is_file_transfer, + ) + + url_parameters = {REQUEST_ID: request_id} + + ret = self.rest.request( + "/queries/v1/query-request?" + urlencode(url_parameters), + data, + client=client, + _no_results=_no_results, + _include_retry_params=True, + _no_retry=_no_retry, + ) + + if ret is None: + ret = {"data": {}} + if ret.get("data") is None: + ret["data"] = {} + if _update_current_object: + data = ret["data"] + if "finalDatabaseName" in data: + self._database = data["finalDatabaseName"] + if "finalSchemaName" in data: + self._schema = data["finalSchemaName"] + if "finalWarehouseName" in data: + self._warehouse = data["finalWarehouseName"] + if "finalRoleName" in data: + self._role = data["finalRoleName"] + + return ret + + def _reauthenticate_by_webbrowser(self): + auth_instance = AuthByWebBrowser( + self.rest, + self.application, + protocol=self._protocol, + host=self.host, + port=self.port, + ) + self._authenticate(auth_instance) + return {"success": True} + + def _authenticate(self, auth_instance): + # make some changes if needed before real __authenticate + try: + self.__authenticate(self.__preprocess_auth_instance(auth_instance)) + except ReauthenticationRequest as ex: + # cached id_token expiration error, we have cleaned id_token and try to authenticate again + logger.debug("ID token expired. Reauthenticating...: %s", ex) + self.__authenticate(self.__preprocess_auth_instance(auth_instance)) + + def __authenticate(self, auth_instance): + auth_instance.authenticate( + authenticator=self._authenticator, + service_name=self.service_name, + account=self.account, + user=self.user, + password=self._password, + ) + self._consent_cache_id_token = getattr( + auth_instance, "consent_cache_id_token", True + ) + + auth = Auth(self.rest) + try: + auth.authenticate( + auth_instance=auth_instance, + account=self.account, + user=self.user, + database=self.database, + schema=self.schema, + warehouse=self.warehouse, + role=self.role, + passcode=self._passcode, + passcode_in_password=self._passcode_in_password, + mfa_callback=self._mfa_callback, + password_callback=self._password_callback, + session_parameters=self._session_parameters, + ) + except OperationalError: + logger.debug( + "Operational Error raised at authentication" + f"for authenticator: {type(auth_instance).__name__}" + ) + + while True: + try: + auth_instance.handle_timeout( + authenticator=self._authenticator, + service_name=self.service_name, + account=self.account, + user=self.user, + password=self._password, + ) + auth.authenticate( + auth_instance=auth_instance, + account=self.account, + user=self.user, + database=self.database, + schema=self.schema, + warehouse=self.warehouse, + role=self.role, + passcode=self._passcode, + passcode_in_password=self._passcode_in_password, + mfa_callback=self._mfa_callback, + password_callback=self._password_callback, + session_parameters=self._session_parameters, + ) + except OperationalError as auth_op: + if auth_op.errno == ER_FAILED_TO_CONNECT_TO_DB: + raise auth_op + logger.debug("Continuing authenticator specific timeout handling") + continue + break + + def _write_params_to_byte_rows( + self, params: list[tuple[Any | tuple]] + ) -> list[bytes]: + """Write csv-format rows of binding values as list of bytes string. + + Args: + params: Binding parameters to bulk array insertion query with qmark/numeric format. + cursor: SnowflakeCursor. + + Returns: + List of bytes string corresponding to rows + + """ + res = [] + try: + for row in params: + temp = map(self.converter.to_csv_bindings, row) + res.append((",".join(temp) + "\n").encode("utf-8")) + except (ProgrammingError, AttributeError) as exc: + raise BindUploadError from exc + return res + + def _get_snowflake_type_and_binding( + self, + cursor: SnowflakeCursor | None, + v: tuple[str, Any] | Any, + ) -> TypeAndBinding: + if isinstance(v, tuple): + if len(v) != 2: + Error.errorhandler_wrapper( + self, + cursor, + ProgrammingError, + { + "msg": "Binding parameters must be a list " + "where one element is a single value or " + "a pair of Snowflake datatype and a value", + "errno": ER_FAILED_PROCESSING_QMARK, + }, + ) + snowflake_type, v = v + else: + snowflake_type = self.converter.snowflake_type(v) + if snowflake_type is None: + Error.errorhandler_wrapper( + self, + cursor, + ProgrammingError, + { + "msg": "Python data type [{}] cannot be " + "automatically mapped to Snowflake data " + "type. Specify the snowflake data type " + "explicitly.".format(v.__class__.__name__.lower()), + "errno": ER_NOT_IMPLICITY_SNOWFLAKE_DATATYPE, + }, + ) + return TypeAndBinding( + snowflake_type, + self.converter.to_snowflake_bindings(snowflake_type, v), + ) + + # TODO we could probably rework this to not make dicts like this: {'1': 'value', '2': '13'} + def _process_params_qmarks( + self, + params: Sequence | None, + cursor: SnowflakeCursor | None = None, + ) -> dict[str, dict[str, str]] | None: + if not params: + return None + processed_params = {} + + get_type_and_binding = partial(self._get_snowflake_type_and_binding, cursor) + + for idx, v in enumerate(params): + if isinstance(v, list): + snowflake_type = self.converter.snowflake_type(v) + all_param_data = list(map(get_type_and_binding, v)) + first_type = all_param_data[0].type + # if all elements have the same snowflake type, update snowflake_type + if all(param_data.type == first_type for param_data in all_param_data): + snowflake_type = first_type + processed_params[str(idx + 1)] = { + "type": snowflake_type, + "value": [param_data.binding for param_data in all_param_data], + } + else: + snowflake_type, snowflake_binding = get_type_and_binding(v) + processed_params[str(idx + 1)] = { + "type": snowflake_type, + "value": snowflake_binding, + } + if logger.getEffectiveLevel() <= logging.DEBUG: + for k, v in processed_params.items(): + logger.debug("idx: %s, type: %s", k, v.get("type")) + return processed_params + + def _process_params_pyformat( + self, + params: Any | Sequence[Any] | dict[Any, Any] | None, + cursor: SnowflakeCursor | None = None, + ) -> tuple[Any] | dict[str, Any] | None: + """Process parameters for client-side parameter binding. + + Args: + params: Either a sequence, or a dictionary of parameters, if anything else + is given then it will be put into a list and processed that way. + cursor: The SnowflakeCursor used to report errors if necessary. + """ + if params is None: + if self._interpolate_empty_sequences: + return None + return {} + if isinstance(params, dict): + return self._process_params_dict(params) + + # TODO: remove this, callers should send in what's in the signature + if not isinstance(params, (tuple, list)): + params = [ + params, + ] + + try: + res = map(self._process_single_param, params) + ret = tuple(res) + logger.debug(f"parameters: {ret}") + return ret + except Exception as e: + Error.errorhandler_wrapper( + self, + cursor, + ProgrammingError, + { + "msg": f"Failed processing pyformat-parameters; {e}", + "errno": ER_FAILED_PROCESSING_PYFORMAT, + }, + ) + + def _process_params_dict( + self, params: dict[Any, Any], cursor: SnowflakeCursor | None = None + ) -> dict: + try: + res = {k: self._process_single_param(v) for k, v in params.items()} + logger.debug(f"parameters: {res}") + return res + except Exception as e: + Error.errorhandler_wrapper( + self, + cursor, + ProgrammingError, + { + "msg": f"Failed processing pyformat-parameters: {e}", + "errno": ER_FAILED_PROCESSING_PYFORMAT, + }, + ) + + def _process_single_param(self, param: Any) -> Any: + """Process a single parameter to Snowflake understandable form. + + This is a convenience function to replace repeated multiple calls with a single + function call. + + It calls the following underlying functions in this order: + 1. self.converter.to_snowflake + 2. self.converter.escape + 3. self.converter.quote + """ + to_snowflake = self.converter.to_snowflake + escape = self.converter.escape + _quote = self.converter.quote + return _quote(escape(to_snowflake(param))) + + def _cancel_query(self, sql, request_id): + """Cancels the query with the exact SQL query and requestId.""" + logger.debug("_cancel_query sql=[%s], request_id=[%s]", sql, request_id) + url_parameters = {REQUEST_ID: str(uuid.uuid4())} + + return self.rest.request( + "/queries/v1/abort-request?" + urlencode(url_parameters), + { + "sqlText": sql, + REQUEST_ID: str(request_id), + }, + ) + + def _next_sequence_counter(self): + """Gets next sequence counter. Used internally.""" + with self._lock_sequence_counter: + self.sequence_counter += 1 + logger.debug("sequence counter: %s", self.sequence_counter) + return self.sequence_counter + + def _log_telemetry(self, telemetry_data): + """Logs data to telemetry.""" + if self.telemetry_enabled: + self._telemetry.try_add_log_to_batch(telemetry_data) + + def _add_heartbeat(self): + """Add an hourly heartbeat query in order to keep connection alive.""" + if not self.heartbeat_thread: + self._validate_client_session_keep_alive_heartbeat_frequency() + heartbeat_wref = weakref.WeakMethod(self._heartbeat_tick) + + def beat_if_possible() -> None: + heartbeat_fn = heartbeat_wref() + if heartbeat_fn: + heartbeat_fn() + + self.heartbeat_thread = HeartBeatTimer( + self.client_session_keep_alive_heartbeat_frequency, + beat_if_possible, + ) + self.heartbeat_thread.start() + logger.debug("started heartbeat") + + def _cancel_heartbeat(self): + """Cancel a heartbeat thread.""" + if self.heartbeat_thread: + self.heartbeat_thread.cancel() + self.heartbeat_thread.join() + self.heartbeat_thread = None + logger.debug("stopped heartbeat") + + def _heartbeat_tick(self): + """Execute a hearbeat if connection isn't closed yet.""" + if not self.is_closed(): + logger.debug("heartbeating!") + self.rest._heartbeat() + + def _validate_client_session_keep_alive_heartbeat_frequency(self) -> int: + """Validate and return heartbeat frequency in seconds.""" + real_max = int(self.rest.master_validity_in_seconds / 4) + real_min = int(real_max / 4) + if self.client_session_keep_alive_heartbeat_frequency is None: + # This is an unlikely scenario but covering it just in case. + self._client_session_keep_alive_heartbeat_frequency = real_min + elif self.client_session_keep_alive_heartbeat_frequency > real_max: + self._client_session_keep_alive_heartbeat_frequency = real_max + elif self.client_session_keep_alive_heartbeat_frequency < real_min: + self._client_session_keep_alive_heartbeat_frequency = real_min + + # ensure the type is integer + self._client_session_keep_alive_heartbeat_frequency = int( + self.client_session_keep_alive_heartbeat_frequency + ) + return self.client_session_keep_alive_heartbeat_frequency + + def _validate_client_prefetch_threads(self): + if self.client_prefetch_threads <= 0: + self._client_prefetch_threads = 1 + elif self.client_prefetch_threads > MAX_CLIENT_PREFETCH_THREADS: + self._client_prefetch_threads = MAX_CLIENT_PREFETCH_THREADS + self._client_prefetch_threads = int(self.client_prefetch_threads) + return self.client_prefetch_threads + + def _update_parameters( + self, + parameters: dict[str, str | int | bool], + ) -> None: + """Update session parameters.""" + with self._lock_converter: + self.converter.set_parameters(parameters) + for name, value in parameters.items(): + self._session_parameters[name] = value + if PARAMETER_CLIENT_TELEMETRY_ENABLED == name: + self.telemetry_enabled = value + elif PARAMETER_CLIENT_TELEMETRY_OOB_ENABLED == name: + if value: + TelemetryService.get_instance().enable() + else: + TelemetryService.get_instance().disable() + elif PARAMETER_CLIENT_SESSION_KEEP_ALIVE == name: + # Only set if the local config is None. + # Always give preference to user config. + if self.client_session_keep_alive is None: + self.client_session_keep_alive = value + elif ( + PARAMETER_CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY == name + and self.client_session_keep_alive_heartbeat_frequency is None + ): + # Only set if local value hasn't been set already. + self.client_session_keep_alive_heartbeat_frequency = value + elif PARAMETER_SERVICE_NAME == name: + self.service_name = value + elif PARAMETER_CLIENT_PREFETCH_THREADS == name: + self.client_prefetch_threads = value + elif PARAMETER_ENABLE_STAGE_S3_PRIVATELINK_FOR_US_EAST_1 == name: + self.enable_stage_s3_privatelink_for_us_east_1 = value + + def _format_query_for_log(self, query): + ret = " ".join(line.strip() for line in query.split("\n")) + return ( + ret + if len(ret) < self.log_max_query_length + else ret[0 : self.log_max_query_length] + "..." + ) + + def __enter__(self): + """Context manager.""" + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Context manager with commit or rollback teardown.""" + if not self._session_parameters.get("AUTOCOMMIT", False): + # Either AUTOCOMMIT is turned off, or is not set so we default to old behavior + if exc_tb is None: + self.commit() + else: + self.rollback() + self.close() + + def _get_query_status(self, sf_qid: str) -> tuple[QueryStatus, dict[str, Any]]: + """Retrieves the status of query with sf_qid and returns it with the raw response. + + This is the underlying function used by the public get_status functions. + + Args: + sf_qid: Snowflake query id of interest. + + Raises: + ValueError: if sf_qid is not a valid UUID string. + """ + try: + uuid.UUID(sf_qid) + except ValueError: + raise ValueError(f"Invalid UUID: '{sf_qid}'") + logger.debug(f"get_query_status sf_qid='{sf_qid}'") + + status = "NO_DATA" + if self.is_closed(): + return QueryStatus.DISCONNECTED, {"data": {"queries": []}} + status_resp = self.rest.request( + "/monitoring/queries/" + quote(sf_qid), method="get", client="rest" + ) + if "queries" not in status_resp["data"]: + return QueryStatus.FAILED_WITH_ERROR, status_resp + queries = status_resp["data"]["queries"] + if len(queries) > 0: + status = queries[0]["status"] + status_ret = QueryStatus[status] + # If query was started by us and it has finished let's cache this info + if sf_qid in self._async_sfqids and not self.is_still_running(status_ret): + self._async_sfqids.remove(sf_qid) + self._done_async_sfqids.add(sf_qid) + return status_ret, status_resp + + def get_query_status(self, sf_qid: str) -> QueryStatus: + """Retrieves the status of query with sf_qid. + + Query status is returned as a QueryStatus. + + Args: + sf_qid: Snowflake query id of interest. + + Raises: + ValueError: if sf_qid is not a valid UUID string. + """ + status, _ = self._get_query_status(sf_qid) + return status + + def get_query_status_throw_if_error(self, sf_qid: str) -> QueryStatus: + """Retrieves the status of query with sf_qid as a QueryStatus and raises an exception if the query terminated with an error. + + Query status is returned as a QueryStatus. + + Args: + sf_qid: Snowflake query id of interest. + + Raises: + ValueError: if sf_qid is not a valid UUID string. + """ + status, status_resp = self._get_query_status(sf_qid) + queries = status_resp["data"]["queries"] + if self.is_an_error(status): + if sf_qid in self._async_sfqids: + self._async_sfqids.remove(sf_qid) + message = status_resp.get("message") + if message is None: + message = "" + code = status_resp.get("code") + if code is None: + code = -1 + sql_state = None + if "data" in status_resp: + message += ( + queries[0].get("errorMessage", "") if len(queries) > 0 else "" + ) + sql_state = status_resp["data"].get("sqlState") + Error.errorhandler_wrapper( + self, + None, + ProgrammingError, + { + "msg": message, + "errno": int(code), + "sqlstate": sql_state, + "sfqid": sf_qid, + }, + ) + return status + + @staticmethod + def is_still_running(status: QueryStatus) -> bool: + """Checks whether given status is currently running.""" + return status in ( + QueryStatus.RUNNING, + QueryStatus.QUEUED, + QueryStatus.RESUMING_WAREHOUSE, + QueryStatus.QUEUED_REPARING_WAREHOUSE, + QueryStatus.BLOCKED, + QueryStatus.NO_DATA, + ) + + @staticmethod + def is_an_error(status: QueryStatus) -> bool: + """Checks whether given status means that there has been an error.""" + return status in ( + QueryStatus.ABORTING, + QueryStatus.FAILED_WITH_ERROR, + QueryStatus.ABORTED, + QueryStatus.FAILED_WITH_INCIDENT, + QueryStatus.DISCONNECTED, + ) + + def _all_async_queries_finished(self) -> bool: + """Checks whether all async queries started by this Connection have finished executing.""" + queries = copy.copy( + self._async_sfqids + ) # get_query_status might update _async_sfqids, let's copy the list + finished_async_queries = ( + not self.is_still_running(self.get_query_status(q)) for q in queries + ) + return all(finished_async_queries) diff --git a/src/snowflake/connector/constants.py b/src/snowflake/connector/constants.py new file mode 100644 index 000000000..328cbd762 --- /dev/null +++ b/src/snowflake/connector/constants.py @@ -0,0 +1,262 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +from collections import defaultdict +from enum import Enum, auto, unique +from typing import Any, DefaultDict, NamedTuple + +DBAPI_TYPE_STRING = 0 +DBAPI_TYPE_BINARY = 1 +DBAPI_TYPE_NUMBER = 2 +DBAPI_TYPE_TIMESTAMP = 3 + + +class FieldType(NamedTuple): + name: str + dbapi_type: list[int] + + +FIELD_TYPES: list[FieldType] = [ + FieldType(name="FIXED", dbapi_type=[DBAPI_TYPE_NUMBER]), + FieldType(name="REAL", dbapi_type=[DBAPI_TYPE_NUMBER]), + FieldType(name="TEXT", dbapi_type=[DBAPI_TYPE_STRING]), + FieldType(name="DATE", dbapi_type=[DBAPI_TYPE_TIMESTAMP]), + FieldType(name="TIMESTAMP", dbapi_type=[DBAPI_TYPE_TIMESTAMP]), + FieldType(name="VARIANT", dbapi_type=[DBAPI_TYPE_BINARY]), + FieldType(name="TIMESTAMP_LTZ", dbapi_type=[DBAPI_TYPE_TIMESTAMP]), + FieldType(name="TIMESTAMP_TZ", dbapi_type=[DBAPI_TYPE_TIMESTAMP]), + FieldType(name="TIMESTAMP_NTZ", dbapi_type=[DBAPI_TYPE_TIMESTAMP]), + FieldType(name="OBJECT", dbapi_type=[DBAPI_TYPE_BINARY]), + FieldType(name="ARRAY", dbapi_type=[DBAPI_TYPE_BINARY]), + FieldType(name="BINARY", dbapi_type=[DBAPI_TYPE_BINARY]), + FieldType(name="TIME", dbapi_type=[DBAPI_TYPE_TIMESTAMP]), + FieldType(name="BOOLEAN", dbapi_type=[]), + FieldType(name="GEOGRAPHY", dbapi_type=[DBAPI_TYPE_STRING]), +] + +FIELD_NAME_TO_ID: DefaultDict[Any, int] = defaultdict(int) +FIELD_ID_TO_NAME: DefaultDict[int, str] = defaultdict(str) + +__binary_types: list[int] = [] +__binary_type_names: list[str] = [] +__string_types: list[int] = [] +__string_type_names: list[str] = [] +__number_types: list[int] = [] +__number_type_names: list[str] = [] +__timestamp_types: list[int] = [] +__timestamp_type_names: list[str] = [] + +for idx, field_type in enumerate(FIELD_TYPES): + FIELD_ID_TO_NAME[idx] = field_type.name + FIELD_NAME_TO_ID[field_type.name] = idx + + dbapi_types = field_type.dbapi_type + for dbapi_type in dbapi_types: + if dbapi_type == DBAPI_TYPE_BINARY: + __binary_types.append(idx) + __binary_type_names.append(field_type.name) + elif dbapi_type == DBAPI_TYPE_TIMESTAMP: + __timestamp_types.append(idx) + __timestamp_type_names.append(field_type.name) + elif dbapi_type == DBAPI_TYPE_NUMBER: + __number_types.append(idx) + __number_type_names.append(field_type.name) + elif dbapi_type == DBAPI_TYPE_STRING: + __string_types.append(idx) + __string_type_names.append(field_type.name) + + +def get_binary_types() -> list[int]: + return __binary_types + + +def is_binary_type_name(type_name: str) -> bool: + return type_name in __binary_type_names + + +def get_string_types() -> list[int]: + return __string_types + + +def is_string_type_name(type_name) -> bool: + return type_name in __string_type_names + + +def get_number_types() -> list[int]: + return __number_types + + +def is_number_type_name(type_name) -> bool: + return type_name in __number_type_names + + +def get_timestamp_types() -> list[int]: + return __timestamp_types + + +def is_timestamp_type_name(type_name) -> bool: + return type_name in __timestamp_type_names + + +def is_date_type_name(type_name) -> bool: + return type_name == "DATE" + + +# Log format +LOG_FORMAT = ( + "%(asctime)s - %(filename)s:%(lineno)d - " + "%(funcName)s() - %(levelname)s - %(message)s" +) + +# String literals +UTF8 = "utf-8" +SHA256_DIGEST = "sha256_digest" + +# PUT/GET related +S3_FS = "S3" +AZURE_FS = "AZURE" +GCS_FS = "GCS" +LOCAL_FS = "LOCAL_FS" +CMD_TYPE_UPLOAD = "UPLOAD" +CMD_TYPE_DOWNLOAD = "DOWNLOAD" +FILE_PROTOCOL = "file://" + + +@unique +class ResultStatus(Enum): + ERROR = "ERROR" + SUCCEEDED = "SUCCEEDED" + UPLOADED = "UPLOADED" + DOWNLOADED = "DOWNLOADED" + COLLISION = "COLLISION" + SKIPPED = "SKIPPED" + RENEW_TOKEN = "RENEW_TOKEN" + RENEW_PRESIGNED_URL = "RENEW_PRESIGNED_URL" + NOT_FOUND_FILE = "NOT_FOUND_FILE" + NEED_RETRY = "NEED_RETRY" + NEED_RETRY_WITH_LOWER_CONCURRENCY = "NEED_RETRY_WITH_LOWER_CONCURRENCY" + + +class SnowflakeS3FileEncryptionMaterial(NamedTuple): + query_id: str + query_stage_master_key: str + smk_id: int + + +class MaterialDescriptor(NamedTuple): + smk_id: int + query_id: str + key_size: int + + +class EncryptionMetadata(NamedTuple): + key: str + iv: str + matdesc: str + + +class FileHeader(NamedTuple): + digest: str | None + content_length: int | None + encryption_metadata: EncryptionMetadata | None + + +PARAMETER_AUTOCOMMIT = "AUTOCOMMIT" +PARAMETER_CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY = ( + "CLIENT_SESSION_KEEP_ALIVE_HEARTBEAT_FREQUENCY" +) +PARAMETER_CLIENT_SESSION_KEEP_ALIVE = "CLIENT_SESSION_KEEP_ALIVE" +PARAMETER_CLIENT_PREFETCH_THREADS = "CLIENT_PREFETCH_THREADS" +PARAMETER_CLIENT_TELEMETRY_ENABLED = "CLIENT_TELEMETRY_ENABLED" +PARAMETER_CLIENT_TELEMETRY_OOB_ENABLED = "CLIENT_OUT_OF_BAND_TELEMETRY_ENABLED" +PARAMETER_CLIENT_STORE_TEMPORARY_CREDENTIAL = "CLIENT_STORE_TEMPORARY_CREDENTIAL" +PARAMETER_CLIENT_REQUEST_MFA_TOKEN = "CLIENT_REQUEST_MFA_TOKEN" +PARAMETER_CLIENT_USE_SECURE_STORAGE_FOR_TEMPORARY_CREDENTIAL = ( + "CLIENT_USE_SECURE_STORAGE_FOR_TEMPORARY_CREDENTAIL" +) +PARAMETER_TIMEZONE = "TIMEZONE" +PARAMETER_SERVICE_NAME = "SERVICE_NAME" +PARAMETER_CLIENT_VALIDATE_DEFAULT_PARAMETERS = "CLIENT_VALIDATE_DEFAULT_PARAMETERS" +PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT = "PYTHON_CONNECTOR_QUERY_RESULT_FORMAT" +PARAMETER_ENABLE_STAGE_S3_PRIVATELINK_FOR_US_EAST_1 = ( + "ENABLE_STAGE_S3_PRIVATELINK_FOR_US_EAST_1" +) + +HTTP_HEADER_CONTENT_TYPE = "Content-Type" +HTTP_HEADER_CONTENT_ENCODING = "Content-Encoding" +HTTP_HEADER_ACCEPT_ENCODING = "Accept-Encoding" +HTTP_HEADER_ACCEPT = "accept" +HTTP_HEADER_USER_AGENT = "User-Agent" +HTTP_HEADER_SERVICE_NAME = "X-Snowflake-Service" + +HTTP_HEADER_VALUE_OCTET_STREAM = "application/octet-stream" + + +@unique +class OCSPMode(Enum): + """OCSP Mode enumerator for all the available modes. + + OCSP mode descriptions: + FAIL_CLOSED: If the client or driver does not receive a valid OCSP CA response for any reason, + the connection fails. + FAIL_OPEN: A response indicating a revoked certificate results in a failed connection. A response with any + other certificate errors or statuses allows the connection to occur, but denotes the message in the logs + at the WARNING level with the relevant details in JSON format. + INSECURE: The connection will occur anyway. + """ + + FAIL_CLOSED = "FAIL_CLOSED" + FAIL_OPEN = "FAIL_OPEN" + INSECURE = "INSECURE" + + +@unique +class FileTransferType(Enum): + """This enum keeps track of the possible file transfer types.""" + + PUT = auto() + GET = auto() + + +@unique +class QueryStatus(Enum): + RUNNING = 0 + ABORTING = 1 + SUCCESS = 2 + FAILED_WITH_ERROR = 3 + ABORTED = 4 + QUEUED = 5 + FAILED_WITH_INCIDENT = 6 + DISCONNECTED = 7 + RESUMING_WAREHOUSE = 8 + # purposeful typo. Is present in QueryDTO.java + QUEUED_REPARING_WAREHOUSE = 9 + RESTARTED = 10 + BLOCKED = 11 + NO_DATA = 12 + + +# Size constants +kilobyte = 1024 +megabyte = kilobyte * 1024 +gigabyte = megabyte * 1024 + + +# ArrowResultChunk constants the unit in this iterator +# EMPTY_UNIT: default +# ROW_UNIT: fetch row by row if the user call `fetchone()` +# TABLE_UNIT: fetch one arrow table if the user call `fetch_pandas()` +@unique +class IterUnit(Enum): + ROW_UNIT = "row" + TABLE_UNIT = "table" + + +S3_CHUNK_SIZE = 8388608 # boto3 default +AZURE_CHUNK_SIZE = 4 * megabyte + +# TODO: all env variables definitions should be here +ENV_VAR_PARTNER = "SF_PARTNER" diff --git a/src/snowflake/connector/converter.py b/src/snowflake/connector/converter.py new file mode 100644 index 000000000..a7116fee1 --- /dev/null +++ b/src/snowflake/connector/converter.py @@ -0,0 +1,724 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import binascii +import decimal +import time +from datetime import date, datetime +from datetime import time as dt_t +from datetime import timedelta, tzinfo +from functools import partial +from logging import getLogger +from math import ceil +from typing import Any, Callable + +import pytz + +from .compat import IS_BINARY, IS_NUMERIC +from .errorcode import ER_NOT_SUPPORT_DATA_TYPE +from .errors import ProgrammingError +from .sfbinaryformat import binary_to_python, binary_to_snowflake +from .sfdatetime import sfdatetime_total_seconds_from_timedelta + +try: + import numpy +except ImportError: + numpy = None +try: + import tzlocal +except ImportError: + tzlocal = None + +BITS_FOR_TIMEZONE = 14 +ZERO_TIMEDELTA = timedelta(seconds=0) +ZERO_EPOCH_DATE = date(1970, 1, 1) +ZERO_EPOCH = datetime.utcfromtimestamp(0) +ZERO_FILL = "000000000" + +logger = getLogger(__name__) + +PYTHON_TO_SNOWFLAKE_TYPE = { + "int": "FIXED", + "long": "FIXED", + "decimal": "FIXED", + "float": "REAL", + "str": "TEXT", + "unicode": "TEXT", + "bytes": "BINARY", + "bytearray": "BINARY", + "bool": "BOOLEAN", + "bool_": "BOOLEAN", + "nonetype": "ANY", + "datetime": "TIMESTAMP_NTZ", + "sfdatetime": "TIMESTAMP_NTZ", + "date": "DATE", + "time": "TIME", + "struct_time": "TIMESTAMP_NTZ", + "timedelta": "TIME", + "list": "TEXT", + "tuple": "TEXT", + "int8": "FIXED", + "int16": "FIXED", + "int32": "FIXED", + "int64": "FIXED", + "uint8": "FIXED", + "uint16": "FIXED", + "uint32": "FIXED", + "uint64": "FIXED", + "float16": "REAL", + "float32": "REAL", + "float64": "REAL", + "datetime64": "TIMESTAMP_NTZ", + "quoted_name": "TEXT", +} + +# Type alias +SnowflakeConverterType = Callable[[Any], Any] + + +def convert_datetime_to_epoch(dt: datetime) -> float: + """Converts datetime to epoch time in seconds. + + If Python > 3.3, you may use timestamp() method. + """ + if dt.tzinfo is not None: + dt0 = dt.astimezone(pytz.UTC).replace(tzinfo=None) + else: + dt0 = dt + return (dt0 - ZERO_EPOCH).total_seconds() + + +def _convert_datetime_to_epoch_nanoseconds(dt: datetime) -> str: + return f"{convert_datetime_to_epoch(dt):f}".replace(".", "") + "000" + + +def _convert_date_to_epoch_milliseconds(dt: datetime) -> str: + return f"{(dt - ZERO_EPOCH_DATE).total_seconds():.3f}".replace(".", "") + + +def _convert_time_to_epoch_nanoseconds(tm: dt_t) -> str: + return ( + str(tm.hour * 3600 + tm.minute * 60 + tm.second) + + f"{tm.microsecond:06d}" + + "000" + ) + + +def _extract_timestamp(value: str, ctx: dict) -> tuple[float, int]: + """Extracts timestamp from a raw data.""" + scale = ctx["scale"] + microseconds = float(value[0: -scale + 6]) if scale > 6 else float(value) + fraction_of_nanoseconds = _adjust_fraction_of_nanoseconds( + value, ctx["max_fraction"], scale + ) + + return microseconds, fraction_of_nanoseconds + + +def _adjust_fraction_of_nanoseconds(value: str, max_fraction: int, scale: int) -> int: + if scale == 0: + return 0 + if value[0] != "-": + return int(value[-scale:] + ZERO_FILL[: 9 - scale]) + + frac = int(value[-scale:]) + if frac == 0: + return 0 + else: + return int(str(max_fraction - frac) + ZERO_FILL[: 9 - scale]) + + +def _generate_tzinfo_from_tzoffset(tzoffset_minutes: int) -> tzinfo: + """Generates tzinfo object from tzoffset.""" + return pytz.FixedOffset(tzoffset_minutes) + + +class SnowflakeConverter: + def __init__(self, **kwargs): + self._parameters: dict[str, str | int | bool] = {} + self._use_numpy = kwargs.get("use_numpy", False) and numpy is not None + + logger.debug("use_numpy: %s", self._use_numpy) + + def set_parameters(self, new_parameters: dict) -> None: + self._parameters = new_parameters + + def set_parameter(self, param: Any, value: Any) -> None: + self._parameters[param] = value + + def get_parameters(self) -> dict[str, str | int | bool]: + return self._parameters + + def get_parameter(self, param: str) -> str | int | bool | None: + return self._parameters.get(param) + + def to_python_method(self, type_name, column) -> SnowflakeConverterType: + """FROM Snowflake to Python Objects""" + ctx = column.copy() + if ctx.get("scale") is not None: + ctx["max_fraction"] = int(10 ** ctx["scale"]) + ctx["zero_fill"] = "0" * (9 - ctx["scale"]) + converters = [f"_{type_name}_to_python"] + if self._use_numpy: + converters.insert(0, f"_{type_name}_numpy_to_python") + for conv in converters: + try: + return getattr(self, conv)(ctx) + except AttributeError: + pass + logger.warning("No column converter found for type: %s", type_name) + return None # Skip conversion + + def _FIXED_to_python(self, ctx): + return int if ctx["scale"] == 0 else decimal.Decimal + + def _FIXED_numpy_to_python(self, ctx): + if ctx["scale"]: + return numpy.float64 + else: + + def conv(value): + try: + return numpy.int64(value) + except OverflowError: + return int(value) + + return conv + + def _REAL_to_python(self, _): + return float + + def _REAL_numpy_to_python(self, _): + return numpy.float64 + + def _TEXT_to_python(self, _): + return None # skip conv + + def _BINARY_to_python(self, _): + return binary_to_python + + def _DATE_to_python(self, _): + """Converts DATE to date.""" + + def conv(value: str) -> date: + try: + return datetime.utcfromtimestamp(int(value) * 86400).date() + except (OSError, ValueError) as e: + logger.debug("Failed to convert: %s", e) + ts = ZERO_EPOCH + timedelta(seconds=int(value) * (24 * 60 * 60)) + return date(ts.year, ts.month, ts.day) + + return conv + + def _DATE_numpy_to_python(self, _): + """Converts DATE to datetime. + + No timezone is attached. + """ + return lambda x: numpy.datetime64(int(x), "D") + + def _TIMESTAMP_TZ_to_python(self, ctx): + """Converts TIMESTAMP TZ to datetime. + + The timezone offset is piggybacked. + """ + scale = ctx["scale"] + + def conv(encoded_value: str) -> datetime: + value, tz = encoded_value.split() + tzinfo = _generate_tzinfo_from_tzoffset(int(tz) - 1440) + return SnowflakeConverter.create_timestamp_from_string( + value=value, scale=scale, tz=tzinfo + ) + + return conv + + def _get_session_tz(self): + """Gets the session timezone or use the local computer's timezone.""" + try: + tz = self.get_parameter("TIMEZONE") + if not tz: + tz = "UTC" + return pytz.timezone(tz) + except pytz.exceptions.UnknownTimeZoneError: + logger.warning("converting to tzinfo failed") + if tzlocal is not None: + return tzlocal.get_localzone() + else: + return datetime.timezone.utc + + def _pre_TIMESTAMP_LTZ_to_python(self, value, ctx) -> datetime: + """Converts TIMESTAMP LTZ to datetime. + + This takes consideration of the session parameter TIMEZONE if available. If not, tzlocal is used. + """ + microseconds, fraction_of_nanoseconds = _extract_timestamp(value, ctx) + tzinfo_value = self._get_session_tz() + + try: + t0 = ZERO_EPOCH + timedelta(seconds=microseconds) + t = pytz.utc.localize(t0, is_dst=False).astimezone(tzinfo_value) + return t, fraction_of_nanoseconds + except OverflowError: + logger.debug( + "OverflowError in converting from epoch time to " + "timestamp_ltz: %s(ms). Falling back to use struct_time." + ) + return time.localtime(microseconds), fraction_of_nanoseconds + + def _TIMESTAMP_LTZ_to_python(self, ctx): + tzinfo = self._get_session_tz() + scale = ctx["scale"] + + return partial( + SnowflakeConverter.create_timestamp_from_string, scale=scale, tz=tzinfo + ) + + _TIMESTAMP_to_python = _TIMESTAMP_LTZ_to_python + + def _TIMESTAMP_NTZ_to_python(self, ctx): + """TIMESTAMP NTZ to datetime with no timezone info is attached.""" + scale = ctx["scale"] + + return partial(SnowflakeConverter.create_timestamp_from_string, scale=scale) + + def _TIMESTAMP_NTZ_numpy_to_python(self, ctx): + """TIMESTAMP NTZ to datetime64 with no timezone info is attached.""" + + def conv(value: str) -> numpy.datetime64: + nanoseconds = int(decimal.Decimal(value).scaleb(9)) + return numpy.datetime64(nanoseconds, "ns") + + return conv + + def _TIME_to_python(self, ctx): + """TIME to formatted string, SnowflakeDateTime, or datetime.time with no timezone attached.""" + scale = ctx["scale"] + + def conv0(value): + return datetime.utcfromtimestamp(float(value)).time() + + def conv(value: str) -> dt_t: + microseconds = float(value[0 : -scale + 6]) + return datetime.utcfromtimestamp(microseconds).time() + + return conv if scale > 6 else conv0 + + def _VARIANT_to_python(self, _): + return None # skip conv + + _OBJECT_to_python = _VARIANT_to_python + + _ARRAY_to_python = _VARIANT_to_python + + def _BOOLEAN_to_python(self, ctx): + return lambda value: value in ("1", "TRUE") + + def snowflake_type(self, value): + """Returns Snowflake data type for the value. This is used for qmark parameter style.""" + type_name = value.__class__.__name__.lower() + return PYTHON_TO_SNOWFLAKE_TYPE.get(type_name) + + def to_snowflake_bindings(self, snowflake_type, value): + """Converts Python data to snowflake data for qmark and numeric parameter style. + + The output is bound in a query in the server side. + """ + type_name = value.__class__.__name__.lower() + return getattr(self, f"_{type_name}_to_snowflake_bindings")( + snowflake_type, value + ) + + def _str_to_snowflake_bindings(self, _, value: str) -> str: + # NOTE: str type is always taken as a text data and never binary + return str(value) + + _int_to_snowflake_bindings = _str_to_snowflake_bindings + _long_to_snowflake_bindings = _str_to_snowflake_bindings + _float_to_snowflake_bindings = _str_to_snowflake_bindings + _unicode_to_snowflake_bindings = _str_to_snowflake_bindings + _decimal_to_snowflake_bindings = _str_to_snowflake_bindings + + def _bytes_to_snowflake_bindings(self, _, value: bytes) -> str: + return binascii.hexlify(value).decode("utf-8") + + _bytearray_to_snowflake_bindings = _bytes_to_snowflake_bindings + + def _bool_to_snowflake_bindings(self, _, value: bool) -> str: + return str(value).lower() + + def _nonetype_to_snowflake_bindings(self, *_) -> None: + return None + + def _date_to_snowflake_bindings(self, _, value: date) -> str: + # milliseconds + return _convert_date_to_epoch_milliseconds(value) + + def _time_to_snowflake_bindings(self, _, value: dt_t) -> str: + # nanoseconds + return _convert_time_to_epoch_nanoseconds(value) + + def _datetime_to_snowflake_bindings( + self, snowflake_type: str, value: datetime + ) -> str: + snowflake_type = snowflake_type.upper() + if snowflake_type == "TIMESTAMP_LTZ": + _, t = self._derive_offset_timestamp(value) + return _convert_datetime_to_epoch_nanoseconds(t) + elif snowflake_type == "TIMESTAMP_NTZ": + # nanoseconds + return _convert_datetime_to_epoch_nanoseconds(value) + elif snowflake_type == "TIMESTAMP_TZ": + offset, t = self._derive_offset_timestamp(value, is_utc=True) + return _convert_datetime_to_epoch_nanoseconds(t) + " {:04d}".format( + int(offset) + ) + else: + raise ProgrammingError( + msg="Binding datetime object with Snowflake data type {} is " + "not supported.".format(snowflake_type), + errno=ER_NOT_SUPPORT_DATA_TYPE, + ) + + def _derive_offset_timestamp( + self, value: datetime, is_utc: bool = False + ) -> tuple[float, datetime]: + """Derives TZ offset and timestamp from the datetime objects.""" + tzinfo = value.tzinfo + if tzinfo is None: + # If no tzinfo is attached, use local timezone. + tzinfo = self._get_session_tz() if not is_utc else pytz.UTC + t = pytz.utc.localize(value, is_dst=False).astimezone(tzinfo) + else: + # if tzinfo is attached, just covert to epoch time + # as the server expects it in UTC anyway + t = value + offset = tzinfo.utcoffset(t.replace(tzinfo=None)).total_seconds() / 60 + 1440 + return offset, t + + def _struct_time_to_snowflake_bindings( + self, snowflake_type: str, value: time.struct_time + ) -> str: + return self._datetime_to_snowflake_bindings( + snowflake_type, datetime.fromtimestamp(time.mktime(value)) + ) + + def _timedelta_to_snowflake_bindings( + self, snowflake_type: str, value: timedelta + ) -> str: + snowflake_type = snowflake_type.upper() + if snowflake_type != "TIME": + raise ProgrammingError( + msg="Binding timedelta object with Snowflake data type {} is " + "not supported.".format(snowflake_type), + errno=ER_NOT_SUPPORT_DATA_TYPE, + ) + (hours, r) = divmod(value.seconds, 3600) + (mins, secs) = divmod(r, 60) + hours += value.days * 24 + return ( + str(hours * 3600 + mins * 60 + secs) + f"{value.microseconds:06d}" + "000" + ) + + def to_snowflake(self, value: Any) -> Any: + """Converts Python data to Snowflake data for pyformat/format style. + + The output is bound in a query in the client side. + """ + type_name = value.__class__.__name__.lower() + return getattr(self, f"_{type_name}_to_snowflake")(value) + + def _int_to_snowflake(self, value: int) -> int: + return int(value) + + def _long_to_snowflake(self, value): + return long(value) + + def _float_to_snowflake(self, value: float) -> float: + return float(value) + + def _str_to_snowflake(self, value: str) -> str: + return str(value) + + _unicode_to_snowflake = _str_to_snowflake + + def _bytes_to_snowflake(self, value: bytes) -> bytes: + return binary_to_snowflake(value) + + _bytearray_to_snowflake = _bytes_to_snowflake + + def _bool_to_snowflake(self, value: bool) -> bool: + return value + + def _bool__to_snowflake(self, value) -> bool: + return bool(value) + + def _nonetype_to_snowflake(self, _): + return None + + def _total_seconds_from_timedelta(self, td: timedelta) -> int: + return sfdatetime_total_seconds_from_timedelta(td) + + def _datetime_to_snowflake(self, value: datetime) -> str: + tzinfo_value = value.tzinfo + if tzinfo_value: + if pytz.utc != tzinfo_value: + try: + td = tzinfo_value.utcoffset(value) + except pytz.exceptions.AmbiguousTimeError: + td = tzinfo_value.utcoffset(value, is_dst=False) + else: + td = ZERO_TIMEDELTA + sign = "+" if td >= ZERO_TIMEDELTA else "-" + td_secs = sfdatetime_total_seconds_from_timedelta(td) + h, m = divmod(abs(td_secs // 60), 60) + if value.microsecond: + return ( + "{year:d}-{month:02d}-{day:02d} " + "{hour:02d}:{minute:02d}:{second:02d}." + "{microsecond:06d}{sign}{tzh:02d}:{tzm:02d}" + ).format( + year=value.year, + month=value.month, + day=value.day, + hour=value.hour, + minute=value.minute, + second=value.second, + microsecond=value.microsecond, + sign=sign, + tzh=h, + tzm=m, + ) + return ( + "{year:d}-{month:02d}-{day:02d} " + "{hour:02d}:{minute:02d}:{second:02d}" + "{sign}{tzh:02d}:{tzm:02d}" + ).format( + year=value.year, + month=value.month, + day=value.day, + hour=value.hour, + minute=value.minute, + second=value.second, + sign=sign, + tzh=h, + tzm=m, + ) + else: + if value.microsecond: + return ( + "{year:d}-{month:02d}-{day:02d} " + "{hour:02d}:{minute:02d}:{second:02d}." + "{microsecond:06d}" + ).format( + year=value.year, + month=value.month, + day=value.day, + hour=value.hour, + minute=value.minute, + second=value.second, + microsecond=value.microsecond, + ) + return ( + "{year:d}-{month:02d}-{day:02d} " "{hour:02d}:{minute:02d}:{second:02d}" + ).format( + year=value.year, + month=value.month, + day=value.day, + hour=value.hour, + minute=value.minute, + second=value.second, + ) + + def _date_to_snowflake(self, value: date) -> str: + """Converts Date object to Snowflake object.""" + return "{year:d}-{month:02d}-{day:02d}".format( + year=value.year, month=value.month, day=value.day + ) + + def _time_to_snowflake(self, value: dt_t) -> str: + if value.microsecond: + return value.strftime("%H:%M:%S.%%06d") % value.microsecond + return value.strftime("%H:%M:%S") + + def _struct_time_to_snowflake(self, value: time.struct_time) -> str: + tzinfo_value = _generate_tzinfo_from_tzoffset(time.timezone // 60) + t = datetime.fromtimestamp(time.mktime(value)) + if pytz.utc != tzinfo_value: + t += tzinfo_value.utcoffset(t) + t = t.replace(tzinfo=tzinfo_value) + return self._datetime_to_snowflake(t) + + def _timedelta_to_snowflake(self, value: timedelta) -> str: + (hours, r) = divmod(value.seconds, 3600) + (mins, secs) = divmod(r, 60) + hours += value.days * 24 + if value.microseconds: + return ("{hour:02d}:{minute:02d}:{second:02d}." "{microsecond:06d}").format( + hour=hours, minute=mins, second=secs, microsecond=value.microseconds + ) + return "{hour:02d}:{minute:02d}:{second:02d}".format( + hour=hours, minute=mins, second=secs + ) + + def _decimal_to_snowflake(self, value: decimal.Decimal) -> str | None: + if isinstance(value, decimal.Decimal): + return str(value) + + return None + + def _list_to_snowflake(self, value: list) -> list: + return [ + SnowflakeConverter.quote(v0) + for v0 in [SnowflakeConverter.escape(v) for v in value] + ] + + _tuple_to_snowflake = _list_to_snowflake + + def __numpy_to_snowflake(self, value): + return value + + _int8_to_snowflake = __numpy_to_snowflake + _int16_to_snowflake = __numpy_to_snowflake + _int32_to_snowflake = __numpy_to_snowflake + _int64_to_snowflake = __numpy_to_snowflake + _uint8_to_snowflake = __numpy_to_snowflake + _uint16_to_snowflake = __numpy_to_snowflake + _uint32_to_snowflake = __numpy_to_snowflake + _uint64_to_snowflake = __numpy_to_snowflake + _float16_to_snowflake = __numpy_to_snowflake + _float32_to_snowflake = __numpy_to_snowflake + _float64_to_snowflake = __numpy_to_snowflake + + def _datetime64_to_snowflake(self, value) -> str: + return str(value) + "+00:00" + + def _quoted_name_to_snowflake(self, value) -> str: + return str(value) + + def __getattr__(self, item): + if item.endswith("_to_snowflake"): + raise ProgrammingError( + msg="Binding data in type ({}) is not supported.".format( + item[1 : item.find("_to_snowflake")] + ), + errno=ER_NOT_SUPPORT_DATA_TYPE, + ) + elif item.endswith("to_snowflake_bindings"): + raise ProgrammingError( + msg="Binding data in type ({}) is not supported.".format( + item[1 : item.find("_to_snowflake_bindings")] + ), + errno=ER_NOT_SUPPORT_DATA_TYPE, + ) + raise AttributeError(f"No method is available: {item}") + + def to_csv_bindings(self, value: tuple[str, Any] | Any) -> str | None: + """Convert value to a string representation in CSV-escaped format to INSERT INTO.""" + if isinstance(value, tuple) and len(value) == 2: + _type, val = value + if _type in ["TIMESTAMP_TZ", "TIME"]: + # unspecified timezone is considered utc + if getattr(val, "tzinfo", 1) is None: + val = self.to_snowflake(pytz.utc.localize(val)) + else: + val = self.to_snowflake(val) + else: + val = self.to_snowflake_bindings(_type, val) + else: + if isinstance(value, (dt_t, timedelta)): + val = self.to_snowflake(value) + else: + _type = self.snowflake_type(value) + val = self.to_snowflake_bindings(_type, value) + return self.escape_for_csv(val) + + @staticmethod + def escape(value): + if isinstance(value, list): + return value + if value is None or IS_NUMERIC(value) or IS_BINARY(value): + return value + res = value + res = res.replace("\\", "\\\\") + res = res.replace("\n", "\\n") + res = res.replace("\r", "\\r") + res = res.replace("\047", "\134\047") # single quotes + return res + + @staticmethod + def quote(value) -> str: + if isinstance(value, list): + return ",".join(value) + if value is None: + return "NULL" + elif isinstance(value, bool): + return "TRUE" if value else "FALSE" + elif IS_NUMERIC(value): + return str(repr(value)) + elif IS_BINARY(value): + # Binary literal syntax + return "X'{}'".format(value.decode("ascii")) + + return f"'{value}'" + + @staticmethod + def escape_for_csv(value: str) -> str: + if value is None: # NULL + return "" + elif not value: # Empty string + return '""' + if ( + value.find('"') >= 0 + or value.find("\n") >= 0 + or value.find(",") >= 0 + or value.find("\\") >= 0 + ): + # replace single quote with double quotes + value = value.replace('"', '""') + return f'"{value}"' + else: + return value + + @staticmethod + def get_seconds_microseconds( + value: str, + scale: int, + ) -> tuple[int, int]: + """Calculate the second and microsecond parts og a timestamp given as a string. + + The trick is that we always want to do floor division, but if the timestamp + is negative then it is given as its inverse. So -0.000_000_009 + (which is 1969-12-31-23:59:59.999999991) should round down to 6 + fraction figures as Python doesn't support sub-microseconds. + Ultimately for the aforementioned example we should return two integers 0 and -000_001. + """ + negative = value[0] == "-" + lhs, _, rhs = value.partition(".") + seconds = int(lhs) + microseconds = int(rhs) if rhs else 0 + if scale < 6: + microseconds *= 10 ** (6 - scale) + elif scale > 6: + if negative: + microseconds = ceil(microseconds / 10 ** (scale - 6)) + else: + microseconds = microseconds // 10 ** (scale - 6) + if negative: + microseconds = -microseconds + return seconds, microseconds + + @staticmethod + def create_timestamp_from_string( + value: str, + scale: int, + tz: tzinfo | None = None, + ) -> datetime: + seconds, fraction = SnowflakeConverter.get_seconds_microseconds(value=value, scale=scale) + if not tz: + return datetime.utcfromtimestamp(seconds) + timedelta(microseconds=fraction) + return datetime.fromtimestamp(seconds, tz=tz) + timedelta(microseconds=fraction) diff --git a/src/snowflake/connector/converter_issue23517.py b/src/snowflake/connector/converter_issue23517.py new file mode 100644 index 000000000..97c9b40ea --- /dev/null +++ b/src/snowflake/connector/converter_issue23517.py @@ -0,0 +1,80 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from datetime import datetime, time, timedelta, tzinfo +from functools import partial +from logging import getLogger + +import pytz + +from .converter import ZERO_EPOCH, SnowflakeConverter, _generate_tzinfo_from_tzoffset + +logger = getLogger(__name__) + + +class SnowflakeConverterIssue23517(SnowflakeConverter): + """Converter for Python 3.5.0 or Any Python on Windows. + + This is to address http://bugs.python.org/issue23517 + """ + + def __init__(self, **kwargs): + super().__init__(**kwargs) + logger.debug("initialized") + + def _TIMESTAMP_TZ_to_python(self, ctx): + scale = ctx["scale"] + + def conv(encoded_value: str) -> datetime: + value, tz = encoded_value.split() + tzinfo = _generate_tzinfo_from_tzoffset(int(tz) - 1440) + return SnowflakeConverterIssue23517.create_timestamp_from_string( + value=value, scale=scale, tz=tzinfo + ) + + return conv + + def _TIMESTAMP_LTZ_to_python(self, ctx): + tzinfo = self._get_session_tz() + scale = ctx["scale"] + + def conv(value: str) -> datetime: + ts = SnowflakeConverterIssue23517.create_timestamp_from_string(value=value, scale=scale) + return pytz.utc.localize(ts, is_dst=False).astimezone(tzinfo) + return conv + + def _TIMESTAMP_NTZ_to_python(self, ctx): + scale = ctx['scale'] + return partial(SnowflakeConverterIssue23517.create_timestamp_from_string, scale=scale) + + def _TIME_to_python(self, ctx): + """Converts TIME to formatted string, SnowflakeDateTime, or datetime.time. + + No timezone is attached. + """ + scale = ctx["scale"] + + def conv0(value: str) -> time: + return (ZERO_EPOCH + timedelta(seconds=(float(value)))).time() + + def conv(value: str) -> time: + microseconds = float(value[0 : -scale + 6]) + return (ZERO_EPOCH + timedelta(seconds=(microseconds))).time() + + return conv if scale > 6 else conv0 + + @staticmethod + def create_timestamp_from_string( + value: str, + scale: int, + tz: tzinfo | None = None, + ) -> datetime: + """Windows does not support negative timestamps, so we need to do that part in Python.""" + seconds, fraction = SnowflakeConverter.get_seconds_microseconds(value=value, scale=scale) + if not tz: + return datetime.utcfromtimestamp(0) + timedelta(seconds=seconds, microseconds=fraction) + return datetime.fromtimestamp(0, tz=tz) + timedelta(seconds=seconds, microseconds=fraction) diff --git a/converter_null.py b/src/snowflake/connector/converter_null.py similarity index 58% rename from converter_null.py rename to src/snowflake/connector/converter_null.py index 6d619559b..dac47ee93 100644 --- a/converter_null.py +++ b/src/snowflake/connector/converter_null.py @@ -1,15 +1,16 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # +from __future__ import annotations + from .converter import SnowflakeConverter class SnowflakeNoConverterToPython(SnowflakeConverter): def __init__(self, **kwargs): - super(SnowflakeNoConverterToPython, self).__init__(**kwargs) + super().__init__(**kwargs) def to_python_method(self, type_name, column): return None diff --git a/converter_snowsql.py b/src/snowflake/connector/converter_snowsql.py similarity index 55% rename from converter_snowsql.py rename to src/snowflake/connector/converter_snowsql.py index d9b8e3056..c60515a0e 100644 --- a/converter_snowsql.py +++ b/src/snowflake/connector/converter_snowsql.py @@ -1,70 +1,64 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # +from __future__ import annotations + import time -from datetime import timedelta, datetime, date +from datetime import date, datetime, timedelta from logging import getLogger import pytz -from .compat import TO_UNICODE, IS_WINDOWS -from .constants import (is_timestamp_type_name, is_date_type_name) +from .compat import IS_WINDOWS +from .constants import is_date_type_name, is_timestamp_type_name from .converter import ( - SnowflakeConverter, ZERO_EPOCH, - _extract_timestamp, + SnowflakeConverter, _adjust_fraction_of_nanoseconds, - _generate_tzinfo_from_tzoffset) -from .sfbinaryformat import (binary_to_python, SnowflakeBinaryFormat) -from .sfdatetime import ( - SnowflakeDateTimeFormat, - SnowflakeDateFormat, - SnowflakeDateTime) + _extract_timestamp, + _generate_tzinfo_from_tzoffset, +) +from .sfbinaryformat import SnowflakeBinaryFormat, binary_to_python +from .sfdatetime import SnowflakeDateFormat, SnowflakeDateTime, SnowflakeDateTimeFormat logger = getLogger(__name__) def format_sftimestamp(ctx, value, franction_of_nanoseconds): sf_datetime = SnowflakeDateTime( - datetime=value, - nanosecond=franction_of_nanoseconds, - scale=ctx.get('scale')) - return ctx['fmt'].format(sf_datetime) if ctx.get('fmt') else \ - TO_UNICODE(sf_datetime) + datetime=value, nanosecond=franction_of_nanoseconds, scale=ctx.get("scale") + ) + return ctx["fmt"].format(sf_datetime) if ctx.get("fmt") else str(sf_datetime) class SnowflakeConverterSnowSQL(SnowflakeConverter): - """ - Snowflake Converter for SnowSQL. + """Snowflake Converter for SnowSQL. Format data instead of just converting the values into native Python objects. """ def __init__(self, **kwargs): - super(SnowflakeConverterSnowSQL, self).__init__(**kwargs) - self._support_negative_year = kwargs.get('support_negative_year', True) + super().__init__(**kwargs) + self._support_negative_year = kwargs.get("support_negative_year", True) def _get_format(self, type_name): - """ - Gets the format - """ + """Gets the format.""" fmt = None - if type_name == u'DATE': - fmt = self._parameters.get(u'DATE_OUTPUT_FORMAT') + if type_name == "DATE": + fmt = self._parameters.get("DATE_OUTPUT_FORMAT") if not fmt: - fmt = u'YYYY-MM-DD' - elif type_name == u'TIME': - fmt = self._parameters.get(u'TIME_OUTPUT_FORMAT') - elif type_name + u'_OUTPUT_FORMAT' in self._parameters: - fmt = self._parameters[type_name + u'_OUTPUT_FORMAT'] + fmt = "YYYY-MM-DD" + elif type_name == "TIME": + fmt = self._parameters.get("TIME_OUTPUT_FORMAT") + elif type_name + "_OUTPUT_FORMAT" in self._parameters: + fmt = self._parameters[type_name + "_OUTPUT_FORMAT"] if not fmt: - fmt = self._parameters[u'TIMESTAMP_OUTPUT_FORMAT'] - elif type_name == u'BINARY': - fmt = self._parameters.get(u'BINARY_OUTPUT_FORMAT') + fmt = self._parameters["TIMESTAMP_OUTPUT_FORMAT"] + elif type_name == "BINARY": + fmt = self._parameters.get("BINARY_OUTPUT_FORMAT") return fmt # @@ -72,27 +66,29 @@ def _get_format(self, type_name): # def to_python_method(self, type_name, column): ctx = column.copy() - if ctx.get('scale') is not None: - ctx['max_fraction'] = int(10 ** ctx['scale']) - ctx['zero_fill'] = '0' * (9 - ctx['scale']) + if ctx.get("scale") is not None: + ctx["max_fraction"] = int(10 ** ctx["scale"]) + ctx["zero_fill"] = "0" * (9 - ctx["scale"]) fmt = None if is_date_type_name(type_name): datetime_class = time.struct_time if not IS_WINDOWS else date fmt = SnowflakeDateFormat( self._get_format(type_name), support_negative_year=self._support_negative_year, - datetime_class=datetime_class) + datetime_class=datetime_class, + ) elif is_timestamp_type_name(type_name): fmt = SnowflakeDateTimeFormat( self._get_format(type_name), data_type=type_name, support_negative_year=self._support_negative_year, - datetime_class=SnowflakeDateTime) - elif type_name == u'BINARY': + datetime_class=SnowflakeDateTime, + ) + elif type_name == "BINARY": fmt = SnowflakeBinaryFormat(self._get_format(type_name)) - logger.debug('Type: %s, Format: %s', type_name, fmt) - ctx['fmt'] = fmt - converters = [u'_{type_name}_to_python'.format(type_name=type_name)] + logger.debug("Type: %s, Format: %s", type_name, fmt) + ctx["fmt"] = fmt + converters = [f"_{type_name}_to_python"] for conv in converters: try: return getattr(self, conv)(ctx) @@ -102,54 +98,43 @@ def to_python_method(self, type_name, column): return None # Skip conversion def _BOOLEAN_to_python(self, ctx): - """ - No conversion for SnowSQL - """ - return lambda value: "True" if value in (u'1', u"True") else u"False" + """No conversion for SnowSQL.""" + return lambda value: "True" if value in ("1", "True") else "False" def _FIXED_to_python(self, ctx): - """ - No conversion for SnowSQL - """ + """No conversion for SnowSQL.""" return None def _REAL_to_python(self, ctx): - """ - No conversion for SnowSQL - """ + """No conversion for SnowSQL.""" return None def _BINARY_to_python(self, ctx): - """ - BINARY to a string formatted by BINARY_OUTPUT_FORMAT - """ - return lambda value: ctx['fmt'].format(binary_to_python(value)) + """BINARY to a string formatted by BINARY_OUTPUT_FORMAT.""" + return lambda value: ctx["fmt"].format(binary_to_python(value)) def _DATE_to_python(self, ctx): - """ - DATE to struct_time/date + """Converts DATE to struct_time/date. No timezone is attached. """ def conv(value): - return ctx['fmt'].format(time.gmtime(int(value) * (24 * 60 * 60))) + return ctx["fmt"].format(time.gmtime(int(value) * (24 * 60 * 60))) def conv_windows(value): ts = ZERO_EPOCH + timedelta(seconds=int(value) * (24 * 60 * 60)) - return ctx['fmt'].format(date(ts.year, ts.month, ts.day)) + return ctx["fmt"].format(date(ts.year, ts.month, ts.day)) return conv if not IS_WINDOWS else conv_windows def _TIMESTAMP_TZ_to_python(self, ctx): - """ - TIMESTAMP TZ to datetime + """Converts TIMESTAMP TZ to datetime. The timezone offset is piggybacked. """ - - scale = ctx['scale'] - max_fraction = ctx.get('max_fraction') + scale = ctx["scale"] + max_fraction = ctx.get("max_fraction") def conv0(encoded_value): value, tz = encoded_value.split() @@ -158,33 +143,33 @@ def conv0(encoded_value): try: t = datetime.fromtimestamp(microseconds, tz=tzinfo) except OSError as e: - logger.debug( - "OSError occurred but falling back to datetime: %s", e) + logger.debug("OSError occurred but falling back to datetime: %s", e) t = ZERO_EPOCH + timedelta(seconds=microseconds) if pytz.utc != tzinfo: t += tzinfo.utcoffset(t) t = t.replace(tzinfo=tzinfo) fraction_of_nanoseconds = _adjust_fraction_of_nanoseconds( - value, max_fraction, scale) + value, max_fraction, scale + ) return format_sftimestamp(ctx, t, fraction_of_nanoseconds) def conv(encoded_value): value, tz = encoded_value.split() - microseconds = float(value[0:-scale + 6]) + microseconds = float(value[0 : -scale + 6]) tzinfo = _generate_tzinfo_from_tzoffset(int(tz) - 1440) try: t = datetime.fromtimestamp(microseconds, tz=tzinfo) - except OSError as e: - logger.debug( - "OSError occurred but falling back to datetime: %s", e) + except (OSError, ValueError) as e: + logger.debug("OSError occurred but falling back to datetime: %s", e) t = ZERO_EPOCH + timedelta(seconds=microseconds) if pytz.utc != tzinfo: t += tzinfo.utcoffset(t) t = t.replace(tzinfo=tzinfo) fraction_of_nanoseconds = _adjust_fraction_of_nanoseconds( - value, max_fraction, scale) + value, max_fraction, scale + ) return format_sftimestamp(ctx, t, fraction_of_nanoseconds) @@ -192,27 +177,23 @@ def conv(encoded_value): def _TIMESTAMP_LTZ_to_python(self, ctx): def conv(value): - t, fraction_of_nanoseconds = self._pre_TIMESTAMP_LTZ_to_python( - value, ctx) + t, fraction_of_nanoseconds = self._pre_TIMESTAMP_LTZ_to_python(value, ctx) return format_sftimestamp(ctx, t, fraction_of_nanoseconds) return conv def _TIMESTAMP_NTZ_to_python(self, ctx): - """ - TIMESTAMP NTZ to Snowflake Formatted String + """Converts TIMESTAMP NTZ to Snowflake Formatted String. No timezone info is attached. """ def conv(value): - microseconds, fraction_of_nanoseconds = \ - _extract_timestamp(value, ctx) + microseconds, fraction_of_nanoseconds = _extract_timestamp(value, ctx) try: t = time.gmtime(microseconds) - except OSError as e: - logger.debug( - "OSError occurred but falling back to datetime: %s", e) + except (OSError, ValueError) as e: + logger.debug("OSError occurred but falling back to datetime: %s", e) t = ZERO_EPOCH + timedelta(seconds=(microseconds)) return format_sftimestamp(ctx, t, fraction_of_nanoseconds) diff --git a/cpp/ArrowIterator/BinaryConverter.cpp b/src/snowflake/connector/cpp/ArrowIterator/BinaryConverter.cpp similarity index 72% rename from cpp/ArrowIterator/BinaryConverter.cpp rename to src/snowflake/connector/cpp/ArrowIterator/BinaryConverter.cpp index 93d3bc6fe..76e3edcec 100644 --- a/cpp/ArrowIterator/BinaryConverter.cpp +++ b/src/snowflake/connector/cpp/ArrowIterator/BinaryConverter.cpp @@ -1,11 +1,13 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #include "BinaryConverter.hpp" +#include namespace sf { -Logger BinaryConverter::logger("snowflake.connector.BinaryConverter"); +Logger* BinaryConverter::logger = new Logger("snowflake.connector.BinaryConverter"); BinaryConverter::BinaryConverter(std::shared_ptr array) : m_array(std::dynamic_pointer_cast(array)) diff --git a/cpp/ArrowIterator/BinaryConverter.hpp b/src/snowflake/connector/cpp/ArrowIterator/BinaryConverter.hpp similarity index 77% rename from cpp/ArrowIterator/BinaryConverter.hpp rename to src/snowflake/connector/cpp/ArrowIterator/BinaryConverter.hpp index 0d4d8d715..5141180cb 100644 --- a/cpp/ArrowIterator/BinaryConverter.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/BinaryConverter.hpp @@ -1,11 +1,13 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_BINARYCONVERTER_HPP #define PC_BINARYCONVERTER_HPP #include "IColumnConverter.hpp" #include "logging.hpp" +#include namespace sf { @@ -20,7 +22,7 @@ class BinaryConverter : public IColumnConverter private: std::shared_ptr m_array; - static Logger logger; + static Logger* logger; }; } // namespace sf diff --git a/cpp/ArrowIterator/BooleanConverter.cpp b/src/snowflake/connector/cpp/ArrowIterator/BooleanConverter.cpp similarity index 82% rename from cpp/ArrowIterator/BooleanConverter.cpp rename to src/snowflake/connector/cpp/ArrowIterator/BooleanConverter.cpp index 90ca98e03..f6e6e79e6 100644 --- a/cpp/ArrowIterator/BooleanConverter.cpp +++ b/src/snowflake/connector/cpp/ArrowIterator/BooleanConverter.cpp @@ -1,7 +1,9 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #include "BooleanConverter.hpp" +#include namespace sf { diff --git a/cpp/ArrowIterator/BooleanConverter.hpp b/src/snowflake/connector/cpp/ArrowIterator/BooleanConverter.hpp similarity index 80% rename from cpp/ArrowIterator/BooleanConverter.hpp rename to src/snowflake/connector/cpp/ArrowIterator/BooleanConverter.hpp index dbbe64774..b36be893f 100644 --- a/cpp/ArrowIterator/BooleanConverter.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/BooleanConverter.hpp @@ -1,10 +1,12 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_BOOLEANCONVERTER_HPP #define PC_BOOLEANCONVERTER_HPP #include "IColumnConverter.hpp" +#include namespace sf { diff --git a/cpp/ArrowIterator/CArrowChunkIterator.cpp b/src/snowflake/connector/cpp/ArrowIterator/CArrowChunkIterator.cpp similarity index 55% rename from cpp/ArrowIterator/CArrowChunkIterator.cpp rename to src/snowflake/connector/cpp/ArrowIterator/CArrowChunkIterator.cpp index 0256f27ed..c708bc5d9 100644 --- a/cpp/ArrowIterator/CArrowChunkIterator.cpp +++ b/src/snowflake/connector/cpp/ArrowIterator/CArrowChunkIterator.cpp @@ -1,6 +1,7 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #include "CArrowChunkIterator.hpp" #include "SnowflakeType.hpp" #include "IntConverter.hpp" @@ -12,47 +13,53 @@ #include "DateConverter.hpp" #include "TimeStampConverter.hpp" #include "TimeConverter.hpp" +#include #include +#include + +#define SF_CHECK_PYTHON_ERR() \ + if (py::checkPyError())\ + {\ + PyObject *type, * val, *traceback;\ + PyErr_Fetch(&type, &val, &traceback);\ + PyErr_Clear();\ + m_currentPyException.reset(val);\ +\ + Py_XDECREF(type);\ + Py_XDECREF(traceback);\ +\ + return std::make_shared(nullptr, m_currentPyException.get());\ + } -namespace sf -{ - -CArrowChunkIterator::CArrowChunkIterator(PyObject* context) -: m_latestReturnedRow(nullptr), m_context(context) -{ -} -void CArrowChunkIterator::addRecordBatch(PyObject* rb) +namespace sf { - // may add some specific behaviors for this iterator - CArrowIterator::addRecordBatch(rb); -} -void CArrowChunkIterator::reset() +CArrowChunkIterator::CArrowChunkIterator(PyObject* context, std::vector> *batches, + PyObject* use_numpy) +: CArrowIterator(batches), m_latestReturnedRow(nullptr), m_context(context) { - m_batchCount = m_cRecordBatches.size(); - m_columnCount = m_batchCount > 0 ? m_cRecordBatches[0]->num_columns() : 0; + m_batchCount = m_cRecordBatches->size(); + m_columnCount = m_batchCount > 0 ? (*m_cRecordBatches)[0]->num_columns() : 0; m_currentBatchIndex = -1; m_rowIndexInBatch = -1; m_rowCountInBatch = 0; m_latestReturnedRow.reset(); + m_useNumpy = PyObject_IsTrue(use_numpy); - logger.info("Arrow chunk info: batchCount %d, columnCount %d", m_batchCount, - m_columnCount); + logger->debug(__FILE__, __func__, __LINE__, "Arrow chunk info: batchCount %d, columnCount %d, use_numpy: %d", m_batchCount, + m_columnCount, m_useNumpy); } -PyObject* CArrowChunkIterator::next() +std::shared_ptr CArrowChunkIterator::next() { m_rowIndexInBatch++; if (m_rowIndexInBatch < m_rowCountInBatch) { - this->currentRowAsTuple(); - if (py::checkPyError()) - { - return nullptr; - } - return m_latestReturnedRow.get(); + this->createRowPyObject(); + SF_CHECK_PYTHON_ERR() + return std::make_shared(m_latestReturnedRow.get(), nullptr); } else { @@ -60,35 +67,31 @@ PyObject* CArrowChunkIterator::next() if (m_currentBatchIndex < m_batchCount) { m_rowIndexInBatch = 0; - m_rowCountInBatch = m_cRecordBatches[m_currentBatchIndex]->num_rows(); + m_rowCountInBatch = (*m_cRecordBatches)[m_currentBatchIndex]->num_rows(); this->initColumnConverters(); - if (py::checkPyError()) - { - return nullptr; - } + SF_CHECK_PYTHON_ERR() - logger.info("Current batch index: %d, rows in current batch: %d", + logger->debug(__FILE__, __func__, __LINE__, "Current batch index: %d, rows in current batch: %d", m_currentBatchIndex, m_rowCountInBatch); - this->currentRowAsTuple(); - if (py::checkPyError()) - { - return nullptr; - } - return m_latestReturnedRow.get(); + this->createRowPyObject(); + SF_CHECK_PYTHON_ERR() + + return std::make_shared(m_latestReturnedRow.get(), nullptr); } } /** It looks like no one will decrease the ref of this Py_None, so we don't - * increament the ref count here */ - return Py_None; + * increment the ref count here */ + return std::make_shared(Py_None, nullptr); } -void CArrowChunkIterator::currentRowAsTuple() +void CArrowChunkIterator::createRowPyObject() { m_latestReturnedRow.reset(PyTuple_New(m_columnCount)); for (int i = 0; i < m_columnCount; i++) { + // PyTuple_SET_ITEM steals a reference to the PyObject returned by toPyObject below PyTuple_SET_ITEM( m_latestReturnedRow.get(), i, m_currentBatchConverters[i]->toPyObject(m_rowIndexInBatch)); @@ -100,14 +103,14 @@ void CArrowChunkIterator::initColumnConverters() { m_currentBatchConverters.clear(); std::shared_ptr currentBatch = - m_cRecordBatches[m_currentBatchIndex]; - std::shared_ptr schema = currentBatch->schema(); + (*m_cRecordBatches)[m_currentBatchIndex]; + m_currentSchema = currentBatch->schema(); for (int i = 0; i < currentBatch->num_columns(); i++) { std::shared_ptr columnArray = currentBatch->column(i); - std::shared_ptr dt = schema->field(i)->type(); + std::shared_ptr dt = m_currentSchema->field(i)->type(); std::shared_ptr metaData = - schema->field(i)->metadata(); + m_currentSchema->field(i)->metadata(); SnowflakeType::Type st = SnowflakeType::snowflakeTypeFromString( metaData->value(metaData->FindKey("logicalType"))); @@ -125,69 +128,47 @@ void CArrowChunkIterator::initColumnConverters() switch (dt->id()) { - case arrow::Type::type::INT8: - { - if (scale > 0) - { - m_currentBatchConverters.push_back(std::make_shared< - sf::DecimalFromIntConverter>( - columnArray, precision, scale)); - break; - } - - m_currentBatchConverters.push_back( - std::make_shared>( - columnArray)); - break; - } - - case arrow::Type::type::INT16: - { - if (scale > 0) - { - m_currentBatchConverters.push_back(std::make_shared< - sf::DecimalFromIntConverter>( - columnArray, precision, scale)); - break; - } - - m_currentBatchConverters.push_back( - std::make_shared>( - columnArray)); - break; +#define _SF_INIT_FIXED_CONVERTER(ARROW_TYPE, ARROW_ARRAY_TYPE) \ + case arrow::Type::type::ARROW_TYPE: \ + {\ + if (scale > 0)\ + {\ + if (m_useNumpy)\ + {\ + m_currentBatchConverters.push_back(std::make_shared<\ + sf::NumpyDecimalConverter>(\ + columnArray, precision, scale, m_context));\ + }\ + else\ + {\ + m_currentBatchConverters.push_back(std::make_shared<\ + sf::DecimalFromIntConverter>(\ + columnArray, precision, scale));\ + }\ + }\ + else\ + {\ + if (m_useNumpy)\ + {\ + m_currentBatchConverters.push_back(\ + std::make_shared>(\ + columnArray, m_context));\ + }\ + else\ + {\ + m_currentBatchConverters.push_back(\ + std::make_shared>(\ + columnArray));\ + }\ + }\ + break;\ } - case arrow::Type::type::INT32: - { - if (scale > 0) - { - m_currentBatchConverters.push_back(std::make_shared< - sf::DecimalFromIntConverter>( - columnArray, precision, scale)); - break; - } - - m_currentBatchConverters.push_back( - std::make_shared>( - columnArray)); - break; - } - - case arrow::Type::type::INT64: - { - if (scale > 0) - { - m_currentBatchConverters.push_back(std::make_shared< - sf::DecimalFromIntConverter>( - columnArray, precision, scale)); - break; - } - - m_currentBatchConverters.push_back( - std::make_shared>( - columnArray)); - break; - } + _SF_INIT_FIXED_CONVERTER(INT8, Int8) + _SF_INIT_FIXED_CONVERTER(INT16, Int16) + _SF_INIT_FIXED_CONVERTER(INT32, Int32) + _SF_INIT_FIXED_CONVERTER(INT64, Int64) +#undef _SF_INIT_FIXED_CONVERTER case arrow::Type::type::DECIMAL: { @@ -203,7 +184,7 @@ void CArrowChunkIterator::initColumnConverters() "[Snowflake Exception] unknown arrow internal data type(%d) " "for FIXED data", dt->id()); - logger.error(errorInfo.c_str()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); PyErr_SetString(PyExc_Exception, errorInfo.c_str()); return; } @@ -216,6 +197,7 @@ void CArrowChunkIterator::initColumnConverters() case SnowflakeType::Type::OBJECT: case SnowflakeType::Type::VARIANT: case SnowflakeType::Type::TEXT: + case SnowflakeType::Type::ARRAY: { m_currentBatchConverters.push_back( std::make_shared(columnArray)); @@ -231,15 +213,31 @@ void CArrowChunkIterator::initColumnConverters() case SnowflakeType::Type::REAL: { - m_currentBatchConverters.push_back( - std::make_shared(columnArray)); + if (m_useNumpy) + { + m_currentBatchConverters.push_back( + std::make_shared(columnArray, m_context)); + } + else + { + m_currentBatchConverters.push_back( + std::make_shared(columnArray)); + } break; } case SnowflakeType::Type::DATE: { - m_currentBatchConverters.push_back( - std::make_shared(columnArray)); + if (m_useNumpy) + { + m_currentBatchConverters.push_back( + std::make_shared(columnArray, m_context)); + } + else + { + m_currentBatchConverters.push_back( + std::make_shared(columnArray)); + } break; } @@ -279,7 +277,7 @@ void CArrowChunkIterator::initColumnConverters() "[Snowflake Exception] unknown arrow internal data type(%d) " "for TIME data", dt->id()); - logger.error(errorInfo.c_str()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); PyErr_SetString(PyExc_Exception, errorInfo.c_str()); return; } @@ -296,17 +294,35 @@ void CArrowChunkIterator::initColumnConverters() { case arrow::Type::type::INT64: { - m_currentBatchConverters.push_back( - std::make_shared( - columnArray, scale, m_context)); + if (m_useNumpy) + { + m_currentBatchConverters.push_back( + std::make_shared( + columnArray, scale, m_context)); + } + else + { + m_currentBatchConverters.push_back( + std::make_shared( + columnArray, scale, m_context)); + } break; } case arrow::Type::type::STRUCT: { - m_currentBatchConverters.push_back( - std::make_shared( - columnArray, scale, m_context)); + if (m_useNumpy) + { + m_currentBatchConverters.push_back( + std::make_shared( + columnArray, scale, m_context)); + } + else + { + m_currentBatchConverters.push_back( + std::make_shared( + columnArray, scale, m_context)); + } break; } @@ -316,7 +332,7 @@ void CArrowChunkIterator::initColumnConverters() "[Snowflake Exception] unknown arrow internal data type(%d) " "for TIMESTAMP_NTZ data", dt->id()); - logger.error(errorInfo.c_str()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); PyErr_SetString(PyExc_Exception, errorInfo.c_str()); return; } @@ -353,7 +369,7 @@ void CArrowChunkIterator::initColumnConverters() "[Snowflake Exception] unknown arrow internal data type(%d) " "for TIMESTAMP_LTZ data", dt->id()); - logger.error(errorInfo.c_str()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); PyErr_SetString(PyExc_Exception, errorInfo.c_str()); return; } @@ -394,7 +410,7 @@ void CArrowChunkIterator::initColumnConverters() "[Snowflake Exception] unknown arrow internal data type(%d) " "for TIMESTAMP_TZ data", dt->id()); - logger.error(errorInfo.c_str()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); PyErr_SetString(PyExc_Exception, errorInfo.c_str()); return; } @@ -406,9 +422,9 @@ void CArrowChunkIterator::initColumnConverters() default: { std::string errorInfo = Logger::formatString( - "[Snowflake Exception] unknown snowflake data type : %d", - metaData->value(metaData->FindKey("logicalType"))); - logger.error(errorInfo.c_str()); + "[Snowflake Exception] unknown snowflake data type : %s", + metaData->value(metaData->FindKey("logicalType")).c_str()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); PyErr_SetString(PyExc_Exception, errorInfo.c_str()); return; } @@ -416,4 +432,29 @@ void CArrowChunkIterator::initColumnConverters() } } +DictCArrowChunkIterator::DictCArrowChunkIterator(PyObject* context, + std::vector> * batches, + PyObject* use_numpy) +: CArrowChunkIterator(context, batches, use_numpy) +{ +} + +void DictCArrowChunkIterator::createRowPyObject() +{ + m_latestReturnedRow.reset(PyDict_New()); + for (int i = 0; i < m_currentSchema->num_fields(); i++) + { + py::UniqueRef value(m_currentBatchConverters[i]->toPyObject(m_rowIndexInBatch)); + if (!value.empty()) + { + // PyDict_SetItemString doesn't steal a reference to value.get(). + PyDict_SetItemString( + m_latestReturnedRow.get(), + m_currentSchema->field(i)->name().c_str(), + value.get()); + } + } + return; +} + } // namespace sf diff --git a/cpp/ArrowIterator/CArrowChunkIterator.hpp b/src/snowflake/connector/cpp/ArrowIterator/CArrowChunkIterator.hpp similarity index 59% rename from cpp/ArrowIterator/CArrowChunkIterator.hpp rename to src/snowflake/connector/cpp/ArrowIterator/CArrowChunkIterator.hpp index 4540febaa..9c94f290a 100644 --- a/cpp/ArrowIterator/CArrowChunkIterator.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/CArrowChunkIterator.hpp @@ -1,12 +1,15 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_ARROWCHUNKITERATOR_HPP #define PC_ARROWCHUNKITERATOR_HPP #include "CArrowIterator.hpp" #include "IColumnConverter.hpp" #include "Python/Common.hpp" +#include +#include namespace sf { @@ -22,25 +25,35 @@ class CArrowChunkIterator : public CArrowIterator /** * Constructor */ - CArrowChunkIterator(PyObject* context); + CArrowChunkIterator(PyObject* context, std::vector> * batches, PyObject *use_numpy); /** * Desctructor */ - ~CArrowChunkIterator() = default; + virtual ~CArrowChunkIterator() = default; /** - * Add Arrow RecordBach to current chunk - * @param rb recordbatch to be added + * @return a python tuple object which contains all data in current row */ - void addRecordBatch(PyObject* rb) override; + std::shared_ptr next() override; +protected: /** - * @return a python tuple object which contains all data in current row + * @return python object of tuple which is tuple of all row values */ - PyObject* next() override; + virtual void createRowPyObject(); + + /** pointer to the latest returned python tuple(row) result */ + py::UniqueRef m_latestReturnedRow; + + /** list of column converters*/ + std::vector> m_currentBatchConverters; + + /** row index inside current record batch (start from 0) */ + int m_rowIndexInBatch; - void reset() override; + /** schema of current record batch */ + std::shared_ptr m_currentSchema; private: /** number of columns */ @@ -52,28 +65,35 @@ class CArrowChunkIterator : public CArrowIterator /** current index that iterator points to */ int m_currentBatchIndex; - /** row index inside current record batch (start from 0) */ - int m_rowIndexInBatch; - /** total number of rows inside current record batch */ int64_t m_rowCountInBatch; - /** pointer to the latest returned python tuple(row) result */ - py::UniqueRef m_latestReturnedRow; - - /** list of column converters*/ - std::vector> m_currentBatchConverters; + /** pointer to the current python exception object */ + py::UniqueRef m_currentPyException; /** arrow format convert context for the current session */ PyObject* m_context; - /** - * @return python object of tuple which is tuple of all row values - */ - void currentRowAsTuple(); + /** true if return numpy int64 float64 datetime*/ + bool m_useNumpy; void initColumnConverters(); }; + +class DictCArrowChunkIterator : public CArrowChunkIterator +{ +public: + DictCArrowChunkIterator(PyObject* context, std::vector> * batches, PyObject *use_numpy); + + ~DictCArrowChunkIterator() = default; + +private: + + void createRowPyObject() override; + +}; + + } #endif // PC_ARROWCHUNKITERATOR_HPP diff --git a/src/snowflake/connector/cpp/ArrowIterator/CArrowIterator.cpp b/src/snowflake/connector/cpp/ArrowIterator/CArrowIterator.cpp new file mode 100644 index 000000000..b1ea110e7 --- /dev/null +++ b/src/snowflake/connector/cpp/ArrowIterator/CArrowIterator.cpp @@ -0,0 +1,19 @@ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + +#include "CArrowIterator.hpp" +#include + +namespace sf +{ + +Logger* CArrowIterator::logger = new Logger("snowflake.connector.CArrowIterator"); + +CArrowIterator::CArrowIterator(std::vector>* batches) : + m_cRecordBatches(batches) +{ + logger->debug(__FILE__, __func__, __LINE__, "Arrow BatchSize: %d", batches->size()); +} + +} diff --git a/src/snowflake/connector/cpp/ArrowIterator/CArrowIterator.hpp b/src/snowflake/connector/cpp/ArrowIterator/CArrowIterator.hpp new file mode 100644 index 000000000..e7e38bbaf --- /dev/null +++ b/src/snowflake/connector/cpp/ArrowIterator/CArrowIterator.hpp @@ -0,0 +1,76 @@ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + +#ifndef PC_ARROWITERATOR_HPP +#define PC_ARROWITERATOR_HPP + +#include "Python/Common.hpp" +#include "logging.hpp" +#include +#include +#include + +#define SF_CHECK_ARROW_RC(arrow_status, format_string, ...) \ + if (!arrow_status.ok()) \ + { \ + std::string errorInfo = Logger::formatString(format_string, ##__VA_ARGS__); \ + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); \ + PyErr_SetString(PyExc_Exception, errorInfo.c_str()); \ + return; \ + } + +#define SF_CHECK_ARROW_RC_AND_RETURN(arrow_status, ret_val, format_string, ...) \ + if (!arrow_status.ok()) \ + { \ + std::string errorInfo = Logger::formatString(format_string, ##__VA_ARGS__); \ + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); \ + PyErr_SetString(PyExc_Exception, errorInfo.c_str()); \ + return ret_val; \ + } + +namespace sf +{ + +/** + * A simple struct to contain return data back cython. + * PyObject would be nullptr if failed and cause string will be populated + */ +class ReturnVal +{ +public: + ReturnVal(PyObject * obj, PyObject *except) : + successObj(obj), exception(except) + { + } + + PyObject * successObj; + + PyObject * exception; +}; + +/** + * Arrow base iterator implementation in C++. + */ + +class CArrowIterator +{ +public: + CArrowIterator(std::vector> * batches); + + virtual ~CArrowIterator() = default; + + /** + * @return a python object which might be current row or an Arrow Table + */ + virtual std::shared_ptr next() = 0; + +protected: + /** list of all record batch in current chunk */ + std::vector> *m_cRecordBatches; + + static Logger* logger; +}; +} + +#endif // PC_ARROWITERATOR_HPP diff --git a/src/snowflake/connector/cpp/ArrowIterator/CArrowTableIterator.cpp b/src/snowflake/connector/cpp/ArrowIterator/CArrowTableIterator.cpp new file mode 100644 index 000000000..3f14be3f9 --- /dev/null +++ b/src/snowflake/connector/cpp/ArrowIterator/CArrowTableIterator.cpp @@ -0,0 +1,1000 @@ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + +#include "CArrowTableIterator.hpp" +#include "SnowflakeType.hpp" +#include "Python/Common.hpp" +#include "Util/time.hpp" +#include +#include +#include + +namespace sf +{ + +/** + * This function is to make sure the arrow table can be successfully converted to pandas dataframe + * using arrow's to_pandas method. Since some Snowflake arrow columns are not supported, this method + * can map those to supported ones. + * Specifically, + * All Snowflake fixed number with scale > 0 (expect decimal) will be converted to Arrow float64/double column + * All Snowflake time columns will be converted to Arrow Time column with unit = second, milli, or, micro. + * All Snowflake timestamp columns will be converted to Arrow timestamp columns + * Specifically, + * timestampntz will be converted to Arrow timestamp with UTC + * timestampltz will be converted to Arrow timestamp with session time zone + * timestamptz will be converted to Arrow timestamp with UTC + * Since Arrow timestamp use int64_t internally so it may be out of range for small and large timestamps + */ +void CArrowTableIterator::reconstructRecordBatches() +{ + // Type conversion, the code needs to be optimized + for (unsigned int batchIdx = 0; batchIdx < m_cRecordBatches->size(); batchIdx++) + { + std::shared_ptr currentBatch = (*m_cRecordBatches)[batchIdx]; + std::shared_ptr schema = currentBatch->schema(); + // These copies will be used if rebuilding the RecordBatch if necessary + bool needsRebuild = false; + std::vector> futureFields; + std::vector> futureColumns; + + for (int colIdx = 0; colIdx < currentBatch->num_columns(); colIdx++) + { + std::shared_ptr columnArray = currentBatch->column(colIdx); + std::shared_ptr field = schema->field(colIdx); + std::shared_ptr dt = field->type(); + std::shared_ptr metaData = field->metadata(); + SnowflakeType::Type st = SnowflakeType::snowflakeTypeFromString( + metaData->value(metaData->FindKey("logicalType"))); + + // reconstruct columnArray in place + switch (st) + { + case SnowflakeType::Type::FIXED: + { + int scale = metaData + ? std::stoi(metaData->value(metaData->FindKey("scale"))) + : 0; + if (scale > 0 && dt->id() != arrow::Type::type::DECIMAL) + { + logger->debug( + __FILE__, + __func__, + __LINE__, + "Convert fixed number column to double column, column scale %d, column type id: %d", + scale, + dt->id() + ); + convertScaledFixedNumberColumn( + batchIdx, + colIdx, + field, + columnArray, + scale, + futureFields, + futureColumns, + needsRebuild + ); + } + break; + } + + case SnowflakeType::Type::ANY: + case SnowflakeType::Type::ARRAY: + case SnowflakeType::Type::BOOLEAN: + case SnowflakeType::Type::CHAR: + case SnowflakeType::Type::OBJECT: + case SnowflakeType::Type::BINARY: + case SnowflakeType::Type::VARIANT: + case SnowflakeType::Type::TEXT: + case SnowflakeType::Type::REAL: + case SnowflakeType::Type::DATE: + { + // Do not need to convert + break; + } + + case SnowflakeType::Type::TIME: + { + int scale = metaData + ? std::stoi(metaData->value(metaData->FindKey("scale"))) + : 9; + + convertTimeColumn(batchIdx, colIdx, field, columnArray, scale, futureFields, futureColumns, needsRebuild); + break; + } + + case SnowflakeType::Type::TIMESTAMP_NTZ: + { + int scale = metaData + ? std::stoi(metaData->value(metaData->FindKey("scale"))) + : 9; + + convertTimestampColumn(batchIdx, colIdx, field, columnArray, scale, futureFields, futureColumns, needsRebuild); + break; + } + + case SnowflakeType::Type::TIMESTAMP_LTZ: + { + int scale = metaData + ? std::stoi(metaData->value(metaData->FindKey("scale"))) + : 9; + + convertTimestampColumn(batchIdx, colIdx, field, columnArray, scale, futureFields, futureColumns, needsRebuild, m_timezone); + break; + } + + case SnowflakeType::Type::TIMESTAMP_TZ: + { + int scale = metaData + ? std::stoi(metaData->value(metaData->FindKey("scale"))) + : 9; + int byteLength = + metaData + ? std::stoi(metaData->value(metaData->FindKey("byteLength"))) + : 16; + + convertTimestampTZColumn(batchIdx, colIdx, field, columnArray, scale, byteLength, futureFields, futureColumns, needsRebuild, m_timezone); + break; + } + + default: + { + std::string errorInfo = Logger::formatString( + "[Snowflake Exception] unknown snowflake data type : %s", + metaData->value(metaData->FindKey("logicalType")).c_str()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); + PyErr_SetString(PyExc_Exception, errorInfo.c_str()); + return; + } + } + } + + if (needsRebuild) + { + std::shared_ptr futureSchema = arrow::schema(futureFields, schema->metadata()); + (*m_cRecordBatches)[batchIdx] = arrow::RecordBatch::Make(futureSchema, currentBatch->num_rows(), futureColumns); + } + } +} + +CArrowTableIterator::CArrowTableIterator( +PyObject* context, +std::vector>* batches, +const bool number_to_decimal +) +: CArrowIterator(batches), +m_context(context), +m_pyTableObjRef(nullptr), +m_convert_number_to_decimal(number_to_decimal) +{ + py::UniqueRef tz(PyObject_GetAttrString(m_context, "_timezone")); + PyArg_Parse(tz.get(), "s", &m_timezone); +} + +std::shared_ptr CArrowTableIterator::next() +{ + bool firstDone = this->convertRecordBatchesToTable(); + if (firstDone && m_cTable) + { + m_pyTableObjRef.reset(arrow::py::wrap_table(m_cTable)); + return std::make_shared(m_pyTableObjRef.get(), nullptr); + } + else + { + return std::make_shared(Py_None, nullptr); + } +} + +void CArrowTableIterator::replaceColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr& newField, + const std::shared_ptr& newColumn, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild) +{ + // replace the targeted column + if (needsRebuild == false) + { + // First time of modifying batches, we have to make a deep copy of fields and columns + std::shared_ptr currentBatch = (*m_cRecordBatches)[batchIdx]; + futureFields = currentBatch->schema()->fields(); + futureColumns = currentBatch->columns(); + needsRebuild = true; + } + futureFields[colIdx] = newField; + futureColumns[colIdx] = newColumn; +} + +template +double CArrowTableIterator::convertScaledFixedNumberToDouble( + const unsigned int scale, + T originalValue +) +{ + if (scale < 9) + { + // simply use divide to convert decimal value in double + return (double) originalValue / sf::internal::powTenSB4[scale]; + } + else + { + // when scale is large, convert the value to string first and then convert it to double + // otherwise, it may loss precision + std::string valStr = std::to_string(originalValue); + int negative = valStr.at(0) == '-' ? 1:0; + unsigned int digits = valStr.length() - negative; + if (digits <= scale) + { + int numOfZeroes = scale - digits + 1; + valStr.insert(negative, std::string(numOfZeroes, '0')); + } + valStr.insert(valStr.length() - scale, "."); + std::size_t offset = 0; + return std::stod(valStr, &offset); + } +} + +void CArrowTableIterator::convertScaledFixedNumberColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const unsigned int scale, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild +) +{ +// Convert scaled fixed number to either Double, or Decimal based on setting + if (m_convert_number_to_decimal){ + convertScaledFixedNumberColumnToDecimalColumn( + batchIdx, + colIdx, + field, + columnArray, + scale, + futureFields, + futureColumns, + needsRebuild + ); + } else { + convertScaledFixedNumberColumnToDoubleColumn( + batchIdx, + colIdx, + field, + columnArray, + scale, + futureFields, + futureColumns, + needsRebuild + ); + } +} + +void CArrowTableIterator::convertScaledFixedNumberColumnToDecimalColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const unsigned int scale, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild +) +{ + // Convert to decimal columns + const std::shared_ptr field_type = field->type(); + const std::shared_ptr destType = arrow::decimal128(38, scale); + std::shared_ptr doubleField = std::make_shared( + field->name(), destType, field->nullable()); + arrow::Decimal128Builder builder(destType, m_pool); + arrow::Status ret; + for(int64_t rowIdx = 0; rowIdx < columnArray->length(); rowIdx++) + { + if (columnArray->IsValid(rowIdx)) + { + arrow::Decimal128 val; + switch (field_type->id()) + { + case arrow::Type::type::INT8: + { + auto originalVal = std::static_pointer_cast(columnArray)->Value(rowIdx); + val = arrow::Decimal128(originalVal); + break; + } + case arrow::Type::type::INT16: + { + auto originalVal = std::static_pointer_cast(columnArray)->Value(rowIdx); + val = arrow::Decimal128(originalVal); + break; + } + case arrow::Type::type::INT32: + { + auto originalVal = std::static_pointer_cast(columnArray)->Value(rowIdx); + val = arrow::Decimal128(originalVal); + break; + } + case arrow::Type::type::INT64: + { + auto originalVal = std::static_pointer_cast(columnArray)->Value(rowIdx); + val = arrow::Decimal128(originalVal); + break; + } + default: + std::string errorInfo = Logger::formatString( + "[Snowflake Exception] unknown arrow internal data type(%d) " + "for FIXED data", + field_type->id()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); + return; + } + ret = builder.Append(val); + } + else + { + ret = builder.AppendNull(); + } + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to append Decimal value: internal data type(%d), errorInfo: %s", + field_type->id(), ret.message().c_str()); + } + + std::shared_ptr doubleArray; + ret = builder.Finish(&doubleArray); + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to finish Decimal array, errorInfo: %s", + ret.message().c_str()); + + // replace the targeted column + replaceColumn(batchIdx, colIdx, doubleField, doubleArray, futureFields, futureColumns, needsRebuild); +} + +void CArrowTableIterator::convertScaledFixedNumberColumnToDoubleColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const unsigned int scale, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild +) +{ + // Convert to arrow double/float64 column + std::shared_ptr doubleField = std::make_shared( + field->name(), arrow::float64(), field->nullable()); + arrow::DoubleBuilder builder(m_pool); + arrow::Status ret; + auto dt = field->type(); + for(int64_t rowIdx = 0; rowIdx < columnArray->length(); rowIdx++) + { + if (columnArray->IsValid(rowIdx)) + { + double val; + switch (dt->id()) + { + case arrow::Type::type::INT8: + { + auto originalVal = std::static_pointer_cast(columnArray)->Value(rowIdx); + val = convertScaledFixedNumberToDouble(scale, originalVal); + break; + } + case arrow::Type::type::INT16: + { + auto originalVal = std::static_pointer_cast(columnArray)->Value(rowIdx); + val = convertScaledFixedNumberToDouble(scale, originalVal); + break; + } + case arrow::Type::type::INT32: + { + auto originalVal = std::static_pointer_cast(columnArray)->Value(rowIdx); + val = convertScaledFixedNumberToDouble(scale, originalVal); + break; + } + case arrow::Type::type::INT64: + { + auto originalVal = std::static_pointer_cast(columnArray)->Value(rowIdx); + val = convertScaledFixedNumberToDouble(scale, originalVal); + break; + } + default: + std::string errorInfo = Logger::formatString( + "[Snowflake Exception] unknown arrow internal data type(%d) " + "for FIXED data", + dt->id()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); + return; + } + ret = builder.Append(val); + } + else + { + ret = builder.AppendNull(); + } + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to append Double value: internal data type(%d), errorInfo: %s", + dt->id(), ret.message().c_str()); + } + + std::shared_ptr doubleArray; + ret = builder.Finish(&doubleArray); + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to finish Double array, errorInfo: %s", + ret.message().c_str()); + + // replace the targeted column + replaceColumn(batchIdx, colIdx, doubleField, doubleArray, futureFields, futureColumns, needsRebuild); +} + +void CArrowTableIterator::convertTimeColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const int scale, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild +) +{ + std::shared_ptr tsField; + std::shared_ptr tsArray; + arrow::Status ret; + auto dt = field->type(); + // Convert to arrow time column + if (scale == 0) + { + auto timeType = arrow::time32(arrow::TimeUnit::SECOND); + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + arrow::Time32Builder builder(timeType, m_pool); + + + for(int64_t rowIdx = 0; rowIdx < columnArray->length(); rowIdx++) + { + if (columnArray->IsValid(rowIdx)) + { + int32_t originalVal = std::static_pointer_cast(columnArray)->Value(rowIdx); + // unit is second + ret = builder.Append(originalVal); + } + else + { + ret = builder.AppendNull(); + } + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to append value: internal data type(%d)" + ", errorInfo: %s", + dt->id(), ret.message().c_str()); + } + + ret = builder.Finish(&tsArray); + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to finish array, errorInfo: %s", + ret.message().c_str()); + } + else if (scale <= 3) + { + auto timeType = arrow::time32(arrow::TimeUnit::MILLI); + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + arrow::Time32Builder builder(timeType, m_pool); + + arrow::Status ret; + for(int64_t rowIdx = 0; rowIdx < columnArray->length(); rowIdx++) + { + if (columnArray->IsValid(rowIdx)) + { + int32_t val = std::static_pointer_cast(columnArray)->Value(rowIdx) + * sf::internal::powTenSB4[3 - scale]; + // unit is millisecond + ret = builder.Append(val); + } + else + { + ret = builder.AppendNull(); + } + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to append value: internal data type(%d)" + ", errorInfo: %s", + dt->id(), ret.message().c_str()); + } + + ret = builder.Finish(&tsArray); + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to finish array, errorInfo: %s", + ret.message().c_str()); + } + else if (scale <= 6) + { + auto timeType = arrow::time64(arrow::TimeUnit::MICRO); + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + arrow::Time64Builder builder(timeType, m_pool); + + arrow::Status ret; + for(int64_t rowIdx = 0; rowIdx < columnArray->length(); rowIdx++) + { + if (columnArray->IsValid(rowIdx)) + { + int64_t val; + switch (dt->id()) + { + case arrow::Type::type::INT32: + val = std::static_pointer_cast(columnArray)->Value(rowIdx); + break; + case arrow::Type::type::INT64: + val = std::static_pointer_cast(columnArray)->Value(rowIdx); + break; + default: + std::string errorInfo = Logger::formatString( + "[Snowflake Exception] unknown arrow internal data type(%d) " + "for FIXED data", + dt->id()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); + return; + } + val *= sf::internal::powTenSB4[6 - scale]; + // unit is microsecond + ret = builder.Append(val); + } + else + { + ret = builder.AppendNull(); + } + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to append value: internal data type(%d), errorInfo: %s", + dt->id(), ret.message().c_str()); + } + + ret = builder.Finish(&tsArray); + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to finish array, errorInfo: %s", + ret.message().c_str()); + } + else + { + // Note: Python/Pandas Time does not support nanoseconds, + // So truncate the time values to microseconds + auto timeType = arrow::time64(arrow::TimeUnit::MICRO); + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + arrow::Time64Builder builder(timeType, m_pool); + + arrow::Status ret; + for(int64_t rowIdx = 0; rowIdx < columnArray->length(); rowIdx++) + { + if (columnArray->IsValid(rowIdx)) + { + int64_t val; + switch (dt->id()) + { + case arrow::Type::type::INT32: + val = std::static_pointer_cast(columnArray)->Value(rowIdx); + break; + case arrow::Type::type::INT64: + val = std::static_pointer_cast(columnArray)->Value(rowIdx); + break; + default: + std::string errorInfo = Logger::formatString( + "[Snowflake Exception] unknown arrow internal data type(%d) " + "for FIXED data", + dt->id()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); + return; + } + val /= sf::internal::powTenSB4[scale - 6]; + // unit is microsecond + ret = builder.Append(val); + } + else + { + ret = builder.AppendNull(); + } + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to append value: internal data type(%d), errorInfo: %s", + dt->id(), ret.message().c_str()); + } + + ret = builder.Finish(&tsArray); + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to finish array, errorInfo: %s", + ret.message().c_str()); + } + + // replace the targeted column + replaceColumn(batchIdx, colIdx, tsField, tsArray, futureFields, futureColumns, needsRebuild); +} + +void CArrowTableIterator::convertTimestampColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const int scale, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild, + const std::string timezone +) +{ + std::shared_ptr tsField; + std::shared_ptr tsArray; + arrow::Status ret; + std::shared_ptr timeType; + auto dt = field->type(); + // Convert to arrow time column + if (scale == 0) + { + if (!timezone.empty()) + { + timeType = arrow::timestamp(arrow::TimeUnit::SECOND, timezone); + } + else + { + timeType = arrow::timestamp(arrow::TimeUnit::SECOND); + } + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + arrow::TimestampBuilder builder(timeType, m_pool); + + + for(int64_t rowIdx = 0; rowIdx < columnArray->length(); rowIdx++) + { + if (columnArray->IsValid(rowIdx)) + { + int64_t originalVal = std::static_pointer_cast(columnArray)->Value(rowIdx); + // unit is second + ret = builder.Append(originalVal); + } + else + { + ret = builder.AppendNull(); + } + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to append value: internal data type(%d), errorInfo: %s", + dt->id(), ret.message().c_str()); + } + + ret = builder.Finish(&tsArray); SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to finish array, errorInfo: %s", + ret.message().c_str()); + } + else if (scale <= 3) + { + if (!timezone.empty()) + { + timeType = arrow::timestamp(arrow::TimeUnit::MILLI, timezone); + } + else + { + timeType = arrow::timestamp(arrow::TimeUnit::MILLI); + } + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + arrow::TimestampBuilder builder(timeType, m_pool); + + arrow::Status ret; + for(int64_t rowIdx = 0; rowIdx < columnArray->length(); rowIdx++) + { + if (columnArray->IsValid(rowIdx)) + { + int64_t val = std::static_pointer_cast(columnArray)->Value(rowIdx) + * sf::internal::powTenSB4[3 - scale]; + // unit is millisecond + ret = builder.Append(val); + } + else + { + ret = builder.AppendNull(); + } + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to append value: internal data type(%d), errorInfo: %s", + dt->id(), ret.message().c_str()); + } + + ret = builder.Finish(&tsArray); + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to finish array, errorInfo: %s", + ret.message().c_str()); + } + else if (scale <= 6) + { + if (!timezone.empty()) + { + timeType = arrow::timestamp(arrow::TimeUnit::MICRO, timezone); + } + else + { + timeType = arrow::timestamp(arrow::TimeUnit::MICRO); + } + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + arrow::TimestampBuilder builder(timeType, m_pool); + + arrow::Status ret; + for(int64_t rowIdx = 0; rowIdx < columnArray->length(); rowIdx++) + { + if (columnArray->IsValid(rowIdx)) + { + int64_t val; + switch (dt->id()) + { + case arrow::Type::type::INT64: + val = std::static_pointer_cast(columnArray)->Value(rowIdx); + break; + default: + std::string errorInfo = Logger::formatString( + "[Snowflake Exception] unknown arrow internal data type(%d) " + "for FIXED data", + dt->id()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); + return; + } + val *= sf::internal::powTenSB4[6 - scale]; + // unit is microsecond + ret = builder.Append(val); + } + else + { + ret = builder.AppendNull(); + } + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to append value: internal data type(%d), errorInfo: %s", + dt->id(), ret.message().c_str()); + } + + ret = builder.Finish(&tsArray); + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to finish array, errorInfo: %s", + ret.message().c_str()); + } + else + { + if (!timezone.empty()) + { + timeType = arrow::timestamp(arrow::TimeUnit::NANO, timezone); + } + else + { + timeType = arrow::timestamp(arrow::TimeUnit::NANO); + } + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + arrow::TimestampBuilder builder(timeType, m_pool); + std::shared_ptr structArray; + if (dt->id() == arrow::Type::type::STRUCT) + { + structArray = std::dynamic_pointer_cast(columnArray); + } + arrow::Status ret; + for(int64_t rowIdx = 0; rowIdx < columnArray->length(); rowIdx++) + { + if (columnArray->IsValid(rowIdx)) + { + int64_t val; + switch (dt->id()) + { + case arrow::Type::type::INT64: + val = std::static_pointer_cast(columnArray)->Value(rowIdx); + val *= sf::internal::powTenSB4[9 - scale]; + break; + case arrow::Type::type::STRUCT: + { + int64_t epoch = std::static_pointer_cast( + structArray->GetFieldByName(sf::internal::FIELD_NAME_EPOCH))->Value(rowIdx); + int32_t fraction = std::static_pointer_cast( + structArray->GetFieldByName(sf::internal::FIELD_NAME_FRACTION))->Value(rowIdx); + val = epoch * sf::internal::powTenSB4[9] + fraction; + } + break; + default: + std::string errorInfo = Logger::formatString( + "[Snowflake Exception] unknown arrow internal data type(%d) " + "for FIXED data", + dt->id()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); + return; + } + // unit is nanosecond + ret = builder.Append(val); + } + else + { + ret = builder.AppendNull(); + } + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to append value: internal data type(%d), errorInfo: %s", + dt->id(), ret.message().c_str()); + } + + ret = builder.Finish(&tsArray); + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to finish array, errorInfo: %s", + ret.message().c_str()); + } + + // replace the targeted column + replaceColumn(batchIdx, colIdx, tsField, tsArray, futureFields, futureColumns, needsRebuild); +} + +void CArrowTableIterator::convertTimestampTZColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const int scale, + const int byteLength, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild, + const std::string timezone +) +{ + std::shared_ptr tsField; + std::shared_ptr tsArray; + std::shared_ptr timeType; + auto dt = field->type(); + // Convert to arrow time column + std::shared_ptr structArray; + structArray = std::dynamic_pointer_cast(columnArray); + auto epochArray = std::static_pointer_cast( + structArray->GetFieldByName(sf::internal::FIELD_NAME_EPOCH)); + auto fractionArray = std::static_pointer_cast( + structArray->GetFieldByName(sf::internal::FIELD_NAME_FRACTION)); + + if (scale == 0) + { + if (!timezone.empty()) + { + timeType = arrow::timestamp(arrow::TimeUnit::SECOND, timezone); + } + else + { + timeType = arrow::timestamp(arrow::TimeUnit::SECOND); + } + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + } + else if (scale <= 3) + { + if (!timezone.empty()) + { + timeType = arrow::timestamp(arrow::TimeUnit::MILLI, timezone); + } + else + { + timeType = arrow::timestamp(arrow::TimeUnit::MILLI); + } + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + } + else if (scale <= 6) + { + if (!timezone.empty()) + { + timeType = arrow::timestamp(arrow::TimeUnit::MICRO, timezone); + } + else + { + timeType = arrow::timestamp(arrow::TimeUnit::MICRO); + } + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + } + else + { + if (!timezone.empty()) + { + timeType = arrow::timestamp(arrow::TimeUnit::NANO, timezone); + } + else + { + timeType = arrow::timestamp(arrow::TimeUnit::NANO); + } + tsField = std::make_shared( + field->name(), timeType, field->nullable()); + } + + arrow::TimestampBuilder builder(timeType, m_pool); + arrow::Status ret; + for(int64_t rowIdx = 0; rowIdx < columnArray->length(); rowIdx++) + { + if (columnArray->IsValid(rowIdx)) + { + if (byteLength == 8) + { + // two fields + int64_t epoch = epochArray->Value(rowIdx); + // append value + if (scale == 0) + { + ret = builder.Append(epoch); + } + else if (scale <= 3) + { + ret = builder.Append(epoch * sf::internal::powTenSB4[3-scale]); + } + else if (scale <= 6) + { + ret = builder.Append(epoch * sf::internal::powTenSB4[6-scale]); + } + else + { + ret = builder.Append(epoch * sf::internal::powTenSB4[9 - scale]); + } + } + else if (byteLength == 16) + { + // three fields + int64_t epoch = epochArray->Value(rowIdx); + int32_t fraction = fractionArray->Value(rowIdx); + if (scale == 0) + { + ret = builder.Append(epoch); + } + else if (scale <= 3) + { + ret = builder.Append(epoch * sf::internal::powTenSB4[3-scale] + + fraction / sf::internal::powTenSB4[6]); + } + else if (scale <= 6) + { + ret = builder.Append(epoch * sf::internal::powTenSB4[6] + fraction / sf::internal::powTenSB4[3]); + } + else + { + ret = builder.Append(epoch * sf::internal::powTenSB4[9] + fraction); + } + } + else + { + std::string errorInfo = Logger::formatString( + "[Snowflake Exception] unknown arrow internal data type(%d) " + "for TIMESTAMP_TZ data", + dt->id()); + logger->error(__FILE__, __func__, __LINE__, errorInfo.c_str()); + PyErr_SetString(PyExc_Exception, errorInfo.c_str()); + return; + } + } + else + { + ret = builder.AppendNull(); + } + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to append value: internal data type(%d), errorInfo: %s", + dt->id(), ret.message().c_str()); + } + + ret = builder.Finish(&tsArray); + SF_CHECK_ARROW_RC(ret, + "[Snowflake Exception] arrow failed to finish array, errorInfo: %s", + ret.message().c_str()); + + // replace the targeted column + replaceColumn(batchIdx, colIdx, tsField, tsArray, futureFields, futureColumns, needsRebuild); +} + +bool CArrowTableIterator::convertRecordBatchesToTable() +{ + // only do conversion once and there exist some record batches + if (!m_cTable && !m_cRecordBatches->empty()) + { + reconstructRecordBatches(); + arrow::Result> ret = arrow::Table::FromRecordBatches(*m_cRecordBatches); + SF_CHECK_ARROW_RC_AND_RETURN(ret, false, + "[Snowflake Exception] arrow failed to build table from batches, errorInfo: %s", + ret.status().message().c_str()); + m_cTable = ret.ValueOrDie(); + + return true; + } + return false; +} + +} // namespace sf diff --git a/src/snowflake/connector/cpp/ArrowIterator/CArrowTableIterator.hpp b/src/snowflake/connector/cpp/ArrowIterator/CArrowTableIterator.hpp new file mode 100644 index 000000000..5680667e5 --- /dev/null +++ b/src/snowflake/connector/cpp/ArrowIterator/CArrowTableIterator.hpp @@ -0,0 +1,189 @@ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + +#ifndef PC_ARROWTABLEITERATOR_HPP +#define PC_ARROWTABLEITERATOR_HPP + +#include "CArrowIterator.hpp" +#include +#include +#include + +namespace sf +{ + +/** + * Arrow table iterator implementation in C++. + * The caller will ask for an Arrow Table to be returned back to Python + * This conversion is zero-copy, just aggregate every columns from multiple record batches + * and build a new table. + */ +class CArrowTableIterator : public CArrowIterator +{ +public: + /** + * Constructor + */ + CArrowTableIterator( + PyObject* context, + std::vector>* batches, + bool number_to_decimal + ); + + /** + * Destructor + */ + ~CArrowTableIterator() = default; + + /** + * @return an arrow table containing all data in all record batches + */ + std::shared_ptr next() override; + +private: + /* arrow table of all record batches in current chunk */ + std::shared_ptr m_cTable; + + /** arrow format convert context for the current session */ + PyObject* m_context; + + /** reference to PyObject */ + py::UniqueRef m_pyTableObjRef; + + /** + * arrow memory buffer to allocate type converted arrays for fetching pandas from arrow + */ + arrow::MemoryPool* m_pool = arrow::default_memory_pool(); + + /** local time zone */ + char* m_timezone; + const bool m_convert_number_to_decimal; + + /** + * Reconstruct record batches with type conversion in place + */ + void reconstructRecordBatches(); + + /** + * Convert all current RecordBatches to Arrow Table + * @return if conversion is executed at first time and successfully + */ + bool convertRecordBatchesToTable(); + + /** + * replace column with the new column in place + */ + void replaceColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr& newField, + const std::shared_ptr& newColumn, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild + ); + + /** + * convert scaled fixed number column to Decimal, or Double column based on setting + */ + void convertScaledFixedNumberColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const unsigned int scale, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild + ); + + /** + * convert scaled fixed number column to Decimal column + */ + void convertScaledFixedNumberColumnToDecimalColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const unsigned int scale, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild + ); + + /** + * convert scaled fixed number column to Double column + */ + void convertScaledFixedNumberColumnToDoubleColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const unsigned int scale, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild + ); + + /** + * convert Snowflake Time column (Arrow int32/int64) to Arrow Time column + * Since Python/Pandas Time does not support nanoseconds, this function truncates values to microseconds if necessary + */ + void convertTimeColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const int scale, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild + ); + + /** + * convert Snowflake TimestampNTZ/TimestampLTZ column to Arrow Timestamp column + */ + void convertTimestampColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const int scale, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild, + const std::string timezone="" + ); + + /** + * convert Snowflake TimestampTZ column to Arrow Timestamp column in UTC + * Arrow Timestamp does not support time zone info in each value, so this method convert TimestampTZ to Arrow + * timestamp with UTC timezone + */ + void convertTimestampTZColumn( + const unsigned int batchIdx, + const int colIdx, + const std::shared_ptr field, + const std::shared_ptr columnArray, + const int scale, + const int byteLength, + std::vector>& futureFields, + std::vector>& futureColumns, + bool& needsRebuild, + const std::string timezone + ); + + /** + * convert scaled fixed number to double + * if scale is small, then just divide based on the scale; otherwise, convert the value to string first and then + * convert to double to avoid precision loss + */ + template + double convertScaledFixedNumberToDouble( + const unsigned int scale, + T originalValue + ); +}; +} +#endif // PC_ARROWTABLEITERATOR_HPP diff --git a/cpp/ArrowIterator/DateConverter.cpp b/src/snowflake/connector/cpp/ArrowIterator/DateConverter.cpp similarity index 59% rename from cpp/ArrowIterator/DateConverter.cpp rename to src/snowflake/connector/cpp/ArrowIterator/DateConverter.cpp index 590bb4664..2b410ad23 100644 --- a/cpp/ArrowIterator/DateConverter.cpp +++ b/src/snowflake/connector/cpp/ArrowIterator/DateConverter.cpp @@ -1,12 +1,14 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #include "DateConverter.hpp" #include "Python/Helpers.hpp" +#include namespace sf { -Logger DateConverter::logger("snowflake.connector.DateConverter"); +Logger* DateConverter::logger = new Logger("snowflake.connector.DateConverter"); py::UniqueRef& DateConverter::initPyDatetimeDate() { @@ -41,4 +43,23 @@ PyObject* DateConverter::toPyObject(int64_t rowIndex) const } } +NumpyDateConverter::NumpyDateConverter(std::shared_ptr array, PyObject * context) +: m_array(std::dynamic_pointer_cast(array)), + m_context(context) +{ +} + +PyObject* NumpyDateConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + int32_t deltaDays = m_array->Value(rowIndex); + return PyObject_CallMethod(m_context, "DATE_to_numpy_datetime64", "i", deltaDays); + } + else + { + Py_RETURN_NONE; + } +} + } // namespace sf diff --git a/cpp/ArrowIterator/DateConverter.hpp b/src/snowflake/connector/cpp/ArrowIterator/DateConverter.hpp similarity index 59% rename from cpp/ArrowIterator/DateConverter.hpp rename to src/snowflake/connector/cpp/ArrowIterator/DateConverter.hpp index 46108668d..efa8e9fb8 100644 --- a/cpp/ArrowIterator/DateConverter.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/DateConverter.hpp @@ -1,12 +1,14 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_DATECONVERTER_HPP #define PC_DATECONVERTER_HPP #include "IColumnConverter.hpp" #include "Python/Common.hpp" #include "logging.hpp" +#include namespace sf { @@ -26,11 +28,24 @@ class DateConverter : public IColumnConverter /** from Python Ordinal to 1970-01-01 */ static constexpr int epochDay = 719163; - static Logger logger; + static Logger* logger; py::UniqueRef& m_pyDatetimeDate; }; +class NumpyDateConverter : public IColumnConverter +{ +public: + explicit NumpyDateConverter(std::shared_ptr array, PyObject * context); + + PyObject* toPyObject(int64_t rowIndex) const override; + +private: + std::shared_ptr m_array; + + PyObject * m_context; +}; + } // namespace sf #endif // PC_DATECONVERTER_HPP diff --git a/cpp/ArrowIterator/DecimalConverter.cpp b/src/snowflake/connector/cpp/ArrowIterator/DecimalConverter.cpp similarity index 86% rename from cpp/ArrowIterator/DecimalConverter.cpp rename to src/snowflake/connector/cpp/ArrowIterator/DecimalConverter.cpp index 282c636cf..6131eeaa4 100644 --- a/cpp/ArrowIterator/DecimalConverter.cpp +++ b/src/snowflake/connector/cpp/ArrowIterator/DecimalConverter.cpp @@ -1,10 +1,12 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ -#include +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// +#include "Python/Common.hpp" #include "DecimalConverter.hpp" #include "Python/Helpers.hpp" +#include +#include namespace sf { @@ -49,7 +51,7 @@ PyObject* DecimalFromDecimalConverter::toPyObject(int64_t rowIndex) const * meet some encoding problem with std::string */ return PyObject_CallFunction(m_pyDecimalConstructor.get(), "s#", formatDecimalString.c_str(), - formatDecimalString.size()); + static_cast(formatDecimalString.size())); } else { diff --git a/cpp/ArrowIterator/DecimalConverter.hpp b/src/snowflake/connector/cpp/ArrowIterator/DecimalConverter.hpp similarity index 66% rename from cpp/ArrowIterator/DecimalConverter.hpp rename to src/snowflake/connector/cpp/ArrowIterator/DecimalConverter.hpp index 8c47be7db..771315dd8 100644 --- a/cpp/ArrowIterator/DecimalConverter.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/DecimalConverter.hpp @@ -1,11 +1,13 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_DECIMALCONVERTER_HPP #define PC_DECIMALCONVERTER_HPP #include "IColumnConverter.hpp" #include "Python/Common.hpp" +#include namespace sf { @@ -78,6 +80,48 @@ PyObject* DecimalFromIntConverter::toPyObject(int64_t rowIndex) const } } + +template +class NumpyDecimalConverter : public IColumnConverter +{ +public: + explicit NumpyDecimalConverter(std::shared_ptr array, + int precision, int scale, PyObject * context) + : m_array(std::dynamic_pointer_cast(array)), + m_precision(precision), + m_scale(scale), + m_context(context) + { + } + + PyObject* toPyObject(int64_t rowIndex) const override; + +private: + std::shared_ptr m_array; + + int m_precision; + + int m_scale; + + PyObject * m_context; +}; + +template +PyObject* NumpyDecimalConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + int64_t val = m_array->Value(rowIndex); + + return PyObject_CallMethod(m_context, "FIXED_to_numpy_float64", "Li", val, m_scale); + } + else + { + Py_RETURN_NONE; + } +} + + } // namespace sf #endif // PC_DECIMALCONVERTER_HPP diff --git a/src/snowflake/connector/cpp/ArrowIterator/FloatConverter.cpp b/src/snowflake/connector/cpp/ArrowIterator/FloatConverter.cpp new file mode 100644 index 000000000..95d45ecbd --- /dev/null +++ b/src/snowflake/connector/cpp/ArrowIterator/FloatConverter.cpp @@ -0,0 +1,48 @@ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + +#include "FloatConverter.hpp" +#include + +namespace sf +{ + +/** snowflake float is 64-precision, which refers to double here */ +FloatConverter::FloatConverter(std::shared_ptr array) +: m_array(std::dynamic_pointer_cast(array)) +{ +} + +PyObject* FloatConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + return PyFloat_FromDouble(m_array->Value(rowIndex)); + } + else + { + Py_RETURN_NONE; + } +} + +NumpyFloat64Converter::NumpyFloat64Converter(std::shared_ptr array, PyObject * context) +: m_array(std::dynamic_pointer_cast(array)), m_context(context) +{ +} + +PyObject* NumpyFloat64Converter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + double val = m_array->Value(rowIndex); + + return PyObject_CallMethod(m_context, "REAL_to_numpy_float64", "d", val); + } + else + { + Py_RETURN_NONE; + } +} + +} // namespace sf diff --git a/cpp/ArrowIterator/FloatConverter.hpp b/src/snowflake/connector/cpp/ArrowIterator/FloatConverter.hpp similarity index 50% rename from cpp/ArrowIterator/FloatConverter.hpp rename to src/snowflake/connector/cpp/ArrowIterator/FloatConverter.hpp index 07dff2287..3ef1b374a 100644 --- a/cpp/ArrowIterator/FloatConverter.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/FloatConverter.hpp @@ -1,10 +1,12 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_FLOATCONVERTER_HPP #define PC_FLOATCONVERTER_HPP #include "IColumnConverter.hpp" +#include namespace sf { @@ -20,6 +22,19 @@ class FloatConverter : public IColumnConverter std::shared_ptr m_array; }; +class NumpyFloat64Converter : public IColumnConverter +{ +public: + explicit NumpyFloat64Converter(std::shared_ptr array, PyObject * context); + + PyObject* toPyObject(int64_t rowIndex) const override; + +private: + std::shared_ptr m_array; + + PyObject * m_context; +}; + } // namespace sf #endif // PC_FLOATCONVERTER_HPP diff --git a/cpp/ArrowIterator/IColumnConverter.hpp b/src/snowflake/connector/cpp/ArrowIterator/IColumnConverter.hpp similarity index 61% rename from cpp/ArrowIterator/IColumnConverter.hpp rename to src/snowflake/connector/cpp/ArrowIterator/IColumnConverter.hpp index 15dd23c87..7cc7b4b62 100644 --- a/cpp/ArrowIterator/IColumnConverter.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/IColumnConverter.hpp @@ -1,12 +1,11 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_ICOLUMNCONVERTER_HPP #define PC_ICOLUMNCONVERTER_HPP -#include -#include -#include +#include "Python/Common.hpp" namespace sf { @@ -16,6 +15,7 @@ class IColumnConverter public: IColumnConverter() = default; virtual ~IColumnConverter() = default; + // The caller is responsible for calling DECREF on the returned pointer virtual PyObject* toPyObject(int64_t rowIndex) const = 0; }; } diff --git a/cpp/ArrowIterator/IntConverter.cpp b/src/snowflake/connector/cpp/ArrowIterator/IntConverter.cpp similarity index 66% rename from cpp/ArrowIterator/IntConverter.cpp rename to src/snowflake/connector/cpp/ArrowIterator/IntConverter.cpp index 9b097dc2b..6762342f8 100644 --- a/cpp/ArrowIterator/IntConverter.cpp +++ b/src/snowflake/connector/cpp/ArrowIterator/IntConverter.cpp @@ -1,6 +1,7 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #include "IntConverter.hpp" namespace sf diff --git a/cpp/ArrowIterator/IntConverter.hpp b/src/snowflake/connector/cpp/ArrowIterator/IntConverter.hpp similarity index 54% rename from cpp/ArrowIterator/IntConverter.hpp rename to src/snowflake/connector/cpp/ArrowIterator/IntConverter.hpp index 70190dd27..0899fca86 100644 --- a/cpp/ArrowIterator/IntConverter.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/IntConverter.hpp @@ -1,10 +1,12 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_INTCONVERTER_HPP #define PC_INTCONVERTER_HPP #include "IColumnConverter.hpp" +#include namespace sf { @@ -48,6 +50,38 @@ PyObject* IntConverter::toPyObject(int64_t rowIndex) const } } +template +class NumpyIntConverter : public IColumnConverter +{ +public: + explicit NumpyIntConverter(std::shared_ptr array, PyObject * context) + : m_array(std::dynamic_pointer_cast(array)), + m_context(context) + { + } + + PyObject* toPyObject(int64_t rowIndex) const override; + +private: + std::shared_ptr m_array; + + PyObject * m_context; +}; + +template +PyObject* NumpyIntConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + int64_t val = m_array->Value(rowIndex); + return PyObject_CallMethod(m_context, "FIXED_to_numpy_int64", "L", val); + } + else + { + Py_RETURN_NONE; + } +} + } // namespace sf #endif // PC_INTCONVERTER_HPP diff --git a/cpp/ArrowIterator/Python/Common.cpp b/src/snowflake/connector/cpp/ArrowIterator/Python/Common.cpp similarity index 63% rename from cpp/ArrowIterator/Python/Common.cpp rename to src/snowflake/connector/cpp/ArrowIterator/Python/Common.cpp index c4c361a40..537cf7fa1 100644 --- a/cpp/ArrowIterator/Python/Common.cpp +++ b/src/snowflake/connector/cpp/ArrowIterator/Python/Common.cpp @@ -1,6 +1,7 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #include "Common.hpp" namespace sf diff --git a/cpp/ArrowIterator/Python/Common.hpp b/src/snowflake/connector/cpp/ArrowIterator/Python/Common.hpp similarity index 63% rename from cpp/ArrowIterator/Python/Common.hpp rename to src/snowflake/connector/cpp/ArrowIterator/Python/Common.hpp index b7bd6204f..2220ec628 100644 --- a/cpp/ArrowIterator/Python/Common.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/Python/Common.hpp @@ -1,10 +1,22 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_PYTHON_COMMON_HPP #define PC_PYTHON_COMMON_HPP +// Support for not having PY_SSIZE_T_CLEAN defined will end in Python 3.10. It causes +// argument parsing to not accept integers, leaving only Py_ssize_t as an option +#define PY_SSIZE_T_CLEAN + +// We have to make sure that we import Python.h once for special flags that need to be +// set before importing it #include +#include +#include +#include +#include +#include #include "Util/macros.hpp" namespace sf @@ -34,13 +46,13 @@ class UniqueRef { } - UniqueRef(UniqueRef&& other) : UniqueRef(other.detach()) + explicit UniqueRef(UniqueRef&& other) : UniqueRef(other.release()) { } UniqueRef& operator=(UniqueRef&& other) { - m_pyObj = other.detach(); + reset(other.release()); return *this; } @@ -49,30 +61,25 @@ class UniqueRef reset(); } - void reset() - { - reset(nullptr); - } - - void reset(PyObject* pyObj) + void reset(PyObject* pyObj = nullptr) { Py_XDECREF(m_pyObj); m_pyObj = pyObj; } - PyObject* detach() + PyObject* release() noexcept { PyObject* tmp = m_pyObj; m_pyObj = nullptr; return tmp; } - PyObject* get() const + PyObject* get() const noexcept { return m_pyObj; } - bool empty() const + bool empty() const noexcept { return m_pyObj == nullptr; } @@ -95,37 +102,18 @@ class PyUniqueLock PyUniqueLock(PyUniqueLock&&) = delete; PyUniqueLock& operator=(PyUniqueLock&&) = delete; - PyUniqueLock() : m_isLocked(false) + PyUniqueLock() { - acquire(); + m_state = PyGILState_Ensure(); } ~PyUniqueLock() { - release(); - } - - void acquire() - { - if (!m_isLocked) - { - m_state = PyGILState_Ensure(); - m_isLocked = true; - } - } - - void release() - { - if (m_isLocked) - { - PyGILState_Release(m_state); - m_isLocked = false; - } + PyGILState_Release(m_state); } private: PyGILState_STATE m_state; - bool m_isLocked; }; } // namespace py diff --git a/cpp/ArrowIterator/Python/Helpers.cpp b/src/snowflake/connector/cpp/ArrowIterator/Python/Helpers.cpp similarity index 79% rename from cpp/ArrowIterator/Python/Helpers.cpp rename to src/snowflake/connector/cpp/ArrowIterator/Python/Helpers.cpp index 4519b4fae..5784c6f6c 100644 --- a/cpp/ArrowIterator/Python/Helpers.cpp +++ b/src/snowflake/connector/cpp/ArrowIterator/Python/Helpers.cpp @@ -1,9 +1,10 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ -#include +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #include "Helpers.hpp" #include "Common.hpp" +#include namespace sf { @@ -30,7 +31,7 @@ void importPythonModule(const std::string& moduleName, UniqueRef& ref, PyObject* module = PyImport_ImportModule(moduleName.c_str()); if (checkPyError()) { - logger.error("import python module '%s' failed", moduleName); + logger.error(__FILE__, __func__, __LINE__, "import python module '%s' failed", moduleName.c_str()); return; } ref.reset(module); @@ -55,7 +56,7 @@ void importFromModule(const UniqueRef& moduleRef, const std::string& name, PyObject* attr = PyObject_GetAttrString(moduleRef.get(), name.c_str()); if (checkPyError()) { - logger.error("import python attribute '%s' failed", name); + logger.error(__FILE__, __func__, __LINE__, "import python attribute '%s' failed", name.c_str()); return; } ref.reset(attr); diff --git a/cpp/ArrowIterator/Python/Helpers.hpp b/src/snowflake/connector/cpp/ArrowIterator/Python/Helpers.hpp similarity index 73% rename from cpp/ArrowIterator/Python/Helpers.hpp rename to src/snowflake/connector/cpp/ArrowIterator/Python/Helpers.hpp index 229ce5cc4..1f5f99273 100644 --- a/cpp/ArrowIterator/Python/Helpers.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/Python/Helpers.hpp @@ -1,14 +1,12 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_PYTHON_HELPERS_HPP #define PC_PYTHON_HELPERS_HPP -/** this two header files will be removed when we replace arrow::Status with our - * own status data structure */ -#include -#include #include "logging.hpp" +#include namespace sf { @@ -20,9 +18,6 @@ class UniqueRef; using Logger = ::sf::Logger; -/** All arrow::Status will be replaced by our own data structure in the future - */ - /** * \brief: import a python module * \param moduleName: the name of the python module diff --git a/cpp/ArrowIterator/SnowflakeType.cpp b/src/snowflake/connector/cpp/ArrowIterator/SnowflakeType.cpp similarity index 93% rename from cpp/ArrowIterator/SnowflakeType.cpp rename to src/snowflake/connector/cpp/ArrowIterator/SnowflakeType.cpp index 88ab12012..e089b8e7d 100644 --- a/cpp/ArrowIterator/SnowflakeType.cpp +++ b/src/snowflake/connector/cpp/ArrowIterator/SnowflakeType.cpp @@ -1,6 +1,7 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #include "SnowflakeType.hpp" namespace sf diff --git a/cpp/ArrowIterator/SnowflakeType.hpp b/src/snowflake/connector/cpp/ArrowIterator/SnowflakeType.hpp similarity index 90% rename from cpp/ArrowIterator/SnowflakeType.hpp rename to src/snowflake/connector/cpp/ArrowIterator/SnowflakeType.hpp index 9fadcfe4b..dc200d3da 100644 --- a/cpp/ArrowIterator/SnowflakeType.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/SnowflakeType.hpp @@ -1,6 +1,7 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_SNOWFLAKETYPE_HPP #define PC_SNOWFLAKETYPE_HPP diff --git a/cpp/ArrowIterator/StringConverter.cpp b/src/snowflake/connector/cpp/ArrowIterator/StringConverter.cpp similarity index 72% rename from cpp/ArrowIterator/StringConverter.cpp rename to src/snowflake/connector/cpp/ArrowIterator/StringConverter.cpp index 565816e06..fef6453a5 100644 --- a/cpp/ArrowIterator/StringConverter.cpp +++ b/src/snowflake/connector/cpp/ArrowIterator/StringConverter.cpp @@ -1,11 +1,13 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #include "StringConverter.hpp" +#include namespace sf { -Logger StringConverter::logger("snowflake.connector.StringConverter"); +Logger* StringConverter::logger = new Logger("snowflake.connector.StringConverter"); StringConverter::StringConverter(std::shared_ptr array) : m_array(std::dynamic_pointer_cast(array)) diff --git a/cpp/ArrowIterator/StringConverter.hpp b/src/snowflake/connector/cpp/ArrowIterator/StringConverter.hpp similarity index 77% rename from cpp/ArrowIterator/StringConverter.hpp rename to src/snowflake/connector/cpp/ArrowIterator/StringConverter.hpp index b30704086..cd09f1505 100644 --- a/cpp/ArrowIterator/StringConverter.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/StringConverter.hpp @@ -1,11 +1,13 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_STRINGCONVERTER_HPP #define PC_STRINGCONVERTER_HPP #include "IColumnConverter.hpp" #include "logging.hpp" +#include namespace sf { @@ -20,7 +22,7 @@ class StringConverter : public IColumnConverter private: std::shared_ptr m_array; - static Logger logger; + static Logger* logger; }; } // namespace sf diff --git a/cpp/ArrowIterator/TimeConverter.cpp b/src/snowflake/connector/cpp/ArrowIterator/TimeConverter.cpp similarity index 66% rename from cpp/ArrowIterator/TimeConverter.cpp rename to src/snowflake/connector/cpp/ArrowIterator/TimeConverter.cpp index 6635f2011..6574b58d0 100644 --- a/cpp/ArrowIterator/TimeConverter.cpp +++ b/src/snowflake/connector/cpp/ArrowIterator/TimeConverter.cpp @@ -1,6 +1,7 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #include "TimeConverter.hpp" namespace sf diff --git a/cpp/ArrowIterator/TimeConverter.hpp b/src/snowflake/connector/cpp/ArrowIterator/TimeConverter.hpp similarity index 94% rename from cpp/ArrowIterator/TimeConverter.hpp rename to src/snowflake/connector/cpp/ArrowIterator/TimeConverter.hpp index fc9757849..fdcde0048 100644 --- a/cpp/ArrowIterator/TimeConverter.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/TimeConverter.hpp @@ -1,6 +1,7 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_TIMECONVERTER_HPP #define PC_TIMECONVERTER_HPP @@ -8,6 +9,7 @@ #include "Python/Common.hpp" #include "Python/Helpers.hpp" #include "Util/time.hpp" +#include namespace sf { diff --git a/src/snowflake/connector/cpp/ArrowIterator/TimeStampConverter.cpp b/src/snowflake/connector/cpp/ArrowIterator/TimeStampConverter.cpp new file mode 100644 index 000000000..0dd9d621c --- /dev/null +++ b/src/snowflake/connector/cpp/ArrowIterator/TimeStampConverter.cpp @@ -0,0 +1,290 @@ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + +#include "TimeStampConverter.hpp" +#include "Python/Helpers.hpp" +#include "Util/time.hpp" + +#include +#include +#include + +template +constexpr char toType() { + static_assert( + std::is_same::value + || std::is_same::value + || std::is_same::value + || std::is_same::value + || std::is_same::value + , "Unknown type"); + return std::is_same::value ? 'b' + : std::is_same::value ? 'h' + : std::is_same::value ? 'i' + : std::is_same::value ? 'l' + : std::is_same::value ? 'L' + // Should not get here. Error. + : '?'; +} + +template +struct FormatArgs1 { + char format[2]; + constexpr FormatArgs1() + : format{toType(), '\0'} + {} +}; +template +struct FormatArgs2 { + char format[3]; + constexpr FormatArgs2() + : format{toType(), toType(), '\0'} + {} +}; +template +struct FormatArgs3 { + char format[4]; + constexpr FormatArgs3() + : format{toType(), toType(), toType(), '\0'} + {} +}; + +namespace sf +{ +TimeStampBaseConverter::TimeStampBaseConverter(PyObject* context, int32_t scale) +: m_context(context), m_scale(scale) +{ +} + +OneFieldTimeStampNTZConverter::OneFieldTimeStampNTZConverter( + std::shared_ptr array, int32_t scale, PyObject* context) +: TimeStampBaseConverter(context, scale), + m_array(std::dynamic_pointer_cast(array)) +{ +} + +PyObject* OneFieldTimeStampNTZConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + internal::TimeSpec ts(m_array->Value(rowIndex), m_scale); + + static constexpr FormatArgs2 format; +#ifdef _WIN32 + return PyObject_CallMethod(m_context, "TIMESTAMP_NTZ_to_python_windows", format.format, + ts.seconds, ts.microseconds); +#else + return PyObject_CallMethod(m_context, "TIMESTAMP_NTZ_to_python", format.format, + ts.seconds, ts.microseconds); +#endif + } + else + { + Py_RETURN_NONE; + } +} + +NumpyOneFieldTimeStampNTZConverter::NumpyOneFieldTimeStampNTZConverter( + std::shared_ptr array, int32_t scale, PyObject* context) +: TimeStampBaseConverter(context, scale), + m_array(std::dynamic_pointer_cast(array)) +{ +} + +PyObject* NumpyOneFieldTimeStampNTZConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + int64_t val = m_array->Value(rowIndex); + return PyObject_CallMethod(m_context, "TIMESTAMP_NTZ_ONE_FIELD_to_numpy_datetime64", "Li", val, m_scale); + } + else + { + Py_RETURN_NONE; + } +} + +TwoFieldTimeStampNTZConverter::TwoFieldTimeStampNTZConverter( + std::shared_ptr array, int32_t scale, PyObject* context) +: TimeStampBaseConverter(context, scale), + m_array(std::dynamic_pointer_cast(array)), + m_epoch(std::dynamic_pointer_cast( + m_array->GetFieldByName(internal::FIELD_NAME_EPOCH))), + m_fraction(std::dynamic_pointer_cast( + m_array->GetFieldByName(internal::FIELD_NAME_FRACTION))) +{ +} + +PyObject* TwoFieldTimeStampNTZConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + int64_t seconds = m_epoch->Value(rowIndex); + int64_t microseconds = m_fraction->Value(rowIndex) / 1000; + + static constexpr FormatArgs2 format; +#ifdef _WIN32 + return PyObject_CallMethod(m_context, "TIMESTAMP_NTZ_to_python_windows", format.format, + seconds, microseconds); +#else + return PyObject_CallMethod(m_context, "TIMESTAMP_NTZ_to_python", format.format, + seconds, microseconds); +#endif + } + else + { + Py_RETURN_NONE; + } +} + +NumpyTwoFieldTimeStampNTZConverter::NumpyTwoFieldTimeStampNTZConverter( + std::shared_ptr array, int32_t scale, PyObject* context) +: TimeStampBaseConverter(context, scale), + m_array(std::dynamic_pointer_cast(array)), + m_epoch(std::dynamic_pointer_cast( + m_array->GetFieldByName(internal::FIELD_NAME_EPOCH))), + m_fraction(std::dynamic_pointer_cast( + m_array->GetFieldByName(internal::FIELD_NAME_FRACTION))) +{ +} + +PyObject* NumpyTwoFieldTimeStampNTZConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + int64_t epoch = m_epoch->Value(rowIndex); + int32_t frac = m_fraction->Value(rowIndex); + return PyObject_CallMethod(m_context, "TIMESTAMP_NTZ_TWO_FIELD_to_numpy_datetime64", "Li", epoch, frac); + } + else + { + Py_RETURN_NONE; + } +} + + +OneFieldTimeStampLTZConverter::OneFieldTimeStampLTZConverter( + std::shared_ptr array, int32_t scale, PyObject* context) +: TimeStampBaseConverter(context, scale), + m_array(std::dynamic_pointer_cast(array)) +{ +} + +PyObject* OneFieldTimeStampLTZConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + internal::TimeSpec ts(m_array->Value(rowIndex), m_scale); + + static constexpr FormatArgs2 format; + +#ifdef _WIN32 + return PyObject_CallMethod(m_context, "TIMESTAMP_LTZ_to_python_windows", format.format, + ts.seconds, ts.microseconds); +#else + return PyObject_CallMethod(m_context, "TIMESTAMP_LTZ_to_python", format.format, + ts.seconds, ts.microseconds); +#endif + } + + Py_RETURN_NONE; +} + +TwoFieldTimeStampLTZConverter::TwoFieldTimeStampLTZConverter( + std::shared_ptr array, int32_t scale, PyObject* context) +: TimeStampBaseConverter(context, scale), + m_array(std::dynamic_pointer_cast(array)), + m_epoch(std::dynamic_pointer_cast( + m_array->GetFieldByName(internal::FIELD_NAME_EPOCH))), + m_fraction(std::dynamic_pointer_cast( + m_array->GetFieldByName(internal::FIELD_NAME_FRACTION))) +{ +} + +PyObject* TwoFieldTimeStampLTZConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + int64_t seconds = m_epoch->Value(rowIndex); + int64_t microseconds = m_fraction->Value(rowIndex) / 1000; + + static constexpr FormatArgs2 format; +#ifdef _WIN32 + return PyObject_CallMethod(m_context, "TIMESTAMP_LTZ_to_python_windows", format.format, + seconds, microseconds); +#else + return PyObject_CallMethod(m_context, "TIMESTAMP_LTZ_to_python", format.format, + seconds, microseconds); +#endif + } + + Py_RETURN_NONE; +} + +TwoFieldTimeStampTZConverter::TwoFieldTimeStampTZConverter( + std::shared_ptr array, int32_t scale, PyObject* context) +: TimeStampBaseConverter(context, scale), + m_array(std::dynamic_pointer_cast(array)), + m_epoch(std::dynamic_pointer_cast( + m_array->GetFieldByName(internal::FIELD_NAME_EPOCH))), + m_timezone(std::dynamic_pointer_cast( + m_array->GetFieldByName(internal::FIELD_NAME_TIME_ZONE))) +{ +} + +PyObject* TwoFieldTimeStampTZConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + int32_t timezone = m_timezone->Value(rowIndex); + internal::TimeSpec ts(m_epoch->Value(rowIndex), m_scale); + + static constexpr FormatArgs3 format; +#if _WIN32 + return PyObject_CallMethod(m_context, "TIMESTAMP_TZ_to_python_windows", format.format, + ts.seconds, ts.microseconds, timezone); +#else + return PyObject_CallMethod(m_context, "TIMESTAMP_TZ_to_python", format.format, + ts.seconds, ts.microseconds, timezone); +#endif + } + + Py_RETURN_NONE; +} + +ThreeFieldTimeStampTZConverter::ThreeFieldTimeStampTZConverter( + std::shared_ptr array, int32_t scale, PyObject* context) +: TimeStampBaseConverter(context, scale), + m_array(std::dynamic_pointer_cast(array)), + m_epoch(std::dynamic_pointer_cast( + m_array->GetFieldByName(internal::FIELD_NAME_EPOCH))), + m_timezone(std::dynamic_pointer_cast( + m_array->GetFieldByName(internal::FIELD_NAME_TIME_ZONE))), + m_fraction(std::dynamic_pointer_cast( + m_array->GetFieldByName(internal::FIELD_NAME_FRACTION))) +{ +} + +PyObject* ThreeFieldTimeStampTZConverter::toPyObject(int64_t rowIndex) const +{ + if (m_array->IsValid(rowIndex)) + { + int32_t timezone = m_timezone->Value(rowIndex); + int64_t seconds = m_epoch->Value(rowIndex); + int64_t microseconds = m_fraction->Value(rowIndex) / 1000; + + static constexpr FormatArgs3 format; +#ifdef _WIN32 + return PyObject_CallMethod(m_context, "TIMESTAMP_TZ_to_python_windows", format.format, + seconds, microseconds, timezone); +#else + return PyObject_CallMethod(m_context, "TIMESTAMP_TZ_to_python", format.format, + seconds, microseconds, timezone); +#endif + } + + Py_RETURN_NONE; +} + +} // namespace sf diff --git a/cpp/ArrowIterator/TimeStampConverter.hpp b/src/snowflake/connector/cpp/ArrowIterator/TimeStampConverter.hpp similarity index 76% rename from cpp/ArrowIterator/TimeStampConverter.hpp rename to src/snowflake/connector/cpp/ArrowIterator/TimeStampConverter.hpp index de2a50330..75207d2ca 100644 --- a/cpp/ArrowIterator/TimeStampConverter.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/TimeStampConverter.hpp @@ -1,6 +1,7 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_TIMESTAMPCONVERTER_HPP #define PC_TIMESTAMPCONVERTER_HPP @@ -8,12 +9,13 @@ #include "Python/Common.hpp" #include "Python/Helpers.hpp" #include "Util/time.hpp" +#include namespace sf { // correspond to python datetime.time and datetime.time has only support 6 bit -// precision, which is milisecond +// precision, which is millisecond class TimeStampBaseConverter : public IColumnConverter { @@ -39,6 +41,18 @@ class OneFieldTimeStampNTZConverter : public TimeStampBaseConverter std::shared_ptr m_array; }; +class NumpyOneFieldTimeStampNTZConverter : public TimeStampBaseConverter +{ +public: + explicit NumpyOneFieldTimeStampNTZConverter(std::shared_ptr array, + int32_t scale, PyObject* context); + + PyObject* toPyObject(int64_t rowIndex) const override; + +private: + std::shared_ptr m_array; +}; + class TwoFieldTimeStampNTZConverter : public TimeStampBaseConverter { public: @@ -53,6 +67,21 @@ class TwoFieldTimeStampNTZConverter : public TimeStampBaseConverter std::shared_ptr m_fraction; }; + +class NumpyTwoFieldTimeStampNTZConverter : public TimeStampBaseConverter +{ +public: + explicit NumpyTwoFieldTimeStampNTZConverter(std::shared_ptr array, + int32_t scale, PyObject* context); + + PyObject* toPyObject(int64_t rowIndex) const override; + +private: + std::shared_ptr m_array; + std::shared_ptr m_epoch; + std::shared_ptr m_fraction; +}; + class OneFieldTimeStampLTZConverter : public TimeStampBaseConverter { public: diff --git a/cpp/ArrowIterator/Util/macros.hpp b/src/snowflake/connector/cpp/ArrowIterator/Util/macros.hpp similarity index 82% rename from cpp/ArrowIterator/Util/macros.hpp rename to src/snowflake/connector/cpp/ArrowIterator/Util/macros.hpp index 3bd36cf7c..12aa8f2b4 100644 --- a/cpp/ArrowIterator/Util/macros.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/Util/macros.hpp @@ -1,6 +1,7 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_UTIL_MACROS_HPP #define PC_UTIL_MACROS_HPP diff --git a/src/snowflake/connector/cpp/ArrowIterator/Util/time.cpp b/src/snowflake/connector/cpp/ArrowIterator/Util/time.cpp new file mode 100644 index 000000000..e9fee5a4e --- /dev/null +++ b/src/snowflake/connector/cpp/ArrowIterator/Util/time.cpp @@ -0,0 +1,76 @@ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + +#include "time.hpp" + +namespace sf +{ + +namespace internal +{ + +int32_t getHourFromSeconds(int64_t seconds, int32_t scale) +{ + return seconds / powTenSB4[scale] / SECONDS_PER_HOUR; +} + +int32_t getHourFromSeconds(int32_t seconds, int32_t scale) +{ + return seconds / powTenSB4[scale] / SECONDS_PER_HOUR; +} + +int32_t getMinuteFromSeconds(int64_t seconds, int32_t scale) +{ + return seconds / powTenSB4[scale] % SECONDS_PER_HOUR / SECONDS_PER_MINUTE; +} + +int32_t getMinuteFromSeconds(int32_t seconds, int32_t scale) +{ + return seconds / powTenSB4[scale] % SECONDS_PER_HOUR / SECONDS_PER_MINUTE; +} + +int32_t getSecondFromSeconds(int64_t seconds, int32_t scale) +{ + return seconds / powTenSB4[scale] % SECONDS_PER_MINUTE; +} + +int32_t getSecondFromSeconds(int32_t seconds, int32_t scale) +{ + return seconds / powTenSB4[scale] % SECONDS_PER_MINUTE; +} + +int32_t getMicrosecondFromSeconds(int64_t seconds, int32_t scale) +{ + int32_t microsec = seconds % powTenSB4[scale]; + return scale > PYTHON_DATETIME_TIME_MICROSEC_DIGIT ? microsec /= + powTenSB4[scale - PYTHON_DATETIME_TIME_MICROSEC_DIGIT] : microsec *= + powTenSB4[PYTHON_DATETIME_TIME_MICROSEC_DIGIT - scale]; +} + +TimeSpec::TimeSpec(int64_t units, int32_t scale) { + if (scale == 0) { + seconds = units; + microseconds = 0; + } else if (scale == 6) { + seconds = 0; + microseconds = units; + } else if (scale > 6) { + seconds = 0; + const int divider = internal::powTenSB4[scale - 6]; + if (units < 0) { + units -= divider - 1; + } + microseconds = units / divider; + } else { + seconds = units / internal::powTenSB4[scale]; + int64_t fractions = std::abs(units % internal::powTenSB4[scale]); + microseconds = fractions * internal::powTenSB4[6 - scale]; + if (units < 0) { + microseconds = -microseconds; + } + } +} + +} // namespace internal +} // namespace sf diff --git a/cpp/ArrowIterator/Util/time.hpp b/src/snowflake/connector/cpp/ArrowIterator/Util/time.hpp similarity index 68% rename from cpp/ArrowIterator/Util/time.hpp rename to src/snowflake/connector/cpp/ArrowIterator/Util/time.hpp index c920031e6..620515e82 100644 --- a/cpp/ArrowIterator/Util/time.hpp +++ b/src/snowflake/connector/cpp/ArrowIterator/Util/time.hpp @@ -1,11 +1,12 @@ -/* - * Copyright (c) 2013-2019 Snowflake Computing - */ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + #ifndef PC_UTIL_TIME_HPP #define PC_UTIL_TIME_HPP -#include #include +#include "Python/Common.hpp" #ifdef _WIN32 #include @@ -45,14 +46,14 @@ constexpr int pow10Int(int n) return n == 0 ? 1 : 10 * pow10Int(n - 1); } -/** if the fraction's digit is greater than PYTHON_DATETIME_TIME_MICROSEC_DIGIT, - * we cast it to exactly PYTHON_DATETIME_TIME_MICROSEC_DIGIT digit */ -int32_t castToFormattedFraction(int32_t frac, bool isPositive, int32_t scale); - -int32_t getNumberOfDigit(int32_t num); +struct TimeSpec { + int64_t seconds; + int64_t microseconds; + TimeSpec(int64_t units, int32_t scale); +}; -// TODO : I think we can just keep int64_t version, since we can call the -// function with implicit conversion from int32 to int64 +// TODO: I think we can just keep int64_t version, since we can call the +// function with implicit conversion from int32 to int64 int32_t getHourFromSeconds(int64_t seconds, int32_t scale); int32_t getMinuteFromSeconds(int64_t seconds, int32_t scale); @@ -69,15 +70,6 @@ int32_t getSecondFromSeconds(int32_t seconds, int32_t scale); int32_t getMicrosecondFromSeconds(int32_t seconds, int32_t scale); -/** the input epoch is the combination of seconds and fraction - * the output is formated double needed by python connector */ -double getFormattedDoubleFromEpoch(int64_t epoch, int32_t scale); - -/** the input are epoch and fraction - * the output is formated double needed by python connector */ -double getFormattedDoubleFromEpochFraction(int64_t epoch, int32_t frac, - int32_t scale); - } // namespace internal } // namespace sf diff --git a/src/snowflake/connector/cpp/Logging/logging.cpp b/src/snowflake/connector/cpp/Logging/logging.cpp new file mode 100644 index 000000000..e43afe08d --- /dev/null +++ b/src/snowflake/connector/cpp/Logging/logging.cpp @@ -0,0 +1,105 @@ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + +#include "logging.hpp" +#include "Python/Helpers.hpp" +#include + +namespace sf +{ +std::string Logger::formatString(const char *format, ...) +{ + char msg[1000] = {0}; + va_list args; + va_start(args, format); + vsnprintf(msg, sizeof(msg), format, args); + va_end(args); + + return std::string(msg); +} + +void Logger::setupPyLogger() +{ + py::UniqueRef pyLoggingModule; + py::importPythonModule("snowflake.connector.snow_logging", pyLoggingModule); + PyObject *logger = + PyObject_CallMethod(pyLoggingModule.get(), "getSnowLogger", "s", m_name); + + m_pyLogger.reset(logger); +} + +Logger::Logger(const char *name) +: m_name(name) +{ +} + +void Logger::log(int level, const char *path_name, const char *func_name, int line_num, const char *msg) +{ + if (m_pyLogger.get() == nullptr) + { + setupPyLogger(); + } + + PyObject *logger = m_pyLogger.get(); + py::UniqueRef keywords(PyDict_New()); + py::UniqueRef call_log(PyObject_GetAttrString(logger, "log")); + + // prepare keyword args for snow_logger + PyDict_SetItemString(keywords.get(), "level", Py_BuildValue("i", level)); + PyDict_SetItemString(keywords.get(), "path_name", Py_BuildValue("s", path_name)); + PyDict_SetItemString(keywords.get(), "func_name", Py_BuildValue("s", func_name)); + PyDict_SetItemString(keywords.get(), "line_num", Py_BuildValue("i", line_num)); + PyDict_SetItemString(keywords.get(), "msg", Py_BuildValue("s", msg)); + + // call snow_logging.SnowLogger.log() + PyObject_Call(call_log.get(), Py_BuildValue("()"), keywords.get()); +} + + +void Logger::debug(const char *path_name, const char *func_name, int line_num, const char *format, ...) +{ + char msg[1000] = {0}; + va_list args; + va_start(args, format); + vsnprintf(msg, sizeof(msg), format, args); + va_end(args); + + Logger::log(DEBUG, path_name, func_name, line_num, msg); +} + +void Logger::info(const char *path_name, const char *func_name, int line_num, const char *format, ...) +{ + char msg[1000] = {0}; + va_list args; + va_start(args, format); + vsnprintf(msg, sizeof(msg), format, args); + va_end(args); + + Logger::log(INFO, path_name, func_name, line_num, msg); +} + +void Logger::warn(const char *path_name, const char *func_name, int line_num, const char *format, ...) +{ + char msg[1000] = {0}; + va_list args; + va_start(args, format); + vsnprintf(msg, sizeof(msg), format, args); + va_end(args); + + Logger::log(WARN, path_name, func_name, line_num, msg); +} + +void Logger::error(const char *path_name, const char *func_name, int line_num, const char *format, ...) +{ + char msg[1000] = {0}; + va_list args; + va_start(args, format); + vsnprintf(msg, sizeof(msg), format, args); + va_end(args); + + + Logger::log(ERROR, path_name, func_name, line_num, msg); +} + +} diff --git a/src/snowflake/connector/cpp/Logging/logging.hpp b/src/snowflake/connector/cpp/Logging/logging.hpp new file mode 100644 index 000000000..46d73b4bc --- /dev/null +++ b/src/snowflake/connector/cpp/Logging/logging.hpp @@ -0,0 +1,49 @@ +// +// Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +// + +#ifndef PC_LOGGING_HPP +#define PC_LOGGING_HPP + +#include "Python/Common.hpp" +#include + +namespace sf +{ + +class Logger +{ +public: + explicit Logger(const char *name); + + void log(int level, const char *path_name, const char *func_name, int line_num, const char *msg); + + void debug(const char *path_name, const char *func_name, int line_num, const char *format, ...); + + void info(const char *path_name, const char *func_name, int line_num, const char *format, ...); + + void warn(const char *path_name, const char *func_name, int line_num, const char *format, ...); + + void error(const char *path_name, const char *func_name, int line_num, const char *format, ...); + + static std::string formatString(const char *fmt, ...); + +private: + py::UniqueRef m_pyLogger; + const char *const m_name; + static constexpr int CRITICAL = 50; + static constexpr int FATAL = CRITICAL; + static constexpr int ERROR = 40; + static constexpr int WARNING = 30; + static constexpr int WARN = WARNING; + static constexpr int INFO = 20; + static constexpr int DEBUG = 10; + static constexpr int NOTSET = 0; + static constexpr int LINE_NUM = 0; + + void setupPyLogger(); +}; + +} // namespace sf + +#endif // PC_LOGGING_HPP diff --git a/cpp/scripts/.clang-format b/src/snowflake/connector/cpp/scripts/.clang-format similarity index 100% rename from cpp/scripts/.clang-format rename to src/snowflake/connector/cpp/scripts/.clang-format diff --git a/cpp/scripts/format.sh b/src/snowflake/connector/cpp/scripts/format.sh similarity index 100% rename from cpp/scripts/format.sh rename to src/snowflake/connector/cpp/scripts/format.sh diff --git a/src/snowflake/connector/cursor.py b/src/snowflake/connector/cursor.py new file mode 100644 index 000000000..e85aad301 --- /dev/null +++ b/src/snowflake/connector/cursor.py @@ -0,0 +1,1297 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging +import re +import signal +import sys +import time +import uuid +from enum import Enum +from logging import getLogger +from threading import Lock, Timer +from typing import ( + IO, + TYPE_CHECKING, + Any, + Callable, + Generator, + Iterator, + NamedTuple, + NoReturn, + Sequence, +) + +from snowflake.connector.result_batch import create_batches_from_response +from snowflake.connector.result_set import ResultSet + +from .bind_upload_agent import BindUploadAgent, BindUploadError +from .compat import BASE_EXCEPTION_CLASS +from .constants import ( + FIELD_NAME_TO_ID, + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT, + FileTransferType, + QueryStatus, +) +from .description import CLIENT_NAME +from .errorcode import ( + ER_CURSOR_IS_CLOSED, + ER_FAILED_PROCESSING_PYFORMAT, + ER_FAILED_TO_REWRITE_MULTI_ROW_INSERT, + ER_INVALID_VALUE, + ER_NO_ARROW_RESULT, + ER_NO_PYARROW, + ER_NO_PYARROW_SNOWSQL, + ER_NOT_POSITIVE_SIZE, + ER_UNSUPPORTED_METHOD, +) +from .errors import ( + DatabaseError, + Error, + IntegrityError, + InterfaceError, + NotSupportedError, + ProgrammingError, +) +from .file_transfer_agent import SnowflakeFileTransferAgent +from .options import installed_pandas, pandas +from .sqlstate import SQLSTATE_FEATURE_NOT_SUPPORTED +from .telemetry import TelemetryData, TelemetryField +from .time_util import get_time_millis + +if TYPE_CHECKING: # pragma: no cover + from .connection import SnowflakeConnection + from .file_transfer_agent import SnowflakeProgressPercentage + from .result_batch import ResultBatch + + +logger = getLogger(__name__) + +if installed_pandas: + from pyarrow import Table +else: + logger.debug("Failed to import pyarrow. Cannot use pandas fetch API") + Table = None + +try: + from .arrow_iterator import PyArrowIterator # NOQA + + CAN_USE_ARROW_RESULT_FORMAT = True +except ImportError as e: # pragma: no cover + logger.debug( + f"Failed to import ArrowResult. No Apache Arrow result set format can be used. ImportError: {e}", + ) + CAN_USE_ARROW_RESULT_FORMAT = False + +STATEMENT_TYPE_ID_DML = 0x3000 +STATEMENT_TYPE_ID_INSERT = STATEMENT_TYPE_ID_DML + 0x100 +STATEMENT_TYPE_ID_UPDATE = STATEMENT_TYPE_ID_DML + 0x200 +STATEMENT_TYPE_ID_DELETE = STATEMENT_TYPE_ID_DML + 0x300 +STATEMENT_TYPE_ID_MERGE = STATEMENT_TYPE_ID_DML + 0x400 +STATEMENT_TYPE_ID_MULTI_TABLE_INSERT = STATEMENT_TYPE_ID_DML + 0x500 + +STATEMENT_TYPE_ID_DML_SET = frozenset( + [ + STATEMENT_TYPE_ID_DML, + STATEMENT_TYPE_ID_INSERT, + STATEMENT_TYPE_ID_UPDATE, + STATEMENT_TYPE_ID_DELETE, + STATEMENT_TYPE_ID_MERGE, + STATEMENT_TYPE_ID_MULTI_TABLE_INSERT, + ] +) + +DESC_TABLE_RE = re.compile(r"desc(?:ribe)?\s+([\w_]+)\s*;?\s*$", flags=re.IGNORECASE) + +LOG_MAX_QUERY_LENGTH = 80 + +ASYNC_NO_DATA_MAX_RETRY = 24 +ASYNC_RETRY_PATTERN = [1, 1, 2, 3, 4, 8, 10] + + +class ResultMetadata(NamedTuple): + name: str + type_code: int + display_size: int + internal_size: int + precision: int + scale: int + is_nullable: bool + + @classmethod + def from_column(cls, col: dict[str, Any]): + """Initializes a ResultMetadata object from the column description in the query response.""" + return cls( + col["name"], + FIELD_NAME_TO_ID[ + col["extTypeName"].upper() + if "extTypeName" in col + else col["type"].upper() + ], + None, + col["length"], + col["precision"], + col["scale"], + col["nullable"], + ) + + +def exit_handler(*_) -> NoReturn: + """Handler for signal. When called, it will raise SystemExit with exit code FORCE_EXIT.""" + print("\nForce exit") + logger.info("Force exit") + sys.exit(1) + + +class ResultState(Enum): + DEFAULT = 1 + VALID = 2 + RESET = 3 + + +class SnowflakeCursor: + """Implementation of Cursor object that is returned from Connection.cursor() method. + + Attributes: + description: A list of namedtuples about metadata for all columns. + rowcount: The number of records updated or selected. If not clear, -1 is returned. + rownumber: The current 0-based index of the cursor in the result set or None if the index cannot be + determined. + sfqid: Snowflake query id in UUID form. Include this in the problem report to the customer support. + sqlstate: Snowflake SQL State code. + timestamp_output_format: Snowflake timestamp_output_format for timestamps. + timestamp_ltz_output_format: Snowflake output format for LTZ timestamps. + timestamp_tz_output_format: Snowflake output format for TZ timestamps. + timestamp_ntz_output_format: Snowflake output format for NTZ timestamps. + date_output_format: Snowflake output format for dates. + time_output_format: Snowflake output format for times. + timezone: Snowflake timezone. + binary_output_format: Snowflake output format for binary fields. + arraysize: The default number of rows fetched by fetchmany. + connection: The connection object by which the cursor was created. + errorhandle: The class that handles error handling. + is_file_transfer: Whether, or not the current command is a put, or get. + + TODO: + Most of these attributes have no reason to be properties, we could just store them in public variables. + Calling a function is expensive in Python and most of these getters are unnecessary. + """ + + PUT_SQL_RE = re.compile(r"^(?:/\*.*\*/\s*)*put\s+", flags=re.IGNORECASE) + GET_SQL_RE = re.compile(r"^(?:/\*.*\*/\s*)*get\s+", flags=re.IGNORECASE) + INSERT_SQL_RE = re.compile(r"^insert\s+into", flags=re.IGNORECASE) + COMMENT_SQL_RE = re.compile(r"/\*.*\*/") + INSERT_SQL_VALUES_RE = re.compile( + r".*VALUES\s*(\(.*\)).*", re.IGNORECASE | re.MULTILINE | re.DOTALL + ) + ALTER_SESSION_RE = re.compile( + r"alter\s+session\s+set\s+(.*)=\'?([^\']+)\'?\s*;", + flags=re.IGNORECASE | re.MULTILINE | re.DOTALL, + ) + + @staticmethod + def get_file_transfer_type(sql: str) -> FileTransferType | None: + """Decide whether a SQL is a file transfer and return its type. + + None is returned if the SQL isn't a file transfer so that this function can be + used in an if-statement. + """ + if SnowflakeCursor.PUT_SQL_RE.match(sql): + return FileTransferType.PUT + elif SnowflakeCursor.GET_SQL_RE.match(sql): + return FileTransferType.GET + return None + + def __init__( + self, + connection: SnowflakeConnection, + use_dict_result: bool = False, + ) -> None: + """Inits a SnowflakeCursor with a connection. + + Args: + connection: The connection that created this cursor. + use_dict_result: Decides whether to use dict result or not. + """ + self._connection: SnowflakeConnection = connection + + self._errorhandler: Callable[ + [SnowflakeConnection, SnowflakeCursor, type[Error], dict[str, str]], + None, + ] = Error.default_errorhandler + self.messages: list[ + tuple[type[Error] | type[Exception], dict[str, str | bool]] + ] = [] + self._timebomb: Timer | None = None # must be here for abort_exit method + self._description: list[ResultMetadata] | None = None + self._column_idx_to_name = None + self._sfqid = None + self._sqlstate = None + self._total_rowcount = -1 + self._sequence_counter = -1 + self._request_id = None + self._is_file_transfer = False + + self._timestamp_output_format = None + self._timestamp_ltz_output_format = None + self._timestamp_ntz_output_format = None + self._timestamp_tz_output_format = None + self._date_output_format = None + self._time_output_format = None + self._timezone = None + self._binary_output_format = None + self._result: Iterator[tuple] | Iterator[dict] | None = None + self._result_set: ResultSet | None = None + self._result_state: ResultState = ResultState.DEFAULT + self._use_dict_result = use_dict_result + self.query: str | None = None + # TODO: self._query_result_format could be defined as an enum + self._query_result_format: str | None = None + + self._arraysize = 1 # PEP-0249: defaults to 1 + + self._lock_canceling = Lock() + + self._first_chunk_time = None + + self._log_max_query_length = connection.log_max_query_length + self._inner_cursor: SnowflakeCursor | None = None + self._prefetch_hook = None + self._rownumber: int | None = None + + self.reset() + + def __del__(self) -> None: # pragma: no cover + try: + self.close() + except BASE_EXCEPTION_CLASS as e: + if logger.getEffectiveLevel() <= logging.INFO: + logger.info(e) + + @property + def description(self) -> list[ResultMetadata]: + return self._description + + @property + def rowcount(self): + return self._total_rowcount if self._total_rowcount >= 0 else None + + @property + def rownumber(self): + return self._rownumber if self._rownumber >= 0 else None + + @property + def sfqid(self): + return self._sfqid + + @property + def sqlstate(self): + return self._sqlstate + + @property + def timestamp_output_format(self): + return self._timestamp_output_format + + @property + def timestamp_ltz_output_format(self): + return ( + self._timestamp_ltz_output_format + if self._timestamp_ltz_output_format + else self._timestamp_output_format + ) + + @property + def timestamp_tz_output_format(self): + return ( + self._timestamp_tz_output_format + if self._timestamp_tz_output_format + else self._timestamp_output_format + ) + + @property + def timestamp_ntz_output_format(self): + return ( + self._timestamp_ntz_output_format + if self._timestamp_ntz_output_format + else self._timestamp_output_format + ) + + @property + def date_output_format(self): + return self._date_output_format + + @property + def time_output_format(self): + return self._time_output_format + + @property + def timezone(self): + return self._timezone + + @property + def binary_output_format(self): + return self._binary_output_format + + @property + def arraysize(self): + return self._arraysize + + @arraysize.setter + def arraysize(self, value): + self._arraysize = int(value) + + @property + def connection(self): + return self._connection + + @property + def errorhandler(self): + return self._errorhandler + + @errorhandler.setter + def errorhandler(self, value): + logger.debug("setting errorhandler: %s", value) + if value is None: + raise ProgrammingError("Invalid errorhandler is specified") + self._errorhandler = value + + @property + def is_file_transfer(self): + """Whether the command is PUT or GET.""" + return hasattr(self, "_is_file_transfer") and self._is_file_transfer + + def callproc(self, procname, args=()): + """Not supported.""" + Error.errorhandler_wrapper( + self.connection, + self, + NotSupportedError, + { + "msg": "callproc is not supported.", + "errno": ER_UNSUPPORTED_METHOD, + "sqlstate": SQLSTATE_FEATURE_NOT_SUPPORTED, + }, + ) + + def close(self) -> bool | None: + """Closes the cursor object. + + Returns whether the cursor was closed during this call. + """ + try: + if self.is_closed(): + return False + + with self._lock_canceling: + self.reset() + self._connection = None + del self.messages[:] + return True + except Exception: + pass + + def is_closed(self): + return self._connection is None or self._connection.is_closed() + + def _execute_helper( + self, + query: str, + timeout: int = 0, + statement_params: dict[str, str] | None = None, + binding_params: tuple | dict[str, dict[str, str]] = None, + binding_stage: str | None = None, + is_internal: bool = False, + describe_only: bool = False, + _no_results: bool = False, + _is_put_get=None, + _no_retry: bool = False, + ): + del self.messages[:] + + if statement_params is not None and not isinstance(statement_params, dict): + Error.errorhandler_wrapper( + self.connection, + self, + ProgrammingError, + { + "msg": "The data type of statement params is invalid. It must be dict.", + "errno": ER_INVALID_VALUE, + }, + ) + + # check if current installation include arrow extension or not, + # if not, we set statement level query result format to be JSON + if not CAN_USE_ARROW_RESULT_FORMAT: + logger.debug("Cannot use arrow result format, fallback to json format") + if statement_params is None: + statement_params = { + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT: "JSON" + } + else: + result_format_val = statement_params.get( + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT + ) + if str(result_format_val).upper() == "ARROW": + self.check_can_use_arrow_resultset() + elif result_format_val is None: + statement_params[ + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT + ] = "JSON" + + self._sequence_counter = self._connection._next_sequence_counter() + self._request_id = uuid.uuid4() + + logger.debug(f"Request id: {self._request_id}") + + if logger.getEffectiveLevel() <= logging.DEBUG: + logger.debug("running query [%s]", self._format_query_for_log(query)) + if _is_put_get is not None: + # if told the query is PUT or GET, use the information + self._is_file_transfer = _is_put_get + else: + # or detect it. + self._is_file_transfer = self.PUT_SQL_RE.match( + query + ) or self.GET_SQL_RE.match(query) + logger.debug("is_file_transfer: %s", self._is_file_transfer is not None) + + real_timeout = ( + timeout if timeout and timeout > 0 else self._connection.network_timeout + ) + + if real_timeout is not None: + self._timebomb = Timer(real_timeout, self.__cancel_query, [query]) + self._timebomb.start() + logger.debug("started timebomb in %ss", real_timeout) + else: + self._timebomb = None + + original_sigint = signal.getsignal(signal.SIGINT) + + def interrupt_handler(*_): # pragma: no cover + try: + signal.signal(signal.SIGINT, exit_handler) + except (ValueError, TypeError): + # ignore failures + pass + try: + if self._timebomb is not None: + self._timebomb.cancel() + logger.debug("cancelled timebomb in finally") + self._timebomb = None + self.__cancel_query(query) + finally: + if original_sigint: + try: + signal.signal(signal.SIGINT, original_sigint) + except (ValueError, TypeError): + # ignore failures + pass + raise KeyboardInterrupt + + try: + if not original_sigint == exit_handler: + signal.signal(signal.SIGINT, interrupt_handler) + except ValueError: # pragma: no cover + logger.debug( + "Failed to set SIGINT handler. " "Not in main thread. Ignored..." + ) + ret = {"data": {}} + try: + ret = self._connection.cmd_query( + query, + self._sequence_counter, + self._request_id, + binding_params=binding_params, + binding_stage=binding_stage, + is_file_transfer=bool(self._is_file_transfer), + statement_params=statement_params, + is_internal=is_internal, + describe_only=describe_only, + _no_results=_no_results, + _no_retry=_no_retry, + ) + finally: + try: + if original_sigint: + signal.signal(signal.SIGINT, original_sigint) + except (ValueError, TypeError): # pragma: no cover + logger.debug( + "Failed to reset SIGINT handler. Not in main " "thread. Ignored..." + ) + if self._timebomb is not None: + self._timebomb.cancel() + logger.debug("cancelled timebomb in finally") + + if "data" in ret and "parameters" in ret["data"]: + parameters = ret["data"]["parameters"] + # Set session parameters for cursor object + for kv in parameters: + if "TIMESTAMP_OUTPUT_FORMAT" in kv["name"]: + self._timestamp_output_format = kv["value"] + if "TIMESTAMP_NTZ_OUTPUT_FORMAT" in kv["name"]: + self._timestamp_ntz_output_format = kv["value"] + if "TIMESTAMP_LTZ_OUTPUT_FORMAT" in kv["name"]: + self._timestamp_ltz_output_format = kv["value"] + if "TIMESTAMP_TZ_OUTPUT_FORMAT" in kv["name"]: + self._timestamp_tz_output_format = kv["value"] + if "DATE_OUTPUT_FORMAT" in kv["name"]: + self._date_output_format = kv["value"] + if "TIME_OUTPUT_FORMAT" in kv["name"]: + self._time_output_format = kv["value"] + if "TIMEZONE" in kv["name"]: + self._timezone = kv["value"] + if "BINARY_OUTPUT_FORMAT" in kv["name"]: + self._binary_output_format = kv["value"] + # Set session parameters for connection object + self._connection._update_parameters( + {p["name"]: p["value"] for p in parameters} + ) + + self.query = query + self._sequence_counter = -1 + return ret + + def execute( + self, + command: str, + params: Sequence[Any] | dict[Any, Any] | None = None, + _bind_stage: str | None = None, + timeout: int | None = None, + _exec_async: bool = False, + _no_retry: bool = False, + _do_reset: bool = True, + _put_callback: SnowflakeProgressPercentage = None, + _put_azure_callback: SnowflakeProgressPercentage = None, + _put_callback_output_stream: IO[str] = sys.stdout, + _get_callback: SnowflakeProgressPercentage = None, + _get_azure_callback: SnowflakeProgressPercentage = None, + _get_callback_output_stream: IO[str] = sys.stdout, + _show_progress_bar: bool = True, + _statement_params: dict[str, str] | None = None, + _is_internal: bool = False, + _describe_only: bool = False, + _no_results: bool = False, + _use_ijson: bool = False, + _is_put_get: bool | None = None, + _raise_put_get_error: bool = True, + _force_put_overwrite: bool = False, + file_stream: IO[bytes] | None = None, + ) -> SnowflakeCursor | None: + """Executes a command/query. + + Args: + command: The SQL command to be executed. + params: Parameters to be bound into the SQL statement. + _bind_stage: Path in temporary stage where binding parameters are uploaded as CSV files. + timeout: Number of seconds after which to abort the query. + _exec_async: Whether to execute this query asynchronously. + _no_retry: Whether or not to retry on known errors. + _do_reset: Whether or not the result set needs to be reset before executing query. + _put_callback: Function to which GET command should call back to. + _put_azure_callback: Function to which an Azure GET command should call back to. + _put_callback_output_stream: The output stream a PUT command's callback should report on. + _get_callback: Function to which GET command should call back to. + _get_azure_callback: Function to which an Azure GET command should call back to. + _get_callback_output_stream: The output stream a GET command's callback should report on. + _show_progress_bar: Whether or not to show progress bar. + _statement_params: Extra information that should be sent to Snowflake with query. + _is_internal: This flag indicates whether the query is issued internally by the connector. + _describe_only: If true, the query will not be executed but return the schema/description of this query. + _no_results: This flag tells the back-end to not return the result, just fire the query and return the + response returned by Snowflake's server. + _use_ijson: This flag doesn't do anything as ijson support has ended. + _is_put_get: Force decision of this SQL query being a PUT, or GET command. This is detected otherwise. + _raise_put_get_error: Whether to raise PUT and GET errors. + _force_put_overwrite: If the SQL query is a PUT, then this flag can force overwriting of an already + existing file on stage. + file_stream: File-like object to be uploaded with PUT + + Returns: + The cursor itself, or None if some error happened, or the response returned + by Snowflake if the _no_results flag is on. + """ + if _exec_async: + _no_results = True + logger.debug("executing SQL/command") + if self.is_closed(): + Error.errorhandler_wrapper( + self.connection, + self, + InterfaceError, + {"msg": "Cursor is closed in execute.", "errno": ER_CURSOR_IS_CLOSED}, + ) + + if _do_reset: + self.reset() + command = command.strip(" \t\n\r") if command else None + if not command: + logger.warning("execute: no query is given to execute") + return + + kwargs = { + "timeout": timeout, + "statement_params": _statement_params, + "is_internal": _is_internal, + "describe_only": _describe_only, + "_no_results": _no_results, + "_is_put_get": _is_put_get, + "_no_retry": _no_retry, + } + + if self._connection.is_pyformat: + # pyformat/format paramstyle + # client side binding + processed_params = self._connection._process_params_pyformat(params, self) + # SNOW-513061 collect telemetry for empty sequence usage before we make the breaking change announcement + if params is not None and len(params) == 0: + self._log_telemetry_job_data( + TelemetryField.EMPTY_SEQ_INTERPOLATION, + TelemetryData.TRUE + if self.connection._interpolate_empty_sequences + else TelemetryData.FALSE, + ) + if logger.getEffectiveLevel() <= logging.DEBUG: + logger.debug( + f"binding: [{self._format_query_for_log(command)}] " + f"with input=[{params}], " + f"processed=[{processed_params}]", + ) + if ( + self.connection._interpolate_empty_sequences + and processed_params is not None + ) or ( + not self.connection._interpolate_empty_sequences + and len(processed_params) > 0 + ): + query = command % processed_params + else: + query = command + else: + # qmark and numeric paramstyle + query = command + if _bind_stage: + kwargs["binding_stage"] = _bind_stage + else: + if params is not None and not isinstance(params, (list, tuple)): + errorvalue = { + "msg": f"Binding parameters must be a list: {params}", + "errno": ER_FAILED_PROCESSING_PYFORMAT, + } + Error.errorhandler_wrapper( + self.connection, self, ProgrammingError, errorvalue + ) + + kwargs["binding_params"] = self._connection._process_params_qmarks( + params, self + ) + + m = DESC_TABLE_RE.match(query) + if m: + query1 = f"describe table {m.group(1)}" + if logger.getEffectiveLevel() <= logging.WARNING: + logger.info( + "query was rewritten: org=%s, new=%s", + " ".join(line.strip() for line in query.split("\n")), + query1, + ) + query = query1 + + if logger.getEffectiveLevel() <= logging.INFO: + logger.info("query: [%s]", self._format_query_for_log(query)) + ret = self._execute_helper(query, **kwargs) + self._sfqid = ( + ret["data"]["queryId"] + if "data" in ret and "queryId" in ret["data"] + else None + ) + self._sqlstate = ( + ret["data"]["sqlState"] + if "data" in ret and "sqlState" in ret["data"] + else None + ) + self._first_chunk_time = get_time_millis() + + # if server gives a send time, log the time it took to arrive + if "data" in ret and "sendResultTime" in ret["data"]: + time_consume_first_result = ( + self._first_chunk_time - ret["data"]["sendResultTime"] + ) + self._log_telemetry_job_data( + TelemetryField.TIME_CONSUME_FIRST_RESULT, time_consume_first_result + ) + logger.debug("sfqid: %s", self.sfqid) + + logger.info("query execution done") + if ret["success"]: + logger.debug("SUCCESS") + data = ret["data"] + + logger.debug("PUT OR GET: %s", self.is_file_transfer) + if self.is_file_transfer: + # Decide whether to use the old, or new code path + sf_file_transfer_agent = SnowflakeFileTransferAgent( + self, + query, + ret, + put_callback=_put_callback, + put_azure_callback=_put_azure_callback, + put_callback_output_stream=_put_callback_output_stream, + get_callback=_get_callback, + get_azure_callback=_get_azure_callback, + get_callback_output_stream=_get_callback_output_stream, + show_progress_bar=_show_progress_bar, + raise_put_get_error=_raise_put_get_error, + force_put_overwrite=_force_put_overwrite + or data.get("overwrite", False), + source_from_stream=file_stream, + multipart_threshold=data.get("threshold"), + use_s3_regional_url=self._connection.enable_stage_s3_privatelink_for_us_east_1, + ) + sf_file_transfer_agent.execute() + data = sf_file_transfer_agent.result() + self._total_rowcount = len(data["rowset"]) if "rowset" in data else -1 + m = self.ALTER_SESSION_RE.match(query) + if m: + # session parameters + param = m.group(1).upper() + value = m.group(2) + self._connection.converter.set_parameter(param, value) + + if _exec_async: + self.connection._async_sfqids.add(self._sfqid) + if _no_results: + self._total_rowcount = ( + ret["data"]["total"] + if "data" in ret and "total" in ret["data"] + else -1 + ) + return data + self._init_result_and_meta(data) + else: + self._total_rowcount = ( + ret["data"]["total"] if "data" in ret and "total" in ret["data"] else -1 + ) + logger.debug(ret) + err = ret["message"] + code = ret.get("code", -1) + if "data" in ret: + err += ret["data"].get("errorMessage", "") + errvalue = { + "msg": err, + "errno": int(code), + "sqlstate": self._sqlstate, + "sfqid": self._sfqid, + } + is_integrity_error = ( + code == "100072" + ) # NULL result in a non-nullable column + error_class = IntegrityError if is_integrity_error else ProgrammingError + Error.errorhandler_wrapper(self.connection, self, error_class, errvalue) + return self + + def execute_async(self, *args, **kwargs): + """Convenience function to execute a query without waiting for results (asynchronously). + + This function takes the same arguments as execute, please refer to that function + for documentation. + """ + kwargs["_exec_async"] = True + return self.execute(*args, **kwargs) + + def describe(self, *args, **kwargs) -> list[ResultMetadata]: + """Obtain the schema of the result without executing the query. + + This function takes the same arguments as execute, please refer to that function + for documentation. + + Returns: + The schema of the result. + """ + kwargs["_describe_only"] = kwargs["_is_internal"] = True + self.execute(*args, **kwargs) + return self._description + + def _format_query_for_log(self, query): + return self._connection._format_query_for_log(query) + + def _is_dml(self, data): + return ( + "statementTypeId" in data + and int(data["statementTypeId"]) in STATEMENT_TYPE_ID_DML_SET + ) + + def _init_result_and_meta(self, data): + is_dml = self._is_dml(data) + self._query_result_format = data.get("queryResultFormat", "json") + logger.debug("Query result format: %s", self._query_result_format) + + if self._total_rowcount == -1 and not is_dml and data.get("total") is not None: + self._total_rowcount = data["total"] + + self._description: list[ResultMetadata] = [ + ResultMetadata.from_column(col) for col in data["rowtype"] + ] + + result_chunks = create_batches_from_response( + self, self._query_result_format, data, self._description + ) + + self._result_set = ResultSet( + self, + result_chunks, + self._connection.client_prefetch_threads, + ) + self._rownumber = -1 + self._result_state = ResultState.VALID + + # don't update the row count when the result is returned from `describe` method + if is_dml and "rowset" in data and len(data["rowset"]) > 0: + updated_rows = 0 + for idx, desc in enumerate(self._description): + if desc[0] in ( + "number of rows updated", + "number of multi-joined rows updated", + "number of rows deleted", + ) or desc[0].startswith("number of rows inserted"): + updated_rows += int(data["rowset"][0][idx]) + if self._total_rowcount == -1: + self._total_rowcount = updated_rows + else: + self._total_rowcount += updated_rows + + def check_can_use_arrow_resultset(self): + global CAN_USE_ARROW_RESULT_FORMAT + + if not CAN_USE_ARROW_RESULT_FORMAT: + if self._connection.application == "SnowSQL": + msg = "Currently SnowSQL doesn't support the result set in Apache Arrow format." + errno = ER_NO_PYARROW_SNOWSQL + else: + msg = "The result set in Apache Arrow format is not supported for the platform." + errno = ER_NO_ARROW_RESULT + + Error.errorhandler_wrapper( + self.connection, + self, + ProgrammingError, + { + "msg": msg, + "errno": errno, + }, + ) + + def check_can_use_pandas(self): + if not installed_pandas: + msg = ( + "Optional dependency: 'pandas' is not installed, please see the following link for install " + "instructions: https://docs.snowflake.com/en/user-guide/python-connector-pandas.html#installation" + ) + errno = ER_NO_PYARROW + + Error.errorhandler_wrapper( + self.connection, + self, + ProgrammingError, + { + "msg": msg, + "errno": errno, + }, + ) + + def query_result(self, qid): + url = f"/queries/{qid}/result" + ret = self._connection.rest.request(url=url, method="get") + self._sfqid = ( + ret["data"]["queryId"] + if "data" in ret and "queryId" in ret["data"] + else None + ) + self._sqlstate = ( + ret["data"]["sqlState"] + if "data" in ret and "sqlState" in ret["data"] + else None + ) + logger.debug("sfqid=%s", self._sfqid) + + if ret.get("success"): + data = ret.get("data") + self._init_result_and_meta(data) + else: + logger.info("failed") + logger.debug(ret) + err = ret["message"] + code = ret.get("code", -1) + if "data" in ret: + err += ret["data"].get("errorMessage", "") + errvalue = { + "msg": err, + "errno": int(code), + "sqlstate": self._sqlstate, + "sfqid": self._sfqid, + } + Error.errorhandler_wrapper( + self.connection, self, ProgrammingError, errvalue + ) + return self + + def fetch_arrow_batches(self) -> Iterator[Table]: + self.check_can_use_arrow_resultset() + if self._query_result_format != "arrow": + raise NotSupportedError + self._log_telemetry_job_data( + TelemetryField.ARROW_FETCH_BATCHES, TelemetryData.TRUE + ) + return self._result_set._fetch_arrow_batches() + + def fetch_arrow_all(self) -> Table | None: + self.check_can_use_arrow_resultset() + if self._query_result_format != "arrow": + raise NotSupportedError + self._log_telemetry_job_data(TelemetryField.ARROW_FETCH_ALL, TelemetryData.TRUE) + return self._result_set._fetch_arrow_all() + + def fetch_pandas_batches(self, **kwargs) -> Iterator[pandas.DataFrame]: + """Fetches a single Arrow Table.""" + self.check_can_use_pandas() + if self._prefetch_hook is not None: + self._prefetch_hook() + raise NotSupportedError + self._log_telemetry_job_data( + TelemetryField.PANDAS_FETCH_BATCHES, TelemetryData.TRUE + ) + return self._result_set._fetch_pandas_batches(**kwargs) + + def fetch_pandas_all(self, **kwargs) -> pandas.DataFrame: + """Fetch Pandas dataframes in batches, where 'batch' refers to Snowflake Chunk.""" + self.check_can_use_pandas() + if self._prefetch_hook is not None: + self._prefetch_hook() + if self._query_result_format != "arrow": + raise NotSupportedError + self._log_telemetry_job_data( + TelemetryField.PANDAS_FETCH_ALL, TelemetryData.TRUE + ) + return self._result_set._fetch_pandas_all(**kwargs) + + def abort_query(self, qid): + url = f"/queries/{qid}/abort-request" + ret = self._connection.rest.request(url=url, method="post") + return ret.get("success") + + def executemany( + self, + command: str, + seqparams: Sequence[Any] | dict[str, Any], + ) -> SnowflakeCursor: + """Executes a command/query with the given set of parameters sequentially.""" + logger.debug("executing many SQLs/commands") + command = command.strip(" \t\n\r") if command else None + + if not seqparams: + return self + + if self.INSERT_SQL_RE.match(command): + if self._connection.is_pyformat: + logger.debug("rewriting INSERT query") + command_wo_comments = re.sub(self.COMMENT_SQL_RE, "", command) + m = self.INSERT_SQL_VALUES_RE.match(command_wo_comments) + if not m: + Error.errorhandler_wrapper( + self.connection, + self, + InterfaceError, + { + "msg": "Failed to rewrite multi-row insert", + "errno": ER_FAILED_TO_REWRITE_MULTI_ROW_INSERT, + }, + ) + + fmt = m.group(1) + values = [] + for param in seqparams: + logger.debug(f"parameter: {param}") + values.append( + fmt % self._connection._process_params_pyformat(param, self) + ) + command = command.replace(fmt, ",".join(values), 1) + self.execute(command) + return self + else: + logger.debug("bulk insert") + # sanity check + row_size = len(seqparams[0]) + for row in seqparams: + if len(row) != row_size: + error_value = { + "msg": f"Bulk data size don't match. expected: {row_size}, " + f"got: {len(row)}, command: {command}", + "errno": ER_INVALID_VALUE, + } + Error.errorhandler_wrapper( + self.connection, self, InterfaceError, error_value + ) + return self + bind_size = len(seqparams) * row_size + bind_stage = None + if ( + bind_size + > self.connection._session_parameters[ + "CLIENT_STAGE_ARRAY_BINDING_THRESHOLD" + ] + > 0 + ): + # bind stage optimization + try: + rows = self.connection._write_params_to_byte_rows(seqparams) + bind_uploader = BindUploadAgent(self, rows) + bind_uploader.upload() + bind_stage = bind_uploader.stage_path + except BindUploadError: + logger.debug( + "Failed to upload binds to stage, sending binds to " + "Snowflake instead." + ) + except Exception as exc: + if not isinstance(exc, INCIDENT_BLACKLIST): + self.connection.incident.report_incident() + raise + binding_param = ( + None if bind_stage else list(map(list, zip(*seqparams))) + ) # transpose + self.execute(command, params=binding_param, _bind_stage=bind_stage) + return self + + self.reset() + for param in seqparams: + self.execute(command, param, _do_reset=False) + return self + + def _result_iterator( + self, + ) -> Generator[dict, None, None] | Generator[tuple, None, None]: + """Yields the elements from _result and raises an exception when appropriate.""" + try: + for _next in self._result: + if isinstance(_next, Exception): + Error.errorhandler_wrapper_from_ready_exception( + self._connection, + self, + _next, + ) + self._rownumber += 1 + yield _next + except TypeError as err: + if self._result_state == ResultState.DEFAULT: + raise err + else: + yield None + + def fetchone(self) -> dict | tuple | None: + """Fetches one row.""" + if self._prefetch_hook is not None: + self._prefetch_hook() + if self._result is None and self._result_set is not None: + self._result = iter(self._result_set) + self._result_state = ResultState.VALID + try: + return next(self._result_iterator()) + except StopIteration: + return None + + def fetchmany(self, size=None): + """Fetches the number of specified rows.""" + if size is None: + size = self.arraysize + + if size < 0: + errorvalue = { + "msg": ( + "The number of rows is not zero or " "positive number: {}" + ).format(size), + "errno": ER_NOT_POSITIVE_SIZE, + } + Error.errorhandler_wrapper( + self.connection, self, ProgrammingError, errorvalue + ) + ret = [] + while size > 0: + row = self.fetchone() + if row is None: + break + ret.append(row) + if size is not None: + size -= 1 + + return ret + + def fetchall(self) -> list[tuple] | list[dict]: + """Fetches all of the results.""" + ret = [] + while True: + row = self.fetchone() + if row is None: + break + ret.append(row) + return ret + + def nextset(self): + """Not supported.""" + logger.debug("nop") + return None + + def setinputsizes(self, _): + """Not supported.""" + logger.debug("nop") + + def setoutputsize(self, _, column=None): + """Not supported.""" + del column + logger.debug("nop") + + def scroll(self, value, mode="relative"): + Error.errorhandler_wrapper( + self.connection, + self, + NotSupportedError, + { + "msg": "scroll is not supported.", + "errno": ER_UNSUPPORTED_METHOD, + "sqlstate": SQLSTATE_FEATURE_NOT_SUPPORTED, + }, + ) + + def reset(self): + """Resets the result set.""" + self._total_rowcount = -1 # reset the rowcount + if self._result_state != ResultState.DEFAULT: + self._result_state = ResultState.RESET + if self._result is not None: + self._result = None + if self._inner_cursor is not None: + self._inner_cursor.reset() + self._result = None + self._inner_cursor = None + self._prefetch_hook = None + if not self.connection._reuse_results: + self._result_set = None + + def __iter__(self) -> Iterator[dict] | Iterator[tuple]: + """Iteration over the result set.""" + # set _result if _result_set is not None + if self._result is None and self._result_set is not None: + self._result = iter(self._result_set) + self._result_state = ResultState.VALID + return self._result_iterator() + + def __cancel_query(self, query): + if self._sequence_counter >= 0 and not self.is_closed(): + logger.debug("canceled. %s, request_id: %s", query, self._request_id) + with self._lock_canceling: + self._connection._cancel_query(query, self._request_id) + + def _log_telemetry_job_data( + self, telemetry_field: TelemetryField, value: Any + ) -> None: + """Builds an instance of TelemetryData with the given field and logs it.""" + obj = { + "type": telemetry_field.value, + "source": self._connection.application if self._connection else CLIENT_NAME, + "query_id": self._sfqid, + "value": int(value), + } + ts = get_time_millis() + try: + self._connection._log_telemetry(TelemetryData(obj, ts)) + except AttributeError: + logger.warning( + "Cursor failed to log to telemetry. Connection object may be None.", + exc_info=True, + ) + + def __enter__(self): + """Context manager.""" + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + """Context manager with commit or rollback.""" + self.close() + + def get_results_from_sfqid(self, sfqid: str): + """Gets the results from previously ran query.""" + + def wait_until_ready(): + """Makes sure query has finished executing and once it has retrieves results.""" + no_data_counter = 0 + retry_pattern_pos = 0 + while True: + status = self.connection.get_query_status(sfqid) + if not self.connection.is_still_running(status): + break + if status == QueryStatus.NO_DATA: # pragma: no cover + no_data_counter += 1 + if no_data_counter > ASYNC_NO_DATA_MAX_RETRY: + raise DatabaseError( + "Cannot retrieve data on the status of this query. No information returned " + "from server for query '{}'" + ) + time.sleep( + 0.5 * ASYNC_RETRY_PATTERN[retry_pattern_pos] + ) # Same wait as JDBC + # If we can advance in ASYNC_RETRY_PATTERN then do so + if retry_pattern_pos < (len(ASYNC_RETRY_PATTERN) - 1): + retry_pattern_pos += 1 + if status != QueryStatus.SUCCESS: + raise DatabaseError( + "Status of query '{}' is {}, results are unavailable".format( + sfqid, status.name + ) + ) + self._inner_cursor.execute(f"select * from table(result_scan('{sfqid}'))") + self._result = self._inner_cursor._result + self._query_result_format = self._inner_cursor._query_result_format + self._total_rowcount = self._inner_cursor._total_rowcount + self._description = self._inner_cursor._description + self._result_set = self._inner_cursor._result_set + self._result_state = ResultState.VALID + self._rownumber = 0 + # Unset this function, so that we don't block anymore + self._prefetch_hook = None + + self.connection.get_query_status_throw_if_error( + sfqid + ) # Trigger an exception if query failed + klass = self.__class__ + self._inner_cursor = klass(self.connection) + self._sfqid = sfqid + self._prefetch_hook = wait_until_ready + + def get_result_batches(self) -> list[ResultBatch] | None: + """Get the previously executed query's ``ResultBatch`` s if available. + + If they are unavailable, in case nothing has been executed yet None will + be returned. + + For a detailed description of ``ResultBatch`` s please see the docstring of: + ``snowflake.connector.result_batches.ResultBatch`` + """ + if self._result_set is None: + return None + self._log_telemetry_job_data( + TelemetryField.GET_PARTITIONS_USED, TelemetryData.TRUE + ) + return self._result_set.batches + + +class DictCursor(SnowflakeCursor): + """Cursor returning results in a dictionary.""" + + def __init__(self, connection): + super().__init__( + connection, + use_dict_result=True, + ) diff --git a/dbapi.py b/src/snowflake/connector/dbapi.py similarity index 74% rename from dbapi.py rename to src/snowflake/connector/dbapi.py index 6b0ae4e2c..9f59ba8cf 100644 --- a/dbapi.py +++ b/src/snowflake/connector/dbapi.py @@ -1,19 +1,22 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # -u""" -This module implements some constructors and singletons as required by the -DB API v2.0 (PEP-249). -""" + +"""This module implements some constructors and singletons as required by the DB API v2.0 (PEP-249).""" + +from __future__ import annotations import datetime import json import time -from .constants import (get_string_types, get_binary_types, get_number_types, - get_timestamp_types) +from .constants import ( + get_binary_types, + get_number_types, + get_string_types, + get_timestamp_types, +) from .mixin import UnicodeMixin @@ -47,7 +50,7 @@ def TimestampFromTicks(ticks): return Timestamp(*time.localtime(ticks)[:6]) -Binary = str +Binary = bytes STRING = _DBAPITypeObject(get_string_types()) BINARY = _DBAPITypeObject(get_binary_types()) @@ -57,9 +60,7 @@ def TimestampFromTicks(ticks): class Json(UnicodeMixin): - """ - JSON adapter - """ + """JSON adapter.""" def __init__(self, value): self._value = json.dump(value) diff --git a/src/snowflake/connector/description.py b/src/snowflake/connector/description.py new file mode 100644 index 000000000..0d14f3fde --- /dev/null +++ b/src/snowflake/connector/description.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +"""Various constants.""" + +from __future__ import annotations + +import platform +import sys + +from .version import VERSION + +SNOWFLAKE_CONNECTOR_VERSION = ".".join(str(v) for v in VERSION[0:3]) +PYTHON_VERSION = ".".join(str(v) for v in sys.version_info[:3]) +OPERATING_SYSTEM = platform.system() +PLATFORM = platform.platform() +IMPLEMENTATION = platform.python_implementation() +COMPILER = platform.python_compiler() + +CLIENT_NAME = "PythonConnector" # don't change! +CLIENT_VERSION = ".".join([str(v) for v in VERSION[:3]]) diff --git a/src/snowflake/connector/encryption_util.py b/src/snowflake/connector/encryption_util.py new file mode 100644 index 000000000..28d627bf8 --- /dev/null +++ b/src/snowflake/connector/encryption_util.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import base64 +import json +import os +import tempfile +from logging import getLogger +from typing import IO, TYPE_CHECKING + +from Cryptodome.Cipher import AES +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes + +from .compat import PKCS5_OFFSET, PKCS5_PAD, PKCS5_UNPAD +from .constants import UTF8, EncryptionMetadata, MaterialDescriptor, kilobyte + +block_size = int(algorithms.AES.block_size / 8) # in bytes + +if TYPE_CHECKING: # pragma: no cover + from .storage_client import SnowflakeFileEncryptionMaterial + + +def matdesc_to_unicode(matdesc): + """Convert Material Descriptor to Unicode String.""" + return str( + json.dumps( + { + "queryId": matdesc.query_id, + "smkId": str(matdesc.smk_id), + "keySize": str(matdesc.key_size), + }, + separators=(",", ":"), + ) + ) + + +class SnowflakeEncryptionUtil: + @staticmethod + def get_secure_random(byte_length: int) -> bytes: + return os.urandom(byte_length) + + @staticmethod + def encrypt_stream( + encryption_material: SnowflakeFileEncryptionMaterial, + src: IO[bytes], + out: IO[bytes], + chunk_size: int = 64 * kilobyte, # block_size * 4 * 1024, + ) -> EncryptionMetadata: + """Reads content from src and write the encrypted content into out. + + This function is sensitive to current position of src and out. + It does not seek to position 0 in neither stream objects before or after the encryption. + + Args: + encryption_material: The encryption material for file. + src: The input stream. + out: The output stream. + chunk_size: The size of read chunks (Default value = block_size * 4 * 1024 + + Returns: + The encryption metadata. + """ + logger = getLogger(__name__) + use_openssl_only = os.getenv("SF_USE_OPENSSL_ONLY", "False") == "True" + decoded_key = base64.standard_b64decode( + encryption_material.query_stage_master_key + ) + key_size = len(decoded_key) + logger.debug("key_size = %s", key_size) + + # Generate key for data encryption + iv_data = SnowflakeEncryptionUtil.get_secure_random(block_size) + file_key = SnowflakeEncryptionUtil.get_secure_random(key_size) + if not use_openssl_only: + data_cipher = AES.new(key=file_key, mode=AES.MODE_CBC, IV=iv_data) + else: + backend = default_backend() + cipher = Cipher( + algorithms.AES(file_key), modes.CBC(iv_data), backend=backend + ) + encryptor = cipher.encryptor() + + padded = False + while True: + chunk = src.read(chunk_size) + if len(chunk) == 0: + break + elif len(chunk) % block_size != 0: + chunk = PKCS5_PAD(chunk, block_size) + padded = True + if not use_openssl_only: + out.write(data_cipher.encrypt(chunk)) + else: + out.write(encryptor.update(chunk)) + if not padded: + if not use_openssl_only: + out.write( + data_cipher.encrypt(block_size * chr(block_size).encode(UTF8)) + ) + else: + out.write(encryptor.update(block_size * chr(block_size).encode(UTF8))) + if use_openssl_only: + out.write(encryptor.finalize()) + + # encrypt key with QRMK + if not use_openssl_only: + key_cipher = AES.new(key=decoded_key, mode=AES.MODE_ECB) + enc_kek = key_cipher.encrypt(PKCS5_PAD(file_key, block_size)) + else: + cipher = Cipher(algorithms.AES(decoded_key), modes.ECB(), backend=backend) + encryptor = cipher.encryptor() + enc_kek = ( + encryptor.update(PKCS5_PAD(file_key, block_size)) + encryptor.finalize() + ) + + mat_desc = MaterialDescriptor( + smk_id=encryption_material.smk_id, + query_id=encryption_material.query_id, + key_size=key_size * 8, + ) + metadata = EncryptionMetadata( + key=base64.b64encode(enc_kek).decode("utf-8"), + iv=base64.b64encode(iv_data).decode("utf-8"), + matdesc=matdesc_to_unicode(mat_desc), + ) + return metadata + + @staticmethod + def encrypt_file( + encryption_material: SnowflakeFileEncryptionMaterial, + in_filename: str, + chunk_size: int = 64 * kilobyte, + tmp_dir: str = None, + ) -> tuple[EncryptionMetadata, str]: + """Encrypts a file in a temporary directory. + + Args: + encryption_material: The encryption material for file. + in_filename: The input file's name. + chunk_size: The size of read chunks (Default value = block_size * 4 * 1024). + tmp_dir: Temporary directory to use, optional (Default value = None). + + Returns: + The encryption metadata and the encrypted file's location. + """ + logger = getLogger(__name__) + temp_output_fd, temp_output_file = tempfile.mkstemp( + text=False, dir=tmp_dir, prefix=os.path.basename(in_filename) + "#" + ) + logger.debug( + "unencrypted file: %s, temp file: %s, tmp_dir: %s", + in_filename, + temp_output_file, + tmp_dir, + ) + with open(in_filename, "rb") as infile: + with os.fdopen(temp_output_fd, "wb") as outfile: + metadata = SnowflakeEncryptionUtil.encrypt_stream( + encryption_material, infile, outfile, chunk_size + ) + return metadata, temp_output_file + + @staticmethod + def decrypt_file( + metadata: EncryptionMetadata, + encryption_material: SnowflakeFileEncryptionMaterial, + in_filename: str, + chunk_size: int = 64 * kilobyte, + tmp_dir=None, + ) -> str: + """Decrypts a file and stores the output in the temporary directory. + + Args: + metadata: The file's metadata input. + encryption_material: The file's encryption material. + in_filename: The name of the input file. + chunk_size: The size of read chunks (Default value = block_size * 4 * 1024). + tmp_dir: Temporary directory to use, optional (Default value = None). + + Returns: + The decrypted file's location. + """ + logger = getLogger(__name__) + use_openssl_only = os.getenv("SF_USE_OPENSSL_ONLY", "False") == "True" + key_base64 = metadata.key + iv_base64 = metadata.iv + decoded_key = base64.standard_b64decode( + encryption_material.query_stage_master_key + ) + key_bytes = base64.standard_b64decode(key_base64) + iv_bytes = base64.standard_b64decode(iv_base64) + + if not use_openssl_only: + key_cipher = AES.new(key=decoded_key, mode=AES.MODE_ECB) + file_key = PKCS5_UNPAD(key_cipher.decrypt(key_bytes)) + data_cipher = AES.new(key=file_key, mode=AES.MODE_CBC, IV=iv_bytes) + else: + backend = default_backend() + cipher = Cipher(algorithms.AES(decoded_key), modes.ECB(), backend=backend) + decryptor = cipher.decryptor() + file_key = PKCS5_UNPAD(decryptor.update(key_bytes) + decryptor.finalize()) + cipher = Cipher( + algorithms.AES(file_key), modes.CBC(iv_bytes), backend=backend + ) + decryptor = cipher.decryptor() + + temp_output_fd, temp_output_file = tempfile.mkstemp( + text=False, dir=tmp_dir, prefix=os.path.basename(in_filename) + "#" + ) + total_file_size = 0 + prev_chunk = None + logger.debug("encrypted file: %s, tmp file: %s", in_filename, temp_output_file) + with open(in_filename, "rb") as infile: + with os.fdopen(temp_output_fd, "wb") as outfile: + while True: + chunk = infile.read(chunk_size) + if len(chunk) == 0: + break + total_file_size += len(chunk) + if not use_openssl_only: + d = data_cipher.decrypt(chunk) + else: + d = decryptor.update(chunk) + outfile.write(d) + prev_chunk = d + if prev_chunk is not None: + total_file_size -= PKCS5_OFFSET(prev_chunk) + if use_openssl_only: + outfile.write(decryptor.finalize()) + outfile.truncate(total_file_size) + return temp_output_file diff --git a/src/snowflake/connector/errorcode.py b/src/snowflake/connector/errorcode.py new file mode 100644 index 000000000..bc93dbcac --- /dev/null +++ b/src/snowflake/connector/errorcode.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +# network +ER_FAILED_TO_CONNECT_TO_DB = 250001 +ER_CONNECTION_IS_CLOSED = 250002 +ER_FAILED_TO_REQUEST = 250003 +ER_NOT_HTTPS_USED = 250004 +ER_FAILED_TO_SERVER = 250005 +ER_IDP_CONNECTION_ERROR = 250006 +ER_INCORRECT_DESTINATION = 250007 +ER_UNABLE_TO_OPEN_BROWSER = 250008 +ER_UNABLE_TO_START_WEBSERVER = 250009 +ER_INVALID_CERTIFICATE = 250011 # not used but keep here to reserve errno + +# connection +ER_NO_ACCOUNT_NAME = 251001 +ER_OLD_PYTHON = 251002 +ER_NO_WINDOWS_SUPPORT = 251003 +ER_FAILED_TO_GET_BOOTSTRAP = 251004 +ER_NO_USER = 251005 +ER_NO_PASSWORD = 251006 +ER_INVALID_VALUE = 251007 +ER_INVALID_PRIVATE_KEY = 251008 +ER_NO_HOSTNAME_FOUND = 251009 +ER_JWT_RETRY_EXPIRED = 251010 +ER_CONNECTION_TIMEOUT = 251011 + +# cursor +ER_FAILED_TO_REWRITE_MULTI_ROW_INSERT = 252001 +ER_NO_ADDITIONAL_CHUNK = 252002 +ER_NOT_POSITIVE_SIZE = 252003 +ER_FAILED_PROCESSING_PYFORMAT = 252004 +ER_FAILED_TO_CONVERT_ROW_TO_PYTHON_TYPE = 252005 +ER_CURSOR_IS_CLOSED = 252006 +ER_FAILED_TO_RENEW_SESSION = 252007 +ER_UNSUPPORTED_METHOD = 252008 +ER_NO_DATA_FOUND = 252009 +ER_CHUNK_DOWNLOAD_FAILED = 252010 +ER_NOT_IMPLICITY_SNOWFLAKE_DATATYPE = 252011 +ER_FAILED_PROCESSING_QMARK = 252012 + +# file_transfer +ER_INVALID_STAGE_FS = 253001 +ER_FAILED_TO_DOWNLOAD_FROM_STAGE = 253002 +ER_FAILED_TO_UPLOAD_TO_STAGE = 253003 +ER_INVALID_STAGE_LOCATION = 253004 +ER_LOCAL_PATH_NOT_DIRECTORY = 253005 +ER_FILE_NOT_EXISTS = 253006 +ER_COMPRESSION_NOT_SUPPORTED = 253007 +ER_INTERNAL_NOT_MATCH_ENCRYPT_MATERIAL = 253008 +ER_FAILED_TO_CHECK_EXISTING_FILES = 253009 + +# ocsp +ER_OCSP_URL_INFO_MISSING = 254001 +ER_OCSP_RESPONSE_UNAVAILABLE = 254002 +ER_OCSP_RESPONSE_FETCH_EXCEPTION = 254003 +ER_OCSP_FAILED_TO_CONNECT_CACHE_SERVER = 254004 +ER_OCSP_RESPONSE_CERT_STATUS_INVALID = 254005 +ER_OCSP_RESPONSE_CERT_STATUS_UNKNOWN = 254006 +ER_OCSP_RESPONSE_CERT_STATUS_REVOKED = 254007 +ER_OCSP_RESPONSE_STATUS_UNSUCCESSFUL = 254008 +ER_OCSP_RESPONSE_ATTACHED_CERT_INVALID = 254009 +ER_OCSP_RESPONSE_ATTACHED_CERT_EXPIRED = 254010 +ER_OCSP_RESPONSE_INVALID_SIGNATURE = 254011 +ER_OCSP_RESPONSE_INVALID_EXPIRY_INFO_MISSING = 254012 +ER_OCSP_RESPONSE_EXPIRED = 254013 +ER_OCSP_RESPONSE_FETCH_FAILURE = 254014 +ER_OCSP_RESPONSE_LOAD_FAILURE = 254015 +ER_OCSP_RESPONSE_CACHE_DOWNLOAD_FAILED = 254016 +ER_OCSP_RESPONSE_CACHE_DECODE_FAILED = 254017 +ER_INVALID_OCSP_RESPONSE_SSD = 254018 +ER_INVALID_SSD = 254019 + +# converter +ER_NOT_SUPPORT_DATA_TYPE = 255001 +ER_NO_PYARROW = 255002 +ER_NO_ARROW_RESULT = 255003 +ER_NO_PYARROW_SNOWSQL = 255004 +ER_FAILED_TO_READ_ARROW_STREAM = 255005 +ER_NO_NUMPY = 255006 diff --git a/src/snowflake/connector/errors.py b/src/snowflake/connector/errors.py new file mode 100644 index 000000000..8e4622631 --- /dev/null +++ b/src/snowflake/connector/errors.py @@ -0,0 +1,564 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging +import os +import re +import traceback +from logging import getLogger +from typing import TYPE_CHECKING + +from .compat import BASE_EXCEPTION_CLASS +from .description import CLIENT_NAME, SNOWFLAKE_CONNECTOR_VERSION +from .secret_detector import SecretDetector +from .telemetry import TelemetryData, TelemetryField +from .telemetry_oob import TelemetryService +from .time_util import get_time_millis + +if TYPE_CHECKING: # pragma: no cover + from .connection import SnowflakeConnection + from .cursor import SnowflakeCursor + +logger = getLogger(__name__) +connector_base_path = os.path.join("snowflake", "connector") + + +RE_FORMATTED_ERROR = re.compile(r"^(\d{6,})(?: \((\S+)\))?:") + + +class Error(BASE_EXCEPTION_CLASS): + """Base Snowflake exception class.""" + + def __init__( + self, + msg: str | None = None, + errno: int | None = None, + sqlstate: str | None = None, + sfqid: str | None = None, + done_format_msg: bool | None = None, + connection: SnowflakeConnection | None = None, + cursor: SnowflakeCursor | None = None, + ): + super().__init__(msg) + self.msg = msg + self.raw_msg = msg + self.errno = errno or -1 + self.sqlstate = sqlstate or "n/a" + self.sfqid = sfqid + + if self.msg: + # TODO: If there's a message then check to see if errno (and maybe sqlstate) + # and if so then don't insert them again, this should eventually be removed + # and we should be explicitly set this at every call to create these + # Exceptions. + # However we shouldn't be creating them during normal execution so + # this should not affect performance to users and will make our error + # messages consistent. + already_formatted_msg = RE_FORMATTED_ERROR.match(msg) + else: + self.msg = "Unknown error" + already_formatted_msg = None + + if self.errno != -1 and not done_format_msg: + if self.sqlstate != "n/a": + if not already_formatted_msg: + if logger.getEffectiveLevel() in (logging.INFO, logging.DEBUG): + self.msg = f"{self.errno:06d} ({self.sqlstate}): {self.sfqid}: {self.msg}" + else: + self.msg = f"{self.errno:06d} ({self.sqlstate}): {self.msg}" + else: + if not already_formatted_msg: + if logger.getEffectiveLevel() in (logging.INFO, logging.DEBUG): + self.msg = f"{self.errno:06d}: {self.errno}: {self.msg}" + else: + self.msg = f"{self.errno:06d}: {self.msg}" + + # We want to skip the last frame/line in the traceback since it is the current frame + self.telemetry_traceback = self.generate_telemetry_stacktrace() + self.exception_telemetry(msg, cursor, connection) + + def __repr__(self): + return self.__str__() + + def __str__(self): + return self.msg + + @staticmethod + def generate_telemetry_stacktrace() -> str: + # Get the current stack minus this function and the Error init function + stack_frames = traceback.extract_stack()[:-2] + filtered_frames = list() + for frame in stack_frames: + # Only add frames associated with the snowflake python connector to the telemetry stacktrace + if connector_base_path in frame.filename: + # Get the index to truncate the file path to hide any user path + safe_path_index = frame.filename.find(connector_base_path) + # Create a new frame with the truncated file name and without the line argument since that can + # output sensitive data + filtered_frames.append( + traceback.FrameSummary( + frame.filename[safe_path_index:], + frame.lineno, + frame.name, + line="", + ) + ) + + return "".join(traceback.format_list(filtered_frames)) + + def telemetry_msg(self) -> str | None: + if self.sqlstate != "n/a": + return f"{self.errno:06d} ({self.sqlstate})" + elif self.errno != -1: + return f"{self.errno:06d}" + else: + return None + + def generate_telemetry_exception_data(self) -> dict[str, str]: + """Generate the data to send through telemetry.""" + telemetry_data = { + TelemetryField.KEY_DRIVER_TYPE.value: CLIENT_NAME, + TelemetryField.KEY_DRIVER_VERSION.value: SNOWFLAKE_CONNECTOR_VERSION, + } + telemetry_msg = self.telemetry_msg() + if self.sfqid: + telemetry_data[TelemetryField.KEY_SFQID.value] = self.sfqid + if self.sqlstate: + telemetry_data[TelemetryField.KEY_SQLSTATE.value] = self.sqlstate + if telemetry_msg: + telemetry_data[TelemetryField.KEY_REASON.value] = telemetry_msg + if self.errno: + telemetry_data[TelemetryField.KEY_ERROR_NUMBER.value] = str(self.errno) + + telemetry_data[ + TelemetryField.KEY_STACKTRACE.value + ] = SecretDetector.mask_secrets(self.telemetry_traceback) + + return telemetry_data + + def send_exception_telemetry( + self, + connection: SnowflakeConnection | None, + telemetry_data: dict[str, str], + ) -> None: + """Send telemetry data by in-band telemetry if it is enabled, otherwise send through out-of-band telemetry.""" + if ( + connection is not None + and connection.telemetry_enabled + and not connection._telemetry.is_closed + ): + # Send with in-band telemetry + telemetry_data[ + TelemetryField.KEY_TYPE.value + ] = TelemetryField.SQL_EXCEPTION.value + telemetry_data[TelemetryField.KEY_SOURCE.value] = connection.application + telemetry_data[TelemetryField.KEY_EXCEPTION.value] = self.__class__.__name__ + ts = get_time_millis() + try: + connection._log_telemetry(TelemetryData(telemetry_data, ts)) + except AttributeError: + logger.debug("Cursor failed to log to telemetry.", exc_info=True) + elif connection is None: + # Send with out-of-band telemetry + telemetry_oob = TelemetryService.get_instance() + telemetry_oob.log_general_exception(self.__class__.__name__, telemetry_data) + + def exception_telemetry( + self, + msg: str, + cursor: SnowflakeCursor | None, + connection: SnowflakeConnection | None, + ) -> None: + """Main method to generate and send telemetry data for exceptions.""" + try: + telemetry_data = self.generate_telemetry_exception_data() + if cursor is not None: + self.send_exception_telemetry(cursor.connection, telemetry_data) + elif connection is not None: + self.send_exception_telemetry(connection, telemetry_data) + else: + self.send_exception_telemetry(None, telemetry_data) + except Exception: + # Do nothing but log if sending telemetry fails + logger.debug("Sending exception telemetry failed") + + @staticmethod + def default_errorhandler( + connection: SnowflakeConnection, + cursor: SnowflakeCursor, + error_class: type[Error], + error_value: dict[str, str], + ) -> None: + """Default error handler that raises an error. + + Args: + connection: Connections in which the error happened. + cursor: Cursor in which the error happened. + error_class: Class of error that needs handling. + error_value: A dictionary of the error details. + + Raises: + A Snowflake error. + """ + raise error_class( + msg=error_value.get("msg"), + errno=error_value.get("errno"), + sqlstate=error_value.get("sqlstate"), + sfqid=error_value.get("sfqid"), + done_format_msg=error_value.get("done_format_msg"), + connection=connection, + cursor=cursor, + ) + + @staticmethod + def errorhandler_wrapper_from_cause( + connection: SnowflakeConnection, + cause: Error | Exception, + cursor: SnowflakeCursor | None = None, + ) -> None: + """Wrapper for errorhandler_wrapper, it is called with a cause instead of a dictionary. + + The dictionary is first extracted from the cause and then it's given to errorhandler_wrapper + + Args: + connection: Connections in which the error happened. + cursor: Cursor in which the error happened. + cause: Error instance that we want to handle. + + Returns: + None if no exceptions are raised by the connection's and cursor's error handlers. + + Raises: + A Snowflake error if connection and cursor are None. + """ + return Error.errorhandler_wrapper( + connection, + cursor, + type(cause), + { + "msg": cause.msg, + "errno": cause.errno, + "sqlstate": cause.sqlstate, + "done_format_msg": True, + }, + ) + + @staticmethod + def errorhandler_wrapper( + connection: SnowflakeConnection | None, + cursor: SnowflakeCursor | None, + error_class: type[Error] | type[Exception], + error_value: dict[str, str | bool | int], + ) -> None: + """Error handler wrapper that calls the errorhandler method. + + Args: + connection: Connections in which the error happened. + cursor: Cursor in which the error happened. + error_class: Class of error that needs handling. + error_value: An optional dictionary of the error details. + + Returns: + None if no exceptions are raised by the connection's and cursor's error handlers. + + Raises: + A Snowflake error if connection, or cursor are None. Otherwise it gives the + exception to the first handler in that order. + """ + + handed_over = Error.hand_to_other_handler( + connection, + cursor, + error_class, + error_value, + ) + if not handed_over: + raise Error.errorhandler_make_exception( + error_class, + error_value, + ) + + @staticmethod + def errorhandler_wrapper_from_ready_exception( + connection: SnowflakeConnection | None, + cursor: SnowflakeCursor | None, + error_exc: Error | Exception, + ) -> None: + """Like errorhandler_wrapper, but it takes a ready to go Exception.""" + if isinstance(error_exc, Error): + error_value = { + "msg": error_exc.msg, + "errno": error_exc.errno, + "sqlstate": error_exc.sqlstate, + "sfqid": error_exc.sfqid, + } + else: + error_value = error_exc.args + + handed_over = Error.hand_to_other_handler( + connection, + cursor, + type(error_exc), + error_value, + ) + if not handed_over: + raise error_exc + + @staticmethod + def hand_to_other_handler( + connection: SnowflakeConnection | None, + cursor: SnowflakeCursor | None, + error_class: type[Error] | type[Exception], + error_value: dict[str, str | bool], + ) -> bool: + """If possible give error to a higher error handler in connection, or cursor. + + Returns: + Whether it error was successfully given to a handler. + """ + error_value.setdefault("done_format_msg", False) + if connection is not None: + connection.messages.append((error_class, error_value)) + if cursor is not None: + cursor.messages.append((error_class, error_value)) + cursor.errorhandler(connection, cursor, error_class, error_value) + return True + elif connection is not None: + connection.errorhandler(connection, cursor, error_class, error_value) + return True + return False + + @staticmethod + def errorhandler_make_exception( + error_class: type[Error] | type[Exception], + error_value: dict[str, str | bool], + ) -> Error | Exception: + """Helper function to errorhandler_wrapper that creates the exception.""" + error_value.setdefault("done_format_msg", False) + + if issubclass(error_class, Error): + return error_class( + msg=error_value["msg"], + errno=error_value.get("errno"), + sqlstate=error_value.get("sqlstate"), + sfqid=error_value.get("sfqid"), + ) + return error_class(error_value) + + +class _Warning(BASE_EXCEPTION_CLASS): + """Exception for important warnings.""" + + pass + + +class InterfaceError(Error): + """Exception for errors related to the interface.""" + + pass + + +class DatabaseError(Error): + """Exception for errors related to the database.""" + + pass + + +class InternalError(DatabaseError): + """Exception for errors internal database errors.""" + + pass + + +class OperationalError(DatabaseError): + """Exception for errors related to the database's operation.""" + + pass + + +class ProgrammingError(DatabaseError): + """Exception for errors programming errors.""" + + pass + + +class IntegrityError(DatabaseError): + """Exception for errors regarding relational integrity.""" + + pass + + +class DataError(DatabaseError): + """Exception for errors reporting problems with processed data.""" + + pass + + +class NotSupportedError(DatabaseError): + """Exception for errors when an unsupported database feature was used.""" + + # Not supported errors do not have any PII in their + def telemetry_msg(self): + return self.msg + + +class RevocationCheckError(OperationalError): + """Exception for errors during certificate revocation check.""" + + # We already send OCSP exception events + def exception_telemetry(self, msg, cursor, connection): + pass + + +# internal errors +class InternalServerError(Error): + """Exception for 500 HTTP code for retry.""" + + def __init__(self, **kwargs): + Error.__init__( + self, + msg=kwargs.get("msg") or "HTTP 500: Internal Server Error", + errno=kwargs.get("errno"), + sqlstate=kwargs.get("sqlstate"), + sfqid=kwargs.get("sfqid"), + ) + + +class ServiceUnavailableError(Error): + """Exception for 503 HTTP code for retry.""" + + def __init__(self, **kwargs): + Error.__init__( + self, + msg=kwargs.get("msg") or "HTTP 503: Service Unavailable", + errno=kwargs.get("errno"), + sqlstate=kwargs.get("sqlstate"), + sfqid=kwargs.get("sfqid"), + ) + + +class GatewayTimeoutError(Error): + """Exception for 504 HTTP error for retry.""" + + def __init__(self, **kwargs): + Error.__init__( + self, + msg=kwargs.get("msg") or "HTTP 504: Gateway Timeout", + errno=kwargs.get("errno"), + sqlstate=kwargs.get("sqlstate"), + sfqid=kwargs.get("sfqid"), + ) + + +class ForbiddenError(Error): + """Exception for 403 HTTP error for retry.""" + + def __init__(self, **kwargs): + Error.__init__( + self, + msg=kwargs.get("msg") or "HTTP 403: Forbidden", + errno=kwargs.get("errno"), + sqlstate=kwargs.get("sqlstate"), + sfqid=kwargs.get("sfqid"), + ) + + +class RequestTimeoutError(Error): + """Exception for 408 HTTP error for retry.""" + + def __init__(self, **kwargs): + Error.__init__( + self, + msg=kwargs.get("msg") or "HTTP 408: Request Timeout", + errno=kwargs.get("errno"), + sqlstate=kwargs.get("sqlstate"), + sfqid=kwargs.get("sfqid"), + ) + + +class BadRequest(Error): + """Exception for 400 HTTP error for retry.""" + + def __init__(self, **kwargs): + Error.__init__( + self, + msg=kwargs.get("msg") or "HTTP 400: Bad Request", + errno=kwargs.get("errno"), + sqlstate=kwargs.get("sqlstate"), + sfqid=kwargs.get("sfqid"), + ) + + +class BadGatewayError(Error): + """Exception for 502 HTTP error for retry.""" + + def __init__(self, **kwargs): + Error.__init__( + self, + msg=kwargs.get("msg") or "HTTP 502: Bad Gateway", + errno=kwargs.get("errno"), + sqlstate=kwargs.get("sqlstate"), + sfqid=kwargs.get("sfqid"), + ) + + +class MethodNotAllowed(Error): + """Exception for 405 HTTP error for retry.""" + + def __init__(self, **kwargs): + Error.__init__( + self, + msg=kwargs.get("msg") or "HTTP 405: Method not allowed", + errno=kwargs.get("errno"), + sqlstate=kwargs.get("sqlstate"), + sfqid=kwargs.get("sfqid"), + ) + + +class OtherHTTPRetryableError(Error): + """Exception for other HTTP error for retry.""" + + def __init__(self, **kwargs): + code = kwargs.get("code", "n/a") + Error.__init__( + self, + msg=kwargs.get("msg") or f"HTTP {code}", + errno=kwargs.get("errno"), + sqlstate=kwargs.get("sqlstate"), + sfqid=kwargs.get("sfqid"), + ) + + +class MissingDependencyError(Error): + """Exception for missing extras dependencies.""" + + def __init__(self, dependency: str): + super().__init__(msg=f"Missing optional dependency: {dependency}") + + +class BindUploadError(Error): + """Exception for bulk array binding stage optimization fails.""" + + pass + + +class RequestExceedMaxRetryError(Error): + """Exception for REST call to remote storage API exceeding maximum retries with transient errors.""" + + pass + + +class TokenExpiredError(Error): + """Exception for REST call to remote storage API failed because of expired authentication token.""" + + pass + + +class PresignedUrlExpiredError(Error): + """Exception for REST call to remote storage API failed because of expired presigned URL.""" + + pass diff --git a/feature.py b/src/snowflake/connector/feature.py similarity index 53% rename from feature.py rename to src/snowflake/connector/feature.py index f74a84220..62fb05e00 100644 --- a/feature.py +++ b/src/snowflake/connector/feature.py @@ -1,9 +1,7 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # - # Feature flags feature_use_pyopenssl = True # use pyopenssl API or openssl command diff --git a/src/snowflake/connector/file_compression_type.py b/src/snowflake/connector/file_compression_type.py new file mode 100644 index 000000000..a127c3fdd --- /dev/null +++ b/src/snowflake/connector/file_compression_type.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +from typing import NamedTuple + + +class CompressionType(NamedTuple): + name: str + file_extension: str + mime_type: str + mime_subtypes: list[str] + is_supported: bool + + +CompressionTypes = { + "GZIP": CompressionType( + name="GZIP", + file_extension=".gz", + mime_type="application", + mime_subtypes=["gzip", "x-gzip"], + is_supported=True, + ), + "DEFLATE": CompressionType( + name="DEFLATE", + file_extension=".deflate", + mime_type="application", + mime_subtypes=["zlib", "deflate"], + is_supported=True, + ), + "RAW_DEFLATE": CompressionType( + name="RAW_DEFLATE", + file_extension=".raw_deflate", + mime_type="application", + mime_subtypes=["raw_deflate"], + is_supported=True, + ), + "BZIP2": CompressionType( + name="BZIP2", + file_extension=".bz2", + mime_type="application", + mime_subtypes=["bzip2", "x-bzip2", "x-bz2", "x-bzip", "bz2"], + is_supported=True, + ), + "LZIP": CompressionType( + name="LZIP", + file_extension=".lz", + mime_type="application", + mime_subtypes=["lzip", "x-lzip"], + is_supported=False, + ), + "LZMA": CompressionType( + name="LZMA", + file_extension=".lzma", + mime_type="application", + mime_subtypes=["lzma", "x-lzma"], + is_supported=False, + ), + "LZO": CompressionType( + name="LZO", + file_extension=".lzo", + mime_type="application", + mime_subtypes=["lzo", "x-lzo"], + is_supported=False, + ), + "XZ": CompressionType( + name="XZ", + file_extension=".xz", + mime_type="application", + mime_subtypes=["xz", "x-xz"], + is_supported=False, + ), + "COMPRESS": CompressionType( + name="COMPRESS", + file_extension=".Z", + mime_type="application", + mime_subtypes=["compress", "x-compress"], + is_supported=False, + ), + "PARQUET": CompressionType( + name="PARQUET", + file_extension=".parquet", + mime_type="snowflake", + mime_subtypes=["parquet"], + is_supported=True, + ), + "ZSTD": CompressionType( + name="ZSTD", + file_extension=".zst", + mime_type="application", + mime_subtypes=["zstd", "x-zstd"], + is_supported=True, + ), + "BROTLI": CompressionType( + name="BROTLI", + file_extension=".br", + mime_type="application", + mime_subtypes=["br", "x-br"], + is_supported=True, + ), + "ORC": CompressionType( + name="ORC", + file_extension=".orc", + mime_type="snowflake", + mime_subtypes=["orc"], + is_supported=True, + ), +} + +subtype_to_meta: dict[str, CompressionType] = { + ms.lower(): meta for meta in CompressionTypes.values() for ms in meta.mime_subtypes +} + +# TODO: Snappy avro doesn't need to be compressed again + + +def lookup_by_mime_sub_type(mime_subtype: str) -> CompressionType | None: + """Look up a CompressionType for a specific mime subtype.""" + return subtype_to_meta.get(mime_subtype.lower()) diff --git a/src/snowflake/connector/file_transfer_agent.py b/src/snowflake/connector/file_transfer_agent.py new file mode 100644 index 000000000..7a8454a97 --- /dev/null +++ b/src/snowflake/connector/file_transfer_agent.py @@ -0,0 +1,1145 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import binascii +import glob +import mimetypes +import os +import sys +import threading +from concurrent.futures.thread import ThreadPoolExecutor +from dataclasses import dataclass +from functools import partial +from logging import getLogger +from time import time +from typing import IO, TYPE_CHECKING, Any, Callable, TypeVar + +from .azure_storage_client import SnowflakeAzureRestClient +from .compat import GET_CWD, IS_WINDOWS +from .constants import ( + AZURE_FS, + CMD_TYPE_DOWNLOAD, + CMD_TYPE_UPLOAD, + GCS_FS, + LOCAL_FS, + S3_FS, + ResultStatus, + megabyte, +) +from .converter_snowsql import SnowflakeConverterSnowSQL +from .errorcode import ( + ER_COMPRESSION_NOT_SUPPORTED, + ER_FAILED_TO_DOWNLOAD_FROM_STAGE, + ER_FAILED_TO_UPLOAD_TO_STAGE, + ER_FILE_NOT_EXISTS, + ER_INTERNAL_NOT_MATCH_ENCRYPT_MATERIAL, + ER_INVALID_STAGE_FS, + ER_INVALID_STAGE_LOCATION, + ER_LOCAL_PATH_NOT_DIRECTORY, +) +from .errors import ( + DatabaseError, + Error, + InternalError, + OperationalError, + ProgrammingError, +) +from .file_compression_type import CompressionTypes, lookup_by_mime_sub_type +from .gcs_storage_client import SnowflakeGCSRestClient +from .local_storage_client import SnowflakeLocalStorageClient +from .s3_storage_client import SnowflakeS3RestClient +from .storage_client import SnowflakeFileEncryptionMaterial, SnowflakeStorageClient + +if TYPE_CHECKING: # pragma: no cover + from .connection import SnowflakeConnection + from .cursor import SnowflakeCursor + from .file_compression_type import CompressionType + +VALID_STORAGE = [LOCAL_FS, S3_FS, AZURE_FS, GCS_FS] + +INJECT_WAIT_IN_PUT = 0 + +logger = getLogger(__name__) + + +def result_text_column_desc(name): + return { + "name": name, + "type": "text", + "length": 16777216, + "precision": None, + "scale": None, + "nullable": False, + } + + +def result_fixed_column_desc(name): + return { + "name": name, + "type": "fixed", + "length": 5, + "precision": 0, + "scale": 0, + "nullable": False, + } + + +# TODO: rewrite, we use this class to store information about file transfers +# It'd make more sense to define a new object, like FileTransferMeta that then has +# more FileMetas inside of it. This would help in some cases, where for example +# consider the case where we run into an unrecoverable error for the whole transfer +# job and we need to convey an error to the main thread and that error needs to be +# raised by the main thread. Where should this go? Currently the answer could be +# all of the current FileMetas. Hmmm... +@dataclass +class SnowflakeFileMeta: + """Class to keep track of information necessary for file operations.""" + + name: str + src_file_name: str + stage_location_type: str + result_status: ResultStatus | None = None + + self: SnowflakeFileTransferAgent | None = None + put_callback: type[SnowflakeProgressPercentage] | None = None + put_azure_callback: type[SnowflakeProgressPercentage] | None = None + put_callback_output_stream: IO[str] | None = None + get_callback: type[SnowflakeProgressPercentage] | None = None + get_azure_callback: type[SnowflakeProgressPercentage] | None = None + get_callback_output_stream: IO[str] | None = None + show_progress_bar: bool = False + multipart_threshold: int = 67108864 # Historical value + presigned_url: str | None = None + overwrite: bool = False + sha256_digest: str | None = None + upload_size: int | None = None + real_src_file_name: str | None = None + error_details: Exception | None = None + last_error: Exception | None = None + no_sleeping_time: bool = False + gcs_file_header_digest: str | None = None + gcs_file_header_content_length: int | None = None + gcs_file_header_encryption_metadata: dict[str, Any] | None = None + + encryption_material: SnowflakeFileEncryptionMaterial | None = None + # Specific to Uploads only + src_file_size: int = 0 + src_compression_type: CompressionType | None = None + dst_compression_type: CompressionType = None + require_compress: bool = False + dst_file_name: str | None = None + dst_file_size: int = -1 + intermediate_stream: IO[bytes] | None = None + src_stream: IO[bytes] | None = None + # Specific to Downloads only + local_location: str | None = None + + +def _update_progress( + file_name: str, + start_time: float, + total_size: float, + progress: float | int, + output_stream: IO | None = sys.stdout, + show_progress_bar: bool | None = True, +) -> bool: + bar_length = 10 # Modify this to change the length of the progress bar + total_size /= megabyte + status = "" + elapsed_time = time() - start_time + throughput = (total_size / elapsed_time) if elapsed_time != 0.0 else 0.0 + if isinstance(progress, int): + progress = float(progress) + if not isinstance(progress, float): + progress = 0 + status = "error: progress var must be float\r\n" + if progress < 0: + progress = 0 + status = "Halt...\r\n" + if progress >= 1: + progress = 1 + status = f"Done ({elapsed_time:.3f}s, {throughput:.2f}MB/s).\r\n" + if not status and show_progress_bar: + status = f"({elapsed_time:.3f}s, {throughput:.2f}MB/s)" + if status: + block = int(round(bar_length * progress)) + + text = ( + f"\r{file_name}({total_size:.2f}MB): " + f"[{'#' * block + '-' * (bar_length - block)}] " + f"{progress * 100.0:.2f}% {status}" + ) + output_stream.write(text) + output_stream.flush() + logger.debug( + f"filename: {file_name}, start_time: {start_time}, total_size: {total_size}, " + f"progress: {progress}, show_progress_bar: {show_progress_bar}" + ) + return progress == 1.0 + + +def percent(seen_so_far: int, size: float) -> float: + return 1.0 if seen_so_far >= size or size <= 0 else float(seen_so_far / size) + + +class SnowflakeProgressPercentage: + """Built-in Progress bar for PUT commands.""" + + def __init__( + self, + filename: str, + filesize: int | float, + output_stream: IO | None = sys.stdout, + show_progress_bar: bool | None = True, + ): + last_pound_char = filename.rfind("#") + if last_pound_char < 0: + last_pound_char = len(filename) + self._filename = os.path.basename(filename[0:last_pound_char]) + self._output_stream = output_stream + self._show_progress_bar = show_progress_bar + self._size = float(filesize) + self._seen_so_far = 0 + self._done = False + self._start_time = time() + self._lock = threading.Lock() + + def __call__(self, bytes_amount: int): + raise NotImplementedError + + +class SnowflakeS3ProgressPercentage(SnowflakeProgressPercentage): + def __init__( + self, + filename: str, + filesize: int | float, + output_stream: IO | None = sys.stdout, + show_progress_bar: bool | None = True, + ): + super().__init__( + filename, + filesize, + output_stream=output_stream, + show_progress_bar=show_progress_bar, + ) + + def __call__(self, bytes_amount: int): + with self._lock: + if self._output_stream: + self._seen_so_far += bytes_amount + percentage = percent(self._seen_so_far, self._size) + if not self._done: + self._done = _update_progress( + self._filename, + self._start_time, + self._size, + percentage, + output_stream=self._output_stream, + show_progress_bar=self._show_progress_bar, + ) + + +class SnowflakeAzureProgressPercentage(SnowflakeProgressPercentage): + def __init__( + self, + filename: str, + filesize: int | float, + output_stream: IO | None = sys.stdout, + show_progress_bar: bool | None = True, + ): + super().__init__( + filename, + filesize, + output_stream=output_stream, + show_progress_bar=show_progress_bar, + ) + + def __call__(self, current: int): + with self._lock: + if self._output_stream: + self._seen_so_far = current + percentage = percent(self._seen_so_far, self._size) + if not self._done: + self._done = _update_progress( + self._filename, + self._start_time, + self._size, + percentage, + output_stream=self._output_stream, + show_progress_bar=self._show_progress_bar, + ) + + +class StorageCredential: + def __init__( + self, + credentials: dict[str, Any], + connection: SnowflakeConnection, + command: str, + ): + self.creds = credentials + self.timestamp = time() + self.lock = threading.Lock() + self.connection = connection + self._command = command + + def update(self, cur_timestamp): + with self.lock: + if cur_timestamp < self.timestamp: + return + logger.debug("Renewing expired storage token.") + ret = self.connection.cursor()._execute_helper(self._command) + self.creds = ret["data"]["stageInfo"]["creds"] + self.timestamp = time() + + +@dataclass +class TransferMetadata: + num_files_started: int = 0 + num_files_completed: int = 0 + chunks_in_queue: int = 0 + + +class SnowflakeFileTransferAgent: + """Snowflake File Transfer Agent provides cloud provider independent implementation for putting/getting files.""" + + def __init__( + self, + cursor: SnowflakeCursor, + command: str, + ret: dict[str, Any], + put_callback: type[SnowflakeProgressPercentage] | None = None, + put_azure_callback: type[SnowflakeProgressPercentage] | None = None, + put_callback_output_stream: IO[str] = sys.stdout, + get_callback: type[SnowflakeProgressPercentage] | None = None, + get_azure_callback: type[SnowflakeProgressPercentage] | None = None, + get_callback_output_stream: IO[str] = sys.stdout, + show_progress_bar: bool = True, + raise_put_get_error: bool = True, + force_put_overwrite: bool = True, + multipart_threshold: int | None = None, + source_from_stream: IO[bytes] | None = None, + use_s3_regional_url: bool = False, + ): + self._cursor = cursor + self._command = command + self._ret = ret + self._put_callback = put_callback + self._put_azure_callback = ( + put_azure_callback if put_azure_callback else put_callback + ) + self._put_callback_output_stream = put_callback_output_stream + self._get_callback = get_callback + self._get_azure_callback = ( + get_azure_callback if get_azure_callback else get_callback + ) + self._get_callback_output_stream = get_callback_output_stream + # when we have not checked whether we should use accelerate, this boolean is None + # _use_accelerate_endpoint in SnowflakeFileTransferAgent could be passed to each SnowflakeS3RestClient + # so we could avoid check accelerate configuration for each S3 client created for each file meta. + self._use_accelerate_endpoint: bool | None = None + self._raise_put_get_error = raise_put_get_error + self._show_progress_bar = show_progress_bar + self._force_put_overwrite = force_put_overwrite + self._source_from_stream = source_from_stream + # The list of self-sufficient file metas that are sent to + # remote storage clients to get operated on. + self._file_metadata: list[SnowflakeFileMeta] = [] + self._results: list[SnowflakeFileMeta] = [] + self._multipart_threshold = multipart_threshold or 67108864 # Historical value + self._use_s3_regional_url = use_s3_regional_url + self._credentials: StorageCredential | None = None + + def execute(self) -> None: + self._parse_command() + self._init_file_metadata() + + if self._command_type == CMD_TYPE_UPLOAD: + self._process_file_compression_type() + + for m in self._file_metadata: + m.self = self + + self._transfer_accelerate_config() + + if self._command_type == CMD_TYPE_DOWNLOAD: + if not os.path.isdir(self._local_location): + os.makedirs(self._local_location) + + if self._stage_location_type == LOCAL_FS: + if not os.path.isdir(self._stage_info["location"]): + os.makedirs(self._stage_info["location"]) + + for m in self._file_metadata: + m.overwrite = self._overwrite + m.self = self + if self._stage_location_type != LOCAL_FS: + m.put_callback = self._put_callback + m.put_azure_callback = self._put_azure_callback + m.put_callback_output_stream = self._put_callback_output_stream + m.get_callback = self._get_callback + m.get_azure_callback = self._get_azure_callback + m.get_callback_output_stream = self._get_callback_output_stream + m.show_progress_bar = self._show_progress_bar + + # multichunk threshold + m.multipart_threshold = self._multipart_threshold + + logger.debug(f"parallel=[{self._parallel}]") + if self._raise_put_get_error and not self._file_metadata: + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + OperationalError, + { + "msg": "While getting file(s) there was an error: " + "the file does not exist.", + "errno": ER_FILE_NOT_EXISTS, + }, + ) + self.transfer(self._file_metadata) + + # turn enum to string, in order to have backward compatible interface + for result in self._results: + result.result_status = result.result_status.value + + def transfer(self, metas: list[SnowflakeFileMeta]) -> None: + max_concurrency = self._parallel + network_tpe = ThreadPoolExecutor(max_concurrency) + preprocess_tpe = ThreadPoolExecutor(min(len(metas), os.cpu_count())) + postprocess_tpe = ThreadPoolExecutor(min(len(metas), os.cpu_count())) + logger.debug(f"Chunk ThreadPoolExecutor size: {max_concurrency}") + cv_main_thread = threading.Condition() # to signal the main thread + cv_chunk_process = ( + threading.Condition() + ) # to get more chunks into the chunk_tpe + files = [self._create_file_transfer_client(m) for m in metas] + num_total_files = len(metas) + transfer_metadata = TransferMetadata() # this is protected by cv_chunk_process + is_upload = self._command_type == CMD_TYPE_UPLOAD + exception_caught_in_callback: Exception | None = None + + def notify_file_completed(): + # Increment the number of completed files, then notify the main thread. + with cv_main_thread: + transfer_metadata.num_files_completed += 1 + cv_main_thread.notify() + + def preprocess_done_cb( + success: bool, + result: Any, + file_meta: SnowflakeFileMeta, + done_client: SnowflakeStorageClient, + ): + if not success: + logger.debug(f"Failed to prepare {done_client.meta.name}.") + if is_upload: + done_client.finish_upload() + done_client.delete_client_data() + else: + done_client.finish_download() + notify_file_completed() + elif done_client.meta.result_status == ResultStatus.SKIPPED: + # this case applies to upload only + notify_file_completed() + else: + logger.debug(f"Finished preparing file {done_client.meta.name}") + with cv_chunk_process: + while transfer_metadata.chunks_in_queue > 2 * max_concurrency: + logger.debug( + "Chunk queue busy, waiting in file done callback..." + ) + cv_chunk_process.wait() + for _chunk_id in range(done_client.num_of_chunks): + _callback = partial( + transfer_done_cb, + done_client=done_client, + chunk_id=_chunk_id, + ) + if is_upload: + network_tpe.submit( + function_and_callback_wrapper, + # Work fn + done_client.upload_chunk, + # Callback fn + _callback, + file_meta, + # Arguments for work fn + _chunk_id, + ) + else: + network_tpe.submit( + function_and_callback_wrapper, + # Work fn + done_client.download_chunk, + # Callback fn + _callback, + file_meta, + # Arguments for work fn + _chunk_id, + ) + transfer_metadata.chunks_in_queue += 1 + cv_chunk_process.notify() + + def transfer_done_cb( + success: bool, + result: Any, + file_meta: SnowflakeFileMeta, + done_client: SnowflakeStorageClient, + chunk_id: int, + ): + # Note: chunk_id is 0 based while num_of_chunks is count + logger.debug( + f"Chunk {chunk_id}/{done_client.num_of_chunks} of file {done_client.meta.name} reached callback" + ) + with cv_chunk_process: + transfer_metadata.chunks_in_queue -= 1 + cv_chunk_process.notify() + + with done_client.lock: + if not success: + # TODO: Cancel other chunks? + done_client.failed_transfers += 1 + logger.debug( + f"Chunk {chunk_id} of file {done_client.meta.name} failed to transfer for unexpected exception {result}" + ) + else: + done_client.successful_transfers += 1 + logger.debug( + f"Chunk progress: {done_client.meta.name}: completed: {done_client.successful_transfers} failed: {done_client.failed_transfers} total: {done_client.num_of_chunks}" + ) + if ( + done_client.successful_transfers + done_client.failed_transfers + == done_client.num_of_chunks + ): + if is_upload: + done_client.finish_upload() + done_client.delete_client_data() + notify_file_completed() + else: + postprocess_tpe.submit( + function_and_callback_wrapper, + # Work fn + done_client.finish_download, + # Callback fn + partial(postprocess_done_cb, done_client=done_client), + transfer_metadata, + ) + logger.debug( + f"submitting {done_client.meta.name} to done_postprocess" + ) + + def postprocess_done_cb( + success: bool, + result: Any, + file_meta: SnowflakeFileMeta, + done_client: SnowflakeStorageClient, + ): + logger.debug(f"File {done_client.meta.name} reached postprocess callback") + + with done_client.lock: + if not success: + done_client.failed_transfers += 1 + logger.debug( + f"File {done_client.meta.name} failed to transfer for unexpected exception {result}" + ) + # Whether there was an exception or not, we're done the file. + notify_file_completed() + + _T = TypeVar("_T") + + def function_and_callback_wrapper( + work: Callable[..., _T], + _callback: Callable[[bool, _T | Exception, SnowflakeFileMeta], None], + file_meta: SnowflakeFileMeta, + *args: Any, + **kwargs: Any, + ) -> None: + """This wrapper makes sure that callbacks are called from the TPEs. + + If the main thread adds a callback to a future that has already been + fulfilled then the callback is executed by the main thread. This can + lead to unexpected slowdowns and behavior. + """ + try: + result: tuple[bool, _T | Exception] = ( + True, + work(*args, **kwargs), + ) + except Exception as e: + logger.error(f"An exception was raised in {repr(work)}", exc_info=True) + file_meta.error_details = e + result = (False, e) + try: + _callback(*result, file_meta) + except Exception as e: + # TODO: if an exception happens in a callback, the exception will not + # propagate to the main thread. We need to save these Exceptions + # somewhere and then re-raise by the main thread. For now let's log + # this exception, but for a long term solution see my + # TODO comment for SnowflakeFileMeta + with cv_main_thread: + nonlocal exception_caught_in_callback + exception_caught_in_callback = e + cv_main_thread.notify() + if not result[0]: + # Re-raising the exception from the work function, it would already + # be logged at this point + logger.error( + f"An exception was raised in {repr(callback)}", exc_info=True + ) + + for file_client in files: + callback = partial(preprocess_done_cb, done_client=file_client) + if is_upload: + preprocess_tpe.submit( + function_and_callback_wrapper, + # Work fn + file_client.prepare_upload, + # Callback fn + callback, + file_client.meta, + ) + else: + preprocess_tpe.submit( + function_and_callback_wrapper, + # Work fn + file_client.prepare_download, + # Callback fn + callback, + file_client.meta, + ) + transfer_metadata.num_files_started += 1 # TODO: do we need this? + + with cv_main_thread: + while transfer_metadata.num_files_completed < num_total_files: + cv_main_thread.wait() + if exception_caught_in_callback is not None: + raise exception_caught_in_callback + + self._results = metas + + def _create_file_transfer_client( + self, meta: SnowflakeFileMeta + ) -> SnowflakeStorageClient: + from .constants import AZURE_CHUNK_SIZE, S3_CHUNK_SIZE + + if self._stage_location_type == LOCAL_FS: + return SnowflakeLocalStorageClient( + meta, + self._stage_info, + 4 * megabyte, + use_s3_regional_url=self._use_s3_regional_url, + ) + elif self._stage_location_type == AZURE_FS: + return SnowflakeAzureRestClient( + meta, + self._credentials, + AZURE_CHUNK_SIZE, + self._stage_info, + use_s3_regional_url=self._use_s3_regional_url, + ) + elif self._stage_location_type == S3_FS: + return SnowflakeS3RestClient( + meta, + self._credentials, + self._stage_info, + S3_CHUNK_SIZE, + use_accelerate_endpoint=self._use_accelerate_endpoint, + use_s3_regional_url=self._use_s3_regional_url, + ) + elif self._stage_location_type == GCS_FS: + return SnowflakeGCSRestClient( + meta, + self._credentials, + self._stage_info, + self._cursor._connection, + self._command, + use_s3_regional_url=self._use_s3_regional_url, + ) + raise Exception(f"{self._stage_location_type} is an unknown stage type") + + def _transfer_accelerate_config(self) -> None: + if self._stage_location_type == S3_FS and self._file_metadata: + client = self._create_file_transfer_client(self._file_metadata[0]) + self._use_accelerate_endpoint = client.transfer_accelerate_config() + + def result(self): + converter_class = self._cursor._connection.converter_class + rowset = [] + if self._command_type == CMD_TYPE_UPLOAD: + if hasattr(self, "_results"): + for meta in self._results: + if meta.src_compression_type is not None: + src_compression_type = meta.src_compression_type.name + else: + src_compression_type = "NONE" + + if meta.dst_compression_type is not None: + dst_compression_type = meta.dst_compression_type.name + else: + dst_compression_type = "NONE" + + error_details: str = ( + repr(meta.error_details) + if meta.error_details is not None + else "" + ) + + src_file_size = ( + meta.src_file_size + if converter_class != SnowflakeConverterSnowSQL + else str(meta.src_file_size) + ) + + dst_file_size = ( + meta.dst_file_size + if converter_class != SnowflakeConverterSnowSQL + else str(meta.dst_file_size) + ) + + logger.debug( + f"raise_put_get_error: {self._raise_put_get_error}, " + f"{meta.result_status}, {type(meta.result_status)}, " + f"{ResultStatus.ERROR}, {type(ResultStatus.ERROR)}", + ) + if self._raise_put_get_error and error_details: + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + OperationalError, + { + "msg": "While putting file(s) there was an error: " + f"'{error_details}', this might be caused by " + f"your access to the blob storage provider, " + f"or by Snowflake.", + "errno": ER_FAILED_TO_UPLOAD_TO_STAGE, + }, + ) + rowset.append( + [ + meta.name, + meta.dst_file_name, + src_file_size, + dst_file_size, + src_compression_type, + dst_compression_type, + meta.result_status, + error_details, + ] + ) + return { + "rowtype": [ + result_text_column_desc("source"), + result_text_column_desc("target"), + result_fixed_column_desc("source_size"), + result_fixed_column_desc("target_size"), + result_text_column_desc("source_compression"), + result_text_column_desc("target_compression"), + result_text_column_desc("status"), + result_text_column_desc("message"), + ], + "rowset": sorted(rowset), + } + else: # DOWNLOAD + if hasattr(self, "_results"): + for meta in self._results: + dst_file_size = ( + meta.dst_file_size + if converter_class != SnowflakeConverterSnowSQL + else str(meta.dst_file_size) + ) + + error_details: str = ( + repr(meta.error_details) + if meta.error_details is not None + else "" + ) + + if self._raise_put_get_error and error_details: + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + OperationalError, + { + "msg": "While getting file(s) there was an error: " + f"'{error_details}', this might be caused by " + f"your access to the blob storage provider, " + f"or by Snowflake.", + "errno": ER_FAILED_TO_DOWNLOAD_FROM_STAGE, + }, + ) + + rowset.append( + [ + meta.dst_file_name, + dst_file_size, + meta.result_status, + error_details, + ] + ) + return { + "rowtype": [ + result_text_column_desc("file"), + result_fixed_column_desc("size"), + result_text_column_desc("status"), + result_text_column_desc("message"), + ], + "rowset": sorted(rowset), + } + + def _expand_filenames(self, locations): + canonical_locations = [] + for file_name in locations: + if self._command_type == CMD_TYPE_UPLOAD: + file_name = os.path.expanduser(file_name) + if not os.path.isabs(file_name): + file_name = os.path.join(GET_CWD(), file_name) + if ( + IS_WINDOWS + and len(file_name) > 2 + and file_name[0] == "/" + and file_name[2] == ":" + ): + # Windows path: /C:/data/file1.txt where it starts with slash + # followed by a drive letter and colon. + file_name = file_name[1:] + files = glob.glob(file_name) + canonical_locations += files + else: + canonical_locations.append(file_name) + + return canonical_locations + + def _init_encryption_material(self) -> None: + self._encryption_material = [] + if self._ret["data"].get("encryptionMaterial") is not None: + root_node = self._ret["data"]["encryptionMaterial"] + logger.debug(self._command_type) + + if self._command_type == CMD_TYPE_UPLOAD: + self._encryption_material.append( + SnowflakeFileEncryptionMaterial( + query_stage_master_key=root_node["queryStageMasterKey"], + query_id=root_node["queryId"], + smk_id=root_node["smkId"], + ) + ) + else: + for elem in root_node: + if elem is not None: + self._encryption_material.append( + SnowflakeFileEncryptionMaterial( + query_stage_master_key=elem["queryStageMasterKey"], + query_id=elem["queryId"], + smk_id=elem["smkId"], + ) + ) + + def _parse_command(self) -> None: + + if "data" not in self._ret: + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + DatabaseError, + { + "msg": "Failed to parse server's response", + "errno": ER_INVALID_STAGE_LOCATION, + }, + ) + + response = self._ret["data"] + + self._command_type = response["command"] + + self._init_encryption_material() + + if not isinstance(response.get("src_locations"), list): + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + DatabaseError, + { + "msg": "Failed to parse the location", + "errno": ER_INVALID_STAGE_LOCATION, + }, + ) + + self._src_locations = response["src_locations"] + + self._parallel = response.get("parallel", 1) + self._overwrite = self._force_put_overwrite or response.get("overwrite", False) + self._stage_location_type = response["stageInfo"]["locationType"].upper() + + if self._stage_location_type not in VALID_STORAGE: + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + OperationalError, + { + "msg": f"Destination location type is not valid: {self._stage_location_type}", + "errno": ER_INVALID_STAGE_FS, + }, + ) + + self._stage_location = response["stageInfo"]["location"] + self._stage_info = response["stageInfo"] + self._credentials = StorageCredential( + self._stage_info["creds"], self._cursor.connection, self._command + ) + self._presigned_urls = self._ret["data"].get("presignedUrls") + + if self._command_type == CMD_TYPE_UPLOAD: + if self._source_from_stream: + self._src_files = self._src_locations + else: + self._src_files = list(self._expand_filenames(self._src_locations)) + self._auto_compress = ( + "autoCompress" not in response or response["autoCompress"] + ) + self._source_compression = ( + response["sourceCompression"].lower() + if "sourceCompression" in response + else "" + ) + else: + self._src_files = list(self._src_locations) + self._src_file_to_encryption_material = {} + if len(response["src_locations"]) == len(self._encryption_material): + for idx, src_file in enumerate(self._src_files): + logger.debug(src_file) + self._src_file_to_encryption_material[ + src_file + ] = self._encryption_material[idx] + elif len(self._encryption_material) != 0: + # some encryption material exists. Zero means no encryption + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + InternalError, + { + "msg": ( + "The number of downloading files doesn't match " + f"the encryption materials: files={len(response['src_locations'])}, " + f"encmat={len(self._encryption_material)}" + ), + "errno": ER_INTERNAL_NOT_MATCH_ENCRYPT_MATERIAL, + }, + ) + + self._local_location = os.path.expanduser(response["localLocation"]) + if not os.path.isdir(self._local_location): + # NOTE: isdir follows the symlink + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + ProgrammingError, + { + "msg": f"The local path is not a directory: {self._local_location}", + "errno": ER_LOCAL_PATH_NOT_DIRECTORY, + }, + ) + + def _init_file_metadata(self) -> None: + logger.debug(f"command type: {self._command_type}") + + if self._command_type == CMD_TYPE_UPLOAD: + if not self._src_files: + file_name = ( + self._ret["data"]["src_locations"] + if "data" in self._ret and "src_locations" in self._ret["data"] + else "None" + ) + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + ProgrammingError, + { + "msg": f"File doesn't exist: {file_name}", + "errno": ER_FILE_NOT_EXISTS, + }, + ) + if self._source_from_stream: + self._file_metadata.append( + SnowflakeFileMeta( + name=os.path.basename(self._src_files[0]), + src_file_name=self._src_files[0], + intermediate_stream=self._source_from_stream, + src_file_size=self._source_from_stream.seek(0, os.SEEK_END), + stage_location_type=self._stage_location_type, + encryption_material=self._encryption_material[0] + if len(self._encryption_material) > 0 + else None, + ) + ) + self._source_from_stream.seek(0) + else: + for file_name in self._src_files: + if not os.path.exists(file_name): + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + ProgrammingError, + { + "msg": f"File doesn't exist: {file_name}", + "errno": ER_FILE_NOT_EXISTS, + }, + ) + elif os.path.isdir(file_name): + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + ProgrammingError, + { + "msg": f"Not a file but a directory: {file_name}", + "errno": ER_FILE_NOT_EXISTS, + }, + ) + statinfo = os.stat(file_name) + self._file_metadata.append( + SnowflakeFileMeta( + name=os.path.basename(file_name), + src_file_name=file_name, + src_file_size=statinfo.st_size, + stage_location_type=self._stage_location_type, + encryption_material=self._encryption_material[0] + if len(self._encryption_material) > 0 + else None, + ) + ) + elif self._command_type == CMD_TYPE_DOWNLOAD: + for idx, file_name in enumerate(self._src_files): + if not file_name: + continue + first_path_sep = file_name.find("/") + dst_file_name = ( + file_name[first_path_sep + 1 :] + if first_path_sep >= 0 + else file_name + ) + url = None + if self._presigned_urls and idx < len(self._presigned_urls): + url = self._presigned_urls[idx] + self._file_metadata.append( + SnowflakeFileMeta( + name=os.path.basename(file_name), + src_file_name=file_name, + dst_file_name=dst_file_name, + stage_location_type=self._stage_location_type, + local_location=self._local_location, + presigned_url=url, + encryption_material=self._src_file_to_encryption_material[ + file_name + ] + if file_name in self._src_file_to_encryption_material + else None, + ) + ) + + def _process_file_compression_type(self) -> None: + user_specified_source_compression = None + if self._source_compression == "auto_detect": + auto_detect = True + elif self._source_compression == "none": + auto_detect = False + else: + user_specified_source_compression = lookup_by_mime_sub_type( + self._source_compression + ) + if ( + user_specified_source_compression is None + or not user_specified_source_compression.is_supported + ): + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + ProgrammingError, + { + "msg": f"Feature is not supported: {user_specified_source_compression}", + "errno": ER_COMPRESSION_NOT_SUPPORTED, + }, + ) + + auto_detect = False + + for m in self._file_metadata: + file_name = m.src_file_name + + current_file_compression_type: CompressionType | None = None + if auto_detect: + mimetypes.init() + _, encoding = mimetypes.guess_type(file_name) + + if encoding is None: + test = None + if not self._source_from_stream: + with open(file_name, "rb") as f: + test = f.read(4) + else: + test = self._source_from_stream.read(4) + self._source_from_stream.seek(0) + if file_name.endswith(".br"): + encoding = "br" + elif test and test[:3] == b"ORC": + encoding = "orc" + elif test and test == b"PAR1": + encoding = "parquet" + elif test and (int(binascii.hexlify(test), 16) == 0x28B52FFD): + encoding = "zstd" + + if encoding is not None: + logger.debug( + f"detected the encoding {encoding}: file={file_name}", + ) + current_file_compression_type = lookup_by_mime_sub_type(encoding) + else: + logger.debug(f"no file encoding was detected: file={file_name}") + + if ( + current_file_compression_type is not None + and not current_file_compression_type.is_supported + ): + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + ProgrammingError, + { + "msg": f"Feature is not supported: {current_file_compression_type}", + "errno": ER_COMPRESSION_NOT_SUPPORTED, + }, + ) + else: + current_file_compression_type = user_specified_source_compression + + if current_file_compression_type is not None: + m.src_compression_type = current_file_compression_type + if current_file_compression_type.is_supported: + m.dst_compression_type = current_file_compression_type + m.require_compress = False + m.dst_file_name = m.name + else: + Error.errorhandler_wrapper( + self._cursor.connection, + self._cursor, + ProgrammingError, + { + "msg": f"Feature is not supported: {current_file_compression_type}", + "errno": ER_COMPRESSION_NOT_SUPPORTED, + }, + ) + else: + # src is not compressed but the destination want to be + # compressed unless the users disable it + m.require_compress = self._auto_compress + m.src_compression_type = None + if self._auto_compress: + m.dst_file_name = m.name + CompressionTypes["GZIP"].file_extension + m.dst_compression_type = CompressionTypes["GZIP"] + else: + m.dst_file_name = m.name + m.dst_compression_type = None diff --git a/src/snowflake/connector/file_util.py b/src/snowflake/connector/file_util.py new file mode 100644 index 000000000..7aa718035 --- /dev/null +++ b/src/snowflake/connector/file_util.py @@ -0,0 +1,164 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import base64 +import gzip +import os +import shutil +import struct +from io import BytesIO +from logging import getLogger +from typing import IO + +from Cryptodome.Hash import SHA256 +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes + +from .constants import UTF8, kilobyte + +logger = getLogger(__name__) + + +class SnowflakeFileUtil: + @staticmethod + def get_digest_and_size(src: IO[bytes]) -> tuple[str, int]: + """Gets stream digest and size. + + Args: + src: The input stream. + + Returns: + Tuple of src's digest and src's size in bytes. + """ + use_openssl_only = os.getenv("SF_USE_OPENSSL_ONLY", "False") == "True" + CHUNK_SIZE = 64 * kilobyte + if not use_openssl_only: + m = SHA256.new() + else: + backend = default_backend() + chosen_hash = hashes.SHA256() + hasher = hashes.Hash(chosen_hash, backend) + while True: + chunk = src.read(CHUNK_SIZE) + if chunk == b"": + break + if not use_openssl_only: + m.update(chunk) + else: + hasher.update(chunk) + + if not use_openssl_only: + digest = base64.standard_b64encode(m.digest()).decode(UTF8) + else: + digest = base64.standard_b64encode(hasher.finalize()).decode(UTF8) + + size = src.tell() + src.seek(0) + return digest, size + + @staticmethod + def compress_with_gzip_from_stream(src_stream: IO[bytes]) -> tuple[IO[bytes], int]: + """Compresses a stream of bytes with GZIP. + + Args: + src_stream: bytes stream + + Returns: + A tuple of byte stream and size. + """ + compressed_data = gzip.compress(src_stream.read()) + src_stream.seek(0) + return BytesIO(compressed_data), len(compressed_data) + + @staticmethod + def compress_file_with_gzip(file_name: str, tmp_dir: str) -> tuple[str, int]: + """Compresses a file with GZIP. + + Args: + file_name: Local path to file to be compressed. + tmp_dir: Temporary directory where an GZIP file will be created. + + Returns: + A tuple of gzip file name and size. + """ + base_name = os.path.basename(file_name) + gzip_file_name = os.path.join(tmp_dir, base_name + "_c.gz") + logger.debug("gzip file: %s, original file: %s", gzip_file_name, file_name) + with open(file_name, "rb") as fr: + with gzip.GzipFile(gzip_file_name, "wb") as fw: + shutil.copyfileobj(fr, fw, length=64 * kilobyte) + SnowflakeFileUtil.normalize_gzip_header(gzip_file_name) + + statinfo = os.stat(gzip_file_name) + return gzip_file_name, statinfo.st_size + + @staticmethod + def normalize_gzip_header(gzip_file_name: str) -> None: + """Normalizes GZIP file header. + + For consistent file digest, this removes creation timestamp and file name from the header. + For more information see http://www.zlib.org/rfc-gzip.html#file-format + + Args: + gzip_file_name: Local path of gzip file. + """ + with open(gzip_file_name, "r+b") as f: + # reset the timestamp in gzip header + f.seek(3, 0) + # Read flags bit + flag_byte = f.read(1) + flags = struct.unpack("B", flag_byte)[0] + f.seek(4, 0) + f.write(struct.pack(" tuple[str, int]: + """Gets stream digest and size. + + Args: + src_stream: The input source stream. + + Returns: + Tuple of src_stream's digest and src_stream's size in bytes. + """ + digest, size = SnowflakeFileUtil.get_digest_and_size(src_stream) + logger.debug("getting digest and size for stream: %s, %s", digest, size) + return digest, size + + @staticmethod + def get_digest_and_size_for_file(file_name: str) -> tuple[str, int]: + """Gets file digest and size. + + Args: + file_name: Local path to a file. + + Returns: + Tuple of file's digest and file size in bytes. + """ + digest, size = None, None + with open(file_name, "rb") as src: + digest, size = SnowflakeFileUtil.get_digest_and_size(src) + logger.debug( + "getting digest and size: %s, %s, file=%s", digest, size, file_name + ) + return digest, size diff --git a/src/snowflake/connector/gcs_storage_client.py b/src/snowflake/connector/gcs_storage_client.py new file mode 100644 index 000000000..572888f98 --- /dev/null +++ b/src/snowflake/connector/gcs_storage_client.py @@ -0,0 +1,396 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import json +import os +from logging import getLogger +from typing import TYPE_CHECKING, Any, NamedTuple + +from .compat import quote +from .constants import ( + FILE_PROTOCOL, + HTTP_HEADER_CONTENT_ENCODING, + FileHeader, + ResultStatus, + kilobyte, +) +from .encryption_util import EncryptionMetadata +from .storage_client import SnowflakeStorageClient +from .vendored import requests + +if TYPE_CHECKING: # pragma: no cover + from .connection import SnowflakeConnection + from .file_transfer_agent import SnowflakeFileMeta, StorageCredential + +logger = getLogger(__name__) + +GCS_METADATA_PREFIX = "x-goog-meta-" +GCS_METADATA_SFC_DIGEST = GCS_METADATA_PREFIX + "sfc-digest" +GCS_METADATA_MATDESC_KEY = GCS_METADATA_PREFIX + "matdesc" +GCS_METADATA_ENCRYPTIONDATAPROP = GCS_METADATA_PREFIX + "encryptiondata" +GCS_FILE_HEADER_DIGEST = "gcs-file-header-digest" +GCS_FILE_HEADER_CONTENT_LENGTH = "gcs-file-header-content-length" +GCS_FILE_HEADER_ENCRYPTION_METADATA = "gcs-file-header-encryption-metadata" +CONTENT_CHUNK_SIZE = 10 * kilobyte +ACCESS_TOKEN = "GCS_ACCESS_TOKEN" + + +class GcsLocation(NamedTuple): + bucket_name: str + path: str + + +class SnowflakeGCSRestClient(SnowflakeStorageClient): + def __init__( + self, + meta: SnowflakeFileMeta, + credentials: StorageCredential, + stage_info: dict[str, Any], + cnx: SnowflakeConnection, + command: str, + use_s3_regional_url=False, + ) -> None: + """Creates a client object with given stage credentials. + + Args: + stage_info: Access credentials and info of a stage. + + Returns: + The client to communicate with GCS. + """ + super().__init__( + meta, stage_info, -1, credentials=credentials, chunked_transfer=False + ) + self.stage_info = stage_info + self._command = command + self.meta = meta + self._cursor = cnx.cursor() + # presigned_url in meta is for downloading + self.presigned_url: str = meta.presigned_url or stage_info.get("presignedUrl") + self.security_token = credentials.creds.get("GCS_ACCESS_TOKEN") + + if self.security_token: + logger.debug(f"len(GCS_ACCESS_TOKEN): {len(self.security_token)}") + else: + logger.debug("No access token received from GS, requesting presigned url") + self._update_presigned_url() + + def _has_expired_token(self, response: requests.Response) -> bool: + return self.security_token and response.status_code == 401 + + def _has_expired_presigned_url(self, response: requests.Response) -> bool: + # Presigned urls can be generated for any xml-api operation + # offered by GCS. Hence the error codes expected are similar + # to xml api. + # https://cloud.google.com/storage/docs/xml-api/reference-status + + presigned_url_expired = ( + not self.security_token + ) and response.status_code == 400 + if presigned_url_expired and self.last_err_is_presigned_url: + logger.debug("Presigned url expiration error two times in a row.") + response.raise_for_status() + self.last_err_is_presigned_url = presigned_url_expired + return presigned_url_expired + + def _upload_chunk(self, chunk_id: int, chunk: bytes) -> None: + meta = self.meta + + content_encoding = "" + if meta.dst_compression_type is not None: + content_encoding = meta.dst_compression_type.name.lower() + + # We set the contentEncoding to blank for GZIP files. We don't + # want GCS to think our gzip files are gzips because it makes + # them download uncompressed, and none of the other providers do + # that. There's essentially no way for us to prevent that + # behavior. Bad Google. + if content_encoding and content_encoding == "gzip": + content_encoding = "" + + gcs_headers = { + HTTP_HEADER_CONTENT_ENCODING: content_encoding, + GCS_METADATA_SFC_DIGEST: meta.sha256_digest, + } + + if self.encryption_metadata: + gcs_headers.update( + { + GCS_METADATA_ENCRYPTIONDATAPROP: json.dumps( + { + "EncryptionMode": "FullBlob", + "WrappedContentKey": { + "KeyId": "symmKey1", + "EncryptedKey": self.encryption_metadata.key, + "Algorithm": "AES_CBC_256", + }, + "EncryptionAgent": { + "Protocol": "1.0", + "EncryptionAlgorithm": "AES_CBC_256", + }, + "ContentEncryptionIV": self.encryption_metadata.iv, + "KeyWrappingMetadata": {"EncryptionLibrary": "Java 5.3.0"}, + } + ), + GCS_METADATA_MATDESC_KEY: self.encryption_metadata.matdesc, + } + ) + + def generate_url_and_rest_args() -> tuple[ + str, dict[str, dict[str | Any, str | None] | bytes] + ]: + if not self.presigned_url: + upload_url = self.generate_file_url( + self.stage_info["location"], meta.dst_file_name.lstrip("/") + ) + access_token = self.security_token + else: + upload_url = self.presigned_url + access_token: str | None = None + if access_token: + gcs_headers.update({"Authorization": f"Bearer {access_token}"}) + rest_args = {"headers": gcs_headers, "data": chunk} + return upload_url, rest_args + + response = self._send_request_with_retry( + "PUT", generate_url_and_rest_args, chunk_id + ) + response.raise_for_status() + meta.gcs_file_header_digest = gcs_headers[GCS_METADATA_SFC_DIGEST] + meta.gcs_file_header_content_length = meta.upload_size + meta.gcs_file_header_encryption_metadata = json.loads( + gcs_headers.get(GCS_METADATA_ENCRYPTIONDATAPROP, "null") + ) + + def download_chunk(self, chunk_id: int) -> None: + meta = self.meta + + def generate_url_and_rest_args() -> tuple[ + str, dict[str, dict[str, str] | bool] + ]: + gcs_headers = {} + if not self.presigned_url: + download_url = self.generate_file_url( + self.stage_info["location"], meta.src_file_name.lstrip("/") + ) + access_token = self.security_token + gcs_headers["Authorization"] = f"Bearer {access_token}" + else: + download_url = self.presigned_url + rest_args = {"headers": gcs_headers, "stream": True} + return download_url, rest_args + + response = self._send_request_with_retry( + "GET", generate_url_and_rest_args, chunk_id + ) + response.raise_for_status() + + self.write_downloaded_chunk(chunk_id, response.content) + + encryption_metadata = None + + if response.headers.get(GCS_METADATA_ENCRYPTIONDATAPROP, None): + encryptiondata = json.loads( + response.headers[GCS_METADATA_ENCRYPTIONDATAPROP] + ) + + if encryptiondata: + encryption_metadata = EncryptionMetadata( + key=encryptiondata["WrappedContentKey"]["EncryptedKey"], + iv=encryptiondata["ContentEncryptionIV"], + matdesc=response.headers[GCS_METADATA_MATDESC_KEY] + if GCS_METADATA_MATDESC_KEY in response.headers + else None, + ) + + meta.gcs_file_header_digest = response.headers.get(GCS_METADATA_SFC_DIGEST) + meta.gcs_file_header_content_length = len(response.content) + meta.gcs_file_header_encryption_metadata = encryption_metadata + + def finish_download(self) -> None: + super().finish_download() + # Sadly, we can only determine the src file size after we've + # downloaded it, unlike the other cloud providers where the + # metadata can be read beforehand. + self.meta.src_file_size = os.path.getsize(self.intermediate_dst_path) + + def _update_presigned_url(self) -> None: + """Updates the file metas with presigned urls if any. + + Currently only the file metas generated for PUT/GET on a GCP account need the presigned urls. + """ + logger.debug("Updating presigned url") + + # Rewrite the command such that a new PUT call is made for each file + # represented by the regex (if present) separately. This is the only + # way to get the presigned url for that file. + file_path_to_be_replaced = self._get_local_file_path_from_put_command() + + if not file_path_to_be_replaced: + # This prevents GET statements to proceed + return + + # At this point the connector has already figured out and + # validated that the local file exists and has also decided + # upon the destination file name and the compression type. + # The only thing that's left to do is to get the presigned + # url for the destination file. If the command originally + # referred to a single file, then the presigned url got in + # that case is simply ignore, since the file name is not what + # we want. + + # GS only looks at the file name at the end of local file + # path to figure out the remote object name. Hence the prefix + # for local path is not necessary in the reconstructed command. + file_path_to_replace_with = self.meta.dst_file_name + command_with_single_file = self._command + command_with_single_file = command_with_single_file.replace( + file_path_to_be_replaced, file_path_to_replace_with + ) + + logger.debug("getting presigned url for %s", file_path_to_replace_with) + ret = self._cursor._execute_helper(command_with_single_file) + + stage_info = ret.get("data", dict()).get("stageInfo", dict()) + self.meta.presigned_url = stage_info.get("presignedUrl") + self.presigned_url = stage_info.get("presignedUrl") + + def _get_local_file_path_from_put_command(self) -> str | None: + """Get the local file path from PUT command (Logic adopted from JDBC, written by Polita). + + Args: + command: Command to be parsed and get the local file path out of. + + Returns: + The local file path. + """ + command = self._command + if FILE_PROTOCOL not in self._command or not self._cursor.PUT_SQL_RE.match( + command + ): + return None + + file_path_begin_index = command.find(FILE_PROTOCOL) + is_file_path_quoted = command[file_path_begin_index - 1] == "'" + file_path_begin_index += len(FILE_PROTOCOL) + + file_path = "" + + if is_file_path_quoted: + file_path_end_index = command.find("'", file_path_begin_index) + + if file_path_end_index > file_path_begin_index: + file_path = command[file_path_begin_index:file_path_end_index] + else: + index_list = [] + for delimiter in [" ", "\n", ";"]: + index = command.find(delimiter, file_path_begin_index) + if index != -1: + index_list += [index] + + file_path_end_index = min(index_list) if index_list else -1 + + if file_path_end_index > file_path_begin_index: + file_path = command[file_path_begin_index:file_path_end_index] + elif file_path_end_index == -1: + file_path = command[file_path_begin_index:] + + return file_path + + def get_file_header(self, filename: str) -> FileHeader | None: + """Gets the remote file's metadata. + + Args: + filename: Not applicable to GCS. + + Returns: + The file header, with expected properties populated or None, based on how the request goes with the + storage provider. + + Notes: + Sometimes this method is called to verify that the file has indeed been uploaded. In cases of presigned + url, we have no way of verifying that, except with the http status code of 200 which we have already + confirmed and set the meta.result_status = UPLOADED/DOWNLOADED. + """ + meta = self.meta + if ( + meta.result_status == ResultStatus.UPLOADED + or meta.result_status == ResultStatus.DOWNLOADED + ): + return FileHeader( + digest=meta.gcs_file_header_digest, + content_length=meta.gcs_file_header_content_length, + encryption_metadata=meta.gcs_file_header_encryption_metadata, + ) + elif self.presigned_url: + meta.result_status = ResultStatus.NOT_FOUND_FILE + else: + + def generate_url_and_authenticated_headers(): + url = self.generate_file_url( + self.stage_info["location"], filename.lstrip("/") + ) + gcs_headers = {"Authorization": f"Bearer {self.security_token}"} + rest_args = {"headers": gcs_headers} + return url, rest_args + + retry_id = "HEAD" + self.retry_count[retry_id] = 0 + response = self._send_request_with_retry( + "HEAD", generate_url_and_authenticated_headers, retry_id + ) + if response.status_code == 404: + meta.result_status = ResultStatus.NOT_FOUND_FILE + return None + elif response.status_code == 200: + digest = response.headers.get(GCS_METADATA_SFC_DIGEST, None) + content_length = response.headers.get("content-length", None) + + encryption_metadata = EncryptionMetadata("", "", "") + if response.headers.get(GCS_METADATA_ENCRYPTIONDATAPROP, None): + encryption_data = json.loads( + response.headers[GCS_METADATA_ENCRYPTIONDATAPROP] + ) + + if encryption_data: + encryption_metadata = EncryptionMetadata( + key=encryption_data["WrappedContentKey"]["EncryptedKey"], + iv=encryption_data["ContentEncryptionIV"], + matdesc=response.headers[GCS_METADATA_MATDESC_KEY] + if GCS_METADATA_MATDESC_KEY in response.headers + else None, + ) + meta.result_status = ResultStatus.UPLOADED + return FileHeader( + digest=digest, + content_length=content_length, + encryption_metadata=encryption_metadata, + ) + response.raise_for_status() + return None + + @staticmethod + def extract_bucket_name_and_path(stage_location: str) -> GcsLocation: + container_name = stage_location + path = "" + + # split stage location as bucket name and path + if "/" in stage_location: + container_name = stage_location[0 : stage_location.index("/")] + path = stage_location[stage_location.index("/") + 1 :] + if path and not path.endswith("/"): + path += "/" + + return GcsLocation(bucket_name=container_name, path=path) + + @staticmethod + def generate_file_url(stage_location: str, filename: str) -> str: + gcs_location = SnowflakeGCSRestClient.extract_bucket_name_and_path( + stage_location + ) + full_file_path = f"{gcs_location.path}{filename}" + return f"https://storage.googleapis.com/{gcs_location.bucket_name}/{quote(full_file_path)}" diff --git a/src/snowflake/connector/gzip_decoder.py b/src/snowflake/connector/gzip_decoder.py new file mode 100644 index 000000000..deaab6d6e --- /dev/null +++ b/src/snowflake/connector/gzip_decoder.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import io +import subprocess +import zlib +from logging import getLogger +from typing import IO + +CHUNK_SIZE = 16384 +MAGIC_NUMBER = 16 # magic number from .vendored.requests/packages/urllib3/response.py + +logger = getLogger(__name__) + + +def decompress_raw_data(raw_data_fd: IO, add_bracket: bool = True) -> bytes: + """Decompresses raw data from file like object with zlib. + + Args: + raw_data_fd: File descriptor object. + add_bracket: Whether, or not to add brackets around the output. (Default value = True) + + Returns: + A byte array of the decompressed file. + """ + obj = zlib.decompressobj(MAGIC_NUMBER + zlib.MAX_WBITS) + writer = io.BytesIO() + if add_bracket: + writer.write(b"[") + d = raw_data_fd.read(CHUNK_SIZE) + while d: + writer.write(obj.decompress(d)) + while obj.unused_data != b"": + unused_data = obj.unused_data + obj = zlib.decompressobj(MAGIC_NUMBER + zlib.MAX_WBITS) + writer.write(obj.decompress(unused_data)) + d = raw_data_fd.read(CHUNK_SIZE) + writer.write(obj.flush()) + if add_bracket: + writer.write(b"]") + return writer.getvalue() + + +def decompress_raw_data_by_zcat(raw_data_fd: IO, add_bracket: bool = True) -> bytes: + """Experimental: Decompresses raw data from file like object with zcat. Otherwise same as decompress_raw_data. + + Args: + raw_data_fd: File descriptor object. + add_bracket: Whether, or not to add brackets around the output. (Default value = True) + + Returns: + A byte array of the decompressed file. + """ + writer = io.BytesIO() + if add_bracket: + writer.write(b"[") + p = subprocess.Popen(["zcat"], stdin=subprocess.PIPE, stdout=subprocess.PIPE) + writer.write(p.communicate(input=raw_data_fd.read())[0]) + if add_bracket: + writer.write(b"]") + return writer.getvalue() + + +def decompress_raw_data_to_unicode_stream(raw_data_fd: IO) -> str: + """Decompresses a raw data in file like object and yields a Unicode string. + + Args: + raw_data_fd: File descriptor object. + + Yields: + A string of the decompressed file in chunks. + """ + obj = zlib.decompressobj(MAGIC_NUMBER + zlib.MAX_WBITS) + yield "[" + d = raw_data_fd.read(CHUNK_SIZE) + while d: + yield obj.decompress(d).decode("utf-8") + while obj.unused_data != b"": + unused_data = obj.unused_data + obj = zlib.decompressobj(MAGIC_NUMBER + zlib.MAX_WBITS) + yield obj.decompress(unused_data).decode("utf-8") + d = raw_data_fd.read(CHUNK_SIZE) + yield obj.flush().decode("utf-8") + "]" diff --git a/src/snowflake/connector/local_storage_client.py b/src/snowflake/connector/local_storage_client.py new file mode 100644 index 000000000..8de8c2e25 --- /dev/null +++ b/src/snowflake/connector/local_storage_client.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os +from logging import getLogger +from math import ceil +from typing import TYPE_CHECKING, Any + +from .constants import ResultStatus +from .storage_client import SnowflakeStorageClient +from .vendored import requests + +if TYPE_CHECKING: # pragma: no cover + from .file_transfer_agent import SnowflakeFileMeta + +logger = getLogger(__name__) + + +class SnowflakeLocalStorageClient(SnowflakeStorageClient): + def __init__( + self, + meta: SnowflakeFileMeta, + stage_info: dict[str, Any], + chunk_size: int, + use_s3_regional_url: bool = False, + ) -> None: + super().__init__(meta, stage_info, chunk_size) + self.data_file = meta.src_file_name + self.full_dst_file_name: str = os.path.join( + stage_info["location"], os.path.basename(meta.dst_file_name) + ) + + def get_file_header(self, filename: str) -> None: + """ + Notes: + Checks whether the file exits in specified directory, does not return FileHeader + """ + # TODO return a FileHeader sometime + target_dir = os.path.join( + os.path.expanduser(self.stage_info["location"]), + filename, + ) + if os.path.isfile(target_dir): + self.meta.result_status = ResultStatus.UPLOADED + else: + self.meta.result_status = ResultStatus.NOT_FOUND_FILE + + def download_chunk(self, chunk_id: int) -> None: + with open(self.full_dst_file_name, "rb") as sfd: + with open( + os.path.join( + self.meta.local_location, os.path.basename(self.meta.dst_file_name) + ), + "wb", + ) as tfd: + tfd.seek(chunk_id * self.chunk_size) + sfd.seek(chunk_id * self.chunk_size) + tfd.write(sfd.read(self.chunk_size)) + + def finish_download(self) -> None: + self.meta.dst_file_size = os.stat(self.full_dst_file_name).st_size + self.meta.result_status = ResultStatus.DOWNLOADED + + def _has_expired_token(self, response: requests.Response) -> bool: + return False + + def prepare_upload(self) -> None: + super().prepare_upload() + if ( + self.meta.upload_size < self.meta.multipart_threshold + or not self.chunked_transfer + ): + self.num_of_chunks = 1 + else: + self.num_of_chunks = ceil(self.meta.upload_size / self.chunk_size) + + def _upload_chunk(self, chunk_id: int, chunk: bytes) -> None: + with open(self.full_dst_file_name, "wb") as tfd: + tfd.seek(chunk_id * self.chunk_size) + tfd.write(chunk) + + def finish_upload(self) -> None: + self.meta.result_status = ResultStatus.UPLOADED + self.meta.dst_file_size = self.meta.upload_size diff --git a/src/snowflake/connector/mixin.py b/src/snowflake/connector/mixin.py new file mode 100644 index 000000000..97bc9a61f --- /dev/null +++ b/src/snowflake/connector/mixin.py @@ -0,0 +1,10 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + + +class UnicodeMixin: + """Mixin class to handle defining the proper __str__/__unicode__ methods in Python 2 or 3.""" + + def __str__(self): + return self.__unicode__() diff --git a/src/snowflake/connector/network.py b/src/snowflake/connector/network.py new file mode 100644 index 000000000..bae5cc93c --- /dev/null +++ b/src/snowflake/connector/network.py @@ -0,0 +1,1145 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import collections +import contextlib +import gzip +import itertools +import json +import logging +import time +import traceback +import uuid +from io import BytesIO +from threading import Lock +from typing import TYPE_CHECKING + +import OpenSSL.SSL + +from . import ssl_wrap_socket +from .compat import ( + BAD_GATEWAY, + BAD_REQUEST, + FORBIDDEN, + GATEWAY_TIMEOUT, + INTERNAL_SERVER_ERROR, + METHOD_NOT_ALLOWED, + OK, + REQUEST_TIMEOUT, + SERVICE_UNAVAILABLE, + UNAUTHORIZED, + BadStatusLine, + IncompleteRead, + urlencode, + urlparse, +) +from .constants import ( + HTTP_HEADER_ACCEPT, + HTTP_HEADER_CONTENT_TYPE, + HTTP_HEADER_SERVICE_NAME, + HTTP_HEADER_USER_AGENT, +) +from .description import ( + CLIENT_NAME, + CLIENT_VERSION, + COMPILER, + IMPLEMENTATION, + OPERATING_SYSTEM, + PLATFORM, + PYTHON_VERSION, + SNOWFLAKE_CONNECTOR_VERSION, +) +from .errorcode import ( + ER_CONNECTION_IS_CLOSED, + ER_CONNECTION_TIMEOUT, + ER_FAILED_TO_CONNECT_TO_DB, + ER_FAILED_TO_RENEW_SESSION, + ER_FAILED_TO_REQUEST, +) +from .errors import ( + BadGatewayError, + BadRequest, + DatabaseError, + Error, + ForbiddenError, + GatewayTimeoutError, + InterfaceError, + InternalServerError, + MethodNotAllowed, + OperationalError, + OtherHTTPRetryableError, + ProgrammingError, + ServiceUnavailableError, +) +from .sqlstate import ( + SQLSTATE_CONNECTION_NOT_EXISTS, + SQLSTATE_CONNECTION_REJECTED, + SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + SQLSTATE_IO_ERROR, +) +from .telemetry_oob import TelemetryService +from .time_util import ( + DEFAULT_MASTER_VALIDITY_IN_SECONDS, + DecorrelateJitterBackoff, + get_time_millis, +) +from .tool.probe_connection import probe_connection +from .vendored import requests +from .vendored.requests import Response, Session +from .vendored.requests.adapters import HTTPAdapter +from .vendored.requests.auth import AuthBase +from .vendored.requests.exceptions import ( + ConnectionError, + ConnectTimeout, + InvalidProxyURL, + ReadTimeout, + SSLError, +) +from .vendored.requests.utils import prepend_scheme_if_needed, select_proxy +from .vendored.urllib3.exceptions import ProtocolError +from .vendored.urllib3.util.url import parse_url + +if TYPE_CHECKING: + from .connection import SnowflakeConnection +logger = logging.getLogger(__name__) + +""" +Monkey patch for PyOpenSSL Socket wrapper +""" +ssl_wrap_socket.inject_into_urllib3() + +# known applications +APPLICATION_SNOWSQL = "SnowSQL" + +# requests parameters +REQUESTS_RETRY = 1 # requests library builtin retry +DEFAULT_SOCKET_CONNECT_TIMEOUT = 1 * 60 # don't reduce less than 45 seconds + +# return codes +QUERY_IN_PROGRESS_CODE = "333333" # GS code: the query is in progress +QUERY_IN_PROGRESS_ASYNC_CODE = "333334" # GS code: the query is detached + +ID_TOKEN_EXPIRED_GS_CODE = "390110" +SESSION_EXPIRED_GS_CODE = "390112" # GS code: session expired. need to renew +MASTER_TOKEN_NOTFOUND_GS_CODE = "390113" +MASTER_TOKEN_EXPIRED_GS_CODE = "390114" +MASTER_TOKEN_INVALD_GS_CODE = "390115" +ID_TOKEN_INVALID_LOGIN_REQUEST_GS_CODE = "390195" +BAD_REQUEST_GS_CODE = "390400" + +# other constants +CONTENT_TYPE_APPLICATION_JSON = "application/json" +ACCEPT_TYPE_APPLICATION_SNOWFLAKE = "application/snowflake" + +REQUEST_TYPE_RENEW = "RENEW" + +HEADER_AUTHORIZATION_KEY = "Authorization" +HEADER_SNOWFLAKE_TOKEN = 'Snowflake Token="{token}"' + +REQUEST_ID = "requestId" +REQUEST_GUID = "request_guid" +SNOWFLAKE_HOST_SUFFIX = ".snowflakecomputing.com" + +SNOWFLAKE_CONNECTOR_VERSION = SNOWFLAKE_CONNECTOR_VERSION +PYTHON_VERSION = PYTHON_VERSION +OPERATING_SYSTEM = OPERATING_SYSTEM +PLATFORM = PLATFORM +IMPLEMENTATION = IMPLEMENTATION +COMPILER = COMPILER + +CLIENT_NAME = CLIENT_NAME # don't change! +CLIENT_VERSION = CLIENT_VERSION +PYTHON_CONNECTOR_USER_AGENT = f"{CLIENT_NAME}/{SNOWFLAKE_CONNECTOR_VERSION} ({PLATFORM}) {IMPLEMENTATION}/{PYTHON_VERSION}" + +NO_TOKEN = "no-token" + +STATUS_TO_EXCEPTION: dict[int, type[Error]] = { + INTERNAL_SERVER_ERROR: InternalServerError, + FORBIDDEN: ForbiddenError, + SERVICE_UNAVAILABLE: ServiceUnavailableError, + GATEWAY_TIMEOUT: GatewayTimeoutError, + BAD_REQUEST: BadRequest, + BAD_GATEWAY: BadGatewayError, + METHOD_NOT_ALLOWED: MethodNotAllowed, +} + +DEFAULT_AUTHENTICATOR = "SNOWFLAKE" # default authenticator name +EXTERNAL_BROWSER_AUTHENTICATOR = "EXTERNALBROWSER" +KEY_PAIR_AUTHENTICATOR = "SNOWFLAKE_JWT" +OAUTH_AUTHENTICATOR = "OAUTH" +ID_TOKEN_AUTHENTICATOR = "ID_TOKEN" +USR_PWD_MFA_AUTHENTICATOR = "USERNAME_PASSWORD_MFA" + + +def is_retryable_http_code(code: int) -> bool: + """Decides whether code is a retryable HTTP issue.""" + return 500 <= code < 600 or code in ( + BAD_REQUEST, # 400 + FORBIDDEN, # 403 + METHOD_NOT_ALLOWED, # 405 + REQUEST_TIMEOUT, # 408 + ) + + +def get_http_retryable_error(status_code: int) -> Error: + error_class: type[Error] = STATUS_TO_EXCEPTION.get( + status_code, OtherHTTPRetryableError + ) + return error_class(errno=status_code) + + +def raise_okta_unauthorized_error( + connection: SnowflakeConnection | None, response: Response +) -> None: + Error.errorhandler_wrapper( + connection, + None, + DatabaseError, + { + "msg": f"Failed to get authentication by OKTA: {response.status_code}: {response.reason}", + "errno": ER_FAILED_TO_CONNECT_TO_DB, + "sqlstate": SQLSTATE_CONNECTION_REJECTED, + }, + ) + + +def raise_failed_request_error( + connection: SnowflakeConnection | None, + url: str, + method: str, + response: Response, +) -> None: + TelemetryService.get_instance().log_http_request_error( + f"HttpError{response.status_code}", + url, + method, + SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + ER_FAILED_TO_REQUEST, + response=response, + ) + Error.errorhandler_wrapper( + connection, + None, + InterfaceError, + { + "msg": f"{response.status_code} {response.reason}: {method} {url}", + "errno": ER_FAILED_TO_REQUEST, + "sqlstate": SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + }, + ) + + +class ProxySupportAdapter(HTTPAdapter): + """This Adapter creates proper headers for Proxy CONNECT messages.""" + + def get_connection(self, url, proxies=None): + proxy = select_proxy(url, proxies) + parsed_url = urlparse(url) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, "http") + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL( + "Please check proxy URL. It is malformed" + " and could be missing the host." + ) + proxy_manager = self.proxy_manager_for(proxy) + + # Add Host to proxy header SNOW-232777 + proxy_manager.proxy_headers["Host"] = parsed_url.hostname + conn = proxy_manager.connection_from_url(url) + else: + # Only scheme should be lower case + url = parsed_url.geturl() + conn = self.poolmanager.connection_from_url(url) + + return conn + + +class RetryRequest(Exception): + """Signal to retry request.""" + + pass + + +class ReauthenticationRequest(Exception): + """Signal to reauthenticate.""" + + def __init__(self, cause): + self.cause = cause + + +class SnowflakeAuth(AuthBase): + """Attaches HTTP Authorization header for Snowflake.""" + + def __init__(self, token): + # setup any auth-related data here + self.token = token + + def __call__(self, r): + """Modifies and returns the request.""" + if HEADER_AUTHORIZATION_KEY in r.headers: + del r.headers[HEADER_AUTHORIZATION_KEY] + if self.token != NO_TOKEN: + r.headers[HEADER_AUTHORIZATION_KEY] = HEADER_SNOWFLAKE_TOKEN.format( + token=self.token + ) + return r + + +class SessionPool: + def __init__(self, rest: SnowflakeRestful): + # A stack of the idle sessions + self._idle_sessions: list[Session] = [] + self._active_sessions: set[Session] = set() + self._rest: SnowflakeRestful = rest + + def get_session(self) -> Session: + """Returns a session from the session pool or creates a new one.""" + try: + session = self._idle_sessions.pop() + except IndexError: + session = self._rest.make_requests_session() + self._active_sessions.add(session) + return session + + def return_session(self, session: Session) -> None: + """Places an active session back into the idle session stack.""" + try: + self._active_sessions.remove(session) + except KeyError: + logger.debug("session doesn't exist in the active session pool. Ignored...") + self._idle_sessions.append(session) + + def __str__(self): + total_sessions = len(self._active_sessions) + len(self._idle_sessions) + return ( + f"SessionPool {len(self._active_sessions)}/{total_sessions} active sessions" + ) + + def close(self) -> None: + """Closes all active and idle sessions in this session pool.""" + if self._active_sessions: + logger.debug(f"Closing {len(self._active_sessions)} active sessions") + for s in itertools.chain(self._active_sessions, self._idle_sessions): + try: + s.close() + except Exception as e: + logger.info(f"Session cleanup failed: {e}") + self._active_sessions.clear() + self._idle_sessions.clear() + + +class SnowflakeRestful: + """Snowflake Restful class.""" + + def __init__( + self, + host="127.0.0.1", + port=8080, + protocol="http", + inject_client_pause=0, + connection: Optional[SnowflakeConnection] = None, + ): + self._host = host + self._port = port + self._protocol = protocol + self._inject_client_pause = inject_client_pause + self._connection = connection + self._lock_token = Lock() + self._sessions_map: dict[str | None, SessionPool] = collections.defaultdict( + lambda: SessionPool(self) + ) + + # OCSP mode (OCSPMode.FAIL_OPEN by default) + ssl_wrap_socket.FEATURE_OCSP_MODE = ( + self._connection._ocsp_mode() + if self._connection + else ssl_wrap_socket.DEFAULT_OCSP_MODE + ) + # cache file name (enabled by default) + ssl_wrap_socket.FEATURE_OCSP_RESPONSE_CACHE_FILE_NAME = ( + self._connection._ocsp_response_cache_filename if self._connection else None + ) + + # This is to address the issue where requests hangs + _ = "dummy".encode("idna").decode("utf-8") + + @property + def token(self): + return self._token if hasattr(self, "_token") else None + + @property + def master_token(self): + return self._master_token if hasattr(self, "_master_token") else None + + @property + def master_validity_in_seconds(self): + return ( + self._master_validity_in_seconds + if hasattr(self, "_master_validity_in_seconds") + and self._master_validity_in_seconds + else DEFAULT_MASTER_VALIDITY_IN_SECONDS + ) + + @master_validity_in_seconds.setter + def master_validity_in_seconds(self, value): + self._master_validity_in_seconds = ( + value if value else DEFAULT_MASTER_VALIDITY_IN_SECONDS + ) + + @property + def id_token(self): + return getattr(self, "_id_token", None) + + @id_token.setter + def id_token(self, value): + self._id_token = value + + @property + def mfa_token(self): + return getattr(self, "_mfa_token", None) + + @mfa_token.setter + def mfa_token(self, value): + self._mfa_token = value + + def close(self): + if hasattr(self, "_token"): + del self._token + if hasattr(self, "_master_token"): + del self._master_token + if hasattr(self, "_id_token"): + del self._id_token + if hasattr(self, "_mfa_token"): + del self._mfa_token + + for session_pool in self._sessions_map.values(): + session_pool.close() + + def request( + self, + url, + body=None, + method="post", + client="sfsql", + _no_results=False, + timeout=None, + _include_retry_params=False, + _no_retry=False, + ): + if body is None: + body = {} + if self.master_token is None and self.token is None: + Error.errorhandler_wrapper( + self._connection, + None, + DatabaseError, + { + "msg": "Connection is closed", + "errno": ER_CONNECTION_IS_CLOSED, + "sqlstate": SQLSTATE_CONNECTION_NOT_EXISTS, + }, + ) + + if client == "sfsql": + accept_type = ACCEPT_TYPE_APPLICATION_SNOWFLAKE + else: + accept_type = CONTENT_TYPE_APPLICATION_JSON + + if timeout is None: + timeout = self._connection.network_timeout + + headers = { + HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, + HTTP_HEADER_ACCEPT: accept_type, + HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT, + } + if self._connection.service_name: + headers[HTTP_HEADER_SERVICE_NAME] = self._connection.service_name + if method == "post": + return self._post_request( + url, + headers, + json.dumps(body), + token=self.token, + _no_results=_no_results, + timeout=timeout, + _include_retry_params=_include_retry_params, + no_retry=_no_retry, + ) + else: + return self._get_request(url, headers, token=self.token, timeout=timeout) + + def update_tokens( + self, + session_token, + master_token, + master_validity_in_seconds=None, + id_token=None, + mfa_token=None, + ): + """Updates session and master tokens and optionally temporary credential.""" + with self._lock_token: + self._token = session_token + self._master_token = master_token + self._id_token = id_token + self._mfa_token = mfa_token + self._master_validity_in_seconds = master_validity_in_seconds + + def _renew_session(self): + """Renew a session and master token.""" + return self._token_request(REQUEST_TYPE_RENEW) + + def _token_request(self, request_type): + logger.debug( + "updating session. master_token: {}".format( + "****" if self.master_token else None + ) + ) + headers = { + HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, + HTTP_HEADER_ACCEPT: CONTENT_TYPE_APPLICATION_JSON, + HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT, + } + if self._connection.service_name: + headers[HTTP_HEADER_SERVICE_NAME] = self._connection.service_name + request_id = str(uuid.uuid4()) + logger.debug("request_id: %s", request_id) + url = "/session/token-request?" + urlencode({REQUEST_ID: request_id}) + + # NOTE: ensure an empty key if master token is not set. + # This avoids HTTP 400. + header_token = self.master_token or "" + body = { + "oldSessionToken": self.token, + "requestType": request_type, + } + ret = self._post_request( + url, + headers, + json.dumps(body), + token=header_token, + timeout=self._connection.network_timeout, + ) + if ret.get("success") and ret.get("data", {}).get("sessionToken"): + logger.debug("success: %s", ret) + self.update_tokens( + ret["data"]["sessionToken"], + ret["data"].get("masterToken"), + master_validity_in_seconds=ret["data"].get("masterValidityInSeconds"), + ) + logger.debug("updating session completed") + return ret + else: + logger.debug("failed: %s", ret) + err = ret.get("message") + if err is not None and ret.get("data"): + err += ret["data"].get("errorMessage", "") + errno = ret.get("code") or ER_FAILED_TO_RENEW_SESSION + if errno in ( + ID_TOKEN_EXPIRED_GS_CODE, + SESSION_EXPIRED_GS_CODE, + MASTER_TOKEN_NOTFOUND_GS_CODE, + MASTER_TOKEN_EXPIRED_GS_CODE, + MASTER_TOKEN_INVALD_GS_CODE, + BAD_REQUEST_GS_CODE, + ): + raise ReauthenticationRequest( + ProgrammingError( + msg=err, + errno=int(errno), + sqlstate=SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + ) + ) + Error.errorhandler_wrapper( + self._connection, + None, + ProgrammingError, + { + "msg": err, + "errno": int(errno), + "sqlstate": SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + }, + ) + + def _heartbeat(self): + headers = { + HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, + HTTP_HEADER_ACCEPT: CONTENT_TYPE_APPLICATION_JSON, + HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT, + } + if self._connection.service_name: + headers[HTTP_HEADER_SERVICE_NAME] = self._connection.service_name + request_id = str(uuid.uuid4()) + logger.debug("request_id: %s", request_id) + url = "/session/heartbeat?" + urlencode({REQUEST_ID: request_id}) + ret = self._post_request( + url, + headers, + None, + token=self.token, + timeout=self._connection.network_timeout, + ) + if not ret.get("success"): + logger.error("Failed to heartbeat. code: %s, url: %s", ret.get("code"), url) + + def delete_session(self, retry=False): + """Deletes the session.""" + if self.master_token is None: + Error.errorhandler_wrapper( + self._connection, + None, + DatabaseError, + { + "msg": "Connection is closed", + "errno": ER_CONNECTION_IS_CLOSED, + "sqlstate": SQLSTATE_CONNECTION_NOT_EXISTS, + }, + ) + + url = "/session?" + urlencode({"delete": "true"}) + headers = { + HTTP_HEADER_CONTENT_TYPE: CONTENT_TYPE_APPLICATION_JSON, + HTTP_HEADER_ACCEPT: CONTENT_TYPE_APPLICATION_JSON, + HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT, + } + if self._connection.service_name: + headers[HTTP_HEADER_SERVICE_NAME] = self._connection.service_name + + body = {} + retry_limit = 3 if retry else 1 + num_retries = 0 + should_retry = True + while should_retry and (num_retries < retry_limit): + try: + should_retry = False + ret = self._post_request( + url, + headers, + json.dumps(body), + token=self.token, + timeout=5, + no_retry=True, + ) + if not ret: + if retry: + should_retry = True + else: + return + elif ret.get("success"): + return + err = ret.get("message") + if err is not None and ret.get("data"): + err += ret["data"].get("errorMessage", "") + # no exception is raised + logger.debug("error in deleting session. ignoring...: %s", err) + except Exception as e: + logger.debug("error in deleting session. ignoring...: %s", e) + finally: + num_retries += 1 + + def _get_request( + self, + url, + headers, + token=None, + timeout=None, + socket_timeout=DEFAULT_SOCKET_CONNECT_TIMEOUT, + ): + if "Content-Encoding" in headers: + del headers["Content-Encoding"] + if "Content-Length" in headers: + del headers["Content-Length"] + + full_url = f"{self._protocol}://{self._host}:{self._port}{url}" + ret = self.fetch( + "get", + full_url, + headers, + timeout=timeout, + token=token, + socket_timeout=socket_timeout, + ) + if ret.get("code") == SESSION_EXPIRED_GS_CODE: + try: + ret = self._renew_session() + except ReauthenticationRequest as ex: + if self._connection._authenticator != EXTERNAL_BROWSER_AUTHENTICATOR: + raise ex.cause + ret = self._connection._reauthenticate_by_webbrowser() + logger.debug( + "ret[code] = {code} after renew_session".format( + code=(ret.get("code", "N/A")) + ) + ) + if ret.get("success"): + return self._get_request(url, headers, token=self.token) + + return ret + + def _post_request( + self, + url, + headers, + body, + token=None, + timeout=None, + _no_results=False, + no_retry=False, + socket_timeout=DEFAULT_SOCKET_CONNECT_TIMEOUT, + _include_retry_params=False, + ): + full_url = f"{self._protocol}://{self._host}:{self._port}{url}" + if self._connection._probe_connection: + from pprint import pprint + + ret = probe_connection(full_url) + pprint(ret) + + ret = self.fetch( + "post", + full_url, + headers, + data=body, + timeout=timeout, + token=token, + no_retry=no_retry, + socket_timeout=socket_timeout, + _include_retry_params=_include_retry_params, + ) + logger.debug( + "ret[code] = {code}, after post request".format( + code=(ret.get("code", "N/A")) + ) + ) + + if ret.get("code") == SESSION_EXPIRED_GS_CODE: + try: + ret = self._renew_session() + except ReauthenticationRequest as ex: + if self._connection._authenticator != EXTERNAL_BROWSER_AUTHENTICATOR: + raise ex.cause + ret = self._connection._reauthenticate_by_webbrowser() + logger.debug( + "ret[code] = {code} after renew_session".format( + code=(ret.get("code", "N/A")) + ) + ) + if ret.get("success"): + return self._post_request( + url, headers, body, token=self.token, timeout=timeout + ) + + if isinstance(ret.get("data"), dict) and ret["data"].get("queryId"): + logger.debug("Query id: {}".format(ret["data"]["queryId"])) + + if ret.get("code") == QUERY_IN_PROGRESS_ASYNC_CODE and _no_results: + return ret + + while ret.get("code") in (QUERY_IN_PROGRESS_CODE, QUERY_IN_PROGRESS_ASYNC_CODE): + if self._inject_client_pause > 0: + logger.debug("waiting for %s...", self._inject_client_pause) + time.sleep(self._inject_client_pause) + # ping pong + result_url = ret["data"]["getResultUrl"] + logger.debug("ping pong starting...") + ret = self._get_request( + result_url, headers, token=self.token, timeout=timeout + ) + logger.debug("ret[code] = %s", ret.get("code", "N/A")) + logger.debug("ping pong done") + + return ret + + def fetch(self, method, full_url, headers, data=None, timeout=None, **kwargs): + """Carry out API request with session management.""" + + class RetryCtx: + def __init__(self, timeout, _include_retry_params=False): + self.total_timeout = timeout + self.timeout = timeout + self.cnt = 0 + self.sleeping_time = 1 + self.start_time = get_time_millis() + self._include_retry_params = _include_retry_params + # backoff between 1 and 16 seconds + self._backoff = DecorrelateJitterBackoff(1, 16) + + def next_sleep(self): + self.sleeping_time = self._backoff.next_sleep( + self.cnt, self.sleeping_time + ) + return self.sleeping_time + + def add_retry_params(self, full_url): + if self._include_retry_params and self.cnt > 0: + suffix = urlencode( + {"clientStartTime": self.start_time, "retryCount": self.cnt} + ) + sep = "&" if urlparse(full_url).query else "?" + return full_url + sep + suffix + else: + return full_url + + include_retry_params = kwargs.pop("_include_retry_params", False) + + with self._use_requests_session(full_url) as session: + retry_ctx = RetryCtx(timeout, include_retry_params) + while True: + ret = self._request_exec_wrapper( + session, method, full_url, headers, data, retry_ctx, **kwargs + ) + if ret is not None: + return ret + + @staticmethod + def add_request_guid(full_url): + """Adds request_guid parameter for HTTP request tracing.""" + parsed_url = urlparse(full_url) + if not parsed_url.hostname.endswith(SNOWFLAKE_HOST_SUFFIX): + return full_url + request_guid = str(uuid.uuid4()) + suffix = urlencode({REQUEST_GUID: request_guid}) + logger.debug(f"Request guid: {request_guid}") + sep = "&" if parsed_url.query else "?" + # url has query string already, just add fields + return full_url + sep + suffix + + def _request_exec_wrapper( + self, + session, + method, + full_url, + headers, + data, + retry_ctx, + no_retry=False, + token=NO_TOKEN, + **kwargs, + ): + + conn = self._connection + logger.debug( + "remaining request timeout: %s, retry cnt: %s", + retry_ctx.timeout, + retry_ctx.cnt + 1, + ) + + start_request_thread = time.time() + full_url = retry_ctx.add_retry_params(full_url) + full_url = SnowflakeRestful.add_request_guid(full_url) + try: + return_object = self._request_exec( + session=session, + method=method, + full_url=full_url, + headers=headers, + data=data, + token=token, + **kwargs, + ) + if return_object is not None: + return return_object + self._handle_unknown_error(method, full_url, headers, data, conn) + TelemetryService.get_instance().log_http_request_error( + "HttpRequestUnknownError", + full_url, + method, + SQLSTATE_IO_ERROR, + ER_FAILED_TO_REQUEST, + retry_timeout=retry_ctx.total_timeout, + retry_count=retry_ctx.cnt, + ) + return {} + except RetryRequest as e: + if ( + retry_ctx.cnt + == TelemetryService.get_instance().num_of_retry_to_trigger_telemetry + ): + TelemetryService.get_instance().log_http_request_error( + "HttpRequestRetry%dTimes" % retry_ctx.cnt, + full_url, + method, + SQLSTATE_IO_ERROR, + ER_FAILED_TO_REQUEST, + retry_timeout=retry_ctx.total_timeout, + retry_count=retry_ctx.cnt, + exception=str(e), + stack_trace=traceback.format_exc(), + ) + cause = e.args[0] + if no_retry: + self.log_and_handle_http_error_with_cause(e, full_url, method, retry_ctx.total_timeout, retry_ctx.cnt, + conn, timed_out=False) + return {} # required for tests + if retry_ctx.timeout is not None: + retry_ctx.timeout -= int(time.time() - start_request_thread) + if retry_ctx.timeout <= 0: + self.log_and_handle_http_error_with_cause(e, full_url, method, retry_ctx.total_timeout, + retry_ctx.cnt, conn) + return {} # required for tests + sleeping_time = retry_ctx.next_sleep() + logger.debug( + "retrying: errorclass=%s, " + "error=%s, " + "counter=%s, " + "sleeping=%s(s)", + type(cause), + cause, + retry_ctx.cnt + 1, + sleeping_time, + ) + time.sleep(sleeping_time) + retry_ctx.cnt += 1 + if retry_ctx.timeout is not None: + retry_ctx.timeout -= sleeping_time + return None # retry + except Exception as e: + if not no_retry: + raise e + logger.debug("Ignored error", exc_info=True) + return {} + + def log_and_handle_http_error_with_cause( + self, + e: Exception, + full_url: str, + method: str, + retry_timeout: int, + retry_count: int, + conn: SnowflakeConnection, + timed_out: bool = True + ) -> None: + cause = e.args[0] + logger.error(cause, exc_info=True) + TelemetryService.get_instance().log_http_request_error( + "HttpRequestRetryTimeout" if timed_out else f"HttpRequestError: {cause}", + full_url, + method, + SQLSTATE_IO_ERROR, + ER_FAILED_TO_REQUEST, + retry_timeout=retry_timeout, + retry_count=retry_count, + exception=str(e), + stack_trace=traceback.format_exc(), + ) + if isinstance(cause, Error): + Error.errorhandler_wrapper_from_cause(conn, cause) + else: + self.handle_invalid_certificate_error(conn, full_url, cause) + + def handle_invalid_certificate_error(self, conn, full_url, cause): + # all other errors raise exception + Error.errorhandler_wrapper( + conn, + None, + OperationalError, + { + "msg": f"Failed to execute request: {cause}", + "errno": ER_FAILED_TO_REQUEST, + }, + ) + + def _handle_unknown_error(self, method, full_url, headers, data, conn): + """Handles unknown errors.""" + if data: + try: + decoded_data = json.loads(data) + if decoded_data.get("data") and decoded_data["data"].get("PASSWORD"): + # masking the password + decoded_data["data"]["PASSWORD"] = "********" + data = json.dumps(decoded_data) + except Exception: + logger.info("data is not JSON") + logger.error( + f"Failed to get the response. Hanging? " + f"method: {method}, url: {full_url}, headers:{headers}, " + f"data: {data}" + ) + Error.errorhandler_wrapper( + conn, + None, + OperationalError, + { + "msg": f"Failed to get the response. Hanging? method: {method}, url: {full_url}", + "errno": ER_FAILED_TO_REQUEST, + }, + ) + + def _request_exec( + self, + session, + method, + full_url, + headers, + data, + token, + catch_okta_unauthorized_error=False, + is_raw_text=False, + is_raw_binary=False, + binary_data_handler=None, + socket_timeout=DEFAULT_SOCKET_CONNECT_TIMEOUT, + ): + if socket_timeout > DEFAULT_SOCKET_CONNECT_TIMEOUT: + # socket timeout should not be more than the default. + # A shorter timeout may be specified for login time, but + # for query, it should be at least 45 seconds. + socket_timeout = DEFAULT_SOCKET_CONNECT_TIMEOUT + logger.debug("socket timeout: %s", socket_timeout) + try: + if not catch_okta_unauthorized_error and data and len(data) > 0: + gzdata = BytesIO() + gzip.GzipFile(fileobj=gzdata, mode="wb").write(data.encode("utf-8")) + gzdata.seek(0, 0) + headers["Content-Encoding"] = "gzip" + input_data = gzdata + else: + input_data = data + + download_start_time = get_time_millis() + # socket timeout is constant. You should be able to receive + # the response within the time. If not, ConnectReadTimeout or + # ReadTimeout is raised. + raw_ret = session.request( + method=method, + url=full_url, + headers=headers, + data=input_data, + timeout=socket_timeout, + verify=True, + stream=is_raw_binary, + auth=SnowflakeAuth(token), + ) + download_end_time = get_time_millis() + + try: + if raw_ret.status_code == OK: + logger.debug("SUCCESS") + if is_raw_text: + ret = raw_ret.text + elif is_raw_binary: + ret = binary_data_handler.to_iterator( + raw_ret.raw, download_end_time - download_start_time + ) + else: + ret = raw_ret.json() + return ret + + if is_retryable_http_code(raw_ret.status_code): + error = get_http_retryable_error(raw_ret.status_code) + logger.debug(f"{error}. Retrying...") + # retryable server exceptions + raise RetryRequest(error) + + elif ( + raw_ret.status_code == UNAUTHORIZED + and catch_okta_unauthorized_error + ): + # OKTA Unauthorized errors + raise_okta_unauthorized_error(self._connection, raw_ret) + return None # required for tests + else: + raise_failed_request_error( + self._connection, full_url, method, raw_ret + ) + return None # required for tests + finally: + raw_ret.close() # ensure response is closed + except SSLError as se: + logger.debug("Hit non-retryable SSL error, %s", str(se)) + TelemetryService.get_instance().log_http_request_error( + "CertificateException%s" % str(se), + full_url, + method, + SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + ER_FAILED_TO_REQUEST, + exception=se, + stack_trace=traceback.format_exc(), + ) + + except ( + BadStatusLine, + ConnectionError, + ConnectTimeout, + IncompleteRead, + ProtocolError, # from urllib3 # from urllib3 + OpenSSL.SSL.SysCallError, + KeyError, # SNOW-39175: asn1crypto.keys.PublicKeyInfo + ValueError, + ReadTimeout, + RuntimeError, + AttributeError, # json decoding error + ) as err: + parsed_url = parse_url(full_url) + if "login-request" in parsed_url.path: + logger.debug( + "Hit a timeout error while logging in. Will be handled by " + f"authenticator. Ignore the following. Error stack: {err}", + exc_info=True, + ) + raise OperationalError( + msg="ConnectionTimeout occurred. Will be handled by authenticator", + errno=ER_CONNECTION_TIMEOUT, + ) + else: + logger.debug( + "Hit retryable client error. Retrying... Ignore the following " + f"error stack: {err}", + exc_info=True, + ) + raise RetryRequest(err) + except Exception as err: + TelemetryService.get_instance().log_http_request_error( + "HttpException%s" % str(err), + full_url, + method, + SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + ER_FAILED_TO_REQUEST, + exception=err, + stack_trace=traceback.format_exc(), + ) + raise err + + def make_requests_session(self): + s = requests.Session() + s.mount("http://", ProxySupportAdapter(max_retries=REQUESTS_RETRY)) + s.mount("https://", ProxySupportAdapter(max_retries=REQUESTS_RETRY)) + s._reuse_count = itertools.count() + return s + + @contextlib.contextmanager + def _use_requests_session(self, url: str | None = None): + """Session caching context manager. + + Notes: + The session is not closed until close() is called so each session may be used multiple times. + """ + # short-lived session, not added to the _sessions_map + if self._connection.disable_request_pooling: + session = self.make_requests_session() + try: + yield session + finally: + session.close() + else: + try: + hostname = urlparse(url).hostname + except Exception: + hostname = None + + session_pool: SessionPool = self._sessions_map[hostname] + session = session_pool.get_session() + logger.debug(f"Session status for SessionPool '{hostname}', {session_pool}") + try: + yield session + finally: + session_pool.return_session(session) + logger.debug( + f"Session status for SessionPool '{hostname}', {session_pool}" + ) diff --git a/src/snowflake/connector/ocsp_asn1crypto.py b/src/snowflake/connector/ocsp_asn1crypto.py new file mode 100644 index 000000000..cb3e8a8cb --- /dev/null +++ b/src/snowflake/connector/ocsp_asn1crypto.py @@ -0,0 +1,476 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os +import platform +import sys +import warnings +from base64 import b64decode, b64encode +from collections import OrderedDict +from datetime import datetime, timezone +from logging import getLogger +from os import getenv + +from asn1crypto.algos import DigestAlgorithm +from asn1crypto.core import Integer, OctetString +from asn1crypto.ocsp import ( + CertId, + OCSPRequest, + OCSPResponse, + Request, + Requests, + TBSRequest, + Version, +) +from asn1crypto.x509 import Certificate +from Cryptodome.Hash import SHA1, SHA256, SHA384, SHA512 +from Cryptodome.PublicKey import RSA +from Cryptodome.Signature import PKCS1_v1_5 +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import padding, utils + +from snowflake.connector.errorcode import ( + ER_OCSP_RESPONSE_ATTACHED_CERT_EXPIRED, + ER_OCSP_RESPONSE_ATTACHED_CERT_INVALID, + ER_OCSP_RESPONSE_CERT_STATUS_INVALID, + ER_OCSP_RESPONSE_INVALID_SIGNATURE, + ER_OCSP_RESPONSE_LOAD_FAILURE, + ER_OCSP_RESPONSE_STATUS_UNSUCCESSFUL, +) +from snowflake.connector.errors import RevocationCheckError +from snowflake.connector.ocsp_snowflake import SnowflakeOCSP +from snowflake.connector.ssd_internal_keys import ret_wildcard_hkey + +with warnings.catch_warnings(): + warnings.simplefilter("ignore") + # force versioned dylibs onto oscrypto ssl on catalina + if sys.platform == "darwin" and platform.mac_ver()[0].startswith("10.15"): + from oscrypto import _module_values, use_openssl + + if _module_values["backend"] is None: + use_openssl( + libcrypto_path="/usr/lib/libcrypto.35.dylib", + libssl_path="/usr/lib/libssl.35.dylib", + ) + from oscrypto import asymmetric + + +logger = getLogger(__name__) + + +class SnowflakeOCSPAsn1Crypto(SnowflakeOCSP): + """OCSP checks by asn1crypto.""" + + # map signature algorithm name to digest class + SIGNATURE_ALGORITHM_TO_DIGEST_CLASS = { + "sha256": SHA256, + "sha384": SHA384, + "sha512": SHA512, + } + + SIGNATURE_ALGORITHM_TO_DIGEST_CLASS_OPENSSL = { + "sha256": hashes.SHA256, + "sha384": hashes.SHA3_384, + "sha512": hashes.SHA3_512, + } + + WILDCARD_CERTID = None + + def __init__(self, **kwargs): + super().__init__(**kwargs) + self.WILDCARD_CERTID = self.encode_cert_id_key(ret_wildcard_hkey()) + + def encode_cert_id_key(self, hkey): + issuer_name_hash, issuer_key_hash, serial_number = hkey + issuer_name_hash = OctetString.load(issuer_name_hash) + issuer_key_hash = OctetString.load(issuer_key_hash) + serial_number = Integer.load(serial_number) + cert_id = CertId( + { + "hash_algorithm": DigestAlgorithm( + {"algorithm": "sha1", "parameters": None} + ), + "issuer_name_hash": issuer_name_hash, + "issuer_key_hash": issuer_key_hash, + "serial_number": serial_number, + } + ) + return cert_id + + def decode_cert_id_key(self, cert_id): + return ( + cert_id["issuer_name_hash"].dump(), + cert_id["issuer_key_hash"].dump(), + cert_id["serial_number"].dump(), + ) + + def decode_cert_id_base64(self, cert_id_base64): + return CertId.load(b64decode(cert_id_base64)) + + def encode_cert_id_base64(self, hkey): + return b64encode(self.encode_cert_id_key(hkey).dump()).decode("ascii") + + def read_cert_bundle(self, ca_bundle_file, storage=None): + """Reads a certificate file including certificates in PEM format.""" + if storage is None: + storage = SnowflakeOCSP.ROOT_CERTIFICATES_DICT + logger.debug("reading certificate bundle: %s", ca_bundle_file) + with open(ca_bundle_file, "rb") as all_certs: + # don't lock storage + from asn1crypto import pem + + pem_certs = pem.unarmor(all_certs.read(), multiple=True) + for type_name, _, der_bytes in pem_certs: + if type_name == "CERTIFICATE": + crt = Certificate.load(der_bytes) + storage[crt.subject.sha256] = crt + + def create_ocsp_request(self, issuer, subject): + """Creates CertId and OCSPRequest.""" + cert_id = CertId( + { + "hash_algorithm": DigestAlgorithm( + {"algorithm": "sha1", "parameters": None} + ), + "issuer_name_hash": OctetString(subject.issuer.sha1), + "issuer_key_hash": OctetString(issuer.public_key.sha1), + "serial_number": subject.serial_number, + } + ) + ocsp_request = OCSPRequest( + { + "tbs_request": TBSRequest( + { + "version": Version(0), + "request_list": Requests( + [ + Request( + { + "req_cert": cert_id, + } + ) + ] + ), + } + ), + } + ) + return cert_id, ocsp_request + + def extract_ocsp_url(self, cert): + urls = cert.ocsp_urls + ocsp_url = urls[0] if urls else None + return ocsp_url + + def decode_ocsp_request(self, ocsp_request): + return ocsp_request.dump() + + def decode_ocsp_request_b64(self, ocsp_request): + data = self.decode_ocsp_request(ocsp_request) # convert to DER + b64data = b64encode(data).decode("ascii") + return b64data + + def extract_good_status(self, single_response): + """Extracts GOOD status.""" + this_update_native = single_response["this_update"].native + next_update_native = single_response["next_update"].native + + return this_update_native, next_update_native + + def extract_revoked_status(self, single_response): + """Extracts REVOKED status.""" + revoked_info = single_response["cert_status"] + revocation_time = revoked_info.native["revocation_time"] + revocation_reason = revoked_info.native["revocation_reason"] + return revocation_time, revocation_reason + + def check_cert_time_validity(self, cur_time, ocsp_cert): + + val_start = ocsp_cert["tbs_certificate"]["validity"]["not_before"].native + val_end = ocsp_cert["tbs_certificate"]["validity"]["not_after"].native + + if cur_time > val_end or cur_time < val_start: + debug_msg = ( + "Certificate attached to OCSP response is invalid. OCSP response " + "current time - {} certificate not before time - {} certificate " + "not after time - {}. Consider running curl -o ocsp.der {}".format( + cur_time, + val_start, + val_end, + super().debug_ocsp_failure_url, + ) + ) + + return False, debug_msg + else: + return True, None + + """ + is_valid_time - checks various components of the OCSP Response + for expiry. + :param cert_id - certificate id corresponding to OCSP Response + :param ocsp_response + :return True/False depending on time validity within the response + """ + + def is_valid_time(self, cert_id, ocsp_response): + res = OCSPResponse.load(ocsp_response) + + if res["response_status"].native != "successful": + raise RevocationCheckError( + msg="Invalid Status: {}".format(res["response_status"].native), + errno=ER_OCSP_RESPONSE_STATUS_UNSUCCESSFUL, + ) + + basic_ocsp_response = res.basic_ocsp_response + if basic_ocsp_response["certs"].native: + ocsp_cert = basic_ocsp_response["certs"][0] + logger.debug( + "Verifying the attached certificate is signed by " + "the issuer. Valid Not After: %s", + ocsp_cert["tbs_certificate"]["validity"]["not_after"].native, + ) + + cur_time = datetime.now(timezone.utc) + + """ + Note: + We purposefully do not verify certificate signature here. + The OCSP Response is extracted from the OCSP Response Cache + which is expected to have OCSP Responses with verified + attached signature. Moreover this OCSP Response is eventually + going to be processed by the driver before being consumed by + the driver. + This step ensures that the OCSP Response cache does not have + any invalid entries. + """ + cert_valid, debug_msg = self.check_cert_time_validity(cur_time, ocsp_cert) + if not cert_valid: + logger.debug(debug_msg) + return False + + tbs_response_data = basic_ocsp_response["tbs_response_data"] + + single_response = tbs_response_data["responses"][0] + cert_status = single_response["cert_status"].name + + try: + if cert_status == "good": + self._process_good_status(single_response, cert_id, ocsp_response) + except Exception as ex: + logger.debug("Failed to validate ocsp response %s", ex) + return False + + return True + + def process_ocsp_response(self, issuer, cert_id, ocsp_response): + try: + res = OCSPResponse.load(ocsp_response) + if self.test_mode is not None: + ocsp_load_failure = getenv("SF_TEST_OCSP_FORCE_BAD_OCSP_RESPONSE") + if ocsp_load_failure is not None: + raise RevocationCheckError( + "Force fail", errno=ER_OCSP_RESPONSE_LOAD_FAILURE + ) + except Exception: + raise RevocationCheckError( + msg="Invalid OCSP Response", errno=ER_OCSP_RESPONSE_LOAD_FAILURE + ) + + if res["response_status"].native != "successful": + raise RevocationCheckError( + msg="Invalid Status: {}".format(res["response_status"].native), + errno=ER_OCSP_RESPONSE_STATUS_UNSUCCESSFUL, + ) + + basic_ocsp_response = res.basic_ocsp_response + if basic_ocsp_response["certs"].native: + logger.debug("Certificate is attached in Basic OCSP Response") + ocsp_cert = basic_ocsp_response["certs"][0] + logger.debug( + "Verifying the attached certificate is signed by " "the issuer" + ) + logger.debug( + "Valid Not After: %s", + ocsp_cert["tbs_certificate"]["validity"]["not_after"].native, + ) + + cur_time = datetime.now(timezone.utc) + + try: + """ + Signature verification should happen before any kind of + validation + """ + self.verify_signature( + ocsp_cert.hash_algo, + ocsp_cert.signature, + issuer, + ocsp_cert["tbs_certificate"], + ) + except RevocationCheckError as rce: + raise RevocationCheckError( + msg=rce.msg, errno=ER_OCSP_RESPONSE_ATTACHED_CERT_INVALID + ) + cert_valid, debug_msg = self.check_cert_time_validity(cur_time, ocsp_cert) + + if not cert_valid: + raise RevocationCheckError( + msg=debug_msg, errno=ER_OCSP_RESPONSE_ATTACHED_CERT_EXPIRED + ) + + else: + logger.debug( + "Certificate is NOT attached in Basic OCSP Response. " + "Using issuer's certificate" + ) + ocsp_cert = issuer + + tbs_response_data = basic_ocsp_response["tbs_response_data"] + + logger.debug("Verifying the OCSP response is signed by the issuer.") + try: + self.verify_signature( + basic_ocsp_response["signature_algorithm"].hash_algo, + basic_ocsp_response["signature"].native, + ocsp_cert, + tbs_response_data, + ) + except RevocationCheckError as rce: + raise RevocationCheckError( + msg=rce.msg, errno=ER_OCSP_RESPONSE_INVALID_SIGNATURE + ) + + single_response = tbs_response_data["responses"][0] + cert_status = single_response["cert_status"].name + if self.test_mode is not None: + test_cert_status = getenv("SF_TEST_OCSP_CERT_STATUS") + if test_cert_status == "revoked": + cert_status = "revoked" + elif test_cert_status == "unknown": + cert_status = "unknown" + elif test_cert_status == "good": + cert_status = "good" + + try: + if cert_status == "good": + self._process_good_status(single_response, cert_id, ocsp_response) + SnowflakeOCSP.OCSP_CACHE.update_cache(self, cert_id, ocsp_response) + elif cert_status == "revoked": + self._process_revoked_status(single_response, cert_id) + elif cert_status == "unknown": + self._process_unknown_status(cert_id) + else: + debug_msg = ( + "Unknown revocation status was returned." + "OCSP response may be malformed: {}.".format(cert_status) + ) + raise RevocationCheckError( + msg=debug_msg, errno=ER_OCSP_RESPONSE_CERT_STATUS_INVALID + ) + except RevocationCheckError as op_er: + debug_msg = "{} Consider running curl -o ocsp.der {}".format( + op_er.msg, self.debug_ocsp_failure_url + ) + raise RevocationCheckError(msg=debug_msg, errno=op_er.errno) + + def verify_signature(self, signature_algorithm, signature, cert, data): + use_openssl_only = os.getenv("SF_USE_OPENSSL_ONLY", "False") == "True" + if not use_openssl_only: + pubkey = asymmetric.load_public_key(cert.public_key).unwrap().dump() + rsakey = RSA.importKey(pubkey) + signer = PKCS1_v1_5.new(rsakey) + if ( + signature_algorithm + in SnowflakeOCSPAsn1Crypto.SIGNATURE_ALGORITHM_TO_DIGEST_CLASS + ): + digest = SnowflakeOCSPAsn1Crypto.SIGNATURE_ALGORITHM_TO_DIGEST_CLASS[ + signature_algorithm + ].new() + else: + # the last resort. should not happen. + digest = SHA1.new() + digest.update(data.dump()) + if not signer.verify(digest, signature): + raise RevocationCheckError(msg="Failed to verify the signature") + + else: + backend = default_backend() + public_key = serialization.load_der_public_key( + cert.public_key.dump(), backend=default_backend() + ) + if ( + signature_algorithm + in SnowflakeOCSPAsn1Crypto.SIGNATURE_ALGORITHM_TO_DIGEST_CLASS + ): + chosen_hash = ( + SnowflakeOCSPAsn1Crypto.SIGNATURE_ALGORITHM_TO_DIGEST_CLASS_OPENSSL[ + signature_algorithm + ]() + ) + else: + # the last resort. should not happen. + chosen_hash = hashes.SHA1() + hasher = hashes.Hash(chosen_hash, backend) + hasher.update(data.dump()) + digest = hasher.finalize() + try: + public_key.verify( + signature, digest, padding.PKCS1v15(), utils.Prehashed(chosen_hash) + ) + except InvalidSignature: + raise RevocationCheckError(msg="Failed to verify the signature") + + def extract_certificate_chain(self, connection): + """Gets certificate chain and extract the key info from OpenSSL connection.""" + from OpenSSL.crypto import FILETYPE_ASN1, dump_certificate + + cert_map = OrderedDict() + logger.debug("# of certificates: %s", len(connection.get_peer_cert_chain())) + + for cert_openssl in connection.get_peer_cert_chain(): + cert_der = dump_certificate(FILETYPE_ASN1, cert_openssl) + cert = Certificate.load(cert_der) + logger.debug( + "subject: %s, issuer: %s", cert.subject.native, cert.issuer.native + ) + cert_map[cert.subject.sha256] = cert + + return self.create_pair_issuer_subject(cert_map) + + def create_pair_issuer_subject(self, cert_map): + """Creates pairs of issuer and subject certificates.""" + issuer_subject = [] + for subject_der in cert_map: + subject = cert_map[subject_der] + if subject.ocsp_no_check_value or subject.ca and not subject.ocsp_urls: + # Root certificate will not be validated + # but it is used to validate the subject certificate + continue + issuer_hash = subject.issuer.sha256 + if issuer_hash not in cert_map: + # IF NO ROOT certificate is attached in the certificate chain + # read it from the local disk + self._lazy_read_ca_bundle() + logger.debug("not found issuer_der: %s", subject.issuer.native) + if issuer_hash not in SnowflakeOCSP.ROOT_CERTIFICATES_DICT: + raise RevocationCheckError( + msg="CA certificate is NOT found in the root " + "certificate list. Make sure you use the latest " + "Python Connector package and the URL is valid." + ) + issuer = SnowflakeOCSP.ROOT_CERTIFICATES_DICT[issuer_hash] + else: + issuer = cert_map[issuer_hash] + + issuer_subject.append((issuer, subject)) + return issuer_subject + + def subject_name(self, subject): + return subject.subject.native diff --git a/ocsp_snowflake.py b/src/snowflake/connector/ocsp_snowflake.py similarity index 52% rename from ocsp_snowflake.py rename to src/snowflake/connector/ocsp_snowflake.py index 4b40308ed..d3bf653d1 100644 --- a/ocsp_snowflake.py +++ b/src/snowflake/connector/ocsp_snowflake.py @@ -1,9 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # +from __future__ import annotations + import codecs import json import os @@ -17,31 +18,48 @@ from copy import deepcopy from datetime import datetime, timedelta from logging import getLogger -from os import path, environ +from os import environ, path from os.path import expanduser -from threading import (Lock) +from threading import Lock, RLock from time import gmtime, strftime import jwt + +# We use regular requests and urlib3 when we reach out to do OCSP checks, basically in this very narrow +# part of the code where we want to call out to check for revoked certificates, +# we don't want to use our hardened version of requests. import requests as generic_requests -from snowflake.connector.compat import (urlsplit, OK) +from snowflake.connector.compat import OK, urlsplit from snowflake.connector.constants import HTTP_HEADER_USER_AGENT from snowflake.connector.errorcode import ( - ER_INVALID_OCSP_RESPONSE, - ER_INVALID_OCSP_RESPONSE_CODE, + ER_INVALID_OCSP_RESPONSE_SSD, ER_INVALID_SSD, - ER_SERVER_CERTIFICATE_UNKNOWN, - ER_SERVER_CERTIFICATE_REVOKED, - ER_OCSP_FAILED_TO_CONNECT_HOST, + ER_OCSP_FAILED_TO_CONNECT_CACHE_SERVER, + ER_OCSP_RESPONSE_ATTACHED_CERT_EXPIRED, + ER_OCSP_RESPONSE_ATTACHED_CERT_INVALID, + ER_OCSP_RESPONSE_CACHE_DECODE_FAILED, + ER_OCSP_RESPONSE_CACHE_DOWNLOAD_FAILED, + ER_OCSP_RESPONSE_CERT_STATUS_INVALID, + ER_OCSP_RESPONSE_CERT_STATUS_REVOKED, + ER_OCSP_RESPONSE_CERT_STATUS_UNKNOWN, + ER_OCSP_RESPONSE_EXPIRED, + ER_OCSP_RESPONSE_FETCH_EXCEPTION, + ER_OCSP_RESPONSE_FETCH_FAILURE, + ER_OCSP_RESPONSE_INVALID_EXPIRY_INFO_MISSING, + ER_OCSP_RESPONSE_INVALID_SIGNATURE, + ER_OCSP_RESPONSE_LOAD_FAILURE, + ER_OCSP_RESPONSE_STATUS_UNSUCCESSFUL, + ER_OCSP_RESPONSE_UNAVAILABLE, + ER_OCSP_URL_INFO_MISSING, ) from snowflake.connector.errors import RevocationCheckError from snowflake.connector.network import PYTHON_CONNECTOR_USER_AGENT from snowflake.connector.ssd_internal_keys import ( - ocsp_internal_ssd_pub_dep1, - ocsp_internal_ssd_pub_dep2, ocsp_internal_dep1_key_ver, ocsp_internal_dep2_key_ver, + ocsp_internal_ssd_pub_dep1, + ocsp_internal_ssd_pub_dep2, ) from snowflake.connector.telemetry_oob import TelemetryService from snowflake.connector.time_util import DecorrelateJitterBackoff @@ -49,9 +67,55 @@ logger = getLogger(__name__) -class OCSPTelemetryData(object): +class OCSPTelemetryData: + + CERTIFICATE_EXTRACTION_FAILED = "CertificateExtractionFailed" + OCSP_URL_MISSING = "OCSPURLMissing" + OCSP_RESPONSE_UNAVAILABLE = "OCSPResponseUnavailable" + OCSP_RESPONSE_FETCH_EXCEPTION = "OCSPResponseFetchException" + OCSP_RESPONSE_FAILED_TO_CONNECT_CACHE_SERVER = ( + "OCSPResponseFailedToConnectCacheServer" + ) + OCSP_RESPONSE_CERT_STATUS_INVALID = "OCSPResponseCertStatusInvalid" + OCSP_RESPONSE_CERT_STATUS_REVOKED = "OCSPResponseCertStatusRevoked" + OCSP_RESPONSE_CERT_STATUS_UNKNOWN = "OCSPResponseCertStatusUnknown" + OCSP_RESPONSE_STATUS_UNSUCCESSFUL = "OCSPResponseStatusUnsuccessful" + OCSP_RESPONSE_ATTACHED_CERT_INVALID = "OCSPResponseAttachedCertInvalid" + OCSP_RESPONSE_ATTACHED_CERT_EXPIRED = "OCSPResponseAttachedCertExpired" + OCSP_RESPONSE_INVALID_SIGNATURE = "OCSPResponseSignatureInvalid" + OCSP_RESPONSE_EXPIRY_INFO_MISSING = "OCSPResponseExpiryInfoMissing" + OCSP_RESPONSE_EXPIRED = "OCSPResponseExpired" + OCSP_RESPONSE_FETCH_FAILURE = "OCSPResponseFetchFailure" + OCSP_RESPONSE_CACHE_DOWNLOAD_FAILED = "OCSPResponseCacheDownloadFailed" + OCSP_RESPONSE_CACHE_DECODE_FAILED = "OCSPResponseCacheDecodeFailed" + OCSP_RESPONSE_LOAD_FAILURE = "OCSPResponseLoadFailure" + OCSP_RESPONSE_INVALID_SSD = "OCSPResponseInvalidSSD" + + ERROR_CODE_MAP = { + ER_OCSP_URL_INFO_MISSING: OCSP_URL_MISSING, + ER_OCSP_RESPONSE_UNAVAILABLE: OCSP_RESPONSE_UNAVAILABLE, + ER_OCSP_RESPONSE_FETCH_EXCEPTION: OCSP_RESPONSE_FETCH_EXCEPTION, + ER_OCSP_FAILED_TO_CONNECT_CACHE_SERVER: OCSP_RESPONSE_FAILED_TO_CONNECT_CACHE_SERVER, + ER_OCSP_RESPONSE_CERT_STATUS_INVALID: OCSP_RESPONSE_CERT_STATUS_INVALID, + ER_OCSP_RESPONSE_CERT_STATUS_REVOKED: OCSP_RESPONSE_CERT_STATUS_REVOKED, + ER_OCSP_RESPONSE_CERT_STATUS_UNKNOWN: OCSP_RESPONSE_CERT_STATUS_UNKNOWN, + ER_OCSP_RESPONSE_STATUS_UNSUCCESSFUL: OCSP_RESPONSE_STATUS_UNSUCCESSFUL, + ER_OCSP_RESPONSE_ATTACHED_CERT_INVALID: OCSP_RESPONSE_ATTACHED_CERT_INVALID, + ER_OCSP_RESPONSE_ATTACHED_CERT_EXPIRED: OCSP_RESPONSE_ATTACHED_CERT_EXPIRED, + ER_OCSP_RESPONSE_INVALID_SIGNATURE: OCSP_RESPONSE_INVALID_SIGNATURE, + ER_OCSP_RESPONSE_INVALID_EXPIRY_INFO_MISSING: OCSP_RESPONSE_EXPIRY_INFO_MISSING, + ER_OCSP_RESPONSE_EXPIRED: OCSP_RESPONSE_EXPIRED, + ER_OCSP_RESPONSE_FETCH_FAILURE: OCSP_RESPONSE_FETCH_FAILURE, + ER_OCSP_RESPONSE_LOAD_FAILURE: OCSP_RESPONSE_LOAD_FAILURE, + ER_OCSP_RESPONSE_CACHE_DOWNLOAD_FAILED: OCSP_RESPONSE_CACHE_DOWNLOAD_FAILED, + ER_OCSP_RESPONSE_CACHE_DECODE_FAILED: OCSP_RESPONSE_CACHE_DECODE_FAILED, + ER_INVALID_OCSP_RESPONSE_SSD: OCSP_RESPONSE_INVALID_SSD, + ER_INVALID_SSD: OCSP_RESPONSE_INVALID_SSD, + } def __init__(self): + self.event_sub_type = None + self.ocsp_connection_method = None self.cert_id = None self.sfc_peer_host = None self.ocsp_url = None @@ -62,6 +126,24 @@ def __init__(self): self.fail_open = False self.insecure_mode = False + def set_event_sub_type(self, event_sub_type: str) -> None: + """ + Sets sub type for OCSP Telemetry Event. + + There can be multiple event_sub_type that could have happened + during a single connection establishment. Ensure that all of them + are captured. + :param event_sub_type: + :return: + """ + if self.event_sub_type is not None: + self.event_sub_type = f"{self.event_sub_type}|{event_sub_type}" + else: + self.event_sub_type = event_sub_type + + def set_ocsp_connection_method(self, ocsp_conn_method: str) -> None: + self.ocsp_connection_method = ocsp_conn_method + def set_cert_id(self, cert_id): self.cert_id = cert_id @@ -95,9 +177,10 @@ def set_insecure_mode(self, insecure_mode): self.insecure_mode = insecure_mode def generate_telemetry_data(self, event_type, urgent=False): - cls, exception, stack_trace = sys.exc_info() + _, exception, _ = sys.exc_info() telemetry_data = {} telemetry_data.update({"eventType": event_type}) + telemetry_data.update({"eventSubType": self.event_sub_type}) telemetry_data.update({"sfcPeerHost": self.sfc_peer_host}) telemetry_data.update({"certId": self.cert_id}) telemetry_data.update({"ocspRequestBase64": self.ocsp_req}) @@ -109,8 +192,13 @@ def generate_telemetry_data(self, event_type, urgent=False): telemetry_data.update({"cacheHit": self.cache_hit}) telemetry_client = TelemetryService.get_instance() - telemetry_client.log_ocsp_exception(event_type, telemetry_data, exception=str(exception), - stack_trace=traceback.format_exc(), urgent=urgent) + telemetry_client.log_ocsp_exception( + event_type, + telemetry_data, + exception=str(exception), + stack_trace=traceback.format_exc(), + urgent=urgent, + ) return telemetry_data # To be updated once Python Driver has out of band telemetry. @@ -118,8 +206,7 @@ def generate_telemetry_data(self, event_type, urgent=False): # telemetry_client.add_log_to_batch(TelemetryData(telemetry_data, datetime.utcnow())) -class SSDPubKey(object): - +class SSDPubKey: def __init__(self): self._key_ver = None self._key = None @@ -135,157 +222,148 @@ def get_key(self): return self._key -class OCSPServer(object): - - MAX_RETRY = int(os.getenv('OCSP_MAX_RETRY', '3')) +class OCSPServer: + MAX_RETRY = int(os.getenv("OCSP_MAX_RETRY", "3")) def __init__(self): self.DEFAULT_CACHE_SERVER_URL = "http://ocsp.snowflakecomputing.com" - ''' + """ The following will change to something like http://ocspssd.snowflakecomputing.com/ocsp/ once the endpoint is up in the backend - ''' - self.NEW_DEFAULT_CACHE_SERVER_BASE_URL = "https://ocspssd.snowflakecomputing.com/ocsp/" + """ + self.NEW_DEFAULT_CACHE_SERVER_BASE_URL = ( + "https://ocspssd.snowflakecomputing.com/ocsp/" + ) if not OCSPServer.is_enabled_new_ocsp_endpoint(): self.CACHE_SERVER_URL = os.getenv( "SF_OCSP_RESPONSE_CACHE_SERVER_URL", - "{0}/{1}".format( + "{}/{}".format( self.DEFAULT_CACHE_SERVER_URL, - OCSPCache.OCSP_RESPONSE_CACHE_FILE_NAME)) + OCSPCache.OCSP_RESPONSE_CACHE_FILE_NAME, + ), + ) else: - self.CACHE_SERVER_URL = os.getenv( - "SF_OCSP_RESPONSE_CACHE_SERVER_URL") + self.CACHE_SERVER_URL = os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_URL") - self.CACHE_SERVER_ENABLED = os.getenv( - "SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED", "true") != "false" + self.CACHE_SERVER_ENABLED = ( + os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED", "true") != "false" + ) # OCSP dynamic cache server URL pattern self.OCSP_RETRY_URL = None @staticmethod def is_enabled_new_ocsp_endpoint(): - """ - Check if new OCSP Endpoint has been - enabled - :return: True or False - """ - return os.getenv("SF_OCSP_ACTIVATE_NEW_ENDPOINT", - "false").lower() == "true" + """Checks if new OCSP Endpoint has been enabled.""" + return os.getenv("SF_OCSP_ACTIVATE_NEW_ENDPOINT", "false").lower() == "true" def reset_ocsp_endpoint(self, hname): + """Resets current object members CACHE_SERVER_URL and RETRY_URL_PATTERN. - """ - Update current object members - CACHE_SERVER_URL and RETRY_URL_PATTERN - to point to new OCSP Fetch and Retry endpoints - respectively - - The new OCSP Endpoint address is based on the - hostname the customer is trying to connect to. - The deployment or in case of client failover, - the replication ID is copied from the hostname. - - :param hname: hostname customer is trying to connect - to + They will point at the new OCSP Fetch and Retry endpoints respectively. The new OCSP Endpoint address is based + on the hostname the customer is trying to connect to. The deployment or in case of client failover, the + replication ID is copied from the hostname. """ if hname.endswith("privatelink.snowflakecomputing.com"): temp_ocsp_endpoint = "".join(["https://ocspssd.", hname, "/ocsp/"]) elif hname.endswith("global.snowflakecomputing.com"): - rep_id_begin = hname[hname.find("-"):] - temp_ocsp_endpoint = "".join( - ["https://ocspssd", rep_id_begin, "/ocsp/"]) + rep_id_begin = hname[hname.find("-") :] + temp_ocsp_endpoint = "".join(["https://ocspssd", rep_id_begin, "/ocsp/"]) elif not hname.endswith("snowflakecomputing.com"): temp_ocsp_endpoint = self.NEW_DEFAULT_CACHE_SERVER_BASE_URL else: - hname_wo_acc = hname[hname.find("."):] - temp_ocsp_endpoint = "".join( - ["https://ocspssd", hname_wo_acc, "/ocsp/"]) + hname_wo_acc = hname[hname.find(".") :] + temp_ocsp_endpoint = "".join(["https://ocspssd", hname_wo_acc, "/ocsp/"]) self.CACHE_SERVER_URL = "".join([temp_ocsp_endpoint, "fetch"]) self.OCSP_RETRY_URL = "".join([temp_ocsp_endpoint, "retry"]) def reset_ocsp_dynamic_cache_server_url(self, use_ocsp_cache_server): - """ - Reset OCSP dynamic cache server url pattern. + """Resets OCSP dynamic cache server url pattern. This is used only when OCSP cache server is updated. """ - if use_ocsp_cache_server is not None: self.CACHE_SERVER_ENABLED = use_ocsp_cache_server if self.CACHE_SERVER_ENABLED: - logger.debug("OCSP response cache server is enabled: %s", - self.CACHE_SERVER_URL) + logger.debug( + "OCSP response cache server is enabled: %s", self.CACHE_SERVER_URL + ) else: logger.debug("OCSP response cache server is disabled") if self.OCSP_RETRY_URL is None: - if self.CACHE_SERVER_URL is not None and \ - (not self.CACHE_SERVER_URL.startswith( - self.DEFAULT_CACHE_SERVER_URL)): + if self.CACHE_SERVER_URL is not None and ( + not self.CACHE_SERVER_URL.startswith(self.DEFAULT_CACHE_SERVER_URL) + ): # only if custom OCSP cache server is used. - parsed_url = urlsplit( - self.CACHE_SERVER_URL) + parsed_url = urlsplit(self.CACHE_SERVER_URL) if not OCSPCache.ACTIVATE_SSD: if parsed_url.port: - self.OCSP_RETRY_URL = \ - u"{0}://{1}:{2}/retry/".format( - parsed_url.scheme, parsed_url.hostname, - parsed_url.port) + u"{0}/{1}" + self.OCSP_RETRY_URL = ( + "{}://{}:{}/retry/".format( + parsed_url.scheme, parsed_url.hostname, parsed_url.port + ) + + "{0}/{1}" + ) else: - self.OCSP_RETRY_URL = \ - u"{0}://{1}/retry/".format( - parsed_url.scheme, - parsed_url.hostname) + u"{0}/{1}" + self.OCSP_RETRY_URL = ( + "{}://{}/retry/".format( + parsed_url.scheme, parsed_url.hostname + ) + + "{0}/{1}" + ) else: if parsed_url.port: - self.OCSP_RETRY_URL = \ - u"{0}://{1}:{2}/retry".format( - parsed_url.scheme, parsed_url.hostname, - parsed_url.port) + self.OCSP_RETRY_URL = "{}://{}:{}/retry".format( + parsed_url.scheme, parsed_url.hostname, parsed_url.port + ) else: - self.OCSP_RETRY_URL = \ - u"{0}://{1}/retry".format( - parsed_url.scheme, parsed_url.hostname) - logger.debug( - "OCSP dynamic cache server RETRY URL: %s", - self.OCSP_RETRY_URL) + self.OCSP_RETRY_URL = "{}://{}/retry".format( + parsed_url.scheme, parsed_url.hostname + ) + logger.debug("OCSP dynamic cache server RETRY URL: %s", self.OCSP_RETRY_URL) def download_cache_from_server(self, ocsp): if self.CACHE_SERVER_ENABLED: # if any of them is not cache, download the cache file from # OCSP response cache server. try: - retval = OCSPServer._download_ocsp_response_cache(ocsp, - self.CACHE_SERVER_URL) + retval = OCSPServer._download_ocsp_response_cache( + ocsp, self.CACHE_SERVER_URL + ) if not retval: - raise RevocationCheckError(msg="OCSP Cache Server Unavailable.") - logger.debug("downloaded OCSP response cache file from %s", - self.CACHE_SERVER_URL) + raise RevocationCheckError( + msg="OCSP Cache Server Unavailable.", + errno=ER_OCSP_RESPONSE_CACHE_DOWNLOAD_FAILED, + ) + logger.debug( + "downloaded OCSP response cache file from %s", self.CACHE_SERVER_URL + ) logger.debug("# of certificates: %s", len(OCSPCache.CACHE)) except RevocationCheckError as rce: - logger.debug("OCSP Response cache download failed. The client" - "will reach out to the OCSP Responder directly for" - "any missing OCSP responses %s\n" % rce.msg) + logger.debug( + "OCSP Response cache download failed. The client" + "will reach out to the OCSP Responder directly for" + "any missing OCSP responses %s\n" % rce.msg + ) + raise @staticmethod def _download_ocsp_response_cache(ocsp, url, do_retry=True): - """ - Download OCSP response cache from the cache server - :param url: OCSP response cache server - :param do_retry: retry if connection fails up to N times - """ + """Downloads OCSP response cache from the cache server.""" headers = {HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT} sf_timeout = SnowflakeOCSP.OCSP_CACHE_SERVER_CONNECTION_TIMEOUT try: start_time = time.time() - logger.debug( - "started downloading OCSP response cache file: %s", url) + logger.debug("started downloading OCSP response cache file: %s", url) if ocsp.test_mode is not None: - test_timeout = os.getenv("SF_TEST_OCSP_CACHE_SERVER_CONNECTION_TIMEOUT", None) + test_timeout = os.getenv( + "SF_TEST_OCSP_CACHE_SERVER_CONNECTION_TIMEOUT", None + ) sf_cache_server_url = os.getenv("SF_TEST_OCSP_CACHE_SERVER_URL", None) if test_timeout is not None: sf_timeout = int(test_timeout) @@ -296,7 +374,7 @@ def _download_ocsp_response_cache(ocsp, url, do_retry=True): max_retry = SnowflakeOCSP.OCSP_CACHE_SERVER_MAX_RETRY if do_retry else 1 sleep_time = 1 backoff = DecorrelateJitterBackoff(sleep_time, 16) - for attempt in range(max_retry): + for _ in range(max_retry): response = session.get( url, timeout=sf_timeout, # socket timeout @@ -307,43 +385,46 @@ def _download_ocsp_response_cache(ocsp, url, do_retry=True): elapsed_time = time.time() - start_time logger.debug( "ended downloading OCSP response cache file. " - "elapsed time: %ss", elapsed_time) + "elapsed time: %ss", + elapsed_time, + ) break elif max_retry > 1: sleep_time = backoff.next_sleep(1, sleep_time) logger.debug( "OCSP server returned %s. Retrying in %s(s)", - response.status_code, sleep_time) + response.status_code, + sleep_time, + ) time.sleep(sleep_time) else: logger.error( - "Failed to get OCSP response after %s attempt.", - max_retry) + "Failed to get OCSP response after %s attempt.", max_retry + ) return False return True - except Exception as e: - logger.debug("Failed to get OCSP response cache from %s: %s", url, - e) + logger.debug("Failed to get OCSP response cache from %s: %s", url, e) raise RevocationCheckError( - msg="Failed to get OCSP Response Cache from {0}: {1}".format( - url, - e), - errno=ER_OCSP_FAILED_TO_CONNECT_HOST) + msg=f"Failed to get OCSP Response Cache from {url}: {e}", + errno=ER_OCSP_FAILED_TO_CONNECT_CACHE_SERVER, + ) def generate_get_url(self, ocsp_url, b64data): parsed_url = urlsplit(ocsp_url) if self.OCSP_RETRY_URL is None: - target_url = "{0}/{1}".format(ocsp_url, b64data) + target_url = f"{ocsp_url}/{b64data}" else: - target_url = self.OCSP_RETRY_URL.format( - parsed_url.hostname, b64data) + target_url = self.OCSP_RETRY_URL.format(parsed_url.hostname, b64data) logger.debug("OCSP Retry URL is - %s", target_url) return target_url -class OCSPCache(object): +class OCSPCache: + # Activate server side directive support + ACTIVATE_SSD = False + CACHE = {} # OCSP cache lock @@ -352,105 +433,123 @@ class OCSPCache(object): # OCSP cache update flag CACHE_UPDATED = False - # Cache Expiration in seconds (24 hours). OCSP validation cache is - # invalidated every 24 hours - CACHE_EXPIRATION = 86400 + # Cache Expiration in seconds (120 hours). OCSP validation cache is + # invalidated every 120 hours (5 days) + CACHE_EXPIRATION = 432000 # OCSP Response Cache URI OCSP_RESPONSE_CACHE_URI = None # OCSP response cache file name - OCSP_RESPONSE_CACHE_FILE_NAME = 'ocsp_response_cache.json' + OCSP_RESPONSE_CACHE_FILE_NAME = "ocsp_response_cache.json" # Cache directory - CACHE_ROOT_DIR = os.getenv('SF_OCSP_RESPONSE_CACHE_DIR') or \ - expanduser("~") or tempfile.gettempdir() CACHE_DIR = None - # Activate server side directive support - ACTIVATE_SSD = False + @staticmethod + def reset_cache_dir(): + # Cache directory + OCSPCache.CACHE_DIR = os.getenv("SF_OCSP_RESPONSE_CACHE_DIR") + if OCSPCache.CACHE_DIR is None: + cache_root_dir = expanduser("~") or tempfile.gettempdir() + if platform.system() == "Windows": + OCSPCache.CACHE_DIR = path.join( + cache_root_dir, "AppData", "Local", "Snowflake", "Caches" + ) + elif platform.system() == "Darwin": + OCSPCache.CACHE_DIR = path.join( + cache_root_dir, "Library", "Caches", "Snowflake" + ) + else: + OCSPCache.CACHE_DIR = path.join(cache_root_dir, ".cache", "snowflake") + logger.debug("cache directory: %s", OCSPCache.CACHE_DIR) - if platform.system() == 'Windows': - CACHE_DIR = path.join(CACHE_ROOT_DIR, 'AppData', 'Local', 'Snowflake', - 'Caches') - elif platform.system() == 'Darwin': - CACHE_DIR = path.join(CACHE_ROOT_DIR, 'Library', 'Caches', 'Snowflake') - else: - CACHE_DIR = path.join(CACHE_ROOT_DIR, '.cache', 'snowflake') + if not path.exists(OCSPCache.CACHE_DIR): + try: + os.makedirs(OCSPCache.CACHE_DIR, mode=0o700) + except Exception as ex: + logger.debug( + "cannot create a cache directory: [%s], err=[%s]", + OCSPCache.CACHE_DIR, + ex, + ) + OCSPCache.CACHE_DIR = None - if not path.exists(CACHE_DIR): - try: - os.makedirs(CACHE_DIR, mode=0o700) - except Exception as ex: - logger.debug('cannot create a cache directory: [%s], err=[%s]', - CACHE_DIR, ex) - CACHE_DIR = None - logger.debug("cache directory: %s", CACHE_DIR) + @staticmethod + def del_cache_file(): + """Deletes the OCSP response cache file if exists.""" + cache_file = path.join( + OCSPCache.CACHE_DIR, OCSPCache.OCSP_RESPONSE_CACHE_FILE_NAME + ) + if path.exists(cache_file): + logger.debug(f"deleting cache file {cache_file}") + os.unlink(cache_file) @staticmethod def set_ssd_status(ssd_status): OCSPCache.ACTIVATE_SSD = ssd_status @staticmethod - def reset_ocsp_response_cache_uri( - ocsp_response_cache_uri): + def reset_ocsp_response_cache_uri(ocsp_response_cache_uri): if ocsp_response_cache_uri is None and OCSPCache.CACHE_DIR is not None: - OCSPCache.OCSP_RESPONSE_CACHE_URI = 'file://' + path.join( - OCSPCache.CACHE_DIR, - OCSPCache.OCSP_RESPONSE_CACHE_FILE_NAME) + OCSPCache.OCSP_RESPONSE_CACHE_URI = "file://" + path.join( + OCSPCache.CACHE_DIR, OCSPCache.OCSP_RESPONSE_CACHE_FILE_NAME + ) else: OCSPCache.OCSP_RESPONSE_CACHE_URI = ocsp_response_cache_uri if OCSPCache.OCSP_RESPONSE_CACHE_URI is not None: # normalize URI for Windows - OCSPCache.OCSP_RESPONSE_CACHE_URI = \ - OCSPCache.OCSP_RESPONSE_CACHE_URI.replace('\\', '/') + OCSPCache.OCSP_RESPONSE_CACHE_URI = ( + OCSPCache.OCSP_RESPONSE_CACHE_URI.replace("\\", "/") + ) - logger.debug("ocsp_response_cache_uri: %s", - OCSPCache.OCSP_RESPONSE_CACHE_URI) - logger.debug( - "OCSP_VALIDATION_CACHE size: %s", len(OCSPCache.CACHE)) + logger.debug("ocsp_response_cache_uri: %s", OCSPCache.OCSP_RESPONSE_CACHE_URI) + logger.debug("OCSP_VALIDATION_CACHE size: %s", len(OCSPCache.CACHE)) @staticmethod def read_file(ocsp): - """ - Read OCSP Response cache data from the URI, which is very likely a file. - """ + """Reads OCSP Response cache data from the URI, which is very likely a file.""" try: parsed_url = urlsplit(OCSPCache.OCSP_RESPONSE_CACHE_URI) - if parsed_url.scheme == 'file': + if parsed_url.scheme == "file": OCSPCache.read_ocsp_response_cache_file( - ocsp, - path.join(parsed_url.netloc, parsed_url.path)) + ocsp, path.join(parsed_url.netloc, parsed_url.path) + ) else: - raise Exception( - "Unsupported OCSP URI: %s", - OCSPCache.OCSP_RESPONSE_CACHE_URI) - except Exception as e: + msg = "Unsupported OCSP URI: {}".format( + OCSPCache.OCSP_RESPONSE_CACHE_URI + ) + raise Exception(msg) + except (RevocationCheckError, Exception) as rce: logger.debug( "Failed to read OCSP response cache file %s: %s, " "No worry. It will validate with OCSP server. " "Ignoring...", - OCSPCache.OCSP_RESPONSE_CACHE_URI, e, exc_info=True) + OCSPCache.OCSP_RESPONSE_CACHE_URI, + rce, + exc_info=True, + ) @staticmethod def read_ocsp_response_cache_file(ocsp, filename): - """ - Reads OCSP Response cache - """ + """Reads OCSP Response cache.""" try: - if OCSPCache.check_ocsp_response_cache_lock_dir(filename) and \ - path.exists(filename): - with codecs.open(filename, 'r', encoding='utf-8', - errors='ignore') as f: + if OCSPCache.check_ocsp_response_cache_lock_dir(filename) and path.exists( + filename + ): + with codecs.open(filename, "r", encoding="utf-8", errors="ignore") as f: ocsp.decode_ocsp_response_cache(json.load(f)) - logger.debug("Read OCSP response cache file: %s, count=%s", - filename, len(OCSPCache.CACHE)) + logger.debug( + "Read OCSP response cache file: %s, count=%s", + filename, + len(OCSPCache.CACHE), + ) else: logger.debug( "Failed to locate OCSP response cache file. " "No worry. It will validate with OCSP server: %s", - filename + filename, ) except Exception as ex: logger.debug("Caught - %s", ex) @@ -458,27 +557,23 @@ def read_ocsp_response_cache_file(ocsp, filename): @staticmethod def update_file(ocsp): - """ - Update OCSP Respone Cache file - """ + """Updates OCSP Respone Cache file.""" with OCSPCache.CACHE_LOCK: if OCSPCache.CACHE_UPDATED: OCSPCache.update_ocsp_response_cache_file( - ocsp, - OCSPCache.OCSP_RESPONSE_CACHE_URI) + ocsp, OCSPCache.OCSP_RESPONSE_CACHE_URI + ) OCSPCache.CACHE_UPDATED = False @staticmethod def update_ocsp_response_cache_file(ocsp, ocsp_response_cache_uri): - """ - Updates OCSP Response Cache - """ + """Updates OCSP Response Cache.""" if ocsp_response_cache_uri is not None: try: parsed_url = urlsplit(ocsp_response_cache_uri) - if parsed_url.scheme == 'file': + if parsed_url.scheme == "file": filename = path.join(parsed_url.netloc, parsed_url.path) - lock_dir = filename + '.lck' + lock_dir = filename + ".lck" for _ in range(100): # wait until the lck file has been removed # or up to 1 second (0.01 x 100) @@ -486,46 +581,49 @@ def update_ocsp_response_cache_file(ocsp, ocsp_response_cache_uri): break time.sleep(0.01) try: - OCSPCache.write_ocsp_response_cache_file( - ocsp, - filename) + OCSPCache.write_ocsp_response_cache_file(ocsp, filename) finally: OCSPCache.unlock_cache_file(lock_dir) else: logger.debug( "No OCSP response cache file is written, because the " "given URI is not a file: %s. Ignoring...", - ocsp_response_cache_uri) + ocsp_response_cache_uri, + ) except Exception as e: logger.debug( "Failed to write OCSP response cache " "file. file: %s, error: %s, Ignoring...", - ocsp_response_cache_uri, e, exc_info=True) + ocsp_response_cache_uri, + e, + exc_info=True, + ) @staticmethod def write_ocsp_response_cache_file(ocsp, filename): - """ - Writes OCSP Response Cache - """ - logger.debug('writing OCSP response cache file') + """Writes OCSP Response Cache.""" + logger.debug(f"writing OCSP response cache file to {filename}") file_cache_data = {} ocsp.encode_ocsp_response_cache(file_cache_data) - with codecs.open(filename, 'w', encoding='utf-8', errors='ignore') as f: + with codecs.open(filename, "w", encoding="utf-8", errors="ignore") as f: json.dump(file_cache_data, f) @staticmethod def check_ocsp_response_cache_lock_dir(filename): - """ - Checks if the lock directory exists. True if it can update the cache - file or False when some other process may be updating the cache file. + """Checks if the lock directory exists. + + Returns: + True if it can update the cache file or False when some other process may be updating the cache file. """ current_time = int(time.time()) - lock_dir = filename + '.lck' + lock_dir = filename + ".lck" try: ts_cache_file = OCSPCache._file_timestamp(filename) - if not path.exists(lock_dir) and \ - current_time - OCSPCache.CACHE_EXPIRATION <= ts_cache_file: + if ( + not path.exists(lock_dir) + and current_time - OCSPCache.CACHE_EXPIRATION <= ts_cache_file + ): # use cache only if no lock directory exists and the cache file # was created last 24 hours return True @@ -538,22 +636,30 @@ def check_ocsp_response_cache_lock_dir(filename): logger.debug( "The lock directory is older than 60 seconds. " "Deleted the lock directory and ignoring the cache: %s", - lock_dir + lock_dir, ) else: logger.debug( - 'The lock directory exists. Other process may be ' - 'updating the cache file: %s, %s', filename, lock_dir) + "The lock directory exists. Other process may be " + "updating the cache file: %s, %s", + filename, + lock_dir, + ) else: os.unlink(filename) logger.debug( - "The cache is older than 1 day. " - "Deleted the cache file: %s", filename) + "The cache is older than 1 day. " "Deleted the cache file: %s", + filename, + ) except Exception as e: logger.debug( "Failed to check OCSP response cache file. No worry. It will " "validate with OCSP server: file: %s, lock directory: %s, " - "error: %s", filename, lock_dir, e) + "error: %s", + filename, + lock_dir, + e, + ) return False @staticmethod @@ -570,10 +676,11 @@ def find_cache(ocsp, cert_id, subject): try: # is_valid_time can raise exception if the cache # entry is a SSD. - if OCSPCache.is_cache_fresh(current_time, ts) and \ - ocsp.is_valid_time(cert_id, cache): + if OCSPCache.is_cache_fresh(current_time, ts) and ocsp.is_valid_time( + cert_id, cache + ): if subject_name: - logger.debug('hit cache for subject: %s', subject_name) + logger.debug("hit cache for subject: %s", subject_name) return True, cache else: OCSPCache.delete_cache(ocsp, cert_id) @@ -581,24 +688,25 @@ def find_cache(ocsp, cert_id, subject): if OCSPCache.ACTIVATE_SSD: logger.debug( "Potentially tried to validate SSD as OCSP Response." - "Attempting to validate as SSD", ex) + "Attempting to validate as SSD", + ex, + ) try: - if OCSPCache.is_cache_fresh(current_time, ts) and \ - SFSsd.validate(cache): + if OCSPCache.is_cache_fresh( + current_time, ts + ) and SFSsd.validate(cache): if subject_name: - logger.debug('hit cache for subject: %s', - subject_name) + logger.debug("hit cache for subject: %s", subject_name) return True, cache else: OCSPCache.delete_cache(ocsp, cert_id) except Exception: OCSPCache.delete_cache(ocsp, cert_id) else: - logger.debug("Could not validate cache entry %s %s", - cert_id, ex) + logger.debug("Could not validate cache entry %s %s", cert_id, ex) OCSPCache.CACHE_UPDATED = True if subject_name: - logger.debug('not hit cache for subject: %s', subject_name) + logger.debug("not hit cache for subject: %s", subject_name) return False, None @staticmethod @@ -618,8 +726,7 @@ def update_or_delete_cache(ocsp, cert_id, ocsp_response, ts): @staticmethod def iterate_cache(): - for rec in OCSPCache.CACHE.items(): - yield rec + yield from OCSPCache.CACHE.items() @staticmethod def update_cache(ocsp, cert_id, ocsp_response): @@ -643,38 +750,42 @@ def delete_cache(ocsp, cert_id): logger.debug("Could not acquire lock", ex) @staticmethod - def merge_cache(ocsp, previous_cache_filename, current_cache_filename, - output_filename): - """ - Merge two cache files into one cache and save to the output. + def merge_cache( + ocsp, previous_cache_filename, current_cache_filename, output_filename + ): + """Merges two cache files into one cache and save to the output. + current_cache takes precedence over previous_cache. """ - OCSPCache.clear_cache() - if previous_cache_filename: - OCSPCache.read_ocsp_response_cache_file( - ocsp, previous_cache_filename) - previous_cache = deepcopy(OCSPCache.CACHE) + try: + OCSPCache.clear_cache() + if previous_cache_filename: + OCSPCache.read_ocsp_response_cache_file(ocsp, previous_cache_filename) + previous_cache = deepcopy(OCSPCache.CACHE) - OCSPCache.clear_cache() - OCSPCache.read_ocsp_response_cache_file(ocsp, current_cache_filename) - current_cache = deepcopy(OCSPCache.CACHE) + OCSPCache.clear_cache() + OCSPCache.read_ocsp_response_cache_file(ocsp, current_cache_filename) + current_cache = deepcopy(OCSPCache.CACHE) - # overwrite the previous one with the current one - previous_cache.update(current_cache) + # overwrite the previous one with the current one + previous_cache.update(current_cache) - OCSPCache.CACHE = previous_cache - OCSPCache.write_ocsp_response_cache_file(ocsp, output_filename) + OCSPCache.CACHE = previous_cache + OCSPCache.write_ocsp_response_cache_file(ocsp, output_filename) + except RevocationCheckError as rce: + # Catch any revocation check error and raise a generic exception + # Any other generic exception along with this one should be + # handled by the calling function / OCSP Cache Server. + raise Exception(rce.msg) @staticmethod def _file_timestamp(filename): - """ - Last created timestamp of the file/dir - """ - if platform.system() == 'Windows': + """Gets the last created timestamp of the file/dir.""" + if platform.system() == "Windows": ts = int(path.getctime(filename)) else: stat = os.stat(filename) - if hasattr(stat, 'st_birthtime'): # odx + if hasattr(stat, "st_birthtime"): # odx ts = int(stat.st_birthtime) else: ts = int(stat.st_mtime) # linux @@ -682,57 +793,52 @@ def _file_timestamp(filename): @staticmethod def lock_cache_file(fname): - """ - Lock a cache file by creating a directory. - """ + """Locks a cache file by creating a directory.""" try: os.mkdir(fname) return True - except IOError: + except OSError: return False @staticmethod def unlock_cache_file(fname): - """ - Unlock a cache file by deleting a directory - """ + """Unlocks a cache file by deleting a directory.""" try: os.rmdir(fname) return True - except IOError: + except OSError: return False @staticmethod def delete_cache_file(): - """ - Delete the cache file. Used by tests only - """ + """Deletes the cache file. Used by tests only.""" parsed_url = urlsplit(OCSPCache.OCSP_RESPONSE_CACHE_URI) fname = path.join(parsed_url.netloc, parsed_url.path) OCSPCache.lock_cache_file(fname) try: + logger.debug(f"deleting cache file, used by tests only {fname}") os.unlink(fname) finally: OCSPCache.unlock_cache_file(fname) @staticmethod def clear_cache(): - """ - Clear cache - """ + """Clears cache.""" with OCSPCache.CACHE_LOCK: OCSPCache.CACHE = {} @staticmethod def cache_size(): - """ - Cache size - """ + """Returns the cache's size.""" with OCSPCache.CACHE_LOCK: return len(OCSPCache.CACHE) -class SFSsd(object): +# Reset OCSP cache directory +OCSPCache.reset_cache_dir() + + +class SFSsd: # Support for Server Side Directives ACTIVATE_SSD = False @@ -746,28 +852,31 @@ class SFSsd(object): ssd_pub_key_dep1 = SSDPubKey() ssd_pub_key_dep2 = SSDPubKey() - SSD_ROOT_DIR = os.getenv('SF_OCSP_SSD_DIR') or \ - expanduser("~") or tempfile.gettempdir() + SSD_ROOT_DIR = ( + os.getenv("SF_OCSP_SSD_DIR") or expanduser("~") or tempfile.gettempdir() + ) - if platform.system() == 'Windows': - SSD_DIR = path.join(SSD_ROOT_DIR, 'AppData', 'Local', 'Snowflake', - 'Caches') - elif platform.system() == 'Darwin': - SSD_DIR = path.join(SSD_ROOT_DIR, 'Library', 'Caches', 'Snowflake') + if platform.system() == "Windows": + SSD_DIR = path.join(SSD_ROOT_DIR, "AppData", "Local", "Snowflake", "Caches") + elif platform.system() == "Darwin": + SSD_DIR = path.join(SSD_ROOT_DIR, "Library", "Caches", "Snowflake") else: - SSD_DIR = path.join(SSD_ROOT_DIR, '.cache', 'snowflake') + SSD_DIR = path.join(SSD_ROOT_DIR, ".cache", "snowflake") def __init__(self): - SFSsd.ssd_pub_key_dep1.update(ocsp_internal_dep1_key_ver, - ocsp_internal_ssd_pub_dep1) - SFSsd.ssd_pub_key_dep2.update(ocsp_internal_dep2_key_ver, - ocsp_internal_ssd_pub_dep2) + SFSsd.ssd_pub_key_dep1.update( + ocsp_internal_dep1_key_ver, ocsp_internal_ssd_pub_dep1 + ) + SFSsd.ssd_pub_key_dep2.update( + ocsp_internal_dep2_key_ver, ocsp_internal_ssd_pub_dep2 + ) @staticmethod def check_ssd_support(): # Activate server side directive support - SFSsd.ACTIVATE_SSD = os.getenv("SF_OCSP_ACTIVATE_SSD", - "false").lower() == "true" + SFSsd.ACTIVATE_SSD = ( + os.getenv("SF_OCSP_ACTIVATE_SSD", "false").lower() == "true" + ) @staticmethod def add_to_ssd_persistent_cache(hostname, ssd): @@ -793,35 +902,36 @@ def find_in_ssd_cache(account_name): @staticmethod def ret_ssd_pub_key(iss_name): - if iss_name == 'dep1': + if iss_name == "dep1": return SFSsd.ssd_pub_key_dep1.get_key() - elif iss_name == 'dep2': + elif iss_name == "dep2": return SFSsd.ssd_pub_key_dep2.get_key() else: return None @staticmethod def ret_ssd_pub_key_ver(iss_name): - if iss_name == 'dep1': + if iss_name == "dep1": return SFSsd.ssd_pub_key_dep1.get_key_version() - elif iss_name == 'dep2': + elif iss_name == "dep2": return SFSsd.ssd_pub_key_dep2.get_key_version() else: return None @staticmethod def update_pub_key(ssd_issuer, ssd_pub_key_ver, ssd_pub_key_new): - if ssd_issuer == 'dep1': + if ssd_issuer == "dep1": SFSsd.ssd_pub_key_dep1.update(ssd_pub_key_ver, ssd_pub_key_new) - elif ssd_issuer == 'dep2': + elif ssd_issuer == "dep2": SFSsd.ssd_pub_key_dep2.update(ssd_pub_key_ver, ssd_pub_key_new) @staticmethod def validate(ssd): try: ssd_header = jwt.get_unverified_header(ssd) - jwt.decode(ssd, SFSsd.ret_ssd_pub_key(ssd_header['ssd_iss']), - algorithm='RS512') + jwt.decode( + ssd, SFSsd.ret_ssd_pub_key(ssd_header["ssd_iss"]), algorithm="RS512" + ) except Exception as ex: logger.debug("Error while validating SSD Token", ex) return False @@ -829,16 +939,14 @@ def validate(ssd): return True -class SnowflakeOCSP(object): - """ - OCSP validator using PyOpenSSL and asn1crypto/pyasn1 - """ +class SnowflakeOCSP: + """OCSP validator using PyOpenSSL and asn1crypto/pyasn1.""" # root certificate cache ROOT_CERTIFICATES_DICT = {} # root certificates # root certificate cache lock - ROOT_CERTIFICATES_DICT_LOCK = Lock() + ROOT_CERTIFICATES_DICT_LOCK = RLock() # ssd cache object SSD = SFSsd() @@ -847,15 +955,18 @@ class SnowflakeOCSP(object): OCSP_CACHE = OCSPCache() OCSP_WHITELIST = re.compile( - r'^' - r'(.*\.snowflakecomputing\.com$' - r'|(?:|.*\.)s3.*\.amazonaws\.com$' # start with s3 or .s3 in the middle - r'|.*\.okta\.com$' - r'|.*\.blob\.core\.windows\.net$)') + r"^" + r"(.*\.snowflakecomputing\.com$" + r"|(?:|.*\.)s3.*\.amazonaws\.com$" # start with s3 or .s3 in the middle + r"|.*\.okta\.com$" + r"|(?:|.*\.)storage\.googleapis\.com$" + r"|.*\.blob\.core\.windows\.net$" + r"|.*\.blob\.core\.usgovcloudapi\.net$)" + ) # Tolerable validity date range ratio. The OCSP response is valid up - # to (next update timestap) + (next update timestamp - - # this update timestap) * TOLERABLE_VALIDITY_RANGE_RATIO. This buffer + # to (next update timestamp) + (next update timestamp - + # this update timestamp) * TOLERABLE_VALIDITY_RANGE_RATIO. This buffer # yields some time for Root CA to update intermediate CA's certificate # OCSP response. In fact, they don't update OCSP response in time. In Dec # 2016, they left OCSP response expires for 5 hours at least, and it @@ -871,7 +982,7 @@ class SnowflakeOCSP(object): ZERO_EPOCH = datetime.utcfromtimestamp(0) # Timestamp format for logging - OUTPUT_TIMESTAMP_FORMAT = '%Y-%m-%d %H:%M:%SZ' + OUTPUT_TIMESTAMP_FORMAT = "%Y-%m-%d %H:%M:%SZ" # Connection timeout in seconds for CA OCSP Responder CA_OCSP_RESPONDER_CONNECTION_TIMEOUT = 10 @@ -889,15 +1000,16 @@ class SnowflakeOCSP(object): OCSP_CACHE_SERVER_MAX_RETRY = 1 def __init__( - self, - ocsp_response_cache_uri=None, - use_ocsp_cache_server=None, - use_post_method=True, - use_fail_open=True): + self, + ocsp_response_cache_uri=None, + use_ocsp_cache_server=None, + use_post_method=True, + use_fail_open=True, + ): self.test_mode = os.getenv("SF_OCSP_TEST_MODE", None) - if self.test_mode is 'true': + if self.test_mode == "true": logger.debug("WARNING - DRIVER CONFIGURED IN TEST MODE") self._use_post_method = use_post_method @@ -909,29 +1021,26 @@ def __init__( if os.getenv("SF_OCSP_FAIL_OPEN") is not None: # failOpen Env Variable is for internal usage/ testing only. # Using it in production is not advised and not supported. - self.FAIL_OPEN = os.getenv("SF_OCSP_FAIL_OPEN").lower() == 'true' + self.FAIL_OPEN = os.getenv("SF_OCSP_FAIL_OPEN").lower() == "true" else: self.FAIL_OPEN = use_fail_open if SnowflakeOCSP.SSD.ACTIVATE_SSD: - SnowflakeOCSP.OCSP_CACHE.set_ssd_status( - SnowflakeOCSP.SSD.ACTIVATE_SSD) + SnowflakeOCSP.OCSP_CACHE.set_ssd_status(SnowflakeOCSP.SSD.ACTIVATE_SSD) SnowflakeOCSP.SSD.clear_ssd_cache() SnowflakeOCSP.read_directives() - SnowflakeOCSP.OCSP_CACHE.reset_ocsp_response_cache_uri( - ocsp_response_cache_uri) + SnowflakeOCSP.OCSP_CACHE.reset_ocsp_response_cache_uri(ocsp_response_cache_uri) if not OCSPServer.is_enabled_new_ocsp_endpoint(): self.OCSP_CACHE_SERVER.reset_ocsp_dynamic_cache_server_url( - use_ocsp_cache_server) + use_ocsp_cache_server + ) SnowflakeOCSP.OCSP_CACHE.read_file(self) def validate_certfile(self, cert_filename, no_exception=False): - """ - Validates the certificate is NOT revoked - """ + """Validates that the certificate is NOT revoked.""" cert_map = {} telemetry_data = OCSPTelemetryData() telemetry_data.set_cache_enabled(self.OCSP_CACHE_SERVER.CACHE_SERVER_ENABLED) @@ -946,19 +1055,18 @@ def validate_certfile(self, cert_filename, no_exception=False): raise ex return self._validate( - None, cert_data, telemetry_data, do_retry=False, no_exception=no_exception) + None, cert_data, telemetry_data, do_retry=False, no_exception=no_exception + ) def validate(self, hostname, connection, no_exception=False): - """ - Validates the certificate is not revoked using OCSP - """ - logger.debug(u'validating certificate: %s', hostname) + """Validates the certificate is not revoked using OCSP.""" + logger.debug("validating certificate: %s", hostname) do_retry = SnowflakeOCSP.get_ocsp_retry_choice() m = not SnowflakeOCSP.OCSP_WHITELIST.match(hostname) if m or hostname.startswith("ocspssd"): - logger.debug(u'skipping OCSP check: %s', hostname) + logger.debug("skipping OCSP check: %s", hostname) return [None, None, None, None, None] if OCSPServer.is_enabled_new_ocsp_endpoint(): @@ -973,29 +1081,38 @@ def validate(self, hostname, connection, no_exception=False): try: cert_data = self.extract_certificate_chain(connection) except RevocationCheckError: - logger.debug(telemetry_data.generate_telemetry_data("RevocationCheckFailure")) + telemetry_data.set_event_sub_type( + OCSPTelemetryData.CERTIFICATE_EXTRACTION_FAILED + ) + logger.debug( + telemetry_data.generate_telemetry_data("RevocationCheckFailure") + ) return None - return self._validate(hostname, cert_data, telemetry_data, do_retry, no_exception) + return self._validate( + hostname, cert_data, telemetry_data, do_retry, no_exception + ) def _validate( - self, hostname, cert_data, telemetry_data, do_retry=True, no_exception=False): + self, hostname, cert_data, telemetry_data, do_retry=True, no_exception=False + ): # Validate certs sequentially if OCSP response cache server is used results = self._validate_certificates_sequential( - cert_data, telemetry_data, hostname, do_retry=do_retry) + cert_data, telemetry_data, hostname, do_retry=do_retry + ) SnowflakeOCSP.OCSP_CACHE.update_file(self) any_err = False - for err, issuer, subject, cert_id, ocsp_response in results: + for err, _issuer, _subject, _cert_id, _ocsp_response in results: if isinstance(err, RevocationCheckError): - err.msg += u' for {}'.format(hostname) + err.msg += f" for {hostname}" if not no_exception and err is not None: raise err elif err is not None: any_err = True - logger.debug('ok' if not any_err else 'failed') + logger.debug("ok" if not any_err else "failed") return results @staticmethod @@ -1003,27 +1120,30 @@ def get_ocsp_retry_choice(): return os.getenv("SF_OCSP_DO_RETRY", "true") == "true" def is_cert_id_in_cache(self, cert_id, subject): + """Decides whether OCSP CertID is in cache. + + Args: + cert_id: OCSP CertID. + subject: Subject certificate. + + Returns: + True if in cache otherwise False, followed by the cached OCSP Response. """ - Is OCSP CertID in cache? - :param cert_id: OCSP CertID - :param subject: subject certificate - :return: True if in cache otherwise False, - followed by the cached OCSP Response - """ - found, cache = SnowflakeOCSP.OCSP_CACHE.find_cache( - self, cert_id, subject) + found, cache = SnowflakeOCSP.OCSP_CACHE.find_cache(self, cert_id, subject) return found, cache - def get_account_from_hostname(self, hostname): - """ - Extract the account name part - from the hostname - :param hostname: - :return: account name + def get_account_from_hostname(self, hostname: str) -> str: + """Extracts the account name part from the hostname. + + Args: + hostname: Hostname that account name is in. + + Returns: + The extracted account name. """ - split_hname = hostname.split('.') + split_hname = hostname.split(".") if "global" in split_hname: - acc_name = split_hname[0].split('-')[0] + acc_name = split_hname[0].split("-")[0] else: acc_name = split_hname[0] return acc_name @@ -1033,14 +1153,18 @@ def is_enabled_fail_open(self): @staticmethod def print_fail_open_warning(ocsp_log): - static_warning = "WARNING!!! Using fail-open to connect. Driver is connecting to an "\ - "HTTPS endpoint without OCSP based Certificate Revocation checking "\ - "as it could not obtain a valid OCSP Response to use from the CA OCSP "\ - "responder. Details:" - ocsp_warning = "{0} \n {1}".format(static_warning, ocsp_log) + static_warning = ( + "WARNING!!! Using fail-open to connect. Driver is connecting to an " + "HTTPS endpoint without OCSP based Certificate Revocation checking " + "as it could not obtain a valid OCSP Response to use from the CA OCSP " + "responder. Details:" + ) + ocsp_warning = f"{static_warning} \n {ocsp_log}" logger.error(ocsp_warning) - def validate_by_direct_connection(self, issuer, subject, telemetry_data, hostname=None, do_retry=True): + def validate_by_direct_connection( + self, issuer, subject, telemetry_data, hostname=None, do_retry=True + ): ssd_cache_status = False cache_status = False ocsp_response = None @@ -1049,81 +1173,97 @@ def validate_by_direct_connection(self, issuer, subject, telemetry_data, hostnam cert_id, req = self.create_ocsp_request(issuer, subject) if SnowflakeOCSP.SSD.ACTIVATE_SSD: ssd_cache_status, ssd = SnowflakeOCSP.SSD.find_in_ssd_cache( - self.get_account_from_hostname(hostname)) + self.get_account_from_hostname(hostname) + ) if not ssd_cache_status: - cache_status, ocsp_response = \ - self.is_cert_id_in_cache(cert_id, subject) + cache_status, ocsp_response = self.is_cert_id_in_cache(cert_id, subject) err = None try: if SnowflakeOCSP.SSD.ACTIVATE_SSD: if ssd_cache_status: - if SnowflakeOCSP.process_ocsp_bypass_directive(ssd, cert_id, - hostname): + if SnowflakeOCSP.process_ocsp_bypass_directive( + ssd, cert_id, hostname + ): return None, issuer, subject, cert_id, ssd else: - SnowflakeOCSP.SSD.remove_from_ssd_persistent_cache( - hostname) + SnowflakeOCSP.SSD.remove_from_ssd_persistent_cache(hostname) raise RevocationCheckError( msg="The account specific SSD being used is invalid. " - "Please contact Snowflake support", + "Please contact Snowflake support", errno=ER_INVALID_SSD, ) else: - wildcard_ssd_status, \ - ssd = SnowflakeOCSP.OCSP_CACHE.find_cache( - self, self.WILDCARD_CERTID, subject) + wildcard_ssd_status, ssd = SnowflakeOCSP.OCSP_CACHE.find_cache( + self, self.WILDCARD_CERTID, subject + ) # if the wildcard SSD is invalid # fall back to normal OCSP checking try: - if wildcard_ssd_status and \ - self.process_ocsp_bypass_directive( - ssd, self.WILDCARD_CERTID, '*'): + if wildcard_ssd_status and self.process_ocsp_bypass_directive( + ssd, self.WILDCARD_CERTID, "*" + ): return None, issuer, subject, cert_id, ssd except Exception as ex: logger.debug( "Failed to validate wildcard SSD, falling back to " - "specific OCSP Responses", ex) + "specific OCSP Responses", + ex, + ) SnowflakeOCSP.OCSP_CACHE.delete_cache( - self, self.WILDCARD_CERTID) + self, self.WILDCARD_CERTID + ) if not cache_status: telemetry_data.set_cache_hit(False) logger.debug("getting OCSP response from CA's OCSP server") - ocsp_response = self._fetch_ocsp_response(req, subject, - cert_id, telemetry_data, - hostname, do_retry) + ocsp_response = self._fetch_ocsp_response( + req, subject, cert_id, telemetry_data, hostname, do_retry + ) else: ocsp_url = self.extract_ocsp_url(subject) - cert_id_enc = self.encode_cert_id_base64(self.decode_cert_id_key(cert_id)) + cert_id_enc = self.encode_cert_id_base64( + self.decode_cert_id_key(cert_id) + ) telemetry_data.set_cache_hit(True) self.debug_ocsp_failure_url = SnowflakeOCSP.create_ocsp_debug_info( - self, req, ocsp_url) + self, req, ocsp_url + ) telemetry_data.set_ocsp_url(ocsp_url) telemetry_data.set_ocsp_req(req) telemetry_data.set_cert_id(cert_id_enc) logger.debug("using OCSP response cache") if not ocsp_response: - logger.debug('No OCSP URL is found.') - raise RevocationCheckError(msg="Could not retrieve OCSP Response. Cannot perform Revocation Check", - errno=ER_SERVER_CERTIFICATE_UNKNOWN) + telemetry_data.set_event_sub_type( + OCSPTelemetryData.OCSP_RESPONSE_UNAVAILABLE + ) + raise RevocationCheckError( + msg="Could not retrieve OCSP Response. Cannot perform Revocation Check", + errno=ER_OCSP_RESPONSE_UNAVAILABLE, + ) try: self.process_ocsp_response(issuer, cert_id, ocsp_response) err = None except RevocationCheckError as op_er: - if SnowflakeOCSP.SSD.ACTIVATE_SSD and op_er.errno == ER_INVALID_OCSP_RESPONSE: - logger.debug( - "Potentially the response is a server side directive") - if self.process_ocsp_bypass_directive(ocsp_response, - cert_id, hostname): + if ( + SnowflakeOCSP.SSD.ACTIVATE_SSD + and op_er.errno == ER_INVALID_OCSP_RESPONSE_SSD + ): + logger.debug("Potentially the response is a server side directive") + if self.process_ocsp_bypass_directive( + ocsp_response, cert_id, hostname + ): err = None else: # TODO - Remove this potentially broken OCSP Response / SSD raise op_er else: + telemetry_data.set_event_sub_type( + OCSPTelemetryData.ERROR_CODE_MAP[op_er.errno] + ) raise op_er except RevocationCheckError as rce: @@ -1140,42 +1280,65 @@ def validate_by_direct_connection(self, issuer, subject, telemetry_data, hostnam def verify_fail_open(self, ex_obj, telemetry_data): if not self.is_enabled_fail_open(): - if ex_obj.errno is ER_SERVER_CERTIFICATE_REVOKED: - logger.debug(telemetry_data.generate_telemetry_data("RevokedCertificateError", True)) + if ex_obj.errno is ER_OCSP_RESPONSE_CERT_STATUS_REVOKED: + logger.debug( + telemetry_data.generate_telemetry_data( + "RevokedCertificateError", True + ) + ) else: - logger.debug(telemetry_data.generate_telemetry_data("RevocationCheckFailure")) + logger.debug( + telemetry_data.generate_telemetry_data("RevocationCheckFailure") + ) return ex_obj else: - if ex_obj.errno is ER_SERVER_CERTIFICATE_REVOKED: - logger.debug(telemetry_data.generate_telemetry_data("RevokedCertificateError", True)) + if ex_obj.errno is ER_OCSP_RESPONSE_CERT_STATUS_REVOKED: + logger.debug( + telemetry_data.generate_telemetry_data( + "RevokedCertificateError", True + ) + ) return ex_obj else: SnowflakeOCSP.print_fail_open_warning( - telemetry_data.generate_telemetry_data("RevocationCheckFailure")) + telemetry_data.generate_telemetry_data("RevocationCheckFailure") + ) return None - def _validate_certificates_sequential(self, cert_data, telemetry_data, - hostname=None, do_retry=True): + def _validate_certificates_sequential( + self, cert_data, telemetry_data, hostname=None, do_retry=True + ): results = [] - self._check_ocsp_response_cache_server(cert_data) + try: + self._check_ocsp_response_cache_server(cert_data) + except RevocationCheckError as rce: + telemetry_data.set_event_sub_type( + OCSPTelemetryData.ERROR_CODE_MAP[rce.errno] + ) + except Exception as ex: + logger.debug( + "Caught unknown exception - %s. Continue to validate by direct connection", + str(ex), + ) + for issuer, subject in cert_data: r = self.validate_by_direct_connection( - issuer, subject, telemetry_data, hostname, do_retry=do_retry) + issuer, subject, telemetry_data, hostname, do_retry=do_retry + ) results.append(r) return results def _check_ocsp_response_cache_server(self, cert_data): - """ - Checks if OCSP response is in cache, and if not download the OCSP - response cache from the server. - :param cert_data: pairs of issuer and subject certificates + """Checks if OCSP response is in cache, and if not it downloads the OCSP response cache from the server. + + Args: + cert_data: Tuple of issuer and subject certificates. """ in_cache = False for issuer, subject in cert_data: # check if any OCSP response is NOT in cache cert_id, _ = self.create_ocsp_request(issuer, subject) - in_cache, cache = SnowflakeOCSP.OCSP_CACHE.find_cache( - self, cert_id, subject) + in_cache, _ = SnowflakeOCSP.OCSP_CACHE.find_cache(self, cert_id, subject) if not in_cache: # not found any break @@ -1184,38 +1347,45 @@ def _check_ocsp_response_cache_server(self, cert_data): self.OCSP_CACHE_SERVER.download_cache_from_server(self) def _lazy_read_ca_bundle(self): - """ - Reads the local cabundle file and cache it in memory - """ + """Reads the local cabundle file and cache it in memory.""" with SnowflakeOCSP.ROOT_CERTIFICATES_DICT_LOCK: if SnowflakeOCSP.ROOT_CERTIFICATES_DICT: # return if already loaded return try: - ca_bundle = (environ.get('REQUESTS_CA_BUNDLE') or - environ.get('CURL_CA_BUNDLE')) + ca_bundle = environ.get("REQUESTS_CA_BUNDLE") or environ.get( + "CURL_CA_BUNDLE" + ) if ca_bundle and path.exists(ca_bundle): # if the user/application specifies cabundle. self.read_cert_bundle(ca_bundle) else: import sys - from botocore.vendored.requests import certs - if hasattr(certs, '__file__') and \ - path.exists(certs.__file__) and \ - path.exists(path.join( - path.dirname(certs.__file__), 'cacert.pem')): + + # This import that depends on these libraries is to import certificates from them, + # we would like to have these as up to date as possible. + from requests import certs + + if ( + hasattr(certs, "__file__") + and path.exists(certs.__file__) + and path.exists( + path.join(path.dirname(certs.__file__), "cacert.pem") + ) + ): # if cacert.pem exists next to certs.py in request # package. ca_bundle = path.join( - path.dirname(certs.__file__), 'cacert.pem') + path.dirname(certs.__file__), "cacert.pem" + ) self.read_cert_bundle(ca_bundle) - elif hasattr(sys, '_MEIPASS'): + elif hasattr(sys, "_MEIPASS"): # if pyinstaller includes cacert.pem cabundle_candidates = [ - ['botocore', 'vendored', 'requests', 'cacert.pem'], - ['requests', 'cacert.pem'], - ['cacert.pem'], + ["botocore", "vendored", "requests", "cacert.pem"], + ["requests", "cacert.pem"], + ["cacert.pem"], ] for filename in cabundle_candidates: ca_bundle = path.join(sys._MEIPASS, *filename) @@ -1223,25 +1393,32 @@ def _lazy_read_ca_bundle(self): self.read_cert_bundle(ca_bundle) break else: - logger.error( - 'No cabundle file is found in _MEIPASS') + logger.error("No cabundle file is found in _MEIPASS") try: import certifi + self.read_cert_bundle(certifi.where()) except Exception: - logger.debug('no certifi is installed. ignored.') + logger.debug("no certifi is installed. ignored.") except Exception as e: - logger.error('Failed to read ca_bundle: %s', e) + logger.error("Failed to read ca_bundle: %s", e) if not SnowflakeOCSP.ROOT_CERTIFICATES_DICT: - logger.error('No CA bundle file is found in the system. ' - 'Set REQUESTS_CA_BUNDLE to the file.') + logger.error( + "No CA bundle file is found in the system. " + "Set REQUESTS_CA_BUNDLE to the file." + ) @staticmethod def _calculate_tolerable_validity(this_update, next_update): - return max(int(SnowflakeOCSP.TOLERABLE_VALIDITY_RANGE_RATIO * ( - next_update - this_update)), SnowflakeOCSP.MAX_CLOCK_SKEW) + return max( + int( + SnowflakeOCSP.TOLERABLE_VALIDITY_RANGE_RATIO + * (next_update - this_update) + ), + SnowflakeOCSP.MAX_CLOCK_SKEW, + ) @staticmethod def _is_validaity_range(current_time, this_update, next_update, test_mode=None): @@ -1251,30 +1428,35 @@ def _is_validaity_range(current_time, this_update, next_update, test_mode=None): return False tolerable_validity = SnowflakeOCSP._calculate_tolerable_validity( - this_update, next_update) - logger.debug(u'Tolerable Validity range for OCSP response: +%s(s)', - tolerable_validity) - return this_update - SnowflakeOCSP.MAX_CLOCK_SKEW <= \ - current_time <= next_update + tolerable_validity + this_update, next_update + ) + return ( + this_update - SnowflakeOCSP.MAX_CLOCK_SKEW + <= current_time + <= next_update + tolerable_validity + ) @staticmethod def _validity_error_message(current_time, this_update, next_update): tolerable_validity = SnowflakeOCSP._calculate_tolerable_validity( - this_update, next_update) - return (u"Response is unreliable. Its validity " - u"date is out of range: current_time={0}, " - u"this_update={1}, next_update={2}, " - u"tolerable next_update={3}. A potential cause is " - u"client clock is skewed, CA fails to update OCSP " - u"response in time.".format( - strftime(SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, - gmtime(current_time)), - strftime(SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, - gmtime(this_update)), - strftime(SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, - gmtime(next_update)), - strftime(SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, - gmtime(next_update + tolerable_validity)))) + this_update, next_update + ) + return ( + "Response is unreliable. Its validity " + "date is out of range: current_time={}, " + "this_update={}, next_update={}, " + "tolerable next_update={}. A potential cause is " + "client clock is skewed, CA fails to update OCSP " + "response in time.".format( + strftime(SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(current_time)), + strftime(SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(this_update)), + strftime(SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(next_update)), + strftime( + SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, + gmtime(next_update + tolerable_validity), + ), + ) + ) @staticmethod def clear_cache(): @@ -1291,67 +1473,83 @@ def delete_cache_file(): @staticmethod def create_ocsp_debug_info(ocsp, ocsp_request, ocsp_url): b64data = ocsp.decode_ocsp_request_b64(ocsp_request) - target_url = "{0}/{1}".format(ocsp_url, b64data) + target_url = f"{ocsp_url}/{b64data}" return target_url - def _fetch_ocsp_response(self, ocsp_request, subject, cert_id, - telemetry_data, hostname=None, do_retry=True): - """ - Fetch OCSP response using OCSPRequest - """ + def _fetch_ocsp_response( + self, + ocsp_request, + subject, + cert_id, + telemetry_data, + hostname=None, + do_retry=True, + ): + """Fetches OCSP response using OCSPRequest.""" sf_timeout = SnowflakeOCSP.CA_OCSP_RESPONDER_CONNECTION_TIMEOUT ocsp_url = self.extract_ocsp_url(subject) - cert_id_enc = self.encode_cert_id_base64( - self.decode_cert_id_key(cert_id)) + cert_id_enc = self.encode_cert_id_base64(self.decode_cert_id_key(cert_id)) if not ocsp_url: - raise RevocationCheckError(msg="No OCSP URL found in cert. Cannot perform Certificate Revocation check", - errno=ER_SERVER_CERTIFICATE_UNKNOWN) + telemetry_data.set_event_sub_type(OCSPTelemetryData.OCSP_URL_MISSING) + raise RevocationCheckError( + msg="No OCSP URL found in cert. Cannot perform Certificate Revocation check", + errno=ER_OCSP_URL_INFO_MISSING, + ) headers = {HTTP_HEADER_USER_AGENT: PYTHON_CONNECTOR_USER_AGENT} - if not SnowflakeOCSP.SSD.ACTIVATE_SSD and \ - not OCSPServer.is_enabled_new_ocsp_endpoint(): - actual_method = 'post' if self._use_post_method else 'get' + if ( + not SnowflakeOCSP.SSD.ACTIVATE_SSD + and not OCSPServer.is_enabled_new_ocsp_endpoint() + ): + actual_method = "post" if self._use_post_method else "get" if self.OCSP_CACHE_SERVER.OCSP_RETRY_URL: # no POST is supported for Retry URL at the moment. - actual_method = 'get' + actual_method = "get" - if actual_method == 'get': + if actual_method == "get": b64data = self.decode_ocsp_request_b64(ocsp_request) - target_url = self.OCSP_CACHE_SERVER.generate_get_url( - ocsp_url, b64data) + target_url = self.OCSP_CACHE_SERVER.generate_get_url(ocsp_url, b64data) payload = None else: target_url = ocsp_url payload = self.decode_ocsp_request(ocsp_request) - headers['Content-Type'] = 'application/ocsp-request' + headers["Content-Type"] = "application/ocsp-request" else: - actual_method = 'post' + actual_method = "post" target_url = self.OCSP_CACHE_SERVER.OCSP_RETRY_URL ocsp_req_enc = self.decode_ocsp_request_b64(ocsp_request) - payload = json.dumps({'hostname': hostname, - 'ocsp_request': ocsp_req_enc, - 'cert_id': cert_id_enc, - 'ocsp_responder_url': ocsp_url}) - headers['Content-Type'] = 'application/json' - + payload = json.dumps( + { + "hostname": hostname, + "ocsp_request": ocsp_req_enc, + "cert_id": cert_id_enc, + "ocsp_responder_url": ocsp_url, + } + ) + headers["Content-Type"] = "application/json" + + telemetry_data.set_ocsp_connection_method(actual_method) if self.test_mode is not None: logger.debug("WARNING - DRIVER IS CONFIGURED IN TESTMODE.") test_ocsp_url = os.getenv("SF_TEST_OCSP_URL", None) - test_timeout = os.getenv("SF_TEST_CA_OCSP_RESPONDER_CONNECTION_TIMEOUT", None) + test_timeout = os.getenv( + "SF_TEST_CA_OCSP_RESPONDER_CONNECTION_TIMEOUT", None + ) if test_timeout is not None: sf_timeout = int(test_timeout) if test_ocsp_url is not None: target_url = test_ocsp_url self.debug_ocsp_failure_url = SnowflakeOCSP.create_ocsp_debug_info( - self, ocsp_request, ocsp_url) + self, ocsp_request, ocsp_url + ) telemetry_data.set_ocsp_req(self.decode_ocsp_request_b64(ocsp_request)) telemetry_data.set_ocsp_url(ocsp_url) telemetry_data.set_cert_id(cert_id_enc) ret = None - logger.debug('url: %s', target_url) + logger.debug("url: %s", target_url) sf_max_retry = SnowflakeOCSP.CA_OCSP_RESPONDER_MAX_RETRY_FO if not self.is_enabled_fail_open(): sf_max_retry = SnowflakeOCSP.CA_OCSP_RESPONDER_MAX_RETRY_FC @@ -1360,7 +1558,7 @@ def _fetch_ocsp_response(self, ocsp_request, subject, cert_id, max_retry = sf_max_retry if do_retry else 1 sleep_time = 1 backoff = DecorrelateJitterBackoff(sleep_time, 16) - for attempt in range(max_retry): + for _ in range(max_retry): try: response = session.request( headers=headers, @@ -1372,135 +1570,155 @@ def _fetch_ocsp_response(self, ocsp_request, subject, cert_id, if response.status_code == OK: logger.debug( "OCSP response was successfully returned from OCSP " - "server.") + "server." + ) ret = response.content break elif max_retry > 1: sleep_time = backoff.next_sleep(1, sleep_time) logger.debug( "OCSP server returned %s. Retrying in %s(s)", - response.status_code, sleep_time) + response.status_code, + sleep_time, + ) time.sleep(sleep_time) except Exception as ex: if max_retry > 1: sleep_time = backoff.next_sleep(1, sleep_time) - logger.debug("Could not fetch OCSP Response from server" - "Retrying in %s(s)", sleep_time) + logger.debug( + "Could not fetch OCSP Response from server" + "Retrying in %s(s)", + sleep_time, + ) time.sleep(sleep_time) else: + telemetry_data.set_event_sub_type( + OCSPTelemetryData.OCSP_RESPONSE_FETCH_EXCEPTION + ) raise RevocationCheckError( msg="Could not fetch OCSP Response from server. Consider" - "checking your whitelists : Exception - {}".format( - str(ex)), - errno=ER_OCSP_FAILED_TO_CONNECT_HOST) + "checking your whitelists : Exception - {}".format(str(ex)), + errno=ER_OCSP_RESPONSE_FETCH_EXCEPTION, + ) else: logger.error( - "Failed to get OCSP response after {0} attempt. Consider checking " - "for OCSP URLs being blocked".format(max_retry)) + "Failed to get OCSP response after {} attempt. Consider checking " + "for OCSP URLs being blocked".format(max_retry) + ) + telemetry_data.set_event_sub_type( + OCSPTelemetryData.OCSP_RESPONSE_FETCH_FAILURE + ) raise RevocationCheckError( msg="Failed to get OCSP response after {} attempt.".format( - max_retry), - errno=ER_INVALID_OCSP_RESPONSE_CODE) + max_retry + ), + errno=ER_OCSP_RESPONSE_FETCH_FAILURE, + ) return ret def _process_good_status(self, single_response, cert_id, ocsp_response): - """ - Process GOOD status - """ + """Processes GOOD status.""" current_time = int(time.time()) - this_update_native, next_update_native = \ - self.extract_good_status(single_response) + this_update_native, next_update_native = self.extract_good_status( + single_response + ) if this_update_native is None or next_update_native is None: raise RevocationCheckError( - msg=u"Either this update or next " - u"update is None. this_update: {}, next_update: {}".format( - this_update_native, next_update_native), - errno=ER_INVALID_OCSP_RESPONSE) - - this_update = (this_update_native.replace( - tzinfo=None) - SnowflakeOCSP.ZERO_EPOCH).total_seconds() - next_update = (next_update_native.replace( - tzinfo=None) - SnowflakeOCSP.ZERO_EPOCH).total_seconds() + msg="Either this update or next " + "update is None. this_update: {}, next_update: {}".format( + this_update_native, next_update_native + ), + errno=ER_INVALID_OCSP_RESPONSE_VALIDITY_INFO_MISSING, + ) + + this_update = ( + this_update_native.replace(tzinfo=None) - SnowflakeOCSP.ZERO_EPOCH + ).total_seconds() + next_update = ( + next_update_native.replace(tzinfo=None) - SnowflakeOCSP.ZERO_EPOCH + ).total_seconds() if not SnowflakeOCSP._is_validaity_range( - current_time, this_update, next_update, self.test_mode): + current_time, this_update, next_update, self.test_mode + ): raise RevocationCheckError( msg=SnowflakeOCSP._validity_error_message( - current_time, this_update, next_update), - errno=ER_INVALID_OCSP_RESPONSE) + current_time, this_update, next_update + ), + errno=ER_OCSP_RESPONSE_EXPIRED, + ) def _process_revoked_status(self, single_response, cert_id): - """ - Process REVOKED status - """ + """Processes REVOKED status.""" current_time = int(time.time()) if self.test_mode is not None: test_cert_status = os.getenv("SF_TEST_OCSP_CERT_STATUS") - if test_cert_status == 'revoked': + if test_cert_status == "revoked": raise RevocationCheckError( - msg="The certificate has been revoked: current_time={0}, " - "revocation_time={1}, reason={2}".format( + msg="The certificate has been revoked: current_time={}, " + "revocation_time={}, reason={}".format( strftime( - SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, - gmtime(current_time)), + SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(current_time) + ), strftime( - SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, - gmtime(current_time)), - "Force Revoke"), - errno=ER_SERVER_CERTIFICATE_REVOKED + SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(current_time) + ), + "Force Revoke", + ), + errno=ER_OCSP_RESPONSE_CERT_STATUS_REVOKED, ) SnowflakeOCSP.OCSP_CACHE.delete_cache(self, cert_id) revocation_time, revocation_reason = self.extract_revoked_status( - single_response) + single_response + ) raise RevocationCheckError( - msg="The certificate has been revoked: current_time={0}, " - "revocation_time={1}, reason={2}".format( - strftime( - SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, - gmtime(current_time)), - revocation_time.strftime( - SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT), - revocation_reason), - errno=ER_SERVER_CERTIFICATE_REVOKED + msg="The certificate has been revoked: current_time={}, " + "revocation_time={}, reason={}".format( + strftime(SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(current_time)), + revocation_time.strftime(SnowflakeOCSP.OUTPUT_TIMESTAMP_FORMAT), + revocation_reason, + ), + errno=ER_OCSP_RESPONSE_CERT_STATUS_REVOKED, ) def _process_unknown_status(self, cert_id): - """ - Process UNKNOWN status - """ + """Processes UNKNOWN status.""" SnowflakeOCSP.OCSP_CACHE.delete_cache(self, cert_id) raise RevocationCheckError( - msg=u"The certificate is in UNKNOWN revocation status.", - errno=ER_SERVER_CERTIFICATE_UNKNOWN, + msg="The certificate is in UNKNOWN revocation status.", + errno=ER_OCSP_RESPONSE_CERT_STATUS_UNKNOWN, ) def decode_ocsp_response_cache(self, ocsp_response_cache_json): - """ - Decodes OCSP response cache from JSON - """ + """Decodes OCSP response cache from JSON.""" try: - for cert_id_base64, ( - ts, ocsp_response) in ocsp_response_cache_json.items(): + for cert_id_base64, (ts, ocsp_response) in ocsp_response_cache_json.items(): cert_id = self.decode_cert_id_base64(cert_id_base64) if not self.is_valid_time(cert_id, b64decode(ocsp_response)): continue SnowflakeOCSP.OCSP_CACHE.update_or_delete_cache( - self, cert_id, b64decode(ocsp_response), ts) + self, cert_id, b64decode(ocsp_response), ts + ) except Exception as ex: logger.debug("Caught here - %s", ex) - raise ex + ermsg = "Exception raised while decoding OCSP Response Cache {}".format( + str(ex) + ) + raise RevocationCheckError( + msg=ermsg, errno=ER_OCSP_RESPONSE_CACHE_DECODE_FAILED + ) def encode_ocsp_response_cache(self, ocsp_response_cache_json): - """ - Encodes OCSP response cache to JSON - """ - logger.debug('encoding OCSP response cache to JSON') - for hkey, (current_time, ocsp_response) in \ - SnowflakeOCSP.OCSP_CACHE.iterate_cache(): + """Encodes OCSP response cache to JSON.""" + logger.debug("encoding OCSP response cache to JSON") + for hkey, ( + current_time, + ocsp_response, + ) in SnowflakeOCSP.OCSP_CACHE.iterate_cache(): k = self.encode_cert_id_base64(hkey) - v = b64encode(ocsp_response).decode('ascii') + v = b64encode(ocsp_response).decode("ascii") ocsp_response_cache_json[k] = (current_time, v) @staticmethod @@ -1508,61 +1726,62 @@ def read_directives(): key_update_ssd = path.join(SFSsd.SSD_DIR, "key_upd_ssd.ssd") host_specific_ssd = path.join(SFSsd.SSD_DIR, "host_spec_bypass_ssd.ssd") if path.exists(key_update_ssd): - with codecs.open(key_update_ssd, 'r', encoding='utf-8', - errors='ignore') as f: + with codecs.open( + key_update_ssd, "r", encoding="utf-8", errors="ignore" + ) as f: ssd_json = json.load(f) for issuer, ssd in ssd_json.items(): SnowflakeOCSP.process_key_update_directive(issuer, ssd) if path.exists(host_specific_ssd): - with codecs.open(host_specific_ssd, 'r', encoding='utf-8', - errors='ignore') as f: + with codecs.open( + host_specific_ssd, "r", encoding="utf-8", errors="ignore" + ) as f: ssd_json = json.load(f) for account_name, ssd in ssd_json.items(): - SnowflakeOCSP.SSD.add_to_ssd_persistent_cache(account_name, - ssd) + SnowflakeOCSP.SSD.add_to_ssd_persistent_cache(account_name, ssd) + + def process_ocsp_bypass_directive( + self, ssd_dir_enc, sfc_cert_id, sfc_endpoint + ) -> bool: + """Parses the jwt token as ocsp bypass directive and decides if SSD is valid. - def process_ocsp_bypass_directive(self, ssd_dir_enc, sfc_cert_id, - sfc_endpoint): - """ - Parse the jwt token as ocsp bypass directive. Expected format: - Payload: - { - “sfcEndpoint” : - “certID” : - “nbf” : - “exp” : - } - Return True for valid SSD else return False - :param ssd_dir_enc: - :param sfc_cert_id: - :param sfc_endpoint: - :return: True/ False + Payload: + { + “sfcEndpoint” : + “certID” : + “nbf” : + “exp” : + } """ - logger.debug("Received an OCSP Bypass Server Side Directive") jwt_ssd_header = jwt.get_unverified_header(ssd_dir_enc) - jwt_ssd_decoded = jwt.decode(ssd_dir_enc, - SnowflakeOCSP.SSD.ret_ssd_pub_key( - jwt_ssd_header['ssd_iss']), - algorithm='RS512') - - if datetime.fromtimestamp(jwt_ssd_decoded['exp']) - \ - datetime.fromtimestamp(jwt_ssd_decoded['nbf']) \ - > timedelta(days=7): + jwt_ssd_decoded = jwt.decode( + ssd_dir_enc, + SnowflakeOCSP.SSD.ret_ssd_pub_key(jwt_ssd_header["ssd_iss"]), + algorithm="RS512", + ) + + if datetime.fromtimestamp(jwt_ssd_decoded["exp"]) - datetime.fromtimestamp( + jwt_ssd_decoded["nbf"] + ) > timedelta(days=7): logger.debug( " Server Side Directive is invalid. Validity exceeds 7 days start - " - "start {0} end {1} ". - format(datetime.fromtimestamp(jwt_ssd_decoded['nbf']). - strftime("%m/%d/%Y, %H:%M:%S"), - datetime.fromtimestamp(jwt_ssd_decoded['exp']). - strftime("%m/%d/%Y, %H:%M:%S"))) + "start {} end {} ".format( + datetime.fromtimestamp(jwt_ssd_decoded["nbf"]).strftime( + "%m/%d/%Y, %H:%M:%S" + ), + datetime.fromtimestamp(jwt_ssd_decoded["exp"]).strftime( + "%m/%d/%Y, %H:%M:%S" + ), + ) + ) return False # Check if the directive is generic (endpoint = *) # or if it is meant for a specific account - if jwt_ssd_decoded['sfcEndpoint'] != '*': + if jwt_ssd_decoded["sfcEndpoint"] != "*": """ In case there are multiple hostnames associated with the same account, @@ -1572,13 +1791,13 @@ def process_ocsp_bypass_directive(self, ssd_dir_enc, sfc_cert_id, list of all the hostnames that can be associated with the account in question. """ - split_string = jwt_ssd_decoded['sfcEndpoint'].split() + split_string = jwt_ssd_decoded["sfcEndpoint"].split() if sfc_endpoint in split_string: return True else: return False - ssd_cert_id_b64 = jwt_ssd_decoded['certId'] + ssd_cert_id_b64 = jwt_ssd_decoded["certId"] ssd_cert_id = self.decode_cert_id_base64(ssd_cert_id_b64) hkey_ssd = self.decode_cert_id_key(ssd_cert_id) @@ -1588,44 +1807,42 @@ def process_ocsp_bypass_directive(self, ssd_dir_enc, sfc_cert_id, else: logger.debug( "Found error in SSD. CertId key in OCSP Cache and CertID in SSD do not match", - sfc_cert_id, jwt_ssd_decoded['certId']) + sfc_cert_id, + jwt_ssd_decoded["certId"], + ) return False @staticmethod def process_key_update_directive(issuer, key_upd_dir_enc): - """ - Parse the jwt token as key update directive. - If the key version in directive < internal key version - do nothing as the internal key is already latest. - Otherwise update in memory pub key corresponding to - the issuer in the directive. - - Expected Format: - Payload: - { - “keyVer” : - “pubKeyTyp” : - “pubKey” : - } - - :param issuer: - :param key_upd_dir_enc - """ + """Parses the jwt token as key update directive. + + If the key version in directive < internal key versio do nothing as the internal key is already latest. + Otherwise update in memory pub key corresponding to the issuer in the directive. + Expected Format: + Payload: + { + “keyVer” : + “pubKeyTyp” : + “pubKey” : + } + """ logger.debug( - "Received an OCSP Key Update Server Side Directive from Issuer - ", - issuer) + "Received an OCSP Key Update Server Side Directive from Issuer - ", issuer + ) jwt_ssd_header = jwt.get_unverified_header(key_upd_dir_enc) - ssd_issuer = jwt_ssd_header['ssd_iss'] + ssd_issuer = jwt_ssd_header["ssd_iss"] # Use the in memory public key corresponding to 'issuer' # for JWT signature validation. - jwt_ssd_decoded = jwt.decode(key_upd_dir_enc, - SnowflakeOCSP.SSD.ret_ssd_pub_key( - ssd_issuer), algorithm='RS512') + jwt_ssd_decoded = jwt.decode( + key_upd_dir_enc, + SnowflakeOCSP.SSD.ret_ssd_pub_key(ssd_issuer), + algorithm="RS512", + ) - ssd_pub_key_ver = float(jwt_ssd_decoded['keyVer']) - ssd_pub_key_new = jwt_ssd_decoded['pubKey'] + ssd_pub_key_ver = float(jwt_ssd_decoded["keyVer"]) + ssd_pub_key_new = jwt_ssd_decoded["pubKey"] """ Check for consistency in issuer name @@ -1635,108 +1852,76 @@ def process_key_update_directive(issuer, key_upd_dir_enc): """ if ssd_issuer == issuer and ssd_pub_key_ver > SFSsd.ret_ssd_pub_key_ver( - ssd_issuer): - SnowflakeOCSP.SSD.update_pub_key(ssd_issuer, ssd_pub_key_ver, - ssd_pub_key_new) + ssd_issuer + ): + SnowflakeOCSP.SSD.update_pub_key( + ssd_issuer, ssd_pub_key_ver, ssd_pub_key_new + ) def read_cert_bundle(self, ca_bundle_file, storage=None): - """ - Reads a certificate file including certificates in PEM format - """ + """Reads a certificate file including certificates in PEM format.""" raise NotImplementedError def encode_cert_id_key(self, _): - """ - Encode Cert ID key to native CertID - """ + """Encodes Cert ID key to native CertID.""" raise NotImplementedError def decode_cert_id_key(self, _): - """ - Decode name CertID to Cert ID key - """ + """Decodes name CertID to Cert ID key.""" raise NotImplementedError def encode_cert_id_base64(self, hkey): - """ - Encode native CertID to base64 Cert ID - """ + """Encodes native CertID to base64 Cert ID.""" raise NotImplementedError def decode_cert_id_base64(self, cert_id_base64): - """ - Decode base64 Cert ID to native CertID - """ + """Decodes base64 Cert ID to native CertID.""" raise NotImplementedError def create_ocsp_request(self, issuer, subject): - """ - Create CertId and OCSPRequest - """ + """Creates CertId and OCSPRequest.""" raise NotImplementedError def extract_ocsp_url(self, cert): - """ - Extract OCSP URL from Certificate - """ + """Extracts OCSP URL from Certificate.""" raise NotImplementedError def decode_ocsp_request(self, ocsp_request): - """ - Decode OCSP request to DER - """ + """Decodes OCSP request to DER.""" raise NotImplementedError def decode_ocsp_request_b64(self, ocsp_request): - """ - Decode OCSP Request object to b64 - """ + """Decodes OCSP Request object to b64.""" raise NotImplementedError def extract_good_status(self, single_response): - """ - Extract Revocation Status GOOD - """ + """Extracts Revocation Status GOOD.""" raise NotImplementedError def extract_revoked_status(self, single_response): - """ - Extract Revocation Status REVOKED - """ + """Extracts Revocation Status REVOKED.""" raise NotImplementedError def process_ocsp_response(self, issuer, cert_id, ocsp_response): - """ - Process OCSP response - """ + """Processes OCSP response.""" raise NotImplementedError def verify_signature(self, signature_algorithm, signature, cert, data): - """ - Verify signature - """ + """Verifies signature.""" raise NotImplementedError def extract_certificate_chain(self, connection): - """ - Gets certificate chain and extract the key info from OpenSSL connection - """ + """Gets certificate chain and extract the key info from OpenSSL connection.""" raise NotImplementedError def create_pair_issuer_subject(self, cert_map): - """ - Creates pairs of issuer and subject certificates - """ + """Creates pairs of issuer and subject certificates.""" raise NotImplementedError def subject_name(self, subject): - """ - Human readable Subject name - """ + """Gets human readable Subject name.""" raise NotImplementedError def is_valid_time(self, cert_id, ocsp_response): - """ - Check whether ocsp_response is in valid time range - """ + """Checks whether ocsp_response is in valid time range.""" raise NotImplementedError diff --git a/src/snowflake/connector/options.py b/src/snowflake/connector/options.py new file mode 100644 index 000000000..cf880c4c5 --- /dev/null +++ b/src/snowflake/connector/options.py @@ -0,0 +1,126 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import importlib +import warnings +from logging import getLogger +from types import ModuleType +from typing import Union + +import pkg_resources + +from .errors import MissingDependencyError + +logger = getLogger(__name__) + +"""This module helps to manage optional dependencies. + +It implements MissingOptionalDependency as a base class. If a module is unavailable an instance of this will be +returned. These derived classes can be seen in this file pre-defined. The point of these classes is that if someone +tries to use pyarrow code then by importing pyarrow from this module if they did pyarrow.xxx then that would raise +a MissingDependencyError. +""" + + +class MissingOptionalDependency: + """A class to replace missing dependencies. + + The only thing this class is supposed to do is raise a MissingDependencyError when __getattr__ is called. + This will be triggered whenever module.member is going to be called. + """ + + _dep_name = "not set" + + def __getattr__(self, item): + raise MissingDependencyError(self._dep_name) + + +class MissingPandas(MissingOptionalDependency): + """The class is specifically for pandas optional dependency.""" + + _dep_name = "pandas" + + +class MissingKeyring(MissingOptionalDependency): + """The class is specifically for sso optional dependency.""" + + _dep_name = "keyring" + + +ModuleLikeObject = Union[ModuleType, MissingOptionalDependency] + + +def warn_incompatible_dep( + dep_name: str, installed_ver: str, expected_ver: pkg_resources.Requirement +) -> None: + warnings.warn( + "You have an incompatible version of '{}' installed ({}), please install a version that " + "adheres to: '{}'".format(dep_name, installed_ver, expected_ver), + stacklevel=2, + ) + + +def _import_or_missing_pandas_option() -> tuple[ + ModuleLikeObject, ModuleLikeObject, bool +]: + """This function tries importing the following packages: pandas, pyarrow. + + If available it returns pandas and pyarrow packages with a flag of whether they were imported. + It also warns users if they have an unsupported pyarrow version installed if possible. + """ + try: + pandas = importlib.import_module("pandas") + # since we enable relative imports without dots this import gives us an issues when ran from test directory + from pandas import DataFrame # NOQA + + pyarrow = importlib.import_module("pyarrow") + # Check whether we have the currently supported pyarrow installed + installed_packages = pkg_resources.working_set.by_key + if all( + k in installed_packages for k in ("snowflake-connector-python", "pyarrow") + ): + _pandas_extras = installed_packages["snowflake-connector-python"]._dep_map[ + "pandas" + ] + _expected_pyarrow_version = [ + dep for dep in _pandas_extras if dep.name == "pyarrow" + ][0] + _installed_pyarrow_version = installed_packages["pyarrow"] + if ( + _installed_pyarrow_version + and _installed_pyarrow_version.version not in _expected_pyarrow_version + ): + warn_incompatible_dep( + "pyarrow", + _installed_pyarrow_version.version, + _expected_pyarrow_version, + ) + + else: + logger.info( + "Cannot determine if compatible pyarrow is installed because of missing package(s) from " + "{}".format(installed_packages.keys()) + ) + return pandas, pyarrow, True + except ImportError: + return MissingPandas(), MissingPandas(), False + + +def _import_or_missing_keyring_option() -> tuple[ModuleLikeObject, bool]: + """This function tries importing the following packages: keyring. + + If available it returns keyring package with a flag of whether it was imported. + """ + try: + keyring = importlib.import_module("keyring") + return keyring, True + except ImportError: + return MissingKeyring(), False + + +# Create actual constants to be imported from this file +pandas, pyarrow, installed_pandas = _import_or_missing_pandas_option() +keyring, installed_keyring = _import_or_missing_keyring_option() diff --git a/src/snowflake/connector/pandas_tools.py b/src/snowflake/connector/pandas_tools.py new file mode 100644 index 000000000..eb1e220bd --- /dev/null +++ b/src/snowflake/connector/pandas_tools.py @@ -0,0 +1,320 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os +import random +import string +from functools import partial +from logging import getLogger +from tempfile import TemporaryDirectory +from typing import TYPE_CHECKING, Callable, Iterable, Iterator, Sequence, TypeVar + +from snowflake.connector import ProgrammingError +from snowflake.connector.options import pandas + +if TYPE_CHECKING: # pragma: no cover + from .connection import SnowflakeConnection + + try: + import sqlalchemy + except ImportError: + sqlalchemy = None + +T = TypeVar("T", bound=Sequence) + +logger = getLogger(__name__) + + +def chunk_helper(lst: T, n: int) -> Iterator[tuple[int, T]]: + """Helper generator to chunk a sequence efficiently with current index like if enumerate was called on sequence.""" + for i in range(0, len(lst), n): + yield int(i / n), lst[i : i + n] + + +def write_pandas( + conn: SnowflakeConnection, + df: pandas.DataFrame, + table_name: str, + database: str | None = None, + schema: str | None = None, + chunk_size: int | None = None, + compression: str = "gzip", + on_error: str = "abort_statement", + parallel: int = 4, + quote_identifiers: bool = True, + auto_create_table: bool = False, + create_temp_table: bool = False, +) -> tuple[ + bool, + int, + int, + Sequence[ + tuple[ + str, + str, + int, + int, + int, + int, + str | None, + int | None, + int | None, + str | None, + ] + ], +]: + """Allows users to most efficiently write back a pandas DataFrame to Snowflake. + + It works by dumping the DataFrame into Parquet files, uploading them and finally copying their data into the table. + + Returns whether all files were ingested correctly, number of chunks uploaded, and number of rows ingested + with all of the COPY INTO command's output for debugging purposes. + + Example usage: + import pandas + from snowflake.connector.pandas_tools import write_pandas + + df = pandas.DataFrame([('Mark', 10), ('Luke', 20)], columns=['name', 'balance']) + success, nchunks, nrows, _ = write_pandas(cnx, df, 'customers') + + Args: + conn: Connection to be used to communicate with Snowflake. + df: Dataframe we'd like to write back. + table_name: Table name where we want to insert into. + database: Database schema and table is in, if not provided the default one will be used (Default value = None). + schema: Schema table is in, if not provided the default one will be used (Default value = None). + chunk_size: Number of elements to be inserted once, if not provided all elements will be dumped once + (Default value = None). + compression: The compression used on the Parquet files, can only be gzip, or snappy. Gzip gives supposedly a + better compression, while snappy is faster. Use whichever is more appropriate (Default value = 'gzip'). + on_error: Action to take when COPY INTO statements fail, default follows documentation at: + https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html#copy-options-copyoptions + (Default value = 'abort_statement'). + parallel: Number of threads to be used when uploading chunks, default follows documentation at: + https://docs.snowflake.com/en/sql-reference/sql/put.html#optional-parameters (Default value = 4). + quote_identifiers: By default, identifiers, specifically database, schema, table and column names + (from df.columns) will be quoted. If set to False, identifiers are passed on to Snowflake without quoting. + I.e. identifiers will be coerced to uppercase by Snowflake. (Default value = True) + auto_create_table: When true, will automatically create a table with corresponding columns for each column in + the passed in DataFrame. The table will not be created if it already exists + create_temp_table: Will make the auto-created table as a temporary table + + Returns: + Returns the COPY INTO command's results to verify ingestion in the form of a tuple of whether all chunks were + ingested correctly, # of chunks, # of ingested rows, and ingest's output. + """ + if database is not None and schema is None: + raise ProgrammingError( + "Schema has to be provided to write_pandas when a database is provided" + ) + # This dictionary maps the compression algorithm to Snowflake put copy into command type + # https://docs.snowflake.com/en/sql-reference/sql/copy-into-table.html#type-parquet + compression_map = {"gzip": "auto", "snappy": "snappy"} + if compression not in compression_map.keys(): + raise ProgrammingError( + "Invalid compression '{}', only acceptable values are: {}".format( + compression, compression_map.keys() + ) + ) + if quote_identifiers: + location = ( + (('"' + database + '".') if database else "") + + (('"' + schema + '".') if schema else "") + + ('"' + table_name + '"') + ) + else: + location = ( + (database + "." if database else "") + + (schema + "." if schema else "") + + (table_name) + ) + if chunk_size is None: + chunk_size = len(df) + cursor = conn.cursor() + stage_name = None # Forward declaration + while True: + try: + stage_name = "".join( + random.choice(string.ascii_lowercase) for _ in range(5) + ) + create_stage_sql = ( + "create temporary stage /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + '"{stage_name}"' + ).format(stage_name=stage_name) + logger.debug(f"creating stage with '{create_stage_sql}'") + cursor.execute(create_stage_sql, _is_internal=True).fetchall() + break + except ProgrammingError as pe: + if pe.msg.endswith("already exists."): + continue + raise + + with TemporaryDirectory() as tmp_folder: + for i, chunk in chunk_helper(df, chunk_size): + chunk_path = os.path.join(tmp_folder, f"file{i}.txt") + # Dump chunk into parquet file + chunk.to_parquet(chunk_path, compression=compression) + # Upload parquet file + upload_sql = ( + "PUT /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + "'file://{path}' @\"{stage_name}\" PARALLEL={parallel}" + ).format( + path=chunk_path.replace("\\", "\\\\").replace("'", "\\'"), + stage_name=stage_name, + parallel=parallel, + ) + logger.debug(f"uploading files with '{upload_sql}'") + cursor.execute(upload_sql, _is_internal=True) + # Remove chunk file + os.remove(chunk_path) + if quote_identifiers: + columns = '"' + '","'.join(list(df.columns)) + '"' + else: + columns = ",".join(list(df.columns)) + + if auto_create_table: + file_format_name = None + while True: + try: + file_format_name = ( + '"' + + "".join(random.choice(string.ascii_lowercase) for _ in range(5)) + + '"' + ) + file_format_sql = ( + f"CREATE FILE FORMAT {file_format_name} " + f"/* Python:snowflake.connector.pandas_tools.write_pandas() */ " + f"TYPE=PARQUET COMPRESSION={compression_map[compression]}" + ) + logger.debug(f"creating file format with '{file_format_sql}'") + cursor.execute(file_format_sql, _is_internal=True) + break + except ProgrammingError as pe: + if pe.msg.endswith("already exists."): + continue + raise + infer_schema_sql = f"SELECT COLUMN_NAME, TYPE FROM table(infer_schema(location=>'@\"{stage_name}\"', file_format=>'{file_format_name}'))" + logger.debug(f"inferring schema with '{infer_schema_sql}'") + column_type_mapping = dict( + cursor.execute(infer_schema_sql, _is_internal=True).fetchall() + ) + # Infer schema can return the columns out of order depending on the chunking we do when uploading + # so we have to iterate through the dataframe columns to make sure we create the table with its + # columns in order + quote = '"' if quote_identifiers else "" + create_table_columns = ", ".join( + [f"{quote}{c}{quote} {column_type_mapping[c]}" for c in df.columns] + ) + create_table_sql = ( + f"CREATE {'TEMP ' if create_temp_table else ''}TABLE IF NOT EXISTS {location} " + f"({create_table_columns})" + f" /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + ) + logger.debug(f"auto creating table with '{create_table_sql}'") + cursor.execute(create_table_sql, _is_internal=True) + drop_file_format_sql = f"DROP FILE FORMAT IF EXISTS {file_format_name}" + logger.debug(f"dropping file format with '{drop_file_format_sql}'") + cursor.execute(drop_file_format_sql, _is_internal=True) + + # in Snowflake, all parquet data is stored in a single column, $1, so we must select columns explicitly + # see (https://docs.snowflake.com/en/user-guide/script-data-load-transform-parquet.html) + if quote_identifiers: + parquet_columns = "$1:" + ",$1:".join(f'"{c}"' for c in df.columns) + else: + parquet_columns = "$1:" + ",$1:".join(df.columns) + copy_into_sql = ( + "COPY INTO {location} /* Python:snowflake.connector.pandas_tools.write_pandas() */ " + "({columns}) " + 'FROM (SELECT {parquet_columns} FROM @"{stage_name}") ' + "FILE_FORMAT=(TYPE=PARQUET COMPRESSION={compression}) " + "PURGE=TRUE ON_ERROR={on_error}" + ).format( + location=location, + columns=columns, + parquet_columns=parquet_columns, + stage_name=stage_name, + compression=compression_map[compression], + on_error=on_error, + ) + logger.debug(f"copying into with '{copy_into_sql}'") + copy_results = cursor.execute(copy_into_sql, _is_internal=True).fetchall() + cursor.close() + return ( + all(e[1] == "LOADED" for e in copy_results), + len(copy_results), + sum(int(e[3]) for e in copy_results), + copy_results, + ) + + +def make_pd_writer( + quote_identifiers: bool = True, +) -> Callable[ + [ + pandas.io.sql.SQLTable, + sqlalchemy.engine.Engine | sqlalchemy.engine.Connection, + Iterable, + Iterable, + ], + None, +]: + """This returns a pd_writer with the desired arguments. + + Example usage: + import pandas as pd + from snowflake.connector.pandas_tools import pd_writer + + sf_connector_version_df = pd.DataFrame([('snowflake-connector-python', '1.0')], columns=['NAME', 'NEWEST_VERSION']) + sf_connector_version_df.to_sql('driver_versions', engine, index=False, method=make_pd_writer()) + + # to use quote_identifiers=False, + from functools import partial + sf_connector_version_df.to_sql( + 'driver_versions', engine, index=False, method=make_pd_writer(quote_identifiers=False))) + + Args: + quote_identifiers: if True (default), the pd_writer will pass quote identifiers to Snowflake. + If False, the created pd_writer will not quote identifiers (and typically coerced to uppercase by Snowflake) + """ + return partial(pd_writer, quote_identifiers=quote_identifiers) + + +def pd_writer( + table: pandas.io.sql.SQLTable, + conn: sqlalchemy.engine.Engine | sqlalchemy.engine.Connection, + keys: Iterable, + data_iter: Iterable, + quote_identifiers: bool = True, +) -> None: + """This is a wrapper on top of write_pandas to make it compatible with to_sql method in pandas. + + Example usage: + import pandas as pd + from snowflake.connector.pandas_tools import pd_writer + + sf_connector_version_df = pd.DataFrame([('snowflake-connector-python', '1.0')], columns=['NAME', 'NEWEST_VERSION']) + sf_connector_version_df.to_sql('driver_versions', engine, index=False, method=pd_writer) + + # to use quote_identifiers=False, see `make_pd_writer` + + Args: + table: Pandas package's table object. + conn: SQLAlchemy engine object to talk to Snowflake. + keys: Column names that we are trying to insert. + data_iter: Iterator over the rows. + quote_identifiers: if True (default), quote identifiers passed to Snowflake. If False, identifiers are not + quoted (and typically coerced to uppercase by Snowflake) + """ + sf_connection = conn.connection.connection + df = pandas.DataFrame(data_iter, columns=keys) + write_pandas( + conn=sf_connection, + df=df, + # Note: Our sqlalchemy connector creates tables case insensitively + table_name=table.name.upper(), + schema=table.schema, + quote_identifiers=quote_identifiers, + ) diff --git a/src/snowflake/connector/proxy.py b/src/snowflake/connector/proxy.py new file mode 100644 index 000000000..884c0558f --- /dev/null +++ b/src/snowflake/connector/proxy.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os + + +def set_proxies(proxy_host, proxy_port, proxy_user=None, proxy_password=None): + """Sets proxy dict for requests.""" + PREFIX_HTTP = "http://" + PREFIX_HTTPS = "https://" + proxies = None + if proxy_host and proxy_port: + if proxy_host.startswith(PREFIX_HTTP): + proxy_host = proxy_host[len(PREFIX_HTTP) :] + elif proxy_host.startswith(PREFIX_HTTPS): + proxy_host = proxy_host[len(PREFIX_HTTPS) :] + if proxy_user or proxy_password: + proxy_auth = "{proxy_user}:{proxy_password}@".format( + proxy_user=proxy_user if proxy_user is not None else "", + proxy_password=proxy_password if proxy_password is not None else "", + ) + else: + proxy_auth = "" + proxies = { + "http": "http://{proxy_auth}{proxy_host}:{proxy_port}".format( + proxy_host=proxy_host, + proxy_port=str(proxy_port), + proxy_auth=proxy_auth, + ), + "https": "http://{proxy_auth}{proxy_host}:{proxy_port}".format( + proxy_host=proxy_host, + proxy_port=str(proxy_port), + proxy_auth=proxy_auth, + ), + } + os.environ["HTTP_PROXY"] = proxies["http"] + os.environ["HTTPS_PROXY"] = proxies["https"] + return proxies diff --git a/src/snowflake/connector/py.typed b/src/snowflake/connector/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/src/snowflake/connector/result_batch.py b/src/snowflake/connector/result_batch.py new file mode 100644 index 000000000..f5397be21 --- /dev/null +++ b/src/snowflake/connector/result_batch.py @@ -0,0 +1,721 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +import abc +import io +import json +import time +from base64 import b64decode +from enum import Enum, unique +from logging import getLogger +from typing import TYPE_CHECKING, Any, Iterator, NamedTuple, Sequence + +from .arrow_context import ArrowConverterContext +from .compat import OK, UNAUTHORIZED, urlparse +from .constants import IterUnit +from .errorcode import ER_FAILED_TO_CONVERT_ROW_TO_PYTHON_TYPE, ER_NO_PYARROW +from .errors import Error, InterfaceError, NotSupportedError, ProgrammingError +from .network import ( + RetryRequest, + get_http_retryable_error, + is_retryable_http_code, + raise_failed_request_error, + raise_okta_unauthorized_error, +) +from .options import installed_pandas, pandas +from .secret_detector import SecretDetector +from .time_util import DecorrelateJitterBackoff, TimerContextManager +from .vendored import requests + +logger = getLogger(__name__) + +MAX_DOWNLOAD_RETRY = 10 +DOWNLOAD_TIMEOUT = 7 # seconds + +if TYPE_CHECKING: # pragma: no cover + from .connection import SnowflakeConnection + from .converter import SnowflakeConverterType + from .cursor import ResultMetadata, SnowflakeCursor + from .vendored.requests import Response + +if installed_pandas: + from pyarrow import DataType, Table + from pyarrow import binary as pa_bin + from pyarrow import bool_ as pa_bool + from pyarrow import date64 as pa_date64 + from pyarrow import field + from pyarrow import float64 as pa_flt64 + from pyarrow import int64 as pa_int64 + from pyarrow import schema + from pyarrow import string as pa_str + from pyarrow import time64 as pa_time64 + from pyarrow import timestamp as pa_ts +else: + DataType, Table = None, None + +# emtpy pyarrow type array corresponding to FIELD_TYPES +FIELD_TYPE_TO_PA_TYPE: list[DataType] = [] + +# qrmk related constants +SSE_C_ALGORITHM = "x-amz-server-side-encryption-customer-algorithm" +SSE_C_KEY = "x-amz-server-side-encryption-customer-key" +SSE_C_AES = "AES256" + + +@unique +class DownloadMetrics(Enum): + """Defines the keywords by which to store metrics for chunks.""" + + download = "download" # Download time in milliseconds + parse = "parse" # Parsing time to final data types + load = "load" # Parsing time from initial type to intermediate types + + +class RemoteChunkInfo(NamedTuple): + """Small class that holds information about chunks that are given by back-end.""" + + url: str + uncompressedSize: int + compressedSize: int + + +def create_batches_from_response( + cursor: SnowflakeCursor, + _format: str, + data: dict[str, Any], + schema: Sequence[ResultMetadata], +) -> list[ResultBatch]: + column_converters: list[tuple[str, SnowflakeConverterType]] = [] + arrow_context: ArrowConverterContext | None = None + rowtypes = data["rowtype"] + total_len: int = data.get("total", 0) + first_chunk_len = total_len + rest_of_chunks: list[ResultBatch] = [] + if _format == "json": + + def col_to_converter(col: dict[str, Any]) -> tuple[str, SnowflakeConverterType]: + type_name = col["type"].upper() + python_method = cursor._connection.converter.to_python_method( + type_name, col + ) + return type_name, python_method + + column_converters: list[tuple[str, SnowflakeConverterType]] = [ + col_to_converter(c) for c in rowtypes + ] + else: + rowset_b64 = data.get("rowsetBase64") + arrow_context = ArrowConverterContext(cursor._connection._session_parameters) + if "chunks" in data: + chunks = data["chunks"] + logger.debug(f"chunk size={len(chunks)}") + # prepare the downloader for further fetch + qrmk = data.get("qrmk") + chunk_headers: dict[str, Any] = {} + if "chunkHeaders" in data: + chunk_headers = {} + for header_key, header_value in data["chunkHeaders"].items(): + chunk_headers[header_key] = header_value + if "encryption" not in header_key: + logger.debug( + f"added chunk header: key={header_key}, value={header_value}" + ) + elif qrmk is not None: + logger.debug(f"qrmk={SecretDetector.mask_secrets(qrmk)}") + chunk_headers[SSE_C_ALGORITHM] = SSE_C_AES + chunk_headers[SSE_C_KEY] = qrmk + + def remote_chunk_info(c: dict[str, Any]) -> RemoteChunkInfo: + return RemoteChunkInfo( + url=c["url"], + uncompressedSize=c["uncompressedSize"], + compressedSize=c["compressedSize"], + ) + + if _format == "json": + rest_of_chunks = [ + JSONResultBatch( + c["rowCount"], + chunk_headers, + remote_chunk_info(c), + schema, + column_converters, + cursor._use_dict_result, + ) + for c in chunks + ] + else: + rest_of_chunks = [ + ArrowResultBatch( + c["rowCount"], + chunk_headers, + remote_chunk_info(c), + arrow_context, + cursor._use_dict_result, + cursor._connection._numpy, + schema, + cursor._connection._arrow_number_to_decimal, + ) + for c in chunks + ] + for c in rest_of_chunks: + first_chunk_len -= c.rowcount + if _format == "json": + first_chunk = JSONResultBatch.from_data( + data.get("rowset"), + first_chunk_len, + schema, + column_converters, + cursor._use_dict_result, + ) + elif rowset_b64 is not None: + first_chunk = ArrowResultBatch.from_data( + rowset_b64, + first_chunk_len, + arrow_context, + cursor._use_dict_result, + cursor._connection._numpy, + schema, + cursor._connection._arrow_number_to_decimal, + ) + else: + logger.error(f"Don't know how to construct ResultBatches from response: {data}") + first_chunk = ArrowResultBatch.from_data( + "", + 0, + arrow_context, + cursor._use_dict_result, + cursor._connection._numpy, + schema, + cursor._connection._arrow_number_to_decimal, + ) + + return [first_chunk] + rest_of_chunks + + +class ResultBatch(abc.ABC): + """Represents what the back-end calls a result chunk. + + These are parts of a result set of a query. They each know how to retrieve their + own results and convert them into Python native formats. + + As you are iterating through a ResultBatch you should check whether the yielded + value is an ``Exception`` in case there was some error parsing the current row + we might yield one of these to allow iteration to continue instead of raising the + ``Exception`` when it occurs. + + These objects are pickleable for easy distribution and replication. + + Please note that the URLs stored in these do expire. The lifetime is dictated by the + Snowflake back-end, at the time of writing this this is 6 hours. + + They can be iterated over multiple times and in different ways. Please follow the + code in ``cursor.py`` to make sure that you are using this class correctly. + + """ + + def __init__( + self, + rowcount: int, + chunk_headers: dict[str, str] | None, + remote_chunk_info: RemoteChunkInfo | None, + schema: Sequence[ResultMetadata], + use_dict_result: bool, + ): + self.rowcount = rowcount + self._chunk_headers = chunk_headers + self._remote_chunk_info = remote_chunk_info + self.schema = schema + self._use_dict_result = use_dict_result + self._metrics: dict[str, int] = {} + self._data: str | list[tuple[Any, ...]] | None = None + if self._remote_chunk_info: + parsed_url = urlparse(self._remote_chunk_info.url) + path_parts = parsed_url.path.rsplit("/", 1) + self.id = path_parts[-1] + else: + self.id = str(self.rowcount) + + @property + def _local(self) -> bool: + """Whether this chunk is local.""" + return self._data is not None + + @property + def compressed_size(self) -> int | None: + """Returns the size of chunk in bytes in compressed form. + + If it's a local chunk this function returns None. + """ + if self._local: + return None + return self._remote_chunk_info.compressedSize + + @property + def uncompressed_size(self) -> int | None: + """Returns the size of chunk in bytes in uncompressed form. + + If it's a local chunk this function returns None. + """ + if self._local: + return None + return self._remote_chunk_info.uncompressedSize + + @property + def column_names(self) -> list[str]: + return [col.name for col in self.schema] + + def __iter__( + self, + ) -> Iterator[dict | Exception] | Iterator[tuple | Exception]: + """Returns an iterator through the data this chunk holds. + + In case of this chunk being a local one it iterates through the local already + parsed data and if it's a remote chunk it will download, parse its data and + return an iterator through it. + """ + return self.create_iter() + + def _download( + self, connection: SnowflakeConnection | None = None, **kwargs + ) -> Response: + """Downloads the data that the ``ResultBatch`` is pointing at.""" + sleep_timer = 1 + backoff = DecorrelateJitterBackoff(1, 16) + for retry in range(MAX_DOWNLOAD_RETRY): + try: + with TimerContextManager() as download_metric: + logger.debug(f"started downloading result batch id: {self.id}") + chunk_url = self._remote_chunk_info.url + request_data = { + "url": chunk_url, + "headers": self._chunk_headers, + "timeout": DOWNLOAD_TIMEOUT, + } + # Try to reuse a connection if possible + if connection and connection._rest is not None: + with connection._rest._use_requests_session() as session: + logger.debug( + f"downloading result batch id: {self.id} with existing session {session}" + ) + response = session.request("get", **request_data) + else: + logger.debug( + f"downloading result batch id: {self.id} with new session" + ) + response = requests.get(**request_data) + + if response.status_code == OK: + logger.debug( + f"successfully downloaded result batch id: {self.id}" + ) + break + + # Raise error here to correctly go in to exception clause + if is_retryable_http_code(response.status_code): + # retryable server exceptions + error: Error = get_http_retryable_error(response.status_code) + raise RetryRequest(error) + elif response.status_code == UNAUTHORIZED: + # make a unauthorized error + raise_okta_unauthorized_error(None, response) + else: + raise_failed_request_error(None, chunk_url, "get", response) + + except (RetryRequest, Exception) as e: + if retry == MAX_DOWNLOAD_RETRY - 1: + # Re-throw if we failed on the last retry + e = e.args[0] if isinstance(e, RetryRequest) else e + raise e + sleep_timer = backoff.next_sleep(1, sleep_timer) + logger.exception( + f"Failed to fetch the large result set batch " + f"{self.id} for the {retry + 1} th time, " + f"backing off for {sleep_timer}s for the reason: '{e}'" + ) + time.sleep(sleep_timer) + + self._metrics[ + DownloadMetrics.download.value + ] = download_metric.get_timing_millis() + return response + + @abc.abstractmethod + def create_iter( + self, **kwargs + ) -> ( + Iterator[dict | Exception] + | Iterator[tuple | Exception] + | Iterator[Table] + | Iterator[pandas.DataFrame] + ): + """Downloads the data from from blob storage that this ResultChunk points at. + + This function is the one that does the actual work for ``self.__iter__``. + + It is necessary because a ``ResultBatch`` can return multiple types of + iterators. A good example of this is simply iterating through + ``SnowflakeCursor`` and calling ``fetch_pandas_batches`` on it. + """ + raise NotImplementedError() + + def _check_can_use_pandas(self) -> None: + if not installed_pandas: + msg = ( + "Optional dependency: 'pandas' is not installed, please see the following link for install " + "instructions: https://docs.snowflake.com/en/user-guide/python-connector-pandas.html#installation" + ) + errno = ER_NO_PYARROW + + raise Error.errorhandler_make_exception( + ProgrammingError, + { + "msg": msg, + "errno": errno, + }, + ) + + @abc.abstractmethod + def to_pandas(self) -> pandas.DataFrame: + raise NotImplementedError() + + @abc.abstractmethod + def to_arrow(self) -> Table: + raise NotImplementedError() + + +class JSONResultBatch(ResultBatch): + def __init__( + self, + rowcount: int, + chunk_headers: dict[str, str] | None, + remote_chunk_info: RemoteChunkInfo | None, + schema: Sequence[ResultMetadata], + column_converters: Sequence[tuple[str, SnowflakeConverterType]], + use_dict_result: bool, + ): + super().__init__( + rowcount, + chunk_headers, + remote_chunk_info, + schema, + use_dict_result, + ) + self.column_converters = column_converters + + @classmethod + def from_data( + cls, + data: Sequence[Sequence[Any]], + data_len: int, + schema: Sequence[ResultMetadata], + column_converters: Sequence[tuple[str, SnowflakeConverterType]], + use_dict_result: bool, + ): + """Initializes a ``JSONResultBatch`` from static, local data.""" + new_chunk = cls( + len(data), + None, + None, + schema, + column_converters, + use_dict_result, + ) + new_chunk._data: ( + list[dict | Exception] | list[tuple | Exception] + ) = new_chunk._parse(data) + return new_chunk + + def _load(self, response: Response) -> list: + """This function loads a compressed JSON file into memory. + + Returns: + Whatever ``json.loads`` return, but in a list. + Unfortunately there's not type hint for this. + For context: https://github.com/python/typing/issues/182 + """ + read_data = response.text + return json.loads("".join(["[", read_data, "]"])) + + def _parse( + self, downloaded_data + ) -> list[dict | Exception] | list[tuple | Exception]: + """Parses downloaded data into its final form.""" + logger.debug(f"parsing for result batch id: {self.id}") + result_list = [] + if self._use_dict_result: + for row in downloaded_data: + row_result = {} + try: + for (_t, c), v, col in zip( + self.column_converters, + row, + self.schema, + ): + row_result[col.name] = v if c is None or v is None else c(v) + result_list.append(row_result) + except Exception as error: + msg = f"Failed to convert: field {col.name}: {_t}::{v}, Error: {error}" + logger.exception(msg) + result_list.append( + Error.errorhandler_make_exception( + InterfaceError, + { + "msg": msg, + "errno": ER_FAILED_TO_CONVERT_ROW_TO_PYTHON_TYPE, + }, + ) + ) + else: + for row in downloaded_data: + row_result = [None] * len(self.schema) + try: + idx = 0 + for (_t, c), v, _col in zip( + self.column_converters, + row, + self.schema, + ): + row_result[idx] = v if c is None or v is None else c(v) + idx += 1 + result_list.append(tuple(row_result)) + except Exception as error: + msg = f"Failed to convert: field {_col.name}: {_t}::{v}, Error: {error}" + logger.exception(msg) + result_list.append( + Error.errorhandler_make_exception( + InterfaceError, + { + "msg": msg, + "errno": ER_FAILED_TO_CONVERT_ROW_TO_PYTHON_TYPE, + }, + ) + ) + return result_list + + def __repr__(self) -> str: + return f"JSONResultChunk({self.id})" + + def create_iter( + self, connection: SnowflakeConnection | None = None, **kwargs + ) -> Iterator[dict | Exception] | Iterator[tuple | Exception]: + if self._local: + return iter(self._data) + response = self._download(connection=connection) + # Load data to a intermediate form + logger.debug(f"started loading result batch id: {self.id}") + with TimerContextManager() as load_metric: + downloaded_data = self._load(response) + logger.debug(f"finished loading result batch id: {self.id}") + self._metrics[DownloadMetrics.load.value] = load_metric.get_timing_millis() + # Process downloaded data + with TimerContextManager() as parse_metric: + parsed_data = self._parse(downloaded_data) + self._metrics[DownloadMetrics.parse.value] = parse_metric.get_timing_millis() + return iter(parsed_data) + + def _arrow_fetching_error(self): + return NotSupportedError( + f"Trying to use arrow fetching on {type(self)} which " + f"is not ArrowResultChunk" + ) + + def to_pandas(self): + raise self._arrow_fetching_error() + + def to_arrow(self): + raise self._arrow_fetching_error() + + +class ArrowResultBatch(ResultBatch): + def __init__( + self, + rowcount: int, + chunk_headers: dict[str, str] | None, + remote_chunk_info: RemoteChunkInfo | None, + context: ArrowConverterContext, + use_dict_result: bool, + numpy: bool, + schema: Sequence[ResultMetadata], + number_to_decimal: bool, + ): + super().__init__( + rowcount, + chunk_headers, + remote_chunk_info, + schema, + use_dict_result, + ) + self._context = context + self._numpy = numpy + self._number_to_decimal = number_to_decimal + + def __repr__(self) -> str: + return f"ArrowResultChunk({self.id})" + + def _load( + self, response: Response, row_unit: IterUnit + ) -> Iterator[dict | Exception] | Iterator[tuple | Exception]: + """Creates a ``PyArrowIterator`` from a response. + + This is used to iterate through results in different ways depending on which + mode that ``PyArrowIterator`` is in. + """ + from .arrow_iterator import PyArrowIterator + + iter = PyArrowIterator( + None, + io.BytesIO(response.content), + self._context, + self._use_dict_result, + self._numpy, + self._number_to_decimal, + ) + if row_unit == IterUnit.TABLE_UNIT: + iter.init_table_unit() + + return iter + + def _from_data( + self, data: str, iter_unit: IterUnit + ) -> Iterator[dict | Exception] | Iterator[tuple | Exception]: + """Creates a ``PyArrowIterator`` files from a str. + + This is used to iterate through results in different ways depending on which + mode that ``PyArrowIterator`` is in. + """ + from .arrow_iterator import PyArrowIterator + + if len(data) == 0: + return iter([]) + + _iter = PyArrowIterator( + None, + io.BytesIO(b64decode(data)), + self._context, + self._use_dict_result, + self._numpy, + self._number_to_decimal, + ) + if iter_unit == IterUnit.TABLE_UNIT: + _iter.init_table_unit() + else: + _iter.init_row_unit() + return _iter + + @classmethod + def from_data( + cls, + data: str, + data_len: int, + context: ArrowConverterContext, + use_dict_result: bool, + numpy: bool, + schema: Sequence[ResultMetadata], + number_to_decimal: bool, + ): + """Initializes an ``ArrowResultBatch`` from static, local data.""" + new_chunk = cls( + data_len, + None, + None, + context, + use_dict_result, + numpy, + schema, + number_to_decimal, + ) + new_chunk._data = data + + return new_chunk + + def _create_iter( + self, iter_unit: IterUnit, connection: SnowflakeConnection | None = None + ) -> (Iterator[dict | Exception] | Iterator[tuple | Exception] | Iterator[Table]): + """Create an iterator for the ResultBatch. Used by get_arrow_iter.""" + if self._local: + return self._from_data(self._data, iter_unit) + response = self._download(connection=connection) + logger.debug(f"started loading result batch id: {self.id}") + with TimerContextManager() as load_metric: + loaded_data = self._load(response, iter_unit) + logger.debug(f"finished loading result batch id: {self.id}") + self._metrics[DownloadMetrics.load.value] = load_metric.get_timing_millis() + return loaded_data + + def _get_arrow_iter( + self, connection: SnowflakeConnection | None = None + ) -> Iterator[Table]: + """Returns an iterator for this batch which yields a pyarrow Table""" + return self._create_iter(iter_unit=IterUnit.TABLE_UNIT, connection=connection) + + def _create_empty_table(self) -> Table: + """Returns emtpy Arrow table based on schema""" + if installed_pandas: + # initialize pyarrow type array corresponding to FIELD_TYPES + FIELD_TYPE_TO_PA_TYPE = [ + pa_int64(), + pa_flt64(), + pa_str(), + pa_date64(), + pa_time64("ns"), + pa_str(), + pa_ts("ns"), + pa_ts("ns"), + pa_ts("ns"), + pa_str(), + pa_str(), + pa_bin(), + pa_time64("ns"), + pa_bool(), + ] + fields = [ + field(s.name, FIELD_TYPE_TO_PA_TYPE[s.type_code]) for s in self.schema + ] + return schema(fields).empty_table() + + def to_arrow(self, connection: SnowflakeConnection | None = None) -> Table: + """Returns this batch as a pyarrow Table""" + val = next(self._get_arrow_iter(connection=connection), None) + if val is not None: + return val + return self._create_empty_table() + + def to_pandas( + self, connection: SnowflakeConnection | None = None, **kwargs + ) -> pandas.DataFrame: + """Returns this batch as a pandas DataFrame""" + self._check_can_use_pandas() + table = self.to_arrow(connection=connection) + return table.to_pandas(**kwargs) + + def _get_pandas_iter( + self, connection: SnowflakeConnection | None = None, **kwargs + ) -> Iterator[pandas.DataFrame]: + """An iterator for this batch which yields a pandas DataFrame""" + iterator_data = [] + dataframe = self.to_pandas(connection=connection, **kwargs) + if not dataframe.empty: + iterator_data.append(dataframe) + return iter(iterator_data) + + def create_iter( + self, connection: SnowflakeConnection | None = None, **kwargs + ) -> ( + Iterator[dict | Exception] + | Iterator[tuple | Exception] + | Iterator[Table] + | Iterator[pandas.DataFrame] + ): + """The interface used by ResultSet to create an iterator for this ResultBatch.""" + iter_unit: IterUnit = kwargs.pop("iter_unit", IterUnit.ROW_UNIT) + if iter_unit == IterUnit.TABLE_UNIT: + structure = kwargs.pop("structure", "pandas") + if structure == "pandas": + return self._get_pandas_iter(connection=connection, **kwargs) + else: + return self._get_arrow_iter(connection=connection) + else: + return self._create_iter(iter_unit=iter_unit, connection=connection) diff --git a/src/snowflake/connector/result_set.py b/src/snowflake/connector/result_set.py new file mode 100644 index 000000000..226a5b4ab --- /dev/null +++ b/src/snowflake/connector/result_set.py @@ -0,0 +1,247 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +from collections import deque +from concurrent.futures import Future +from concurrent.futures.thread import ThreadPoolExecutor +from logging import getLogger +from typing import TYPE_CHECKING, Any, Callable, Deque, Iterable, Iterator + +from .constants import IterUnit +from .errors import NotSupportedError +from .options import installed_pandas, pandas +from .result_batch import ( + ArrowResultBatch, + DownloadMetrics, + JSONResultBatch, + ResultBatch, +) +from .telemetry import TelemetryField +from .time_util import get_time_millis + +if TYPE_CHECKING: # pragma: no cover + from snowflake.connector.cursor import SnowflakeCursor + +if installed_pandas: + from pyarrow import Table, concat_tables +else: + Table = None + +logger = getLogger(__name__) + + +def result_set_iterator( + first_batch_iter: Iterator[tuple], + unconsumed_batches: Deque[Future[Iterator[tuple]]], + unfetched_batches: Deque[ResultBatch], + final: Callable[[], None], + prefetch_thread_num: int, + **kw: Any, +) -> (Iterator[dict | Exception] | Iterator[tuple | Exception] | Iterator[Table]): + """Creates an iterator over some other iterators. + + Very similar to itertools.chain but we need some keywords to be propagated to + ``_download`` functions later. + + We need this to have ResultChunks fall out of usage so that they can be garbage + collected. + + Just like ``ResultBatch`` iterator, this might yield an ``Exception`` to allow users + to continue iterating through the rest of the ``ResultBatch``. + """ + + with ThreadPoolExecutor(prefetch_thread_num) as pool: + # Fill up window + + logger.debug("beginning to schedule result batch downloads") + + for _ in range(min(prefetch_thread_num, len(unfetched_batches))): + logger.debug( + f"queuing download of result batch id: {unfetched_batches[0].id}" + ) + unconsumed_batches.append( + pool.submit(unfetched_batches.popleft().create_iter, **kw) + ) + + yield from first_batch_iter + + i = 1 + while unconsumed_batches: + logger.debug(f"user requesting to consume result batch {i}") + + # Submit the next un-fetched batch to the pool + if unfetched_batches: + logger.debug( + f"queuing download of result batch id: {unfetched_batches[0].id}" + ) + future = pool.submit(unfetched_batches.popleft().create_iter, **kw) + unconsumed_batches.append(future) + + future = unconsumed_batches.popleft() + + # this will raise an exception if one has occurred + batch_iterator = future.result() + + logger.debug(f"user began consuming result batch {i}") + yield from batch_iterator + logger.debug(f"user finished consuming result batch {i}") + + i += 1 + final() + + +class ResultSet(Iterable[list]): + """This class retrieves the results of a query with the historical strategy. + + It pre-downloads the first up to 4 ResultChunks (this doesn't include the 1st chunk + as that is embedded in the response JSON from Snowflake) upon creating an Iterator + on it. + + It also reports telemetry data about its ``ResultBatch``es once it's done iterating + through them. + + Currently we do not support mixing multiple ``ResultBatch`` types and having + different column definitions types per ``ResultBatch``. + """ + + def __init__( + self, + cursor: SnowflakeCursor, + result_chunks: list[JSONResultBatch] | list[ArrowResultBatch], + prefetch_thread_num: int, + ): + self.batches = result_chunks + self._cursor = cursor + self.prefetch_thread_num = prefetch_thread_num + + def _report_metrics(self) -> None: + """Report all metrics totalled up. + + This includes TIME_CONSUME_LAST_RESULT, TIME_DOWNLOADING_CHUNKS and + TIME_PARSING_CHUNKS in that order. + """ + if self._cursor._first_chunk_time is not None: + time_consume_last_result = ( + get_time_millis() - self._cursor._first_chunk_time + ) + self._cursor._log_telemetry_job_data( + TelemetryField.TIME_CONSUME_LAST_RESULT, time_consume_last_result + ) + metrics = self._get_metrics() + if DownloadMetrics.download.value in metrics: + self._cursor._log_telemetry_job_data( + TelemetryField.TIME_DOWNLOADING_CHUNKS, + metrics.get(DownloadMetrics.download.value), + ) + if DownloadMetrics.parse.value in metrics: + self._cursor._log_telemetry_job_data( + TelemetryField.TIME_PARSING_CHUNKS, + metrics.get(DownloadMetrics.parse.value), + ) + + def _finish_iterating(self): + """Used for any cleanup after the result set iterator is done.""" + + self._report_metrics() + + def _can_create_arrow_iter(self) -> None: + # For now we don't support mixed ResultSets, so assume first partition's type + # represents them all + head_type = type(self.batches[0]) + if head_type != ArrowResultBatch: + raise NotSupportedError( + f"Trying to use arrow fetching on {head_type} which " + f"is not ArrowResultChunk" + ) + + def _fetch_arrow_batches( + self, + ) -> Iterator[Table]: + """Fetches all the results as Arrow Tables, chunked by Snowflake back-end.""" + self._can_create_arrow_iter() + return self._create_iter(iter_unit=IterUnit.TABLE_UNIT, structure="arrow") + + def _fetch_arrow_all(self) -> Table | None: + """Fetches a single Arrow Table from all of the ``ResultBatch``.""" + tables = list(self._fetch_arrow_batches()) + if tables: + return concat_tables(tables) + else: + return None + + def _fetch_pandas_batches(self, **kwargs) -> Iterator[pandas.DataFrame]: + """Fetches Pandas dataframes in batches, where batch refers to Snowflake Chunk. + + Thus, the batch size (the number of rows in dataframe) is determined by + Snowflake's back-end. + """ + self._can_create_arrow_iter() + return self._create_iter(iter_unit=IterUnit.TABLE_UNIT, structure="pandas") + + def _fetch_pandas_all(self, **kwargs) -> pandas.DataFrame: + """Fetches a single Pandas dataframe.""" + dataframes = list(self._fetch_pandas_batches()) + if dataframes: + return pandas.concat( + dataframes, + ignore_index=True, # Don't keep in result batch indexes + **kwargs, + ) + return pandas.DataFrame(columns=self.batches[0].column_names) + + def _get_metrics(self) -> dict[str, int]: + """Sum up all the chunks' metrics and show them together.""" + overall_metrics: dict[str, int] = {} + for c in self.batches: + for n, v in c._metrics.items(): + overall_metrics[n] = overall_metrics.get(n, 0) + v + return overall_metrics + + def __iter__(self) -> Iterator[tuple]: + """Returns a new iterator through all batches with default values.""" + return self._create_iter() + + def _create_iter( + self, + **kwargs, + ) -> ( + Iterator[dict | Exception] + | Iterator[tuple | Exception] + | Iterator[Table] + | Iterator[pandas.DataFrame] + ): + """Set up a new iterator through all batches with first 5 chunks downloaded. + + This function is a helper function to ``__iter__`` and it was introduced for the + cases where we need to propagate some values to later ``_download`` calls. + """ + # add connection so that result batches can use sessions + kwargs["connection"] = self._cursor.connection + + first_batch_iter = self.batches[0].create_iter(**kwargs) + + # Iterator[Tuple] Futures that have not been consumed by the user + unconsumed_batches: Deque[Future[Iterator[tuple]]] = deque() + + # batches that have not been fetched + unfetched_batches = deque(self.batches[1:]) + for num, batch in enumerate(unfetched_batches): + logger.debug(f"result batch {num + 1} has id: {batch.id}") + + return result_set_iterator( + first_batch_iter, + unconsumed_batches, + unfetched_batches, + self._finish_iterating, + self.prefetch_thread_num, + **kwargs, + ) + + def total_row_index(self) -> int: + """Returns the total rowcount of the ``ResultSet`` .""" + total = 0 + for p in self.batches: + total += p.rowcount + return total diff --git a/src/snowflake/connector/s3_storage_client.py b/src/snowflake/connector/s3_storage_client.py new file mode 100644 index 000000000..5e02475d7 --- /dev/null +++ b/src/snowflake/connector/s3_storage_client.py @@ -0,0 +1,577 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import binascii +import re +import xml.etree.ElementTree as ET +from datetime import datetime +from io import IOBase +from logging import getLogger +from operator import itemgetter +from typing import TYPE_CHECKING, Any, NamedTuple + +from cryptography.hazmat.primitives import hashes, hmac + +from .compat import quote, urlparse +from .constants import ( + HTTP_HEADER_CONTENT_TYPE, + HTTP_HEADER_VALUE_OCTET_STREAM, + FileHeader, + ResultStatus, +) +from .encryption_util import EncryptionMetadata +from .storage_client import SnowflakeStorageClient +from .vendored import requests + +if TYPE_CHECKING: # pragma: no cover + from .file_transfer_agent import SnowflakeFileMeta, StorageCredential + +logger = getLogger(__name__) + +META_PREFIX = "x-amz-meta-" +SFC_DIGEST = "sfc-digest" + +AMZ_MATDESC = "x-amz-matdesc" +AMZ_KEY = "x-amz-key" +AMZ_IV = "x-amz-iv" + +ERRORNO_WSAECONNABORTED = 10053 # network connection was aborted + +EXPIRED_TOKEN = "ExpiredToken" +ADDRESSING_STYLE = "virtual" # explicit force to use virtual addressing style +UNSIGNED_PAYLOAD = "UNSIGNED-PAYLOAD" + +RE_MULTIPLE_SPACES = re.compile(r" +") + + +class S3Location(NamedTuple): + bucket_name: str + path: str + + +class SnowflakeS3RestClient(SnowflakeStorageClient): + def __init__( + self, + meta: SnowflakeFileMeta, + credentials: StorageCredential, + stage_info: dict[str, Any], + chunk_size: int, + use_accelerate_endpoint: bool | None = None, + use_s3_regional_url=False, + ) -> None: + """Rest client for S3 storage. + + Args: + stage_info: + """ + super().__init__(meta, stage_info, chunk_size, credentials=credentials) + # Signature version V4 + # Addressing style Virtual Host + self.region_name: str = stage_info["region"] + # Multipart upload only + self.upload_id: str | None = None + self.etags: list[str] | None = None + self.s3location: S3Location = ( + SnowflakeS3RestClient._extract_bucket_name_and_path( + self.stage_info["location"] + ) + ) + self.use_s3_regional_url = use_s3_regional_url + + # if GS sends us an endpoint, it's likely for FIPS. Use it. + self.endpoint: str | None = None + if stage_info["endPoint"]: + self.endpoint = ( + f"https://{self.s3location.bucket_name}." + stage_info["endPoint"] + ) + self.transfer_accelerate_config(use_accelerate_endpoint) + + def transfer_accelerate_config( + self, use_accelerate_endpoint: bool | None = None + ) -> bool: + # if self.endpoint has been set, e.g. by metadata, no more config is needed. + if self.endpoint is not None: + return self.endpoint.find("s3-accelerate.amazonaws.com") >= 0 + if self.use_s3_regional_url: + self.endpoint = ( + f"https://{self.s3location.bucket_name}." + f"s3.{self.region_name}.amazonaws.com" + ) + return False + else: + if use_accelerate_endpoint is None: + use_accelerate_endpoint = self._get_bucket_accelerate_config( + self.s3location.bucket_name + ) + + if use_accelerate_endpoint: + self.endpoint = ( + f"https://{self.s3location.bucket_name}.s3-accelerate.amazonaws.com" + ) + else: + self.endpoint = ( + f"https://{self.s3location.bucket_name}.s3.amazonaws.com" + ) + return use_accelerate_endpoint + + @staticmethod + def _sign_bytes(secret_key: bytes, _input: str) -> bytes: + """Applies HMAC-SHA-256 to given string with secret_key.""" + h = hmac.HMAC(secret_key, hashes.SHA256()) + h.update(_input.encode("utf-8")) + return h.finalize() + + @staticmethod + def _sign_bytes_hex(secret_key: bytes, _input: str) -> bytes: + """Convenience function, same as _sign_bytes, but returns result in hex form.""" + return binascii.hexlify(SnowflakeS3RestClient._sign_bytes(secret_key, _input)) + + @staticmethod + def _hash_bytes(_input: bytes) -> bytes: + """Applies SHA-256 hash to given bytes.""" + digest = hashes.Hash(hashes.SHA256()) + digest.update(_input) + return digest.finalize() + + @staticmethod + def _hash_bytes_hex(_input: bytes) -> bytes: + """Convenience function, same as _hash_bytes, but returns result in hex form.""" + return binascii.hexlify(SnowflakeS3RestClient._hash_bytes(_input)) + + @staticmethod + def _construct_query_string( + query_parts: tuple[tuple[str, str], ...], + ) -> str: + """Convenience function to build the query part of a URL from key-value pairs. + + It filters out empty strings from the key, value pairs. + """ + return "&".join(["=".join(filter(bool, e)) for e in query_parts]) + + @staticmethod + def _construct_canonicalized_and_signed_headers( + headers: dict[str, str | list[str]] + ) -> tuple[str, str]: + """Construct canonical headers as per AWS specs, returns the signed headers too. + + Does not support sorting by values in case the keys are the same, don't send + in duplicate keys, but this is not possible with a dictionary anyways. + """ + res = [] + low_key_dict = {k.lower(): v for k, v in headers.items()} + sorted_headers = sorted(low_key_dict.keys()) + _res = [(k, low_key_dict[k]) for k in sorted_headers] + + for k, v in _res: + # if value is a list, convert to string delimited by comma + if isinstance(v, list): + v = ",".join(v) + # if multiline header, replace withs space + k = k.replace("\n", " ") + res.append(k.strip() + ":" + RE_MULTIPLE_SPACES.sub(" ", v.strip())) + + ans = "\n".join(res) + if ans: + ans += "\n" + + return ans, ";".join(sorted_headers) + + @staticmethod + def _construct_canonical_request_and_signed_headers( + verb: str, + canonical_uri_parameter: str, + query_parts: dict[str, str], + canonical_headers: dict[str, str | list[str]] | None = None, + payload_hash: str = "", + ) -> tuple[str, str]: + """Build canonical request and also return signed headers. + + Note: this doesn't support sorting by values in case the same key is given + more than once, but doing this is also not possible with a dictionary. + """ + canonical_query_string = "&".join( + "=".join([k, v]) for k, v in sorted(query_parts.items(), key=itemgetter(0)) + ) + ( + canonical_headers, + signed_headers, + ) = SnowflakeS3RestClient._construct_canonicalized_and_signed_headers( + canonical_headers + ) + + return ( + "\n".join( + [ + verb, + canonical_uri_parameter or "/", + canonical_query_string, + canonical_headers, + signed_headers, + payload_hash, + ] + ), + signed_headers, + ) + + @staticmethod + def _construct_string_to_sign( + region_name: str, + service_name: str, + amzdate: str, + short_amzdate: str, + canonical_request_hash: bytes, + ) -> tuple[str, str]: + """Given all the necessary information construct a V4 string to sign. + + As per AWS specs it requires the scope, the hash of the canonical request and + the current date in the following format: YYYYMMDDTHHMMSSZ where T and Z are + constant characters. + This function generates the scope from the amzdate (which is just the date + portion of amzdate), region name and service we want to use (this is only s3 + in our case). + """ + scope = f"{short_amzdate}/{region_name}/{service_name}/aws4_request" + return ( + "\n".join( + [ + "AWS4-HMAC-SHA256", + amzdate, + scope, + canonical_request_hash.decode("utf-8"), + ] + ), + scope, + ) + + def _has_expired_token(self, response: requests.Response) -> bool: + """Extract error code and error message from the S3's error response. + + Expected format: + https://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html#RESTErrorResponses + + Args: + response: Rest error response in XML format + + Returns: True if the error response is caused by token expiration + + """ + if response.status_code != 400: + return False + message = response.text + if not message or message.isspace(): + return False + err = ET.fromstring(message) + return err.find("Code").text == EXPIRED_TOKEN + + @staticmethod + def _extract_bucket_name_and_path(stage_location) -> S3Location: + # split stage location as bucket name and path + bucket_name, _, path = stage_location.partition("/") + if path and not path.endswith("/"): + path += "/" + + return S3Location(bucket_name=bucket_name, path=path) + + def _send_request_with_authentication_and_retry( + self, + url: str, + verb: str, + retry_id: int | str, + query_parts: dict[str, str] | None = None, + x_amz_headers: dict[str, str] | None = None, + headers: dict[str, str] | None = None, + payload: bytes | bytearray | IOBase | None = None, + unsigned_payload: bool = False, + ) -> requests.Response: + if x_amz_headers is None: + x_amz_headers = {} + if headers is None: + headers = {} + if payload is None: + payload = b"" + if query_parts is None: + query_parts = {} + parsed_url = urlparse(url) + x_amz_headers["x-amz-security-token"] = self.credentials.creds.get( + "AWS_TOKEN", "" + ) + x_amz_headers["host"] = parsed_url.hostname + if unsigned_payload: + x_amz_headers["x-amz-content-sha256"] = UNSIGNED_PAYLOAD + else: + x_amz_headers["x-amz-content-sha256"] = ( + SnowflakeS3RestClient._hash_bytes_hex(payload).lower().decode() + ) + + def generate_authenticated_url_and_args_v4() -> tuple[bytes, dict[str, bytes]]: + t = datetime.utcnow() + amzdate = t.strftime("%Y%m%dT%H%M%SZ") + short_amzdate = amzdate[:8] + x_amz_headers["x-amz-date"] = amzdate + + ( + canonical_request, + signed_headers, + ) = self._construct_canonical_request_and_signed_headers( + verb=verb, + canonical_uri_parameter=parsed_url.path + + (f";{parsed_url.params}" if parsed_url.params else ""), + query_parts=query_parts, + canonical_headers=x_amz_headers, + payload_hash=x_amz_headers["x-amz-content-sha256"], + ) + string_to_sign, scope = self._construct_string_to_sign( + self.region_name, + "s3", + amzdate, + short_amzdate, + self._hash_bytes_hex(canonical_request.encode("utf-8")).lower(), + ) + kDate = self._sign_bytes( + ("AWS4" + self.credentials.creds["AWS_SECRET_KEY"]).encode("utf-8"), + short_amzdate, + ) + kRegion = self._sign_bytes(kDate, self.region_name) + kService = self._sign_bytes(kRegion, "s3") + signing_key = self._sign_bytes(kService, "aws4_request") + + signature = self._sign_bytes_hex(signing_key, string_to_sign).lower() + authorization_header = ( + "AWS4-HMAC-SHA256 " + + f"Credential={self.credentials.creds['AWS_KEY_ID']}/{scope}, " + + f"SignedHeaders={signed_headers}, " + + f"Signature={signature.decode('utf-8')}" + ) + headers.update(x_amz_headers) + headers["Authorization"] = authorization_header + rest_args = {"headers": headers} + + if payload: + rest_args["data"] = payload + + return url.encode("utf-8"), rest_args + + return self._send_request_with_retry( + verb, generate_authenticated_url_and_args_v4, retry_id + ) + + def get_file_header(self, filename: str) -> FileHeader | None: + """Gets the metadata of file in specified location. + + Args: + filename: Name of remote file. + + Returns: + None if HEAD returns 404, otherwise a FileHeader instance populated + with metadata + """ + path = quote(self.s3location.path + filename.lstrip("/")) + url = self.endpoint + f"/{path}" + + retry_id = "HEAD" + self.retry_count[retry_id] = 0 + response = self._send_request_with_authentication_and_retry( + url=url, verb="HEAD", retry_id=retry_id + ) + if response.status_code == 200: + self.meta.result_status = ResultStatus.UPLOADED + metadata = response.headers + encryption_metadata = ( + EncryptionMetadata( + key=metadata.get(META_PREFIX + AMZ_KEY), + iv=metadata.get(META_PREFIX + AMZ_IV), + matdesc=metadata.get(META_PREFIX + AMZ_MATDESC), + ) + if metadata.get(META_PREFIX + AMZ_KEY) + else None + ) + return FileHeader( + digest=metadata.get(META_PREFIX + SFC_DIGEST), + content_length=int(metadata.get("Content-Length")), + encryption_metadata=encryption_metadata, + ) + elif response.status_code == 404: + logger.debug( + f"not found. bucket: {self.s3location.bucket_name}, path: {path}" + ) + self.meta.result_status = ResultStatus.NOT_FOUND_FILE + return None + else: + response.raise_for_status() + + def _prepare_file_metadata(self) -> dict[str, Any]: + """Construct metadata for a file to be uploaded. + + Returns: File metadata in a dict. + + """ + s3_metadata = { + META_PREFIX + SFC_DIGEST: self.meta.sha256_digest, + } + if self.encryption_metadata: + s3_metadata.update( + { + META_PREFIX + AMZ_IV: self.encryption_metadata.iv, + META_PREFIX + AMZ_KEY: self.encryption_metadata.key, + META_PREFIX + AMZ_MATDESC: self.encryption_metadata.matdesc, + } + ) + return s3_metadata + + def _initiate_multipart_upload(self) -> None: + query_parts = (("uploads", ""),) + path = quote(self.s3location.path + self.meta.dst_file_name.lstrip("/")) + query_string = self._construct_query_string(query_parts) + url = self.endpoint + f"/{path}?{query_string}" + s3_metadata = self._prepare_file_metadata() + # initiate multipart upload + retry_id = "Initiate" + self.retry_count[retry_id] = 0 + response = self._send_request_with_authentication_and_retry( + url=url, + verb="POST", + retry_id=retry_id, + x_amz_headers=s3_metadata, + headers={HTTP_HEADER_CONTENT_TYPE: HTTP_HEADER_VALUE_OCTET_STREAM}, + query_parts=dict(query_parts), + ) + if response.status_code == 200: + self.upload_id = ET.fromstring(response.content)[2].text + self.etags = [None] * self.num_of_chunks + else: + response.raise_for_status() + + def _upload_chunk(self, chunk_id: int, chunk: bytes) -> None: + path = quote(self.s3location.path + self.meta.dst_file_name.lstrip("/")) + url = self.endpoint + f"/{path}" + + if self.num_of_chunks == 1: # single request + s3_metadata = self._prepare_file_metadata() + response = self._send_request_with_authentication_and_retry( + url=url, + verb="PUT", + retry_id=chunk_id, + payload=chunk, + x_amz_headers=s3_metadata, + headers={HTTP_HEADER_CONTENT_TYPE: HTTP_HEADER_VALUE_OCTET_STREAM}, + unsigned_payload=True, + ) + response.raise_for_status() + else: + # multipart PUT + query_parts = ( + ("partNumber", str(chunk_id + 1)), + ("uploadId", self.upload_id), + ) + query_string = self._construct_query_string(query_parts) + chunk_url = f"{url}?{query_string}" + response = self._send_request_with_authentication_and_retry( + url=chunk_url, + verb="PUT", + retry_id=chunk_id, + payload=chunk, + unsigned_payload=True, + query_parts=dict(query_parts), + ) + if response.status_code == 200: + self.etags[chunk_id] = response.headers["ETag"] + response.raise_for_status() + + def _complete_multipart_upload(self) -> None: + query_parts = (("uploadId", self.upload_id),) + path = quote(self.s3location.path + self.meta.dst_file_name.lstrip("/")) + query_string = self._construct_query_string(query_parts) + url = self.endpoint + f"/{path}?{query_string}" + logger.debug("Initiating multipart upload complete") + # Complete multipart upload + root = ET.Element("CompleteMultipartUpload") + for idx, etag_str in enumerate(self.etags): + part = ET.Element("Part") + etag = ET.Element("ETag") + etag.text = etag_str + part.append(etag) + part_number = ET.Element("PartNumber") + part_number.text = str(idx + 1) + part.append(part_number) + root.append(part) + retry_id = "Complete" + self.retry_count[retry_id] = 0 + response = self._send_request_with_authentication_and_retry( + url=url, + verb="POST", + retry_id=retry_id, + payload=ET.tostring(root), + query_parts=dict(query_parts), + ) + response.raise_for_status() + + def _abort_multipart_upload(self) -> None: + if self.upload_id is None: + return + query_parts = (("uploadId", self.upload_id),) + path = quote(self.s3location.path + self.meta.dst_file_name.lstrip("/")) + query_string = self._construct_query_string(query_parts) + url = self.endpoint + f"/{path}?{query_string}" + + retry_id = "Abort" + self.retry_count[retry_id] = 0 + response = self._send_request_with_authentication_and_retry( + url=url, + verb="DELETE", + retry_id=retry_id, + query_parts=dict(query_parts), + ) + response.raise_for_status() + + def download_chunk(self, chunk_id: int) -> None: + logger.debug(f"Downloading chunk {chunk_id}") + path = quote(self.s3location.path + self.meta.src_file_name.lstrip("/")) + url = self.endpoint + f"/{path}" + if self.num_of_chunks == 1: + response = self._send_request_with_authentication_and_retry( + url=url, verb="GET", retry_id=chunk_id + ) + if response.status_code == 200: + self.write_downloaded_chunk(0, response.content) + self.meta.result_status = ResultStatus.DOWNLOADED + response.raise_for_status() + else: + chunk_size = self.chunk_size + if chunk_id < self.num_of_chunks - 1: + _range = f"{chunk_id * chunk_size}-{(chunk_id+1)*chunk_size-1}" + else: + _range = f"{chunk_id * chunk_size}-" + + response = self._send_request_with_authentication_and_retry( + url=url, + verb="GET", + retry_id=chunk_id, + headers={"Range": f"bytes={_range}"}, + ) + if response.status_code in (200, 206): + self.write_downloaded_chunk(chunk_id, response.content) + response.raise_for_status() + + def _get_bucket_accelerate_config(self, bucket_name: str) -> bool: + query_parts = (("accelerate", ""),) + query_string = self._construct_query_string(query_parts) + url = f"https://{bucket_name}.s3.amazonaws.com/?{query_string}" + retry_id = "accelerate" + self.retry_count[retry_id] = 0 + response = self._send_request_with_authentication_and_retry( + url=url, verb="GET", retry_id=retry_id, query_parts=dict(query_parts) + ) + if response.status_code == 200: + config = ET.fromstring(response.text) + namespace = config.tag[: config.tag.index("}") + 1] + statusTag = f"{namespace}Status" + found = config.find(statusTag) + use_accelerate_endpoint = ( + False if found is None else (found.text == "Enabled") + ) + logger.debug(f"use_accelerate_endpoint: {use_accelerate_endpoint}") + return use_accelerate_endpoint + return False diff --git a/src/snowflake/connector/secret_detector.py b/src/snowflake/connector/secret_detector.py new file mode 100644 index 000000000..0d21f5ef9 --- /dev/null +++ b/src/snowflake/connector/secret_detector.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +"""The secret detector detects sensitive information. + +It masks secrets that might be leaked from two potential avenues + 1. Out of Band Telemetry + 2. Logging +""" +from __future__ import annotations + +import logging +import os +import re + +MIN_TOKEN_LEN = os.getenv("MIN_TOKEN_LEN", 32) +MIN_PWD_LEN = os.getenv("MIN_PWD_LEN", 8) + + +class SecretDetector(logging.Formatter): + AWS_KEY_PATTERN = re.compile( + r"(aws_key_id|aws_secret_key|access_key_id|secret_access_key)\s*=\s*'([^']+)'", + flags=re.IGNORECASE, + ) + AWS_TOKEN_PATTERN = re.compile( + r'(accessToken|tempToken|keySecret)"\s*:\s*"([a-z0-9/+]{32,}={0,2})"', + flags=re.IGNORECASE, + ) + SAS_TOKEN_PATTERN = re.compile( + r"(sig|signature|AWSAccessKeyId|password|passcode)=(?P[a-z0-9%/+]{16,})", + flags=re.IGNORECASE, + ) + PRIVATE_KEY_PATTERN = re.compile( + r"-----BEGIN PRIVATE KEY-----\\n([a-z0-9/+=\\n]{32,})\\n-----END PRIVATE KEY-----", + flags=re.MULTILINE | re.IGNORECASE, + ) + PRIVATE_KEY_DATA_PATTERN = re.compile( + r'"privateKeyData": "([a-z0-9/+=\\n]{10,})"', flags=re.MULTILINE | re.IGNORECASE + ) + CONNECTION_TOKEN_PATTERN = re.compile( + r"(token|assertion content)" r"([\'\"\s:=]+)" r"([a-z0-9=/_\-\+]{8,})", + flags=re.IGNORECASE, + ) + + PASSWORD_PATTERN = re.compile( + r"(password" + r"|pwd)" + r"([\'\"\s:=]+)" + r"([a-z0-9!\"#\$%&\\\'\(\)\*\+\,-\./:;<=>\?\@\[\]\^_`\{\|\}~]{8,})", + flags=re.IGNORECASE, + ) + + @staticmethod + def mask_connection_token(text): + return SecretDetector.CONNECTION_TOKEN_PATTERN.sub(r"\1\2****", text) + + @staticmethod + def mask_password(text): + return SecretDetector.PASSWORD_PATTERN.sub(r"\1\2****", text) + + @staticmethod + def mask_aws_keys(text): + return SecretDetector.AWS_KEY_PATTERN.sub(r"\1='****'", text) + + @staticmethod + def mask_sas_tokens(text): + return SecretDetector.SAS_TOKEN_PATTERN.sub(r"\1=****", text) + + @staticmethod + def mask_aws_tokens(text): + return SecretDetector.AWS_TOKEN_PATTERN.sub(r'\1":"XXXX"', text) + + @staticmethod + def mask_private_key(text): + return SecretDetector.PRIVATE_KEY_PATTERN.sub( + "-----BEGIN PRIVATE KEY-----\\\\nXXXX\\\\n-----END PRIVATE KEY-----", text + ) + + @staticmethod + def mask_private_key_data(text): + return SecretDetector.PRIVATE_KEY_DATA_PATTERN.sub( + '"privateKeyData": "XXXX"', text + ) + + @staticmethod + def mask_secrets(text: str) -> tuple[bool, str, str]: + """Masks any secrets. This is the method that should be used by outside classes. + + Args: + text: A string which may contain a secret. + + Returns: + The masked string. + """ + if text is None: + return (False, None, None) + + masked = False + err_str = None + try: + masked_text = SecretDetector.mask_connection_token( + SecretDetector.mask_password( + SecretDetector.mask_private_key_data( + SecretDetector.mask_private_key( + SecretDetector.mask_aws_tokens( + SecretDetector.mask_sas_tokens( + SecretDetector.mask_aws_keys(text) + ) + ) + ) + ) + ) + ) + if masked_text != text: + masked = True + except Exception as ex: + # We'll assume that the exception was raised during masking + # to be safe consider that the log has sensitive information + # and do not raise an exception. + masked = True + masked_text = str(ex) + err_str = str(ex) + + return masked, masked_text, err_str + + def format(self, record: logging.LogRecord) -> str: + """Wrapper around logging module's formatter. + + This will ensure that the formatted message is free from sensitive credentials. + + Args: + record: The logging record. + + Returns: + Formatted desensitized log string. + """ + try: + unsanitized_log = super().format(record) + masked, sanitized_log, err_str = SecretDetector.mask_secrets( + unsanitized_log + ) + if masked and err_str is not None: + sanitized_log = "{} - {} {} - {} - {} - {}".format( + record.asctime, + record.threadName, + "secret_detector.py", + "sanitize_log_str", + record.levelname, + err_str, + ) + except Exception as ex: + sanitized_log = "{} - {} {} - {} - {} - {}".format( + record.asctime, + record.threadName, + "secret_detector.py", + "sanitize_log_str", + record.levelname, + "EXCEPTION - " + str(ex), + ) + return sanitized_log diff --git a/src/snowflake/connector/sfbinaryformat.py b/src/snowflake/connector/sfbinaryformat.py new file mode 100644 index 000000000..78187c3d1 --- /dev/null +++ b/src/snowflake/connector/sfbinaryformat.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from base64 import b16decode, b16encode, standard_b64encode + +from .errors import InternalError + +# Converts a Snowflake binary value into a "bytes" object. +binary_to_python = b16decode + + +def binary_to_snowflake(binary_value) -> bytes | bytearray: + """Encodes a "bytes" object for passing to Snowflake.""" + result = b16encode(binary_value) + + if isinstance(binary_value, bytearray): + return bytearray(result) + return result + + +class SnowflakeBinaryFormat: + """Formats binary values ("bytes" objects) in hex or base64.""" + + def __init__(self, name): + name = name.upper() + if name == "HEX": + self._encode = b16encode + elif name == "BASE64": + self._encode = standard_b64encode + else: + raise InternalError(f"Unrecognized binary format {name}") + + def format(self, binary_value): + """Formats a "bytes" object, returning a string.""" + return self._encode(binary_value).decode("ascii") diff --git a/sfdatetime.py b/src/snowflake/connector/sfdatetime.py similarity index 54% rename from sfdatetime.py rename to src/snowflake/connector/sfdatetime.py index 79f97251f..c96b05133 100644 --- a/sfdatetime.py +++ b/src/snowflake/connector/sfdatetime.py @@ -1,47 +1,44 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # -from collections import namedtuple -import time -from datetime import timedelta, datetime, date +from __future__ import annotations -from .compat import TO_UNICODE +import time +from collections import namedtuple +from datetime import date, datetime, timedelta ZERO_TIMEDELTA = timedelta(0) ElementType = { - u'Year2digit_ElementType': [u"YY", u"%y"], - u'Year_ElementType': [u"YYYY", u"%Y"], - u'Month_ElementType': [u"MM", u"%m"], - u'MonthAbbrev_ElementType': [u"MON", u"%b"], - u'DayOfMonth_ElementType': [u"DD", u"%d"], - u'DayOfWeekAbbrev_ElementType': [u"DY", u"%a"], - u'Hour24_ElementType': [u"HH24", u"%H"], - u'Hour12_ElementType': [u"HH12", u"%I"], - u'Hour_ElementType': [u"HH", u"%H"], - u'Ante_Meridiem_ElementType': [u"AM", u"%p"], - u'Post_Meridiem_ElementType': [u"PM", u"%p"], - u'Minute_ElementType': [u"MI", u"%M"], - u'Second_ElementType': [u"SS", u"%S"], - u'MilliSecond_ElementType': [u"FF", u""], + "Year2digit_ElementType": ["YY", "%y"], + "Year_ElementType": ["YYYY", "%Y"], + "Month_ElementType": ["MM", "%m"], + "MonthAbbrev_ElementType": ["MON", "%b"], + "DayOfMonth_ElementType": ["DD", "%d"], + "DayOfWeekAbbrev_ElementType": ["DY", "%a"], + "Hour24_ElementType": ["HH24", "%H"], + "Hour12_ElementType": ["HH12", "%I"], + "Hour_ElementType": ["HH", "%H"], + "Ante_Meridiem_ElementType": ["AM", "%p"], + "Post_Meridiem_ElementType": ["PM", "%p"], + "Minute_ElementType": ["MI", "%M"], + "Second_ElementType": ["SS", "%S"], + "MilliSecond_ElementType": ["FF", ""], # special code for parsing fractions - u'TZOffsetHourColonMin_ElementType': [u"TZH:TZM", u"%z"], - u'TZOffsetHourMin_ElementType': [u"TZHTZM", u"%z"], - u'TZOffsetHourOnly_ElementType': [u"TZH", u"%z"], - u'TZAbbr_ElementType': [u"TZD", u"%Z"], + "TZOffsetHourColonMin_ElementType": ["TZH:TZM", "%z"], + "TZOffsetHourMin_ElementType": ["TZHTZM", "%z"], + "TZOffsetHourOnly_ElementType": ["TZH", "%z"], + "TZAbbr_ElementType": ["TZD", "%Z"], } def sfdatetime_total_seconds_from_timedelta(td): - return (td.microseconds + ( - td.seconds + td.days * 24 * 3600) * 10 ** 6) // 10 ** 6 + return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) // 10**6 -SnowflakeDateTime = namedtuple( - 'SnowflakeDateTime', 'datetime nanosecond scale') +SnowflakeDateTime = namedtuple("SnowflakeDateTime", "datetime nanosecond scale") def _support_negative_year(value, year_len): @@ -55,7 +52,7 @@ def _support_negative_year_datetime(value, year_len): def _build_year_format(dt, year_len): - if hasattr(dt, 'year'): + if hasattr(dt, "year"): # datetime year_raw_value = dt.year else: @@ -70,13 +67,13 @@ def _support_negative_year_struct_time(dt, year_len): def _build_raw_year_format(year_raw_value, year_len): - sign_char = u'' + sign_char = "" if year_raw_value < 0: - sign_char = u'-' + sign_char = "-" year_raw_value *= -1 if year_len == 2: year_raw_value %= 100 - fmt = sign_char + u'{:0' + TO_UNICODE(year_len) + u'd}' + fmt = sign_char + "{:0" + str(year_len) + "d}" return fmt.format(year_raw_value) @@ -87,15 +84,15 @@ def _support_negative_year_date(value, year_len): def _inject_fraction(value, fraction_len): # if FF is included - nano_str = u'{:09d}' + nano_str = "{:09d}" - if hasattr(value, 'microsecond'): - nano_str = u'{:06d}' + if hasattr(value, "microsecond"): + nano_str = "{:06d}" fraction = value.microsecond - elif hasattr(value, 'nanosecond'): + elif hasattr(value, "nanosecond"): fraction = value.nanosecond else: - nano_str = u'{:01d}' + nano_str = "{:01d}" fraction = 0 # struct_time. no fraction of second if fraction_len > 0: @@ -104,9 +101,9 @@ def _inject_fraction(value, fraction_len): else: # no length of FF is specified nano_value = nano_str.format(fraction) - if hasattr(value, 'scale'): + if hasattr(value, "scale"): # but scale is specified - nano_value = nano_value[:value.scale] + nano_value = nano_value[: value.scale] return nano_value @@ -119,24 +116,23 @@ def _inject_others(_, value0): _support_negative_year_datetime, _support_negative_year_struct_time, _support_negative_year_date, - _inject_fraction + _inject_fraction, } -class SnowflakeDateTimeFormat(object): - """ - Snowflake DateTime Formatter - """ +class SnowflakeDateTimeFormat: + """Snowflake DateTime Formatter.""" def __init__( - self, - sql_format, - data_type=u'TIMESTAMP_NTZ', - datetime_class=datetime, - support_negative_year=True, - inject_fraction=True): + self, + sql_format, + data_type="TIMESTAMP_NTZ", + datetime_class=datetime, + support_negative_year=True, + inject_fraction=True, + ): self._sql_format = sql_format - self._ignore_tz = data_type in (u'TIMESTAMP_NTZ', u'DATE') + self._ignore_tz = data_type in ("TIMESTAMP_NTZ", "DATE") if datetime_class == datetime: self._support_negative_year_method = _support_negative_year_datetime elif datetime_class == time.struct_time: @@ -147,39 +143,34 @@ def __init__( self._support_negative_year_method = _support_negative_year # format method - self.format = getattr(self, u'_format_{type_name}'.format( - type_name=datetime_class.__name__)) + self.format = getattr(self, f"_format_{datetime_class.__name__}") self._compile( - support_negative_year=support_negative_year, - inject_fraction=inject_fraction) + support_negative_year=support_negative_year, inject_fraction=inject_fraction + ) def _pre_format(self, value): fmt = [] for e in self._elements: f = e[0] fmt.append(f(value, e[1])) - return u''.join(fmt) + return "".join(fmt) def _format_SnowflakeDateTime(self, value): - """ - Formats SnowflakeDateTime object - """ + """Formats SnowflakeDateTime object.""" fmt = self._pre_format(value) dt = value.datetime if isinstance(dt, time.struct_time): - return TO_UNICODE(time.strftime(fmt, dt)) + return str(time.strftime(fmt, dt)) if dt.year < 1000: # NOTE: still not supported return dt.isoformat() return dt.strftime(fmt) def _format_datetime(self, value): - """ - Formats datetime object - """ + """Formats datetime object.""" fmt = self._pre_format(value) if isinstance(value, time.struct_time): - return TO_UNICODE(time.strftime(fmt, value)) + return str(time.strftime(fmt, value)) if value.year < 1000: # NOTE: still not supported. return value.isoformat() @@ -195,8 +186,7 @@ def _match_token(self, sql_fmt, candidates, ignore=False): return 1 def _add_raw_char(self, ch): - self._elements.append( - (_inject_others, u'%%' if ch == u'%' else ch)) + self._elements.append((_inject_others, "%%" if ch == "%" else ch)) def _compile(self, support_negative_year=True, inject_fraction=True): self._elements = [] @@ -205,126 +195,119 @@ def _compile(self, support_negative_year=True, inject_fraction=True): while idx < len(u_sql_format): ch = u_sql_format[idx] - if ch == u'A': + if ch == "A": idx += self._match_token( u_sql_format[idx:], [ - ElementType[u'Ante_Meridiem_ElementType'], - ]) - elif ch == u'D': + ElementType["Ante_Meridiem_ElementType"], + ], + ) + elif ch == "D": idx += self._match_token( u_sql_format[idx:], [ - ElementType[u'DayOfMonth_ElementType'], - ElementType[u'DayOfWeekAbbrev_ElementType'], - ] + ElementType["DayOfMonth_ElementType"], + ElementType["DayOfWeekAbbrev_ElementType"], + ], ) - elif ch == u'H': + elif ch == "H": idx += self._match_token( u_sql_format[idx:], [ - ElementType[u'Hour24_ElementType'], - ElementType[u'Hour12_ElementType'], - ElementType[u'Hour_ElementType'], - ] + ElementType["Hour24_ElementType"], + ElementType["Hour12_ElementType"], + ElementType["Hour_ElementType"], + ], ) - elif ch == u'M': + elif ch == "M": idx += self._match_token( u_sql_format[idx:], [ - ElementType[u'MonthAbbrev_ElementType'], - ElementType[u'Month_ElementType'], - ElementType[u'Minute_ElementType'], - ] + ElementType["MonthAbbrev_ElementType"], + ElementType["Month_ElementType"], + ElementType["Minute_ElementType"], + ], ) - elif ch == u'P': + elif ch == "P": idx += self._match_token( u_sql_format[idx:], [ - ElementType[u'Post_Meridiem_ElementType'], - ] + ElementType["Post_Meridiem_ElementType"], + ], ) - elif ch == u'S': + elif ch == "S": idx += self._match_token( u_sql_format[idx:], [ - ElementType[u'Second_ElementType'], - ] + ElementType["Second_ElementType"], + ], ) - elif ch == u'T': + elif ch == "T": # ignore TZ format if data type doesn't have TZ. idx += self._match_token( u_sql_format[idx:], [ - ElementType[u'TZOffsetHourColonMin_ElementType'], - ElementType[u'TZOffsetHourMin_ElementType'], - ElementType[u'TZOffsetHourOnly_ElementType'], - ElementType[u'TZAbbr_ElementType'], + ElementType["TZOffsetHourColonMin_ElementType"], + ElementType["TZOffsetHourMin_ElementType"], + ElementType["TZOffsetHourOnly_ElementType"], + ElementType["TZAbbr_ElementType"], ], ignore=self._ignore_tz, ) - elif ch == u'Y': + elif ch == "Y": idx += self._match_token( u_sql_format[idx:], [ - ElementType[u'Year_ElementType'], - ElementType[u'Year2digit_ElementType'], - ] + ElementType["Year_ElementType"], + ElementType["Year2digit_ElementType"], + ], ) if support_negative_year: # Add a special directive to handle YYYY/YY last_element = self._elements[-1] - if last_element[1] == '%Y': + if last_element[1] == "%Y": del self._elements[-1] - self._elements.append( - (self._support_negative_year_method, 4)) - elif last_element[1] == '%y': + self._elements.append((self._support_negative_year_method, 4)) + elif last_element[1] == "%y": del self._elements[-1] - self._elements.append( - (self._support_negative_year_method, 2)) + self._elements.append((self._support_negative_year_method, 2)) - elif ch == u'.': - if idx + 1 < len(u_sql_format) and \ - u_sql_format[idx + 1:].startswith( - ElementType[u'MilliSecond_ElementType'][0]): + elif ch == ".": + if idx + 1 < len(u_sql_format) and u_sql_format[idx + 1 :].startswith( + ElementType["MilliSecond_ElementType"][0] + ): # Will be FF, just mark that there's a dot before FF - self._elements.append((_inject_others, u'.')) + self._elements.append((_inject_others, ".")) self._fractions_with_dot = True else: self._add_raw_char(ch) idx += 1 - elif ch == u'F': + elif ch == "F": if u_sql_format[idx:].startswith( - ElementType[u'MilliSecond_ElementType'][0]): - idx += len(ElementType[u'MilliSecond_ElementType'][0]) + ElementType["MilliSecond_ElementType"][0] + ): + idx += len(ElementType["MilliSecond_ElementType"][0]) if inject_fraction: # Construct formatter to find fractions position. fractions_len = -1 - if idx < len(u_sql_format) and \ - u_sql_format[idx].isdigit(): + if idx < len(u_sql_format) and u_sql_format[idx].isdigit(): # followed by a single digit? fractions_len = int(u_sql_format[idx]) idx += 1 - self._elements.append( - (_inject_fraction, fractions_len)) + self._elements.append((_inject_fraction, fractions_len)) else: - self._elements.append((_inject_others, u'0')) + self._elements.append((_inject_others, "0")) else: self._add_raw_char(ch) idx += 1 - elif ch == u'"': + elif ch == '"': # copy a double quoted string to the python format idx += 1 start_idx = idx - while idx < len(self._sql_format) and \ - self._sql_format[idx] != u'"': + while idx < len(self._sql_format) and self._sql_format[idx] != '"': idx += 1 - self._elements.append( - ( - _inject_others, - self._sql_format[start_idx:idx] - )) + self._elements.append((_inject_others, self._sql_format[start_idx:idx])) if idx < len(self._sql_format): idx += 1 else: @@ -343,22 +326,20 @@ def _optimize_elements(self): return del self._elements[-1] del self._elements[-1] - self._elements.append(( - _inject_others, - second_last_element[1] + last_element[1])) + self._elements.append( + (_inject_others, second_last_element[1] + last_element[1]) + ) class SnowflakeDateFormat(SnowflakeDateTimeFormat): def __init__(self, sql_format, **kwargs): - kwargs['inject_fraction'] = False # no fraction - super(SnowflakeDateFormat, self).__init__(sql_format, **kwargs) + kwargs["inject_fraction"] = False # no fraction + super().__init__(sql_format, **kwargs) def _format_struct_time(self, value): - """ - Formats struct_time - """ + """Formats struct_time.""" fmt = self._pre_format(value) - return TO_UNICODE(time.strftime(fmt, value)) + return str(time.strftime(fmt, value)) def _format_date(self, value): fmt = self._pre_format(value) diff --git a/src/snowflake/connector/snow_logging.py b/src/snowflake/connector/snow_logging.py new file mode 100644 index 000000000..681bd781c --- /dev/null +++ b/src/snowflake/connector/snow_logging.py @@ -0,0 +1,96 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging +import warnings + + +def getSnowLogger(name=None, extra=None): + if name: + logger = logging.getLogger(name) + return SnowLogger(logger, extra) + + +class SnowLogger(logging.LoggerAdapter): + """Snowflake Python logger wrapper of the built-in Python logger. + + This logger wrapper supports user-provided logging info about + file name, function name and line number. This wrapper can be + used in Cython code (.pyx). + """ + + def debug(self, msg, path_name=None, func_name=None, *args, **kwargs): + self.log(logging.DEBUG, msg, path_name, func_name, *args, **kwargs) + + def info(self, msg, path_name=None, func_name=None, *args, **kwargs): + self.log(logging.INFO, msg, path_name, func_name, *args, **kwargs) + + def warning(self, msg, path_name=None, func_name=None, *args, **kwargs): + self.log(logging.WARNING, msg, path_name, func_name, *args, **kwargs) + + def warn(self, msg, path_name=None, func_name=None, *args, **kwargs): + warnings.warn( + "The 'warn' method is deprecated, " "use 'warning' instead", + DeprecationWarning, + 2, + ) + self.warning(msg, path_name, func_name, *args, **kwargs) + + def error(self, msg, path_name=None, func_name=None, *args, **kwargs): + self.log(logging.ERROR, msg, path_name, func_name, *args, **kwargs) + + def exception( + self, msg, path_name=None, func_name=None, *args, exc_info=True, **kwargs + ): + """Convenience method for logging an ERROR with exception information.""" + self.error(msg, path_name, func_name, *args, exc_info=exc_info, **kwargs) + + def critical(self, msg, path_name=None, func_name=None, *args, **kwargs): + self.log(logging.CRITICAL, msg, path_name, func_name, *args, **kwargs) + + fatal = critical + + def log( + self, + level: int, + msg: str, + path_name: str | None = None, + func_name: str | None = None, + line_num: int = 0, + *args, + **kwargs, + ): + """Generalized log method of SnowLogger wrapper. + + Args: + level: Logging level. + msg: Logging message. + path_name: Absolute or relative path of the file where the logger gets called. + func_name: Function inside which the logger gets called. + line_num: Line number at which the logger gets called. + """ + if not path_name: + path_name = "path_name not provided" + if not func_name: + func_name = "func_name not provided" + if not isinstance(level, int): + if logging.raiseExceptions: + raise TypeError("level must be an integer") + else: + return + if self.logger.isEnabledFor(level): + record = self.logger.makeRecord( + self.logger.name, + level, + path_name, + line_num, + msg, + args, + None, + func_name, + **kwargs, + ) + self.logger.handle(record) diff --git a/sqlstate.py b/src/snowflake/connector/sqlstate.py similarity index 79% rename from sqlstate.py rename to src/snowflake/connector/sqlstate.py index 1b684cd38..dcc86203e 100644 --- a/sqlstate.py +++ b/src/snowflake/connector/sqlstate.py @@ -1,7 +1,6 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED = "08001" diff --git a/ssd_internal_keys.py b/src/snowflake/connector/ssd_internal_keys.py similarity index 71% rename from ssd_internal_keys.py rename to src/snowflake/connector/ssd_internal_keys.py index e715c1373..6ecb4b77d 100644 --- a/ssd_internal_keys.py +++ b/src/snowflake/connector/ssd_internal_keys.py @@ -1,9 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # +from __future__ import annotations + from binascii import unhexlify # key version @@ -26,8 +27,8 @@ def ret_int_pub_key_ver(issuer): def ret_wildcard_hkey(): - issuer_name_hash = unhexlify('040130') - issuer_key_hash = unhexlify('040130') - serial_number = unhexlify('020100') + issuer_name_hash = unhexlify("040130") + issuer_key_hash = unhexlify("040130") + serial_number = unhexlify("020100") hkey = (issuer_name_hash, issuer_key_hash, serial_number) return hkey diff --git a/src/snowflake/connector/ssl_wrap_socket.py b/src/snowflake/connector/ssl_wrap_socket.py new file mode 100644 index 000000000..f861fc3d5 --- /dev/null +++ b/src/snowflake/connector/ssl_wrap_socket.py @@ -0,0 +1,138 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +# +# SSL wrap socket for PyOpenSSL. +# Mostly copied from +# +# https://github.com/shazow/urllib3/blob/master/urllib3/contrib/pyopenssl.py +# +# and added OCSP validator on the top. +import logging +import time +from functools import wraps +from inspect import getfullargspec as get_args +from socket import socket + +import certifi +import OpenSSL.SSL + +from .constants import OCSPMode +from .errorcode import ER_OCSP_RESPONSE_CERT_STATUS_REVOKED +from .errors import OperationalError +from .vendored.urllib3 import connection as connection_ +from .vendored.urllib3.contrib.pyopenssl import PyOpenSSLContext +from .vendored.urllib3.util import ssl_ as ssl_ + +DEFAULT_OCSP_MODE: OCSPMode = OCSPMode.FAIL_OPEN +FEATURE_OCSP_MODE: OCSPMode = DEFAULT_OCSP_MODE + +""" +OCSP Response cache file name +""" +FEATURE_OCSP_RESPONSE_CACHE_FILE_NAME: Optional[str] = None + +log = logging.getLogger(__name__) + + +def inject_into_urllib3(): + """Monkey-patch urllib3 with PyOpenSSL-backed SSL-support and OCSP.""" + log.debug("Injecting ssl_wrap_socket_with_ocsp") + connection_.ssl_wrap_socket = ssl_wrap_socket_with_ocsp + + +@wraps(ssl_.ssl_wrap_socket) +def ssl_wrap_socket_with_ocsp(*args, **kwargs): + # Extract host_name + hostname_index = get_args(ssl_.ssl_wrap_socket).args.index("server_hostname") + server_hostname = ( + args[hostname_index] + if len(args) > hostname_index + else kwargs.get("server_hostname", None) + ) + # Remove context if present + ssl_context_index = get_args(ssl_.ssl_wrap_socket).args.index("ssl_context") + context_in_args = len(args) > ssl_context_index + ssl_context = ( + args[hostname_index] if context_in_args else kwargs.get("ssl_context", None) + ) + if not isinstance(ssl_context, PyOpenSSLContext): + # Create new default context + if context_in_args: + new_args = list(args) + new_args[ssl_context_index] = None + args = tuple(new_args) + else: + del kwargs["ssl_context"] + # Fix ca certs location + ca_certs_index = get_args(ssl_.ssl_wrap_socket).args.index("ca_certs") + ca_certs_in_args = len(args) > ca_certs_index + if not ca_certs_in_args and not kwargs.get("ca_certs"): + kwargs["ca_certs"] = certifi.where() + + ret = ssl_.ssl_wrap_socket(*args, **kwargs) + + from .ocsp_asn1crypto import SnowflakeOCSPAsn1Crypto as SFOCSP + + log.debug( + "OCSP Mode: %s, " "OCSP response cache file name: %s", + FEATURE_OCSP_MODE.name, + FEATURE_OCSP_RESPONSE_CACHE_FILE_NAME, + ) + if FEATURE_OCSP_MODE != OCSPMode.INSECURE: + v = SFOCSP( + ocsp_response_cache_uri=FEATURE_OCSP_RESPONSE_CACHE_FILE_NAME, + use_fail_open=FEATURE_OCSP_MODE == OCSPMode.FAIL_OPEN, + ).validate(server_hostname, ret.connection) + if not v: + raise OperationalError( + msg=( + "The certificate is revoked or " + "could not be validated: hostname={}".format(server_hostname) + ), + errno=ER_OCSP_RESPONSE_CERT_STATUS_REVOKED, + ) + else: + log.info( + "THIS CONNECTION IS IN INSECURE " + "MODE. IT MEANS THE CERTIFICATE WILL BE " + "VALIDATED BUT THE CERTIFICATE REVOCATION " + "STATUS WILL NOT BE CHECKED." + ) + + return ret + + +def _openssl_connect( + hostname: str, port: int = 443, max_retry: int = 20, timeout: int | None = None +) -> OpenSSL.SSL.Connection: + """The OpenSSL connection without validating certificates. + + This is used to diagnose SSL issues. + """ + err = None + sleeping_time = 1 + for _ in range(max_retry): + try: + client = socket() + client.connect((hostname, port)) + context = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD) + if timeout is not None: + context.set_timeout(timeout) + client_ssl = OpenSSL.SSL.Connection(context, client) + client_ssl.set_connect_state() + client_ssl.set_tlsext_host_name(hostname.encode("utf-8")) + client_ssl.do_handshake() + return client_ssl + except ( + OpenSSL.SSL.SysCallError, + OSError, + ) as ex: + err = ex + sleeping_time = min(sleeping_time * 2, 16) + time.sleep(sleeping_time) + if err: + raise err diff --git a/src/snowflake/connector/storage_client.py b/src/snowflake/connector/storage_client.py new file mode 100644 index 000000000..887812258 --- /dev/null +++ b/src/snowflake/connector/storage_client.py @@ -0,0 +1,437 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os +import shutil +import tempfile +import threading +import time +from abc import ABC, abstractmethod +from collections import defaultdict +from io import BytesIO +from logging import getLogger +from math import ceil +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, NamedTuple + +import OpenSSL + +from .constants import FileHeader, ResultStatus +from .encryption_util import EncryptionMetadata, SnowflakeEncryptionUtil +from .errors import RequestExceedMaxRetryError +from .file_util import SnowflakeFileUtil +from .vendored import requests +from .vendored.requests import ConnectionError, Timeout + +if TYPE_CHECKING: # pragma: no cover + from .file_transfer_agent import SnowflakeFileMeta, StorageCredential + +logger = getLogger(__name__) + + +class SnowflakeFileEncryptionMaterial(NamedTuple): + query_stage_master_key: str # query stage master key + query_id: str # query id + smk_id: int # SMK id + + +METHODS = { + "GET": requests.get, + "PUT": requests.put, + "POST": requests.post, + "HEAD": requests.head, + "DELETE": requests.delete, +} + + +class SnowflakeStorageClient(ABC): + TRANSIENT_HTTP_ERR = (408, 429, 500, 502, 503, 504) + + TRANSIENT_ERRORS = (OpenSSL.SSL.SysCallError, Timeout, ConnectionError) + SLEEP_MAX = 16.0 + SLEEP_UNIT = 1.0 + + def __init__( + self, + meta: SnowflakeFileMeta, + stage_info: dict[str, Any], + chunk_size: int, + chunked_transfer: bool | None = True, + credentials: StorageCredential | None = None, + max_retry: int = 5, + ) -> None: + self.meta = meta + self.stage_info = stage_info + self.retry_count: dict[int | str, int] = defaultdict(lambda: 0) + self.tmp_dir = tempfile.mkdtemp() + self.data_file: str | None = None + self.encryption_metadata: EncryptionMetadata | None = None + + self.max_retry = max_retry # TODO + self.credentials = credentials + # UPLOAD + meta.real_src_file_name = meta.src_file_name + meta.upload_size = meta.src_file_size + self.preprocessed = ( + False # so we don't repeat compression/file digest when re-encrypting + ) + # DOWNLOAD + self.full_dst_file_name: str | None = ( + os.path.realpath( + os.path.join( + self.meta.local_location, os.path.basename(self.meta.dst_file_name) + ) + ) + if self.meta.local_location + else None + ) + self.intermediate_dst_path: Path | None = ( + Path(self.full_dst_file_name + ".part") + if self.meta.local_location + else None + ) + # CHUNK + self.chunked_transfer = chunked_transfer # only true for GCS + self.chunk_size = chunk_size + self.num_of_chunks = 0 + self.lock = threading.Lock() + self.successful_transfers: int = 0 + self.failed_transfers: int = 0 + # only used when PRESIGNED_URL expires + self.last_err_is_presigned_url = False + + def compress(self) -> None: + if self.meta.require_compress: + meta = self.meta + logger.debug(f"compressing file={meta.src_file_name}") + if meta.intermediate_stream: + ( + meta.src_stream, + upload_size, + ) = SnowflakeFileUtil.compress_with_gzip_from_stream( + meta.intermediate_stream + ) + else: + ( + meta.real_src_file_name, + upload_size, + ) = SnowflakeFileUtil.compress_file_with_gzip( + meta.src_file_name, self.tmp_dir + ) + + def get_digest(self) -> None: + meta = self.meta + logger.debug(f"getting digest file={meta.real_src_file_name}") + if meta.intermediate_stream is None: + ( + meta.sha256_digest, + meta.upload_size, + ) = SnowflakeFileUtil.get_digest_and_size_for_file(meta.real_src_file_name) + else: + ( + meta.sha256_digest, + meta.upload_size, + ) = SnowflakeFileUtil.get_digest_and_size_for_stream( + meta.src_stream or meta.intermediate_stream + ) + + def encrypt(self) -> None: + meta = self.meta + logger.debug(f"encrypting file={meta.real_src_file_name}") + if meta.intermediate_stream is None: + ( + self.encryption_metadata, + self.data_file, + ) = SnowflakeEncryptionUtil.encrypt_file( + meta.encryption_material, + meta.real_src_file_name, + tmp_dir=self.tmp_dir, + ) + meta.upload_size = os.path.getsize(self.data_file) + else: + encrypted_stream = BytesIO() + src_stream = meta.src_stream or meta.intermediate_stream + src_stream.seek(0) + self.encryption_metadata = SnowflakeEncryptionUtil.encrypt_stream( + meta.encryption_material, src_stream, encrypted_stream + ) + src_stream.seek(0) + meta.upload_size = encrypted_stream.seek(0, os.SEEK_END) + encrypted_stream.seek(0) + if meta.src_stream is not None: + meta.src_stream.close() + meta.src_stream = encrypted_stream + self.data_file = meta.real_src_file_name + + @abstractmethod + def get_file_header(self, filename: str) -> FileHeader | None: + """Check if file exists in target location and obtain file metadata if exists. + + Notes: + Updates meta.result_status. + """ + pass + + def preprocess(self) -> None: + meta = self.meta + logger.debug(f"Preprocessing {meta.src_file_name}") + + if not meta.overwrite: + self.get_file_header(meta.dst_file_name) # Check if file exists on remote + if meta.result_status == ResultStatus.UPLOADED: + # Skipped + logger.debug( + f'file already exists location="{self.stage_info["location"]}", ' + f'file_name="{meta.dst_file_name}"' + ) + meta.dst_file_size = 0 + meta.result_status = ResultStatus.SKIPPED + self.preprocessed = True + return + # Uploading + if meta.require_compress: + self.compress() + self.get_digest() + + self.preprocessed = True + + def prepare_upload(self) -> None: + meta = self.meta + + if not self.preprocessed: + self.preprocess() + elif meta.encryption_material: + # need to clean up previous encrypted file + os.remove(self.data_file) + + logger.debug(f"Preparing to upload {meta.src_file_name}") + + if meta.encryption_material: + self.encrypt() + else: + self.data_file = meta.real_src_file_name + logger.debug("finished preprocessing") + if meta.upload_size < meta.multipart_threshold or not self.chunked_transfer: + self.num_of_chunks = 1 + else: + self.num_of_chunks = ceil(meta.upload_size / self.chunk_size) + logger.debug(f"number of chunks {self.num_of_chunks}") + # clean up + self.retry_count = {} + + for chunk_id in range(self.num_of_chunks): + self.retry_count[chunk_id] = 0 + if self.chunked_transfer and self.num_of_chunks > 1: + self._initiate_multipart_upload() + + def finish_upload(self) -> None: + meta = self.meta + if self.successful_transfers == self.num_of_chunks: + if self.num_of_chunks > 1: + self._complete_multipart_upload() + meta.result_status = ResultStatus.UPLOADED + meta.dst_file_size = meta.upload_size + logger.debug(f"{meta.src_file_name} upload is completed.") + else: + # TODO: add more error details to result/meta + meta.dst_file_size = 0 + logger.debug(f"{meta.src_file_name} upload is aborted.") + if self.num_of_chunks > 1: + self._abort_multipart_upload() + meta.result_status = ResultStatus.ERROR + + @abstractmethod + def _has_expired_token(self, response: requests.Response) -> bool: + pass + + def _send_request_with_retry( + self, + verb: str, + get_request_args: Callable[[], tuple[bytes, dict[str, Any]]], + retry_id: int, + ) -> requests.Response: + rest_call = METHODS[verb] + url = b"" + conn = None + if self.meta.self and self.meta.self._cursor.connection: + conn = self.meta.self._cursor.connection + + while self.retry_count[retry_id] < self.max_retry: + cur_timestamp = self.credentials.timestamp + url, rest_kwargs = get_request_args() + try: + if conn: + with conn._rest._use_requests_session(url) as session: + logger.debug(f"storage client request with session {session}") + response = session.request(verb, url, **rest_kwargs) + else: + logger.debug("storage client request with new session") + response = rest_call(url, **rest_kwargs) + + if self._has_expired_presigned_url(response): + self._update_presigned_url() + else: + self.last_err_is_presigned_url = False + if response.status_code in self.TRANSIENT_HTTP_ERR: + time.sleep( + min( + # TODO should SLEEP_UNIT come from the parent + # SnowflakeConnection and be customizable by users? + (2 ** self.retry_count[retry_id]) * self.SLEEP_UNIT, + self.SLEEP_MAX, + ) + ) + self.retry_count[retry_id] += 1 + elif self._has_expired_token(response): + self.credentials.update(cur_timestamp) + else: + return response + except self.TRANSIENT_ERRORS as e: + self.last_err_is_presigned_url = False + time.sleep( + min( + (2 ** self.retry_count[retry_id]) * self.SLEEP_UNIT, + self.SLEEP_MAX, + ) + ) + logger.warning(f"{verb} with url {url} failed for transient error: {e}") + self.retry_count[retry_id] += 1 + else: + raise RequestExceedMaxRetryError( + f"{verb} with url {url} failed for exceeding maximum retries." + ) + + def prepare_download(self) -> None: + # TODO: add nicer error message for when target directory is not writeable + # but this should be done before we get here + base_dir = os.path.dirname(self.full_dst_file_name) + if not os.path.exists(base_dir): + os.makedirs(base_dir) + + # HEAD + file_header = self.get_file_header(self.meta.real_src_file_name) + + if file_header and file_header.encryption_metadata: + self.encryption_metadata = file_header.encryption_metadata + + self.num_of_chunks = 1 + if file_header and file_header.content_length: + self.meta.src_file_size = file_header.content_length + if ( + self.chunked_transfer + and self.meta.src_file_size > self.meta.multipart_threshold + ): + self.num_of_chunks = ceil(file_header.content_length / self.chunk_size) + + # Preallocate encrypted file. + with self.intermediate_dst_path.open("wb+") as fd: + fd.truncate(self.meta.src_file_size) + + def write_downloaded_chunk(self, chunk_id: int, data: bytes) -> None: + """Writes given data to the temp location starting at chunk_id * chunk_size.""" + # TODO: should we use chunking and write content in smaller chunks? + with self.intermediate_dst_path.open("rb+") as fd: + fd.seek(self.chunk_size * chunk_id) + fd.write(data) + + def finish_download(self) -> None: + meta = self.meta + if self.num_of_chunks != 0 and self.successful_transfers == self.num_of_chunks: + meta.result_status = ResultStatus.DOWNLOADED + if meta.encryption_material: + logger.debug(f"encrypted data file={self.full_dst_file_name}") + # For storage utils that do not have the privilege of + # getting the metadata early, both object and metadata + # are downloaded at once. In which case, the file meta will + # be updated with all the metadata that we need and + # then we can call get_file_header to get just that and also + # preserve the idea of getting metadata in the first place. + # One example of this is the utils that use presigned url + # for upload/download and not the storage client library. + if meta.presigned_url is not None: + file_header = self.get_file_header(meta.src_file_name) + self.encryption_metadata = file_header.encryption_metadata + + tmp_dst_file_name = SnowflakeEncryptionUtil.decrypt_file( + self.encryption_metadata, + meta.encryption_material, + str(self.intermediate_dst_path), + tmp_dir=self.tmp_dir, + ) + shutil.move(tmp_dst_file_name, self.full_dst_file_name) + self.intermediate_dst_path.unlink() + else: + logger.debug(f"not encrypted data file={self.full_dst_file_name}") + shutil.move(str(self.intermediate_dst_path), self.full_dst_file_name) + stat_info = os.stat(self.full_dst_file_name) + meta.dst_file_size = stat_info.st_size + else: + # TODO: add more error details to result/meta + if os.path.isfile(self.full_dst_file_name): + os.unlink(self.full_dst_file_name) + logger.exception(f"Failed to download a file: {self.full_dst_file_name}") + meta.dst_file_size = -1 + meta.result_status = ResultStatus.ERROR + + def upload_chunk(self, chunk_id: int) -> None: + new_stream = not bool(self.meta.src_stream or self.meta.intermediate_stream) + fd = ( + self.meta.src_stream + or self.meta.intermediate_stream + or open(self.data_file, "rb") + ) + try: + if self.num_of_chunks == 1: + _data = fd.read() + else: + fd.seek(chunk_id * self.chunk_size) + _data = fd.read(self.chunk_size) + finally: + if new_stream: + fd.close() + logger.debug(f"Uploading chunk {chunk_id} of file {self.data_file}") + self._upload_chunk(chunk_id, _data) + logger.debug(f"Successfully uploaded chunk {chunk_id} of file {self.data_file}") + + @abstractmethod + def _upload_chunk(self, chunk_id: int, chunk: bytes) -> None: + pass + + @abstractmethod + def download_chunk(self, chunk_id: int) -> None: + pass + + # Override in GCS + def _has_expired_presigned_url(self, response: requests.Response) -> bool: + return False + + # Override in GCS + def _update_presigned_url(self) -> None: + pass + + # Override in S3 + def _initiate_multipart_upload(self) -> None: + pass + + # Override in S3 + def _complete_multipart_upload(self) -> None: + pass + + # Override in S3 + def _abort_multipart_upload(self) -> None: + pass + + def delete_client_data(self) -> None: + """Deletes the tmp_dir and closes the source stream belonging to this client. + This function is idempotent.""" + if os.path.exists(self.tmp_dir): + logger.debug(f"cleaning up tmp dir: {self.tmp_dir}") + shutil.rmtree(self.tmp_dir) + if self.meta.src_stream and not self.meta.src_stream.closed: + self.meta.src_stream.close() + + def __del__(self) -> None: + self.delete_client_data() diff --git a/src/snowflake/connector/telemetry.py b/src/snowflake/connector/telemetry.py new file mode 100644 index 000000000..c29ec1a7b --- /dev/null +++ b/src/snowflake/connector/telemetry.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging +from enum import Enum, unique +from threading import Lock +from typing import TYPE_CHECKING + +from .secret_detector import SecretDetector +from .test_util import ENABLE_TELEMETRY_LOG, rt_plain_logger + +if TYPE_CHECKING: + from .network import SnowflakeRestful + +logger = logging.getLogger(__name__) + + +@unique +class TelemetryField(Enum): + # Fields which can be logged to telemetry + TIME_CONSUME_FIRST_RESULT = "client_time_consume_first_result" + TIME_CONSUME_LAST_RESULT = "client_time_consume_last_result" + TIME_DOWNLOADING_CHUNKS = "client_time_downloading_chunks" + TIME_PARSING_CHUNKS = "client_time_parsing_chunks" + SQL_EXCEPTION = "client_sql_exception" + GET_PARTITIONS_USED = "client_get_partitions_used" + EMPTY_SEQ_INTERPOLATION = "client_pyformat_empty_seq_interpolation" + # fetch_pandas_* usage + PANDAS_FETCH_ALL = "client_fetch_pandas_all" + PANDAS_FETCH_BATCHES = "client_fetch_pandas_batches" + # fetch_arrow_* usage + ARROW_FETCH_ALL = "client_fetch_arrow_all" + ARROW_FETCH_BATCHES = "client_fetch_arrow_batches" + # Keys for telemetry data sent through either in-band or out-of-band telemetry + KEY_TYPE = "type" + KEY_SOURCE = "source" + KEY_SFQID = "QueryID" + KEY_SQLSTATE = "SQLState" + KEY_DRIVER_TYPE = "DriverType" + KEY_DRIVER_VERSION = "DriverVersion" + KEY_REASON = "reason" + KEY_ERROR_NUMBER = "ErrorNumber" + KEY_STACKTRACE = "Stacktrace" + KEY_EXCEPTION = "Exception" + + +class TelemetryData: + """An instance of telemetry data which can be sent to the server.""" + + TRUE = 1 + FALSE = 0 + + def __init__(self, message, timestamp): + self.message = message + self.timestamp = timestamp + + def to_dict(self): + return {"message": self.message, "timestamp": str(self.timestamp)} + + def __repr__(self): + return str(self.to_dict()) + + +class TelemetryClient: + """Client to enqueue and send metrics to the telemetry endpoint in batch.""" + + SF_PATH_TELEMETRY = "/telemetry/send" + DEFAULT_FORCE_FLUSH_SIZE = 100 + + def __init__(self, rest: SnowflakeRestful, flush_size=None): + self._rest: SnowflakeRestful | None = rest + self._log_batch = [] + self._flush_size = flush_size or TelemetryClient.DEFAULT_FORCE_FLUSH_SIZE + self._lock = Lock() + self._enabled = True + + def add_log_to_batch(self, telemetry_data: TelemetryData) -> None: + if self.is_closed: + raise Exception("Attempted to add log when TelemetryClient is closed") + elif not self._enabled: + logger.debug("TelemetryClient disabled. Ignoring log.") + return + + with self._lock: + self._log_batch.append(telemetry_data) + + if len(self._log_batch) >= self._flush_size: + self.send_batch() + + def try_add_log_to_batch(self, telemetry_data: TelemetryData) -> None: + try: + self.add_log_to_batch(telemetry_data) + except Exception: + logger.warning("Failed to add log to telemetry.", exc_info=True) + + def send_batch(self): + if self.is_closed: + raise Exception("Attempted to send batch when TelemetryClient is closed") + elif not self._enabled: + logger.debug("TelemetryClient disabled. Not sending logs.") + return + + with self._lock: + to_send = self._log_batch + self._log_batch = [] + + if not to_send: + logger.debug("Nothing to send to telemetry.") + return + + body = {"logs": [x.to_dict() for x in to_send]} + logger.debug( + "Sending %d logs to telemetry. Data is %s.", + len(body), + SecretDetector.mask_secrets(str(body))[1], + ) + if ENABLE_TELEMETRY_LOG: + # This logger guarantees the payload won't be masked. Testing purpose. + rt_plain_logger.debug(f"Inband telemetry data being sent is {body}") + try: + ret = self._rest.request( + TelemetryClient.SF_PATH_TELEMETRY, + body=body, + method="post", + client=None, + timeout=5, + ) + if not ret["success"]: + logger.info( + "Non-success response from telemetry server: %s. " + "Disabling telemetry.", + str(ret), + ) + self._enabled = False + else: + logger.debug("Successfully uploading metrics to telemetry.") + except Exception: + self._enabled = False + logger.debug("Failed to upload metrics to telemetry.", exc_info=True) + + @property + def is_closed(self): + return self._rest is None + + def close(self, send_on_close=True): + if not self.is_closed: + logger.debug("Closing telemetry client.") + if send_on_close: + self.send_batch() + self._rest = None + + def disable(self): + self._enabled = False + + def is_enabled(self): + return self._enabled + + def buffer_size(self): + return len(self._log_batch) diff --git a/src/snowflake/connector/telemetry_oob.py b/src/snowflake/connector/telemetry_oob.py new file mode 100644 index 000000000..e98054da3 --- /dev/null +++ b/src/snowflake/connector/telemetry_oob.py @@ -0,0 +1,511 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import datetime +import json +import logging +import uuid +from collections import namedtuple +from queue import Queue + +from .compat import OK +from .description import CLIENT_NAME, SNOWFLAKE_CONNECTOR_VERSION +from .secret_detector import SecretDetector +from .test_util import ENABLE_TELEMETRY_LOG, rt_plain_logger +from .vendored import requests + +logger = logging.getLogger(__name__) + +DEFAULT_BATCH_SIZE = 10 +DEFAULT_NUM_OF_RETRY_TO_TRIGGER_TELEMETRY = 10 +REQUEST_TIMEOUT = 3 + +TelemetryAPI = namedtuple("TelemetryAPI", ["url", "api_key"]) +TelemetryServer = namedtuple("TelemetryServer", ["name", "url", "api_key"]) +TelemetryEventBase = namedtuple( + "TelemetryEventBase", ["name", "tags", "urgent", "value"] +) + + +class TelemetryAPIEndpoint: + SFCTEST = TelemetryAPI( + url="https://sfctest.client-telemetry.snowflakecomputing.com/enqueue", + api_key="rRNY3EPNsB4U89XYuqsZKa7TSxb9QVX93yNM4tS6", + ) + SFCDEV = TelemetryAPI( + url="https://sfcdev.client-telemetry.snowflakecomputing.com/enqueue", + api_key="kyTKLWpEZSaJnrzTZ63I96QXZHKsgfqbaGmAaIWf", + ) + PROD = TelemetryAPI( + url="https://client-telemetry.snowflakecomputing.com/enqueue", + api_key="wLpEKqnLOW9tGNwTjab5N611YQApOb3t9xOnE1rX", + ) + + +class TelemetryServerDeployments: + DEV = TelemetryServer( + "dev", TelemetryAPIEndpoint.SFCTEST.url, TelemetryAPIEndpoint.SFCTEST.api_key + ) + REG = TelemetryServer( + "reg", TelemetryAPIEndpoint.SFCTEST.url, TelemetryAPIEndpoint.SFCTEST.api_key + ) + QA1 = TelemetryServer( + "qa1", TelemetryAPIEndpoint.SFCDEV.url, TelemetryAPIEndpoint.SFCDEV.api_key + ) + PREPROD3 = TelemetryServer( + "preprod3", TelemetryAPIEndpoint.SFCDEV.url, TelemetryAPIEndpoint.SFCDEV.api_key + ) + PROD = TelemetryServer( + "prod", TelemetryAPIEndpoint.PROD.url, TelemetryAPIEndpoint.PROD.api_key + ) + + +ENABLED_DEPLOYMENTS = ( + TelemetryServerDeployments.DEV.name, + TelemetryServerDeployments.REG.name, + TelemetryServerDeployments.QA1.name, + TelemetryServerDeployments.PREPROD3.name, + TelemetryServerDeployments.PROD.name, +) + + +class TelemetryEvent(TelemetryEventBase): + """Base class for log and metric telemetry events. + + This class has all of the logic except for the 'type' of the telemetry event. + That must be defined by the child class. + """ + + def get_type(self): + """Gets the telemetry event type.""" + raise NotImplementedError + + def to_dict(self): + """Transform this event into a dictionary.""" + event = dict() + event["Name"] = self.name + event["Urgent"] = self.urgent + event["Value"] = self.value + event["Tags"] = self.generate_tags() + event.update( + { + "UUID": str(uuid.uuid4()), + "Created_On": datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S"), + "Type": self.get_type(), + "SchemaVersion": 1, + } + ) + return event + + def get_deployment(self): + """Gets the deployment field specified in tags if it exists.""" + tags = self.tags + if tags: + for tag in tags: + if tag.get("Name", None) == "deployment": + return tag.get("Value") + + return "Unknown" + + def generate_tags(self): + """Generates the tags to send as part of the telemetry event. Parts of the tags are user defined.""" + tags = dict() + # Add in tags that were added to the event + if self.tags and len(self.tags) > 0: + for k, v in self.tags.items(): + if v is not None: + tags[str(k).lower()] = str(v) + + telemetry = TelemetryService.get_instance() + # Add telemetry service generated tags + tags["driver"] = CLIENT_NAME + tags["version"] = str(SNOWFLAKE_CONNECTOR_VERSION) + tags["telemetryServerDeployment"] = telemetry.deployment.name + tags["connectionString"] = telemetry.get_connection_string() + if telemetry.context and len(telemetry.context) > 0: + for k, v in telemetry.context.items(): + if v is not None: + tags["ctx_" + str(k).lower()] = str(v) + + return tags + + +class TelemetryLogEvent(TelemetryEvent): + def get_type(self): + return "Log" + + +class TelemetryMetricEvent(TelemetryEvent): + def get_type(self): + return "Metric" + + +class TelemetryService: + __instance = None + + @staticmethod + def get_instance(): + """Static access method.""" + if TelemetryService.__instance is None: + TelemetryService() + return TelemetryService.__instance + + def __init__(self): + """Virtually private constructor.""" + if TelemetryService.__instance is not None: + raise Exception("This class is a singleton!") + else: + TelemetryService.__instance = self + self._enabled = True + self._queue = Queue() + self.batch_size = DEFAULT_BATCH_SIZE + self.num_of_retry_to_trigger_telemetry = ( + DEFAULT_NUM_OF_RETRY_TO_TRIGGER_TELEMETRY + ) + self.context = dict() + self.connection_params = dict() + self.deployment = TelemetryServerDeployments.PROD + + def __del__(self): + """Tries to flush all events left in the queue. Ignores all exceptions.""" + try: + self.close() + except Exception: + pass + + @property + def enabled(self): + """Whether the Telemetry service is enabled or not.""" + return self._enabled + + def enable(self): + """Enable Telemetry Service.""" + self._enabled = True + + def disable(self): + """Disable Telemetry Service.""" + self._enabled = False + + @property + def queue(self): + """Get the queue that holds all of the telemetry events.""" + return self._queue + + @property + def context(self): + """Returns the context of the current connection.""" + return self._context + + @context.setter + def context(self, value): + """Sets the context of the current connection.""" + self._context = value + + @property + def connection_params(self): + """Returns the connection parameters from the current connection.""" + return self._connection_params + + @connection_params.setter + def connection_params(self, value): + """Sets the connection parameters from the current connection.""" + self._connection_params = value + + @property + def batch_size(self): + """Returns the batch size for uploading results.""" + return self._batch_size + + @batch_size.setter + def batch_size(self, value): + """Sets the batch size for uploading results.""" + self._batch_size = value + + @property + def num_of_retry_to_trigger_telemetry(self): + """Returns the number of HTTP retries before we submit a telemetry event.""" + return self._num_of_retry_to_trigger_telemetry + + @num_of_retry_to_trigger_telemetry.setter + def num_of_retry_to_trigger_telemetry(self, value): + """Sets the number of HTTP retries before we submit a telemetry event.""" + self._num_of_retry_to_trigger_telemetry = value + + @property + def deployment(self): + """Returns the deployment that we are sending the telemetry information to.""" + return self._deployment + + @deployment.setter + def deployment(self, value): + """Sets the deployment that we are sending the telemetry information to.""" + self._deployment = value + + def is_deployment_enabled(self): + """Returns whether or not this deployment is enabled.""" + return self.deployment.name in ENABLED_DEPLOYMENTS + + def get_connection_string(self): + """Returns the URL used to connect to Snowflake.""" + return ( + self.connection_params.get("protocol", "") + + "://" + + self.connection_params.get("host", "") + + ":" + + str(self.connection_params.get("port", "")) + ) + + def add(self, event): + """Adds a telemetry event to the queue. If the event is urgent, upload all telemetry events right away.""" + if not self.enabled: + return + + self.queue.put(event) + if self.queue.qsize() > self.batch_size or event.urgent: + payload = self.export_queue_to_string() + if payload is None: + return + self._upload_payload(payload) + + def flush(self): + """Flushes all telemetry events in the queue and submit them to the back-end.""" + if not self.enabled: + return + + if not self.queue.empty(): + payload = self.export_queue_to_string() + if payload is None: + return + self._upload_payload(payload) + + def update_context(self, connection_params): + """Updates the telemetry service context. Remove any passwords or credentials.""" + self.configure_deployment(connection_params) + self.context = dict() + + for key, value in connection_params.items(): + if ( + "password" not in key + and "passcode" not in key + and "privateKey" not in key + ): + self.context[key] = value + + def configure_deployment(self, connection_params): + """Determines which deployment we are sending Telemetry OOB messages to.""" + self.connection_params = connection_params + account = ( + self.connection_params.get("account") + if self.connection_params.get("account") + else "" + ) + host = ( + self.connection_params.get("host") + if self.connection_params.get("host") + else "" + ) + port = self.connection_params.get("port", None) + + # Set as PROD by default + deployment = TelemetryServerDeployments.PROD + if "reg" in host or "local" in host: + deployment = TelemetryServerDeployments.REG + if port == 8080: + deployment = TelemetryServerDeployments.DEV + elif "qa1" in host or "qa1" in account: + deployment = TelemetryServerDeployments.QA1 + elif "preprod3" in host: + deployment = TelemetryServerDeployments.PREPROD3 + + self.deployment = deployment + + def log_ocsp_exception( + self, + event_type, + telemetry_data, + exception=None, + stack_trace=None, + tags=None, + urgent=False, + ): + """Logs an OCSP Exception and adds it to the queue to be uploaded.""" + if tags is None: + tags = dict() + try: + if self.enabled: + event_name = "OCSPException" + if exception is not None: + telemetry_data["exceptionMessage"] = str(exception) + if stack_trace is not None: + telemetry_data["exceptionStackTrace"] = stack_trace + + if tags is None: + tags = dict() + + tags["eventType"] = event_type + + log_event = TelemetryLogEvent( + name=event_name, tags=tags, urgent=urgent, value=telemetry_data + ) + + self.add(log_event) + except Exception: + # Do nothing on exception, just log + logger.debug("Failed to log OCSP exception", exc_info=True) + + def log_http_request_error( + self, + event_name, + url, + method, + sqlstate, + errno, + response=None, + retry_timeout=None, + retry_count=None, + exception=None, + stack_trace=None, + tags=None, + urgent=False, + ): + """Logs an HTTP Request error and adds it to the queue to be uploaded.""" + if tags is None: + tags = dict() + try: + if self.enabled: + telemetry_data = dict() + response_status_code = -1 + # This mimics the output of HttpRequestBase.toString() from JBDC + telemetry_data["request"] = f"{method} {url}" + telemetry_data["sqlState"] = sqlstate + telemetry_data["errorCode"] = errno + if response: + telemetry_data["response"] = response.json() + telemetry_data["responseStatusLine"] = str(response.reason) + if response.status_code: + response_status_code = str(response.status_code) + telemetry_data["responseStatusCode"] = response_status_code + if retry_timeout: + telemetry_data["retryTimeout"] = str(retry_timeout) + if retry_count: + telemetry_data["retryCount"] = str(retry_count) + if exception: + telemetry_data["exceptionMessage"] = str(exception) + if stack_trace: + telemetry_data["exceptionStackTrace"] = stack_trace + + if tags is None: + tags = dict() + + tags["responseStatusCode"] = response_status_code + tags["sqlState"] = str(sqlstate) + tags["errorCode"] = errno + + log_event = TelemetryLogEvent( + name=event_name, tags=tags, value=telemetry_data, urgent=urgent + ) + + self.add(log_event) + except Exception: + # Do nothing on exception, just log + logger.debug("Failed to log HTTP request error", exc_info=True) + + def log_general_exception( + self, + event_name: str, + telemetry_data: dict, + tags: dict | None = None, + urgent: bool | None = False, + ): + """Sends any type of exception through OOB telemetry.""" + if tags is None: + tags = dict() + try: + if self.enabled: + log_event = TelemetryLogEvent( + name=event_name, tags=tags, value=telemetry_data, urgent=urgent + ) + self.add(log_event) + except Exception: + # Do nothing on exception, just log + logger.debug("Failed to log general exception", exc_info=True) + + def _upload_payload(self, payload): + """Uploads the JSON-formatted string payload to the telemetry backend. + + Ignore any exceptions that may arise. + """ + success = True + response = None + try: + if not self.is_deployment_enabled(): + logger.debug("Skip the disabled deployment: %s", self.deployment.name) + return + logger.debug(f"Sending OOB telemetry data. Payload: {payload}") + if ENABLE_TELEMETRY_LOG: + # This logger guarantees the payload won't be masked. Testing purpose. + rt_plain_logger.debug(f"OOB telemetry data being sent is {payload}") + + with requests.Session() as session: + headers = { + "Content-type": "application/json", + "x-api-key": self.deployment.api_key, + } + response = session.post( + self.deployment.url, + data=payload, + headers=headers, + timeout=REQUEST_TIMEOUT, + ) + if ( + response.status_code == OK + and json.loads(response.text).get("statusCode", 0) == OK + ): + logger.debug( + "telemetry server request success: %d", response.status_code + ) + else: + logger.debug( + "telemetry server request error: %d", response.status_code + ) + success = False + except Exception as e: + logger.debug( + "Telemetry request failed, Exception response: %s, exception: %s", + response, + str(e), + ) + success = False + finally: + logger.debug("Telemetry request success=%s", success) + + def export_queue_to_string(self): + """Exports all events in the queue into a JSON formatted string with secrets masked.""" + logs = list() + while not self._queue.empty(): + logs.append(self._queue.get().to_dict()) + # We may get an exception trying to serialize a python object to JSON + try: + payload = json.dumps(logs) + except Exception: + logger.debug( + "Failed to generate a JSON dump from the passed in telemetry OOB events. String representation of logs: %s" + % str(logs), + exc_info=True, + ) + payload = None + _, masked_text, _ = SecretDetector.mask_secrets(payload) + return masked_text + + def close(self): + """Closes the telemetry service.""" + self.flush() + self.disable() + + def size(self): + """Returns the size of the queue.""" + return self.queue.qsize() diff --git a/src/snowflake/connector/test_util.py b/src/snowflake/connector/test_util.py new file mode 100644 index 000000000..9037ce570 --- /dev/null +++ b/src/snowflake/connector/test_util.py @@ -0,0 +1,31 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging +import os + +from .compat import IS_LINUX + +RUNNING_ON_JENKINS = os.getenv("JENKINS_HOME") is not None +REGRESSION_TEST_LOG_DIR = os.getenv("CLIENT_LOG_DIR_PATH_DOCKER", "/tmp") +ENABLE_TELEMETRY_LOG = RUNNING_ON_JENKINS and IS_LINUX +rt_plain_logger = None + + +if ENABLE_TELEMETRY_LOG: + rt_plain_logger = logging.getLogger("regression.test.plain.logger") + rt_plain_logger.setLevel(logging.DEBUG) + ch = logging.FileHandler( + os.path.join(REGRESSION_TEST_LOG_DIR, "snowflake_ssm_rt_telemetry.log") + ) + ch.setLevel(logging.DEBUG) + ch.setFormatter( + logging.Formatter( + "%(asctime)s - %(threadName)s %(filename)s:%(lineno)d - %(funcName)s() - %(levelname)s - %(message)s" + ) + ) + rt_plain_logger.addHandler(ch) diff --git a/src/snowflake/connector/time_util.py b/src/snowflake/connector/time_util.py new file mode 100644 index 000000000..a087b7372 --- /dev/null +++ b/src/snowflake/connector/time_util.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import random +import time +from logging import getLogger +from typing import Any, Callable + +logger = getLogger(__name__) + +try: + from threading import _Timer as Timer +except ImportError: + from threading import Timer + +DEFAULT_MASTER_VALIDITY_IN_SECONDS = 4 * 60 * 60 # seconds + + +class HeartBeatTimer(Timer): + """A thread which executes a function every client_session_keep_alive_heartbeat_frequency seconds.""" + + def __init__( + self, client_session_keep_alive_heartbeat_frequency: int, f: Callable + ) -> None: + interval = client_session_keep_alive_heartbeat_frequency + super().__init__(interval, f) + # Mark this as a daemon thread, so that it won't prevent Python from exiting. + self.daemon = True + + def run(self) -> None: + while not self.finished.is_set(): + self.finished.wait(self.interval) + if not self.finished.is_set(): + try: + self.function() + except Exception as e: + logger.debug("failed to heartbeat: %s", e) + + +def get_time_millis() -> int: + """Returns the current time in milliseconds.""" + return int(time.time() * 1000) + + +class DecorrelateJitterBackoff: + # Decorrelate Jitter backoff + # https://www.awsarchitectureblog.com/2015/03/backoff.html + def __init__(self, base: int, cap: int) -> None: + self._base = base + self._cap = cap + + def next_sleep(self, _: Any, sleep: int) -> int: + return min(self._cap, random.randint(self._base, sleep * 3)) + + +class TimerContextManager: + """Context manager class to easily measure execution of a code block. + + Once the context manager finishes, the class should be cast into an int to retrieve + result. + + Example: + + with TimerContextManager() as measured_time: + pass + download_metric = measured_time.get_timing_millis() + """ + + def __init__(self) -> None: + self._start: int | None = None + self._end: int | None = None + + def __enter__(self) -> TimerContextManager: + self._start = get_time_millis() + return self + + def __exit__(self, exc_type, exc_val, exc_tb) -> None: + self._end = get_time_millis() + + def get_timing_millis(self) -> int: + """Get measured timing in milliseconds.""" + if self._start is None or self._end is None: + raise Exception( + "Trying to get timing before TimerContextManager has finished" + ) + return self._end - self._start diff --git a/src/snowflake/connector/tool/__init__.py b/src/snowflake/connector/tool/__init__.py new file mode 100644 index 000000000..d689e9c10 --- /dev/null +++ b/src/snowflake/connector/tool/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# diff --git a/tool/dump_certs.py b/src/snowflake/connector/tool/dump_certs.py similarity index 59% rename from tool/dump_certs.py rename to src/snowflake/connector/tool/dump_certs.py index 638e6f6eb..7da2cdbae 100644 --- a/tool/dump_certs.py +++ b/src/snowflake/connector/tool/dump_certs.py @@ -1,8 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # + +from __future__ import annotations + import os import sys from os import path @@ -11,18 +13,21 @@ def main(): - """ - Internal Tool: Extract certificate files in PEM - """ + """Internal Tool: Extract certificate files in PEM.""" def help(): print( "Extract certificate file. The target file can be a single file " "or a directory including multiple certificates. The certificate " - "file format should be PEM.") - print(""" -Usage: {0} -""".format(path.basename(sys.argv[0]))) + "file format should be PEM." + ) + print( + """ +Usage: {} +""".format( + path.basename(sys.argv[0]) + ) + ) sys.exit(2) if len(sys.argv) < 2: @@ -30,8 +35,7 @@ def help(): input_filename = sys.argv[1] if path.isdir(input_filename): - files = [path.join(input_filename, f) for f in - os.listdir(input_filename)] + files = [path.join(input_filename, f) for f in os.listdir(input_filename)] else: files = [input_filename] @@ -46,10 +50,8 @@ def extract_certificate_file(input_filename): ocsp.read_cert_bundle(input_filename, cert_map) for cert in cert_map.values(): - print("serial #: {}, name: {}".format( - cert.serial_number, - cert.subject.native)) + print(f"serial #: {cert.serial_number}, name: {cert.subject.native}") -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/src/snowflake/connector/tool/dump_ocsp_response.py b/src/snowflake/connector/tool/dump_ocsp_response.py new file mode 100644 index 000000000..73c76dd84 --- /dev/null +++ b/src/snowflake/connector/tool/dump_ocsp_response.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import time +from os import path +from time import gmtime, strftime + +from asn1crypto import ocsp as asn1crypto_ocsp + +from snowflake.connector.compat import urlsplit +from snowflake.connector.ocsp_asn1crypto import SnowflakeOCSPAsn1Crypto as SFOCSP +from snowflake.connector.ssl_wrap_socket import _openssl_connect + + +def main(): + """Internal Tool: OCSP response dumper.""" + + def help(): + print("Dump OCSP Response for the URL. ") + print( + """ +Usage: {} [ ...] +""".format( + path.basename(sys.argv[0]) + ) + ) + sys.exit(2) + + import sys + + if len(sys.argv) < 2: + help() + + urls = sys.argv[1:] + dump_ocsp_response(urls, output_filename=None) + + +def dump_good_status(current_time, single_response): + print("This Update: {}".format(single_response["this_update"].native)) + print("Next Update: {}".format(single_response["next_update"].native)) + this_update = ( + single_response["this_update"].native.replace(tzinfo=None) - SFOCSP.ZERO_EPOCH + ).total_seconds() + next_update = ( + single_response["next_update"].native.replace(tzinfo=None) - SFOCSP.ZERO_EPOCH + ).total_seconds() + + tolerable_validity = SFOCSP._calculate_tolerable_validity(this_update, next_update) + print( + "Tolerable Update: {}".format( + strftime("%Y%m%d%H%M%SZ", gmtime(next_update + tolerable_validity)) + ) + ) + if SFOCSP._is_validaity_range(current_time, this_update, next_update): + print("OK") + else: + print(SFOCSP._validity_error_message(current_time, this_update, next_update)) + + +def dump_revoked_status(single_response): + revoked_info = single_response["cert_status"] + revocation_time = revoked_info.native["revocation_time"] + revocation_reason = revoked_info.native["revocation_reason"] + print( + "Revoked Time: {}".format( + revocation_time.strftime(SFOCSP.OUTPUT_TIMESTAMP_FORMAT) + ) + ) + print(f"Revoked Reason: {revocation_reason}") + + +def dump_ocsp_response(urls, output_filename): + ocsp = SFOCSP() + for url in urls: + if not url.startswith("http"): + url = "https://" + url + parsed_url = urlsplit(url) + hostname = parsed_url.hostname + port = parsed_url.port or 443 + connection = _openssl_connect(hostname, port) + cert_data = ocsp.extract_certificate_chain(connection) + current_time = int(time.time()) + print(f"Target URL: {url}") + print( + "Current Time: {}".format(strftime("%Y%m%d%H%M%SZ", gmtime(current_time))) + ) + for issuer, subject in cert_data: + _, _ = ocsp.create_ocsp_request(issuer, subject) + _, _, _, cert_id, ocsp_response_der = ocsp.validate_by_direct_connection( + issuer, subject + ) + ocsp_response = asn1crypto_ocsp.OCSPResponse.load(ocsp_response_der) + print("------------------------------------------------------------") + print(f"Subject Name: {subject.subject.native}") + print(f"Issuer Name: {issuer.subject.native}") + print(f"OCSP URI: {subject.ocsp_urls}") + print(f"CRL URI: {subject.crl_distribution_points[0].native}") + print(f"Issuer Name Hash: {subject.issuer.sha1}") + print(f"Issuer Key Hash: {issuer.public_key.sha1}") + print(f"Serial Number: {subject.serial_number}") + print("Response Status: {}".format(ocsp_response["response_status"].native)) + basic_ocsp_response = ocsp_response.basic_ocsp_response + tbs_response_data = basic_ocsp_response["tbs_response_data"] + print("Responder ID: {}".format(tbs_response_data["responder_id"].name)) + current_time = int(time.time()) + for single_response in tbs_response_data["responses"]: + cert_status = single_response["cert_status"].name + if cert_status == "good": + dump_good_status(current_time, single_response) + elif cert_status == "revoked": + dump_revoked_status(single_response) + else: + print("Unknown") + print("") + + if output_filename: + SFOCSP.OCSP_CACHE.write_ocsp_response_cache_file(ocsp, output_filename) + return SFOCSP.OCSP_CACHE.CACHE + + +if __name__ == "__main__": + main() diff --git a/tool/dump_ocsp_response_cache.py b/src/snowflake/connector/tool/dump_ocsp_response_cache.py similarity index 54% rename from tool/dump_ocsp_response_cache.py rename to src/snowflake/connector/tool/dump_ocsp_response_cache.py index c5c74c33e..c92b04f8c 100644 --- a/tool/dump_ocsp_response_cache.py +++ b/src/snowflake/connector/tool/dump_ocsp_response_cache.py @@ -1,8 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # + +from __future__ import annotations + import json import sys from datetime import datetime @@ -10,12 +12,11 @@ from os import path from time import gmtime, strftime, time -from OpenSSL.crypto import dump_certificate, FILETYPE_ASN1 from asn1crypto import core, ocsp from asn1crypto.x509 import Certificate +from OpenSSL.crypto import FILETYPE_ASN1, dump_certificate -from snowflake.connector.ocsp_asn1crypto \ - import SnowflakeOCSPAsn1Crypto as SFOCSP +from snowflake.connector.ocsp_asn1crypto import SnowflakeOCSPAsn1Crypto as SFOCSP from snowflake.connector.ssl_wrap_socket import _openssl_connect ZERO_EPOCH = datetime.utcfromtimestamp(0) @@ -24,19 +25,22 @@ def main(): - """ - Internal Tool: Dump OCSP response cache file. - """ + """Internal Tool: Dump OCSP response cache file.""" def help(): print( "Dump OCSP Response cache. This tools extracts OCSP response " "cache file, i.e., ~/.cache/snowflake/ocsp_response_cache. " "Note the subject name shows up if the certificate exists in " - "the certs directory.") - print(""" -Usage: {0} -""".format(path.basename(sys.argv[0]))) + "the certs directory." + ) + print( + """ +Usage: {} +""".format( + path.basename(sys.argv[0]) + ) + ) sys.exit(2) if len(sys.argv) < 4: @@ -50,8 +54,7 @@ def help(): hostname_file = sys.argv[2] cert_glob_pattern = sys.argv[3] - dump_ocsp_response_cache( - ocsp_response_cache_file, hostname_file, cert_glob_pattern) + dump_ocsp_response_cache(ocsp_response_cache_file, hostname_file, cert_glob_pattern) def raise_old_cache_exception(current_time, created_on, name, serial_number): @@ -60,44 +63,49 @@ def raise_old_cache_exception(current_time, created_on, name, serial_number): "should be newer than {}: " "name: {}, serial_number: {}, " "current_time: {}, created_on: {}".format( - strftime(SFOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime( - current_time - OCSP_CACHE_SERVER_INTERVAL)), - name, serial_number, - strftime( - SFOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(current_time)), strftime( - SFOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(created_on)))) + SFOCSP.OUTPUT_TIMESTAMP_FORMAT, + gmtime(current_time - OCSP_CACHE_SERVER_INTERVAL), + ), + name, + serial_number, + strftime(SFOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(current_time)), + strftime(SFOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(created_on)), + ) + ) def raise_outdated_validity_exception( - current_time, name, serial_number, this_update, next_update): - raise Exception("ERROR: OCSP response cache include " - "outdated data: " - "name: {}, serial_number: {}, " - "current_time: {}, this_update: {}, " - "next_update: {}".format( - name, serial_number, - strftime( - SFOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(current_time)), - this_update.strftime( - SFOCSP.OUTPUT_TIMESTAMP_FORMAT), - next_update.strftime( - SFOCSP.OUTPUT_TIMESTAMP_FORMAT))) + current_time, name, serial_number, this_update, next_update +): + raise Exception( + "ERROR: OCSP response cache include " + "outdated data: " + "name: {}, serial_number: {}, " + "current_time: {}, this_update: {}, " + "next_update: {}".format( + name, + serial_number, + strftime(SFOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(current_time)), + this_update.strftime(SFOCSP.OUTPUT_TIMESTAMP_FORMAT), + next_update.strftime(SFOCSP.OUTPUT_TIMESTAMP_FORMAT), + ) + ) def dump_ocsp_response_cache( - ocsp_response_cache_file, hostname_file, cert_glob_pattern): - """ - Dump OCSP response cache contents. Show the subject name as well if - the subject is included in the certificate files. + ocsp_response_cache_file, hostname_file, cert_glob_pattern +): + """Dump OCSP response cache contents. + + Show the subject name as well if the subject is included in the certificate files. """ sfocsp = SFOCSP() s_to_n = _fetch_certs(hostname_file) s_to_n1 = _serial_to_name(sfocsp, cert_glob_pattern) s_to_n.update(s_to_n1) - SFOCSP.OCSP_CACHE.read_ocsp_response_cache_file( - sfocsp, ocsp_response_cache_file) + SFOCSP.OCSP_CACHE.read_ocsp_response_cache_file(sfocsp, ocsp_response_cache_file) def custom_key(k): # third element is Serial Number for the subject @@ -115,57 +123,55 @@ def custom_key(k): else: name = "Unknown" output[json_key] = { - 'serial_number': format(serial_number,'d'), - 'name': name, + "serial_number": format(serial_number, "d"), + "name": name, } value = ocsp_validation_cache[hkey] cache = value[1] ocsp_response = ocsp.OCSPResponse.load(cache) basic_ocsp_response = ocsp_response.basic_ocsp_response - tbs_response_data = basic_ocsp_response['tbs_response_data'] + tbs_response_data = basic_ocsp_response["tbs_response_data"] current_time = int(time()) - for single_response in tbs_response_data['responses']: + for single_response in tbs_response_data["responses"]: created_on = int(value[0]) - produce_at = tbs_response_data['produced_at'].native - this_update = single_response['this_update'].native - next_update = single_response['next_update'].native + produce_at = tbs_response_data["produced_at"].native + this_update = single_response["this_update"].native + next_update = single_response["next_update"].native if current_time - OCSP_CACHE_SERVER_INTERVAL > created_on: - raise_old_cache_exception(current_time, created_on, name, - serial_number) + raise_old_cache_exception(current_time, created_on, name, serial_number) next_update_utc = ( - next_update.replace( - tzinfo=None) - ZERO_EPOCH).total_seconds() + next_update.replace(tzinfo=None) - ZERO_EPOCH + ).total_seconds() this_update_utc = ( - this_update.replace( - tzinfo=None) - ZERO_EPOCH).total_seconds() + this_update.replace(tzinfo=None) - ZERO_EPOCH + ).total_seconds() - if current_time > next_update_utc or \ - current_time < this_update_utc: + if current_time > next_update_utc or current_time < this_update_utc: raise_outdated_validity_exception( - current_time, name, serial_number, this_update, next_update) - - output[json_key]['created_on'] = strftime( - SFOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(created_on)) - output[json_key]['produce_at'] = str(produce_at) - output[json_key]['this_update'] = str(this_update) - output[json_key]['next_update'] = str(next_update) + current_time, name, serial_number, this_update, next_update + ) + + output[json_key]["created_on"] = strftime( + SFOCSP.OUTPUT_TIMESTAMP_FORMAT, gmtime(created_on) + ) + output[json_key]["produce_at"] = str(produce_at) + output[json_key]["this_update"] = str(this_update) + output[json_key]["next_update"] = str(next_update) print(json.dumps(output)) def _serial_to_name(sfocsp, cert_glob_pattern): - """ - Create a map table from serial number to name - """ + """Creates a map table from serial number to name.""" map_serial_to_name = {} for cert_file in glob(cert_glob_pattern): cert_map = {} sfocsp.read_cert_bundle(cert_file, cert_map) cert_data = sfocsp.create_pair_issuer_subject(cert_map) - for issuer, subject in cert_data: + for _, subject in cert_data: map_serial_to_name[subject.serial_number] = subject.subject.native return map_serial_to_name @@ -173,7 +179,7 @@ def _serial_to_name(sfocsp, cert_glob_pattern): def _fetch_certs(hostname_file): with open(hostname_file) as f: - hostnames = f.read().split('\n') + hostnames = f.read().split("\n") map_serial_to_name = {} for h in hostnames: @@ -188,5 +194,5 @@ def _fetch_certs(hostname_file): return map_serial_to_name -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/tool/probe_connection.py b/src/snowflake/connector/tool/probe_connection.py similarity index 50% rename from tool/probe_connection.py rename to src/snowflake/connector/tool/probe_connection.py index e31daa343..e742d7601 100644 --- a/tool/probe_connection.py +++ b/src/snowflake/connector/tool/probe_connection.py @@ -1,10 +1,13 @@ -# -*- coding: utf-8 -*- # +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations -from socket import (gaierror, gethostbyname_ex) +from socket import gaierror, gethostbyname_ex -from OpenSSL.crypto import dump_certificate, FILETYPE_ASN1 from asn1crypto import ocsp +from OpenSSL.crypto import FILETYPE_ASN1, dump_certificate from ..compat import urlsplit from ..ssl_wrap_socket import _openssl_connect @@ -17,14 +20,14 @@ def probe_connection(url): try: actual_hostname, aliases, ips = gethostbyname_ex(parsed_url.hostname) ret = { - 'url': url, - 'input_hostname': parsed_url.hostname, - 'actual_hostname': actual_hostname, - 'aliases': aliases, - 'ips': ips, + "url": url, + "input_hostname": parsed_url.hostname, + "actual_hostname": actual_hostname, + "aliases": aliases, + "ips": ips, } except gaierror as e: - return {'err:': e} + return {"err:": e} connection = _openssl_connect(parsed_url.hostname, parsed_url.port) # certificates @@ -40,28 +43,31 @@ def probe_connection(url): # DNS lookup for OCSP server try: actual_hostname, aliases, ips = gethostbyname_ex( - parsed_ocsp_url.hostname) + parsed_ocsp_url.hostname + ) ocsp_status = { - 'input_url': ocsp_uris[0], - 'actual_hostname': actual_hostname, - 'aliases': aliases, - 'ips': ips, + "input_url": ocsp_uris[0], + "actual_hostname": actual_hostname, + "aliases": aliases, + "ips": ips, } except gaierror as e: ocsp_status = { - 'input_url': ocsp_uris[0], - 'error': e, + "input_url": ocsp_uris[0], + "error": e, } else: ocsp_status = {} certificates.append( - {'hash': cert.subject.sha1, - 'name': cert.subject.native, - 'issuer': cert.issuer.native, - 'serial_number': cert.serial_number, - 'ocsp': ocsp_status, - }) + { + "hash": cert.subject.sha1, + "name": cert.subject.native, + "issuer": cert.issuer.native, + "serial_number": cert.serial_number, + "ocsp": ocsp_status, + } + ) - ret['certificates'] = certificates + ret["certificates"] = certificates return ret diff --git a/src/snowflake/connector/util_text.py b/src/snowflake/connector/util_text.py new file mode 100644 index 000000000..2c3f405c5 --- /dev/null +++ b/src/snowflake/connector/util_text.py @@ -0,0 +1,256 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging +import re +from io import StringIO + +COMMENT_PATTERN_RE = re.compile(r"^\s*\-\-") +EMPTY_LINE_RE = re.compile(r"^\s*$") + +_logger = logging.getLogger(__name__) + + +class SQLDelimiter: + """Class that wraps a SQL delimiter string. + + Since split_statements is a generator this mutable object will allow it change while executing. + """ + + def __str__(self): + return self.sql_delimiter + + def __init__(self, sql_delimiter: str = ";"): + """Initializes SQLDelimiter with a string.""" + self.sql_delimiter = sql_delimiter + + +def split_statements( + buf: StringIO, + remove_comments: bool = False, + delimiter: SQLDelimiter | None = None, +): + """Splits a stream into SQL statements (ends with a semicolon) or commands (!...). + + Args: + buf: Unicode data stream. + remove_comments: Whether or not to remove all comments (Default value = False). + delimiter: The delimiter string that separates SQL commands from each other. + + Yields: + A SQL statement or a command. + """ + if delimiter is None: + delimiter = SQLDelimiter() # Use default delimiter if none was given. + in_quote = False + ch_quote = None + in_comment = False + in_double_dollars = False + previous_delimiter = None + + line = buf.readline() + if isinstance(line, bytes): + raise TypeError("Input data must not be binary type.") + + statement = [] + while line != "": + col = 0 + col0 = 0 + len_line = len(line) + sql_delimiter = delimiter.sql_delimiter + if not previous_delimiter or sql_delimiter != previous_delimiter: + # Only (re)compile new Regexes if they should be + escaped_delim = re.escape(sql_delimiter) + # Special characters possible in the sql delimiter are '_', '/' and ';'. If a delimiter does not end, or + # start with a special character then look for word separation with \b regex. + if re.match(r"\w", sql_delimiter[0]): + RE_START = re.compile(rf"^[^\w$]?{escaped_delim}") + else: + RE_START = re.compile(rf"^.?{escaped_delim}") + if re.match(r"\w", sql_delimiter[-1]): + RE_END = re.compile(rf"{escaped_delim}[^\w$]?$") + else: + RE_END = re.compile(rf"{escaped_delim}.?$") + previous_delimiter = sql_delimiter + while True: + if col >= len_line: + if col0 < col: + if not in_comment and not in_quote and not in_double_dollars: + statement.append((line[col0:col], True)) + if len(statement) == 1 and statement[0][0] == "": + statement = [] + break + elif not in_comment and (in_quote or in_double_dollars): + statement.append((line[col0:col], True)) + elif not remove_comments: + statement.append((line[col0:col], False)) + break + elif in_comment: + if line[col:].startswith("*/"): + in_comment = False + if not remove_comments: + statement.append((line[col0 : col + 2], False)) + col += 2 + col0 = col + else: + col += 1 + elif in_double_dollars: + if line[col:].startswith("$$"): + in_double_dollars = False + statement.append((line[col0 : col + 2], False)) + col += 2 + col0 = col + else: + col += 1 + elif in_quote: + if ( + line[col] == "\\" + and col < len_line - 1 + and line[col + 1] in (ch_quote, "\\") + ): + col += 2 + elif line[col] == ch_quote: + if ( + col < len_line - 1 + and line[col + 1] != ch_quote + or col == len_line - 1 + ): + # exits quote + in_quote = False + statement.append((line[col0 : col + 1], True)) + col += 1 + col0 = col + else: + # escaped quote and still in quote + col += 2 + else: + col += 1 + else: + if line[col] in ("'", '"'): + in_quote = True + ch_quote = line[col] + col += 1 + elif line[col] in (" ", "\t"): + statement.append((line[col0 : col + 1], True)) + col += 1 + col0 = col + elif line[col:].startswith("--"): + statement.append((line[col0:col], True)) + if not remove_comments: + # keep the comment + statement.append((line[col:], False)) + col = len_line + 1 + col0 = col + elif line[col:].startswith("/*") and not line[col0:].startswith( + "file://" + ): + if not remove_comments: + statement.append((line[col0 : col + 2], False)) + else: + statement.append((line[col0:col], False)) + col += 2 + col0 = col + in_comment = True + elif line[col:].startswith("$$"): + statement.append((line[col0 : col + 2], True)) + col += 2 + col0 = col + in_double_dollars = True + elif ( + RE_START.match(line[col - 1 : col + len(sql_delimiter)]) + if col > 0 + else (RE_START.match(line[col : col + len(sql_delimiter)])) + ) and (RE_END.match(line[col : col + len(sql_delimiter) + 1])): + statement.append((line[col0:col] + ";", True)) + col += len(sql_delimiter) + try: + if line[col] == ">": + col += 1 + statement[-1] = (statement[-1][0] + ">", statement[-1][1]) + except IndexError: + pass + if COMMENT_PATTERN_RE.match(line[col:]) or EMPTY_LINE_RE.match( + line[col:] + ): + if not remove_comments: + # keep the comment + statement.append((line[col:], False)) + col = len_line + while col < len_line and line[col] in (" ", "\t"): + col += 1 + yield _concatenate_statements(statement) + col0 = col + statement = [] + elif col == 0 and line[col] == "!": # command + if len(statement) > 0: + yield _concatenate_statements(statement) + statement = [] + yield ( + line.strip()[: -len(sql_delimiter)] + if line.strip().endswith(sql_delimiter) + else line.strip() + ).strip(), False + break + else: + col += 1 + line = buf.readline() + + if len(statement) > 0: + yield _concatenate_statements(statement) + + +def _concatenate_statements(statement_list): + """Concatenate statements. + + Each statement should be a tuple of statement and is_put_or_get. + + The is_put_or_get is set to True if the statement is PUT or GET otherwise False for valid statement. + None is set if the statement is empty or comment only. + + Args: + statement_list: List of statement parts. + + Returns: + Tuple of statements and whether they are PUT or GET. + """ + valid_statement_list = [] + is_put_or_get = None + for text, is_statement in statement_list: + valid_statement_list.append(text) + if is_put_or_get is None and is_statement and len(text.strip()) >= 3: + is_put_or_get = text[:3].upper() in ("PUT", "GET") + return "".join(valid_statement_list).strip(), is_put_or_get + + +def construct_hostname(region, account): + """Constructs hostname from region and account.""" + if region == "us-west-2": + region = "" + if region: + if account.find(".") > 0: + account = account[0 : account.find(".")] + host = f"{account}.{region}.snowflakecomputing.com" + else: + host = f"{account}.snowflakecomputing.com" + return host + + +def parse_account(account): + url_parts = account.split(".") + # if this condition is true, then we have some extra + # stuff in the account field. + if len(url_parts) > 1: + if url_parts[1] == "global": + # remove external ID from account + parsed_account = url_parts[0][0 : url_parts[0].rfind("-")] + else: + # remove region subdomain + parsed_account = url_parts[0] + else: + parsed_account = account + + return parsed_account diff --git a/src/snowflake/connector/vendored/__init__.py b/src/snowflake/connector/vendored/__init__.py new file mode 100644 index 000000000..d689e9c10 --- /dev/null +++ b/src/snowflake/connector/vendored/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# diff --git a/src/snowflake/connector/vendored/requests/LICENSE b/src/snowflake/connector/vendored/requests/LICENSE new file mode 100644 index 000000000..67db85882 --- /dev/null +++ b/src/snowflake/connector/vendored/requests/LICENSE @@ -0,0 +1,175 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. diff --git a/src/snowflake/connector/vendored/requests/__init__.py b/src/snowflake/connector/vendored/requests/__init__.py new file mode 100644 index 000000000..188858b85 --- /dev/null +++ b/src/snowflake/connector/vendored/requests/__init__.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- + +# __ +# /__) _ _ _ _ _/ _ +# / ( (- (/ (/ (- _) / _) +# / + +""" +Requests HTTP Library +~~~~~~~~~~~~~~~~~~~~~ + +Requests is an HTTP library, written in Python, for human beings. +Basic GET usage: + + >>> import requests + >>> r = requests.get('https://www.python.org') + >>> r.status_code + 200 + >>> b'Python is a programming language' in r.content + True + +... or POST: + + >>> payload = dict(key1='value1', key2='value2') + >>> r = requests.post('https://httpbin.org/post', data=payload) + >>> print(r.text) + { + ... + "form": { + "key1": "value1", + "key2": "value2" + }, + ... + } + +The other HTTP methods are supported - see `requests.api`. Full documentation +is at . + +:copyright: (c) 2017 by Kenneth Reitz. +:license: Apache 2.0, see LICENSE for more details. +""" + +from .. import urllib3 +import warnings +from .exceptions import RequestsDependencyWarning + +try: + from charset_normalizer import __version__ as charset_normalizer_version +except ImportError: + charset_normalizer_version = None + +try: + from chardet import __version__ as chardet_version +except ImportError: + chardet_version = None + +def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): + urllib3_version = urllib3_version.split('.') + assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. + + # Sometimes, urllib3 only reports its version as 16.1. + if len(urllib3_version) == 2: + urllib3_version.append('0') + + # Check urllib3 for compatibility. + major, minor, patch = urllib3_version # noqa: F811 + major, minor, patch = int(major), int(minor), int(patch) + # urllib3 >= 1.21.1, <= 1.26 + assert major == 1 + assert minor >= 21 + assert minor <= 26 + + # Check charset_normalizer for compatibility. + if chardet_version: + major, minor, patch = chardet_version.split('.')[:3] + major, minor, patch = int(major), int(minor), int(patch) + # chardet_version >= 3.0.2, < 5.0.0 + assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0) + elif charset_normalizer_version: + major, minor, patch = charset_normalizer_version.split('.')[:3] + major, minor, patch = int(major), int(minor), int(patch) + # charset_normalizer >= 2.0.0 < 3.0.0 + assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0) + else: + raise Exception("You need either charset_normalizer or chardet installed") + +def _check_cryptography(cryptography_version): + # cryptography < 1.3.4 + try: + cryptography_version = list(map(int, cryptography_version.split('.'))) + except ValueError: + return + + if cryptography_version < [1, 3, 4]: + warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version) + warnings.warn(warning, RequestsDependencyWarning) + +# Check imported dependencies for compatibility. +try: + check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version) +except (AssertionError, ValueError): + warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " + "version!".format(urllib3.__version__, chardet_version, charset_normalizer_version), + RequestsDependencyWarning) + +# Attempt to enable urllib3's SNI support, if possible +try: + from ..urllib3.contrib import pyopenssl + pyopenssl.inject_into_urllib3() + + # Check cryptography version + from cryptography import __version__ as cryptography_version + _check_cryptography(cryptography_version) +except ImportError: + pass + +# urllib3's DependencyWarnings should be silenced. +from ..urllib3.exceptions import DependencyWarning +warnings.simplefilter('ignore', DependencyWarning) + +from .__version__ import __title__, __description__, __url__, __version__ +from .__version__ import __build__, __author__, __author_email__, __license__ +from .__version__ import __copyright__, __cake__ + +from . import utils +from .models import Request, Response, PreparedRequest +from .api import request, get, head, post, patch, put, delete, options +from .sessions import session, Session +from .status_codes import codes +from .exceptions import ( + RequestException, Timeout, URLRequired, + TooManyRedirects, HTTPError, ConnectionError, + FileModeWarning, ConnectTimeout, ReadTimeout +) + +# Set default logging handler to avoid "No handler found" warnings. +import logging +from logging import NullHandler + +logging.getLogger(__name__).addHandler(NullHandler()) + +# FileModeWarnings go off per the default. +warnings.simplefilter('default', FileModeWarning, append=True) diff --git a/src/snowflake/connector/vendored/requests/__version__.py b/src/snowflake/connector/vendored/requests/__version__.py new file mode 100644 index 000000000..0d7cde1df --- /dev/null +++ b/src/snowflake/connector/vendored/requests/__version__.py @@ -0,0 +1,14 @@ +# .-. .-. .-. . . .-. .-. .-. .-. +# |( |- |.| | | |- `-. | `-. +# ' ' `-' `-`.`-' `-' `-' ' `-' + +__title__ = 'requests' +__description__ = 'Python HTTP for Humans.' +__url__ = 'https://requests.readthedocs.io' +__version__ = '2.26.0' +__build__ = 0x022600 +__author__ = 'Kenneth Reitz' +__author_email__ = 'me@kennethreitz.org' +__license__ = 'Apache 2.0' +__copyright__ = 'Copyright 2020 Kenneth Reitz' +__cake__ = u'\u2728 \U0001f370 \u2728' diff --git a/src/snowflake/connector/vendored/requests/_internal_utils.py b/src/snowflake/connector/vendored/requests/_internal_utils.py new file mode 100644 index 000000000..759d9a56b --- /dev/null +++ b/src/snowflake/connector/vendored/requests/_internal_utils.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- + +""" +requests._internal_utils +~~~~~~~~~~~~~~ + +Provides utility functions that are consumed internally by Requests +which depend on extremely few external helpers (such as compat) +""" + +from .compat import is_py2, builtin_str, str + + +def to_native_string(string, encoding='ascii'): + """Given a string object, regardless of type, returns a representation of + that string in the native string type, encoding and decoding where + necessary. This assumes ASCII unless told otherwise. + """ + if isinstance(string, builtin_str): + out = string + else: + if is_py2: + out = string.encode(encoding) + else: + out = string.decode(encoding) + + return out + + +def unicode_is_ascii(u_string): + """Determine if unicode string only contains ASCII characters. + + :param str u_string: unicode string to check. Must be unicode + and not Python 2 `str`. + :rtype: bool + """ + assert isinstance(u_string, str) + try: + u_string.encode('ascii') + return True + except UnicodeEncodeError: + return False diff --git a/src/snowflake/connector/vendored/requests/adapters.py b/src/snowflake/connector/vendored/requests/adapters.py new file mode 100644 index 000000000..c87a620b2 --- /dev/null +++ b/src/snowflake/connector/vendored/requests/adapters.py @@ -0,0 +1,533 @@ +# -*- coding: utf-8 -*- + +""" +requests.adapters +~~~~~~~~~~~~~~~~~ + +This module contains the transport adapters that Requests uses to define +and maintain connections. +""" + +import os.path +import socket + +from ..urllib3.poolmanager import PoolManager, proxy_from_url +from ..urllib3.response import HTTPResponse +from ..urllib3.util import parse_url +from ..urllib3.util import Timeout as TimeoutSauce +from ..urllib3.util.retry import Retry +from ..urllib3.exceptions import ClosedPoolError +from ..urllib3.exceptions import ConnectTimeoutError +from ..urllib3.exceptions import HTTPError as _HTTPError +from ..urllib3.exceptions import MaxRetryError +from ..urllib3.exceptions import NewConnectionError +from ..urllib3.exceptions import ProxyError as _ProxyError +from ..urllib3.exceptions import ProtocolError +from ..urllib3.exceptions import ReadTimeoutError +from ..urllib3.exceptions import SSLError as _SSLError +from ..urllib3.exceptions import ResponseError +from ..urllib3.exceptions import LocationValueError + +from .models import Response +from .compat import urlparse, basestring +from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths, + get_encoding_from_headers, prepend_scheme_if_needed, + get_auth_from_url, urldefragauth, select_proxy) +from .structures import CaseInsensitiveDict +from .cookies import extract_cookies_to_jar +from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, + ProxyError, RetryError, InvalidSchema, InvalidProxyURL, + InvalidURL) +from .auth import _basic_auth_str + +try: + from ..urllib3.contrib.socks import SOCKSProxyManager +except ImportError: + def SOCKSProxyManager(*args, **kwargs): + raise InvalidSchema("Missing dependencies for SOCKS support.") + +DEFAULT_POOLBLOCK = False +DEFAULT_POOLSIZE = 10 +DEFAULT_RETRIES = 0 +DEFAULT_POOL_TIMEOUT = None + + +class BaseAdapter(object): + """The Base Transport Adapter""" + + def __init__(self): + super(BaseAdapter, self).__init__() + + def send(self, request, stream=False, timeout=None, verify=True, + cert=None, proxies=None): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + """ + raise NotImplementedError + + def close(self): + """Cleans up adapter specific items.""" + raise NotImplementedError + + +class HTTPAdapter(BaseAdapter): + """The built-in HTTP Adapter for urllib3. + + Provides a general-case interface for Requests sessions to contact HTTP and + HTTPS urls by implementing the Transport Adapter interface. This class will + usually be created by the :class:`Session ` class under the + covers. + + :param pool_connections: The number of urllib3 connection pools to cache. + :param pool_maxsize: The maximum number of connections to save in the pool. + :param max_retries: The maximum number of retries each connection + should attempt. Note, this applies only to failed DNS lookups, socket + connections and connection timeouts, never to requests where data has + made it to the server. By default, Requests does not retry failed + connections. If you need granular control over the conditions under + which we retry a request, import urllib3's ``Retry`` class and pass + that instead. + :param pool_block: Whether the connection pool should block for connections. + + Usage:: + + >>> import requests + >>> s = requests.Session() + >>> a = requests.adapters.HTTPAdapter(max_retries=3) + >>> s.mount('http://', a) + """ + __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', + '_pool_block'] + + def __init__(self, pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK): + if max_retries == DEFAULT_RETRIES: + self.max_retries = Retry(0, read=False) + else: + self.max_retries = Retry.from_int(max_retries) + self.config = {} + self.proxy_manager = {} + + super(HTTPAdapter, self).__init__() + + self._pool_connections = pool_connections + self._pool_maxsize = pool_maxsize + self._pool_block = pool_block + + self.init_poolmanager(pool_connections, pool_maxsize, block=pool_block) + + def __getstate__(self): + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + # Can't handle by adding 'proxy_manager' to self.__attrs__ because + # self.poolmanager uses a lambda function, which isn't pickleable. + self.proxy_manager = {} + self.config = {} + + for attr, value in state.items(): + setattr(self, attr, value) + + self.init_poolmanager(self._pool_connections, self._pool_maxsize, + block=self._pool_block) + + def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): + """Initializes a urllib3 PoolManager. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param connections: The number of urllib3 connection pools to cache. + :param maxsize: The maximum number of connections to save in the pool. + :param block: Block when no free connections are available. + :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. + """ + # save these values for pickling + self._pool_connections = connections + self._pool_maxsize = maxsize + self._pool_block = block + + self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, + block=block, strict=True, **pool_kwargs) + + def proxy_manager_for(self, proxy, **proxy_kwargs): + """Return urllib3 ProxyManager for the given proxy. + + This method should not be called from user code, and is only + exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The proxy to return a urllib3 ProxyManager for. + :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. + :returns: ProxyManager + :rtype: urllib3.ProxyManager + """ + if proxy in self.proxy_manager: + manager = self.proxy_manager[proxy] + elif proxy.lower().startswith('socks'): + username, password = get_auth_from_url(proxy) + manager = self.proxy_manager[proxy] = SOCKSProxyManager( + proxy, + username=username, + password=password, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs + ) + else: + proxy_headers = self.proxy_headers(proxy) + manager = self.proxy_manager[proxy] = proxy_from_url( + proxy, + proxy_headers=proxy_headers, + num_pools=self._pool_connections, + maxsize=self._pool_maxsize, + block=self._pool_block, + **proxy_kwargs) + + return manager + + def cert_verify(self, conn, url, verify, cert): + """Verify a SSL certificate. This method should not be called from user + code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param conn: The urllib3 connection object associated with the cert. + :param url: The requested URL. + :param verify: Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use + :param cert: The SSL certificate to verify. + """ + if url.lower().startswith('https') and verify: + + cert_loc = None + + # Allow self-specified cert location. + if verify is not True: + cert_loc = verify + + if not cert_loc: + cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) + + if not cert_loc or not os.path.exists(cert_loc): + raise IOError("Could not find a suitable TLS CA certificate bundle, " + "invalid path: {}".format(cert_loc)) + + conn.cert_reqs = 'CERT_REQUIRED' + + if not os.path.isdir(cert_loc): + conn.ca_certs = cert_loc + else: + conn.ca_cert_dir = cert_loc + else: + conn.cert_reqs = 'CERT_NONE' + conn.ca_certs = None + conn.ca_cert_dir = None + + if cert: + if not isinstance(cert, basestring): + conn.cert_file = cert[0] + conn.key_file = cert[1] + else: + conn.cert_file = cert + conn.key_file = None + if conn.cert_file and not os.path.exists(conn.cert_file): + raise IOError("Could not find the TLS certificate file, " + "invalid path: {}".format(conn.cert_file)) + if conn.key_file and not os.path.exists(conn.key_file): + raise IOError("Could not find the TLS key file, " + "invalid path: {}".format(conn.key_file)) + + def build_response(self, req, resp): + """Builds a :class:`Response ` object from a urllib3 + response. This should not be called from user code, and is only exposed + for use when subclassing the + :class:`HTTPAdapter ` + + :param req: The :class:`PreparedRequest ` used to generate the response. + :param resp: The urllib3 response object. + :rtype: requests.Response + """ + response = Response() + + # Fallback to None if there's no status_code, for whatever reason. + response.status_code = getattr(resp, 'status', None) + + # Make headers case-insensitive. + response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) + + # Set encoding. + response.encoding = get_encoding_from_headers(response.headers) + response.raw = resp + response.reason = response.raw.reason + + if isinstance(req.url, bytes): + response.url = req.url.decode('utf-8') + else: + response.url = req.url + + # Add new cookies from the server. + extract_cookies_to_jar(response.cookies, req, resp) + + # Give the Response some context. + response.request = req + response.connection = self + + return response + + def get_connection(self, url, proxies=None): + """Returns a urllib3 connection for the given URL. This should not be + called from user code, and is only exposed for use when subclassing the + :class:`HTTPAdapter `. + + :param url: The URL to connect to. + :param proxies: (optional) A Requests-style dictionary of proxies used on this request. + :rtype: urllib3.ConnectionPool + """ + proxy = select_proxy(url, proxies) + + if proxy: + proxy = prepend_scheme_if_needed(proxy, 'http') + proxy_url = parse_url(proxy) + if not proxy_url.host: + raise InvalidProxyURL("Please check proxy URL. It is malformed" + " and could be missing the host.") + proxy_manager = self.proxy_manager_for(proxy) + conn = proxy_manager.connection_from_url(url) + else: + # Only scheme should be lower case + parsed = urlparse(url) + url = parsed.geturl() + conn = self.poolmanager.connection_from_url(url) + + return conn + + def close(self): + """Disposes of any internal state. + + Currently, this closes the PoolManager and any active ProxyManager, + which closes any pooled connections. + """ + self.poolmanager.clear() + for proxy in self.proxy_manager.values(): + proxy.clear() + + def request_url(self, request, proxies): + """Obtain the url to use when making the final request. + + If the message is being sent through a HTTP proxy, the full URL has to + be used. Otherwise, we should only use the path portion of the URL. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` being sent. + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. + :rtype: str + """ + proxy = select_proxy(request.url, proxies) + scheme = urlparse(request.url).scheme + + is_proxied_http_request = (proxy and scheme != 'https') + using_socks_proxy = False + if proxy: + proxy_scheme = urlparse(proxy).scheme.lower() + using_socks_proxy = proxy_scheme.startswith('socks') + + url = request.path_url + if is_proxied_http_request and not using_socks_proxy: + url = urldefragauth(request.url) + + return url + + def add_headers(self, request, **kwargs): + """Add any headers needed by the connection. As of v2.0 this does + nothing by default, but is left for overriding by users that subclass + the :class:`HTTPAdapter `. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param request: The :class:`PreparedRequest ` to add headers to. + :param kwargs: The keyword arguments from the call to send(). + """ + pass + + def proxy_headers(self, proxy): + """Returns a dictionary of the headers to add to any request sent + through a proxy. This works with urllib3 magic to ensure that they are + correctly sent to the proxy, rather than in a tunnelled request if + CONNECT is being used. + + This should not be called from user code, and is only exposed for use + when subclassing the + :class:`HTTPAdapter `. + + :param proxy: The url of the proxy being used for this request. + :rtype: dict + """ + headers = {} + username, password = get_auth_from_url(proxy) + + if username: + headers['Proxy-Authorization'] = _basic_auth_str(username, + password) + + return headers + + def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): + """Sends PreparedRequest object. Returns Response object. + + :param request: The :class:`PreparedRequest ` being sent. + :param stream: (optional) Whether to stream the request content. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple or urllib3 Timeout object + :param verify: (optional) Either a boolean, in which case it controls whether + we verify the server's TLS certificate, or a string, in which case it + must be a path to a CA bundle to use + :param cert: (optional) Any user-provided SSL certificate to be trusted. + :param proxies: (optional) The proxies dictionary to apply to the request. + :rtype: requests.Response + """ + + try: + conn = self.get_connection(request.url, proxies) + except LocationValueError as e: + raise InvalidURL(e, request=request) + + self.cert_verify(conn, request.url, verify, cert) + url = self.request_url(request, proxies) + self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies) + + chunked = not (request.body is None or 'Content-Length' in request.headers) + + if isinstance(timeout, tuple): + try: + connect, read = timeout + timeout = TimeoutSauce(connect=connect, read=read) + except ValueError as e: + # this may raise a string formatting error. + err = ("Invalid timeout {}. Pass a (connect, read) " + "timeout tuple, or a single float to set " + "both timeouts to the same value".format(timeout)) + raise ValueError(err) + elif isinstance(timeout, TimeoutSauce): + pass + else: + timeout = TimeoutSauce(connect=timeout, read=timeout) + + try: + if not chunked: + resp = conn.urlopen( + method=request.method, + url=url, + body=request.body, + headers=request.headers, + redirect=False, + assert_same_host=False, + preload_content=False, + decode_content=False, + retries=self.max_retries, + timeout=timeout + ) + + # Send the request. + else: + if hasattr(conn, 'proxy_pool'): + conn = conn.proxy_pool + + low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) + + try: + low_conn.putrequest(request.method, + url, + skip_accept_encoding=True) + + for header, value in request.headers.items(): + low_conn.putheader(header, value) + + low_conn.endheaders() + + for i in request.body: + low_conn.send(hex(len(i))[2:].encode('utf-8')) + low_conn.send(b'\r\n') + low_conn.send(i) + low_conn.send(b'\r\n') + low_conn.send(b'0\r\n\r\n') + + # Receive the response from the server + try: + # For Python 2.7, use buffering of HTTP responses + r = low_conn.getresponse(buffering=True) + except TypeError: + # For compatibility with Python 3.3+ + r = low_conn.getresponse() + + resp = HTTPResponse.from_httplib( + r, + pool=conn, + connection=low_conn, + preload_content=False, + decode_content=False + ) + except: + # If we hit any problems here, clean up the connection. + # Then, reraise so that we can handle the actual exception. + low_conn.close() + raise + + except (ProtocolError, socket.error) as err: + raise ConnectionError(err, request=request) + + except MaxRetryError as e: + if isinstance(e.reason, ConnectTimeoutError): + # TODO: Remove this in 3.0.0: see #2811 + if not isinstance(e.reason, NewConnectionError): + raise ConnectTimeout(e, request=request) + + if isinstance(e.reason, ResponseError): + raise RetryError(e, request=request) + + if isinstance(e.reason, _ProxyError): + raise ProxyError(e, request=request) + + if isinstance(e.reason, _SSLError): + # This branch is for urllib3 v1.22 and later. + raise SSLError(e, request=request) + + raise ConnectionError(e, request=request) + + except ClosedPoolError as e: + raise ConnectionError(e, request=request) + + except _ProxyError as e: + raise ProxyError(e) + + except (_SSLError, _HTTPError) as e: + if isinstance(e, _SSLError): + # This branch is for urllib3 versions earlier than v1.22 + raise SSLError(e, request=request) + elif isinstance(e, ReadTimeoutError): + raise ReadTimeout(e, request=request) + else: + raise + + return self.build_response(request, resp) diff --git a/src/snowflake/connector/vendored/requests/api.py b/src/snowflake/connector/vendored/requests/api.py new file mode 100644 index 000000000..4cba90eef --- /dev/null +++ b/src/snowflake/connector/vendored/requests/api.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- + +""" +requests.api +~~~~~~~~~~~~ + +This module implements the Requests API. + +:copyright: (c) 2012 by Kenneth Reitz. +:license: Apache2, see LICENSE for more details. +""" + +from . import sessions + + +def request(method, url, **kwargs): + """Constructs and sends a :class:`Request `. + + :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. + :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. + ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` + or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string + defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers + to add for the file. + :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How many seconds to wait for the server to send data + before giving up, as a float, or a :ref:`(connect timeout, read + timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. + :param stream: (optional) if ``False``, the response content will be immediately downloaded. + :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. + :return: :class:`Response ` object + :rtype: requests.Response + + Usage:: + + >>> import requests + >>> req = requests.request('GET', 'https://httpbin.org/get') + >>> req + + """ + + # By using the 'with' statement we are sure the session is closed, thus we + # avoid leaving sockets open which can trigger a ResourceWarning in some + # cases, and look like a memory leak in others. + with sessions.Session() as session: + return session.request(method=method, url=url, **kwargs) + + +def get(url, params=None, **kwargs): + r"""Sends a GET request. + + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary, list of tuples or bytes to send + in the query string for the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request('get', url, params=params, **kwargs) + + +def options(url, **kwargs): + r"""Sends an OPTIONS request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request('options', url, **kwargs) + + +def head(url, **kwargs): + r"""Sends a HEAD request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. If + `allow_redirects` is not provided, it will be set to `False` (as + opposed to the default :meth:`request` behavior). + :return: :class:`Response ` object + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', False) + return request('head', url, **kwargs) + + +def post(url, data=None, json=None, **kwargs): + r"""Sends a POST request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request('post', url, data=data, json=json, **kwargs) + + +def put(url, data=None, **kwargs): + r"""Sends a PUT request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request('put', url, data=data, **kwargs) + + +def patch(url, data=None, **kwargs): + r"""Sends a PATCH request. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json data to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request('patch', url, data=data, **kwargs) + + +def delete(url, **kwargs): + r"""Sends a DELETE request. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :return: :class:`Response ` object + :rtype: requests.Response + """ + + return request('delete', url, **kwargs) diff --git a/src/snowflake/connector/vendored/requests/auth.py b/src/snowflake/connector/vendored/requests/auth.py new file mode 100644 index 000000000..eeface39a --- /dev/null +++ b/src/snowflake/connector/vendored/requests/auth.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- + +""" +requests.auth +~~~~~~~~~~~~~ + +This module contains the authentication handlers for Requests. +""" + +import os +import re +import time +import hashlib +import threading +import warnings + +from base64 import b64encode + +from .compat import urlparse, str, basestring +from .cookies import extract_cookies_to_jar +from ._internal_utils import to_native_string +from .utils import parse_dict_header + +CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' +CONTENT_TYPE_MULTI_PART = 'multipart/form-data' + + +def _basic_auth_str(username, password): + """Returns a Basic Auth string.""" + + # "I want us to put a big-ol' comment on top of it that + # says that this behaviour is dumb but we need to preserve + # it because people are relying on it." + # - Lukasa + # + # These are here solely to maintain backwards compatibility + # for things like ints. This will be removed in 3.0.0. + if not isinstance(username, basestring): + warnings.warn( + "Non-string usernames will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(username), + category=DeprecationWarning, + ) + username = str(username) + + if not isinstance(password, basestring): + warnings.warn( + "Non-string passwords will no longer be supported in Requests " + "3.0.0. Please convert the object you've passed in ({!r}) to " + "a string or bytes object in the near future to avoid " + "problems.".format(type(password)), + category=DeprecationWarning, + ) + password = str(password) + # -- End Removal -- + + if isinstance(username, str): + username = username.encode('latin1') + + if isinstance(password, str): + password = password.encode('latin1') + + authstr = 'Basic ' + to_native_string( + b64encode(b':'.join((username, password))).strip() + ) + + return authstr + + +class AuthBase(object): + """Base class that all auth implementations derive from""" + + def __call__(self, r): + raise NotImplementedError('Auth hooks must be callable.') + + +class HTTPBasicAuth(AuthBase): + """Attaches HTTP Basic Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + + def __eq__(self, other): + return all([ + self.username == getattr(other, 'username', None), + self.password == getattr(other, 'password', None) + ]) + + def __ne__(self, other): + return not self == other + + def __call__(self, r): + r.headers['Authorization'] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPProxyAuth(HTTPBasicAuth): + """Attaches HTTP Proxy Authentication to a given Request object.""" + + def __call__(self, r): + r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) + return r + + +class HTTPDigestAuth(AuthBase): + """Attaches HTTP Digest Authentication to the given Request object.""" + + def __init__(self, username, password): + self.username = username + self.password = password + # Keep state in per-thread local storage + self._thread_local = threading.local() + + def init_per_thread_state(self): + # Ensure state is initialized just once per-thread + if not hasattr(self._thread_local, 'init'): + self._thread_local.init = True + self._thread_local.last_nonce = '' + self._thread_local.nonce_count = 0 + self._thread_local.chal = {} + self._thread_local.pos = None + self._thread_local.num_401_calls = None + + def build_digest_header(self, method, url): + """ + :rtype: str + """ + + realm = self._thread_local.chal['realm'] + nonce = self._thread_local.chal['nonce'] + qop = self._thread_local.chal.get('qop') + algorithm = self._thread_local.chal.get('algorithm') + opaque = self._thread_local.chal.get('opaque') + hash_utf8 = None + + if algorithm is None: + _algorithm = 'MD5' + else: + _algorithm = algorithm.upper() + # lambdas assume digest modules are imported at the top level + if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': + def md5_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.md5(x).hexdigest() + hash_utf8 = md5_utf8 + elif _algorithm == 'SHA': + def sha_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha1(x).hexdigest() + hash_utf8 = sha_utf8 + elif _algorithm == 'SHA-256': + def sha256_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha256(x).hexdigest() + hash_utf8 = sha256_utf8 + elif _algorithm == 'SHA-512': + def sha512_utf8(x): + if isinstance(x, str): + x = x.encode('utf-8') + return hashlib.sha512(x).hexdigest() + hash_utf8 = sha512_utf8 + + KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) + + if hash_utf8 is None: + return None + + # XXX not implemented yet + entdig = None + p_parsed = urlparse(url) + #: path is request-uri defined in RFC 2616 which should not be empty + path = p_parsed.path or "/" + if p_parsed.query: + path += '?' + p_parsed.query + + A1 = '%s:%s:%s' % (self.username, realm, self.password) + A2 = '%s:%s' % (method, path) + + HA1 = hash_utf8(A1) + HA2 = hash_utf8(A2) + + if nonce == self._thread_local.last_nonce: + self._thread_local.nonce_count += 1 + else: + self._thread_local.nonce_count = 1 + ncvalue = '%08x' % self._thread_local.nonce_count + s = str(self._thread_local.nonce_count).encode('utf-8') + s += nonce.encode('utf-8') + s += time.ctime().encode('utf-8') + s += os.urandom(8) + + cnonce = (hashlib.sha1(s).hexdigest()[:16]) + if _algorithm == 'MD5-SESS': + HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) + + if not qop: + respdig = KD(HA1, "%s:%s" % (nonce, HA2)) + elif qop == 'auth' or 'auth' in qop.split(','): + noncebit = "%s:%s:%s:%s:%s" % ( + nonce, ncvalue, cnonce, 'auth', HA2 + ) + respdig = KD(HA1, noncebit) + else: + # XXX handle auth-int. + return None + + self._thread_local.last_nonce = nonce + + # XXX should the partial digests be encoded too? + base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ + 'response="%s"' % (self.username, realm, nonce, path, respdig) + if opaque: + base += ', opaque="%s"' % opaque + if algorithm: + base += ', algorithm="%s"' % algorithm + if entdig: + base += ', digest="%s"' % entdig + if qop: + base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) + + return 'Digest %s' % (base) + + def handle_redirect(self, r, **kwargs): + """Reset num_401_calls counter on redirects.""" + if r.is_redirect: + self._thread_local.num_401_calls = 1 + + def handle_401(self, r, **kwargs): + """ + Takes the given response and tries digest-auth, if needed. + + :rtype: requests.Response + """ + + # If response is not 4xx, do not auth + # See https://github.com/psf/requests/issues/3772 + if not 400 <= r.status_code < 500: + self._thread_local.num_401_calls = 1 + return r + + if self._thread_local.pos is not None: + # Rewind the file position indicator of the body to where + # it was to resend the request. + r.request.body.seek(self._thread_local.pos) + s_auth = r.headers.get('www-authenticate', '') + + if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: + + self._thread_local.num_401_calls += 1 + pat = re.compile(r'digest ', flags=re.IGNORECASE) + self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) + + # Consume content and release the original connection + # to allow our new request to reuse the same one. + r.content + r.close() + prep = r.request.copy() + extract_cookies_to_jar(prep._cookies, r.request, r.raw) + prep.prepare_cookies(prep._cookies) + + prep.headers['Authorization'] = self.build_digest_header( + prep.method, prep.url) + _r = r.connection.send(prep, **kwargs) + _r.history.append(r) + _r.request = prep + + return _r + + self._thread_local.num_401_calls = 1 + return r + + def __call__(self, r): + # Initialize per-thread state, if needed + self.init_per_thread_state() + # If we have a saved nonce, skip the 401 + if self._thread_local.last_nonce: + r.headers['Authorization'] = self.build_digest_header(r.method, r.url) + try: + self._thread_local.pos = r.body.tell() + except AttributeError: + # In the case of HTTPDigestAuth being reused and the body of + # the previous request was a file-like object, pos has the + # file position of the previous body. Ensure it's set to + # None. + self._thread_local.pos = None + r.register_hook('response', self.handle_401) + r.register_hook('response', self.handle_redirect) + self._thread_local.num_401_calls = 1 + + return r + + def __eq__(self, other): + return all([ + self.username == getattr(other, 'username', None), + self.password == getattr(other, 'password', None) + ]) + + def __ne__(self, other): + return not self == other diff --git a/src/snowflake/connector/vendored/requests/certs.py b/src/snowflake/connector/vendored/requests/certs.py new file mode 100644 index 000000000..d1a378d78 --- /dev/null +++ b/src/snowflake/connector/vendored/requests/certs.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +""" +requests.certs +~~~~~~~~~~~~~~ + +This module returns the preferred default CA certificate bundle. There is +only one — the one from the certifi package. + +If you are packaging Requests, e.g., for a Linux distribution or a managed +environment, you can change the definition of where() to return a separately +packaged CA bundle. +""" +from certifi import where + +if __name__ == '__main__': + print(where()) diff --git a/src/snowflake/connector/vendored/requests/compat.py b/src/snowflake/connector/vendored/requests/compat.py new file mode 100644 index 000000000..0b14f5015 --- /dev/null +++ b/src/snowflake/connector/vendored/requests/compat.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- + +""" +requests.compat +~~~~~~~~~~~~~~~ + +This module handles import compatibility issues between Python 2 and +Python 3. +""" + +try: + import chardet +except ImportError: + import charset_normalizer as chardet + +import sys + +# ------- +# Pythons +# ------- + +# Syntax sugar. +_ver = sys.version_info + +#: Python 2.x? +is_py2 = (_ver[0] == 2) + +#: Python 3.x? +is_py3 = (_ver[0] == 3) + +try: + import simplejson as json +except ImportError: + import json + +# --------- +# Specifics +# --------- + +if is_py2: + from urllib import ( + quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, + proxy_bypass, proxy_bypass_environment, getproxies_environment) + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag + from urllib2 import parse_http_list + import cookielib + from Cookie import Morsel + from StringIO import StringIO + # Keep OrderedDict for backwards compatibility. + from collections import Callable, Mapping, MutableMapping, OrderedDict + + + builtin_str = str + bytes = str + str = unicode + basestring = basestring + numeric_types = (int, long, float) + integer_types = (int, long) + +elif is_py3: + from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag + from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment + from http import cookiejar as cookielib + from http.cookies import Morsel + from io import StringIO + # Keep OrderedDict for backwards compatibility. + from collections import OrderedDict + from collections.abc import Callable, Mapping, MutableMapping + + builtin_str = str + str = str + bytes = bytes + basestring = (str, bytes) + numeric_types = (int, float) + integer_types = (int,) diff --git a/src/snowflake/connector/vendored/requests/cookies.py b/src/snowflake/connector/vendored/requests/cookies.py new file mode 100644 index 000000000..56fccd9c2 --- /dev/null +++ b/src/snowflake/connector/vendored/requests/cookies.py @@ -0,0 +1,549 @@ +# -*- coding: utf-8 -*- + +""" +requests.cookies +~~~~~~~~~~~~~~~~ + +Compatibility code to be able to use `cookielib.CookieJar` with requests. + +requests.utils imports from here, so be careful with imports. +""" + +import copy +import time +import calendar + +from ._internal_utils import to_native_string +from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping + +try: + import threading +except ImportError: + import dummy_threading as threading + + +class MockRequest(object): + """Wraps a `requests.Request` to mimic a `urllib2.Request`. + + The code in `cookielib.CookieJar` expects this interface in order to correctly + manage cookie policies, i.e., determine whether a cookie can be set, given the + domains of the request and the cookie. + + The original request object is read-only. The client is responsible for collecting + the new headers via `get_new_headers()` and interpreting them appropriately. You + probably want `get_cookie_header`, defined below. + """ + + def __init__(self, request): + self._r = request + self._new_headers = {} + self.type = urlparse(self._r.url).scheme + + def get_type(self): + return self.type + + def get_host(self): + return urlparse(self._r.url).netloc + + def get_origin_req_host(self): + return self.get_host() + + def get_full_url(self): + # Only return the response's URL if the user hadn't set the Host + # header + if not self._r.headers.get('Host'): + return self._r.url + # If they did set it, retrieve it and reconstruct the expected domain + host = to_native_string(self._r.headers['Host'], encoding='utf-8') + parsed = urlparse(self._r.url) + # Reconstruct the URL as we expect it + return urlunparse([ + parsed.scheme, host, parsed.path, parsed.params, parsed.query, + parsed.fragment + ]) + + def is_unverifiable(self): + return True + + def has_header(self, name): + return name in self._r.headers or name in self._new_headers + + def get_header(self, name, default=None): + return self._r.headers.get(name, self._new_headers.get(name, default)) + + def add_header(self, key, val): + """cookielib has no legitimate use for this method; add it back if you find one.""" + raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") + + def add_unredirected_header(self, name, value): + self._new_headers[name] = value + + def get_new_headers(self): + return self._new_headers + + @property + def unverifiable(self): + return self.is_unverifiable() + + @property + def origin_req_host(self): + return self.get_origin_req_host() + + @property + def host(self): + return self.get_host() + + +class MockResponse(object): + """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. + + ...what? Basically, expose the parsed HTTP headers from the server response + the way `cookielib` expects to see them. + """ + + def __init__(self, headers): + """Make a MockResponse for `cookielib` to read. + + :param headers: a httplib.HTTPMessage or analogous carrying the headers + """ + self._headers = headers + + def info(self): + return self._headers + + def getheaders(self, name): + self._headers.getheaders(name) + + +def extract_cookies_to_jar(jar, request, response): + """Extract the cookies from the response into a CookieJar. + + :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) + :param request: our own requests.Request object + :param response: urllib3.HTTPResponse object + """ + if not (hasattr(response, '_original_response') and + response._original_response): + return + # the _original_response field is the wrapped httplib.HTTPResponse object, + req = MockRequest(request) + # pull out the HTTPMessage with the headers and put it in the mock: + res = MockResponse(response._original_response.msg) + jar.extract_cookies(res, req) + + +def get_cookie_header(jar, request): + """ + Produce an appropriate Cookie header string to be sent with `request`, or None. + + :rtype: str + """ + r = MockRequest(request) + jar.add_cookie_header(r) + return r.get_new_headers().get('Cookie') + + +def remove_cookie_by_name(cookiejar, name, domain=None, path=None): + """Unsets a cookie by name, by default over all domains and paths. + + Wraps CookieJar.clear(), is O(n). + """ + clearables = [] + for cookie in cookiejar: + if cookie.name != name: + continue + if domain is not None and domain != cookie.domain: + continue + if path is not None and path != cookie.path: + continue + clearables.append((cookie.domain, cookie.path, cookie.name)) + + for domain, path, name in clearables: + cookiejar.clear(domain, path, name) + + +class CookieConflictError(RuntimeError): + """There are two cookies that meet the criteria specified in the cookie jar. + Use .get and .set and include domain and path args in order to be more specific. + """ + + +class RequestsCookieJar(cookielib.CookieJar, MutableMapping): + """Compatibility class; is a cookielib.CookieJar, but exposes a dict + interface. + + This is the CookieJar we create by default for requests and sessions that + don't specify one, since some clients may expect response.cookies and + session.cookies to support dict operations. + + Requests does not use the dict interface internally; it's just for + compatibility with external client code. All requests code should work + out of the box with externally provided instances of ``CookieJar``, e.g. + ``LWPCookieJar`` and ``FileCookieJar``. + + Unlike a regular CookieJar, this class is pickleable. + + .. warning:: dictionary operations that are normally O(1) may be O(n). + """ + + def get(self, name, default=None, domain=None, path=None): + """Dict-like get() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + + .. warning:: operation is O(n), not O(1). + """ + try: + return self._find_no_duplicates(name, domain, path) + except KeyError: + return default + + def set(self, name, value, **kwargs): + """Dict-like set() that also supports optional domain and path args in + order to resolve naming collisions from using one cookie jar over + multiple domains. + """ + # support client code that unsets cookies by assignment of a None value: + if value is None: + remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) + return + + if isinstance(value, Morsel): + c = morsel_to_cookie(value) + else: + c = create_cookie(name, value, **kwargs) + self.set_cookie(c) + return c + + def iterkeys(self): + """Dict-like iterkeys() that returns an iterator of names of cookies + from the jar. + + .. seealso:: itervalues() and iteritems(). + """ + for cookie in iter(self): + yield cookie.name + + def keys(self): + """Dict-like keys() that returns a list of names of cookies from the + jar. + + .. seealso:: values() and items(). + """ + return list(self.iterkeys()) + + def itervalues(self): + """Dict-like itervalues() that returns an iterator of values of cookies + from the jar. + + .. seealso:: iterkeys() and iteritems(). + """ + for cookie in iter(self): + yield cookie.value + + def values(self): + """Dict-like values() that returns a list of values of cookies from the + jar. + + .. seealso:: keys() and items(). + """ + return list(self.itervalues()) + + def iteritems(self): + """Dict-like iteritems() that returns an iterator of name-value tuples + from the jar. + + .. seealso:: iterkeys() and itervalues(). + """ + for cookie in iter(self): + yield cookie.name, cookie.value + + def items(self): + """Dict-like items() that returns a list of name-value tuples from the + jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a + vanilla python dict of key value pairs. + + .. seealso:: keys() and values(). + """ + return list(self.iteritems()) + + def list_domains(self): + """Utility method to list all the domains in the jar.""" + domains = [] + for cookie in iter(self): + if cookie.domain not in domains: + domains.append(cookie.domain) + return domains + + def list_paths(self): + """Utility method to list all the paths in the jar.""" + paths = [] + for cookie in iter(self): + if cookie.path not in paths: + paths.append(cookie.path) + return paths + + def multiple_domains(self): + """Returns True if there are multiple domains in the jar. + Returns False otherwise. + + :rtype: bool + """ + domains = [] + for cookie in iter(self): + if cookie.domain is not None and cookie.domain in domains: + return True + domains.append(cookie.domain) + return False # there is only one domain in jar + + def get_dict(self, domain=None, path=None): + """Takes as an argument an optional domain and path and returns a plain + old Python dict of name-value pairs of cookies that meet the + requirements. + + :rtype: dict + """ + dictionary = {} + for cookie in iter(self): + if ( + (domain is None or cookie.domain == domain) and + (path is None or cookie.path == path) + ): + dictionary[cookie.name] = cookie.value + return dictionary + + def __contains__(self, name): + try: + return super(RequestsCookieJar, self).__contains__(name) + except CookieConflictError: + return True + + def __getitem__(self, name): + """Dict-like __getitem__() for compatibility with client code. Throws + exception if there are more than one cookie with name. In that case, + use the more explicit get() method instead. + + .. warning:: operation is O(n), not O(1). + """ + return self._find_no_duplicates(name) + + def __setitem__(self, name, value): + """Dict-like __setitem__ for compatibility with client code. Throws + exception if there is already a cookie of that name in the jar. In that + case, use the more explicit set() method instead. + """ + self.set(name, value) + + def __delitem__(self, name): + """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s + ``remove_cookie_by_name()``. + """ + remove_cookie_by_name(self, name) + + def set_cookie(self, cookie, *args, **kwargs): + if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): + cookie.value = cookie.value.replace('\\"', '') + return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) + + def update(self, other): + """Updates this jar with cookies from another CookieJar or dict-like""" + if isinstance(other, cookielib.CookieJar): + for cookie in other: + self.set_cookie(copy.copy(cookie)) + else: + super(RequestsCookieJar, self).update(other) + + def _find(self, name, domain=None, path=None): + """Requests uses this method internally to get cookie values. + + If there are conflicting cookies, _find arbitrarily chooses one. + See _find_no_duplicates if you want an exception thrown if there are + conflicting cookies. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :return: cookie.value + """ + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + return cookie.value + + raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) + + def _find_no_duplicates(self, name, domain=None, path=None): + """Both ``__get_item__`` and ``get`` call this function: it's never + used elsewhere in Requests. + + :param name: a string containing name of cookie + :param domain: (optional) string containing domain of cookie + :param path: (optional) string containing path of cookie + :raises KeyError: if cookie is not found + :raises CookieConflictError: if there are multiple cookies + that match name and optionally domain and path + :return: cookie.value + """ + toReturn = None + for cookie in iter(self): + if cookie.name == name: + if domain is None or cookie.domain == domain: + if path is None or cookie.path == path: + if toReturn is not None: # if there are multiple cookies that meet passed in criteria + raise CookieConflictError('There are multiple cookies with name, %r' % (name)) + toReturn = cookie.value # we will eventually return this as long as no cookie conflict + + if toReturn: + return toReturn + raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) + + def __getstate__(self): + """Unlike a normal CookieJar, this class is pickleable.""" + state = self.__dict__.copy() + # remove the unpickleable RLock object + state.pop('_cookies_lock') + return state + + def __setstate__(self, state): + """Unlike a normal CookieJar, this class is pickleable.""" + self.__dict__.update(state) + if '_cookies_lock' not in self.__dict__: + self._cookies_lock = threading.RLock() + + def copy(self): + """Return a copy of this RequestsCookieJar.""" + new_cj = RequestsCookieJar() + new_cj.set_policy(self.get_policy()) + new_cj.update(self) + return new_cj + + def get_policy(self): + """Return the CookiePolicy instance used.""" + return self._policy + + +def _copy_cookie_jar(jar): + if jar is None: + return None + + if hasattr(jar, 'copy'): + # We're dealing with an instance of RequestsCookieJar + return jar.copy() + # We're dealing with a generic CookieJar instance + new_jar = copy.copy(jar) + new_jar.clear() + for cookie in jar: + new_jar.set_cookie(copy.copy(cookie)) + return new_jar + + +def create_cookie(name, value, **kwargs): + """Make a cookie from underspecified parameters. + + By default, the pair of `name` and `value` will be set for the domain '' + and sent on every request (this is sometimes called a "supercookie"). + """ + result = { + 'version': 0, + 'name': name, + 'value': value, + 'port': None, + 'domain': '', + 'path': '/', + 'secure': False, + 'expires': None, + 'discard': True, + 'comment': None, + 'comment_url': None, + 'rest': {'HttpOnly': None}, + 'rfc2109': False, + } + + badargs = set(kwargs) - set(result) + if badargs: + err = 'create_cookie() got unexpected keyword arguments: %s' + raise TypeError(err % list(badargs)) + + result.update(kwargs) + result['port_specified'] = bool(result['port']) + result['domain_specified'] = bool(result['domain']) + result['domain_initial_dot'] = result['domain'].startswith('.') + result['path_specified'] = bool(result['path']) + + return cookielib.Cookie(**result) + + +def morsel_to_cookie(morsel): + """Convert a Morsel object into a Cookie containing the one k/v pair.""" + + expires = None + if morsel['max-age']: + try: + expires = int(time.time() + int(morsel['max-age'])) + except ValueError: + raise TypeError('max-age: %s must be integer' % morsel['max-age']) + elif morsel['expires']: + time_template = '%a, %d-%b-%Y %H:%M:%S GMT' + expires = calendar.timegm( + time.strptime(morsel['expires'], time_template) + ) + return create_cookie( + comment=morsel['comment'], + comment_url=bool(morsel['comment']), + discard=False, + domain=morsel['domain'], + expires=expires, + name=morsel.key, + path=morsel['path'], + port=None, + rest={'HttpOnly': morsel['httponly']}, + rfc2109=False, + secure=bool(morsel['secure']), + value=morsel.value, + version=morsel['version'] or 0, + ) + + +def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): + """Returns a CookieJar from a key/value dictionary. + + :param cookie_dict: Dict of key/values to insert into CookieJar. + :param cookiejar: (optional) A cookiejar to add the cookies to. + :param overwrite: (optional) If False, will not replace cookies + already in the jar with new ones. + :rtype: CookieJar + """ + if cookiejar is None: + cookiejar = RequestsCookieJar() + + if cookie_dict is not None: + names_from_jar = [cookie.name for cookie in cookiejar] + for name in cookie_dict: + if overwrite or (name not in names_from_jar): + cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) + + return cookiejar + + +def merge_cookies(cookiejar, cookies): + """Add cookies to cookiejar and returns a merged CookieJar. + + :param cookiejar: CookieJar object to add the cookies to. + :param cookies: Dictionary or CookieJar object to be added. + :rtype: CookieJar + """ + if not isinstance(cookiejar, cookielib.CookieJar): + raise ValueError('You can only merge into CookieJar') + + if isinstance(cookies, dict): + cookiejar = cookiejar_from_dict( + cookies, cookiejar=cookiejar, overwrite=False) + elif isinstance(cookies, cookielib.CookieJar): + try: + cookiejar.update(cookies) + except AttributeError: + for cookie_in_jar in cookies: + cookiejar.set_cookie(cookie_in_jar) + + return cookiejar diff --git a/src/snowflake/connector/vendored/requests/exceptions.py b/src/snowflake/connector/vendored/requests/exceptions.py new file mode 100644 index 000000000..9a09a22df --- /dev/null +++ b/src/snowflake/connector/vendored/requests/exceptions.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- + +""" +requests.exceptions +~~~~~~~~~~~~~~~~~~~ + +This module contains the set of Requests' exceptions. +""" +from ..urllib3.exceptions import HTTPError as BaseHTTPError + + +class RequestException(IOError): + """There was an ambiguous exception that occurred while handling your + request. + """ + + def __init__(self, *args, **kwargs): + """Initialize RequestException with `request` and `response` objects.""" + response = kwargs.pop('response', None) + self.response = response + self.request = kwargs.pop('request', None) + if (response is not None and not self.request and + hasattr(response, 'request')): + self.request = self.response.request + super(RequestException, self).__init__(*args, **kwargs) + + +class InvalidJSONError(RequestException): + """A JSON error occurred.""" + + +class HTTPError(RequestException): + """An HTTP error occurred.""" + + +class ConnectionError(RequestException): + """A Connection error occurred.""" + + +class ProxyError(ConnectionError): + """A proxy error occurred.""" + + +class SSLError(ConnectionError): + """An SSL error occurred.""" + + +class Timeout(RequestException): + """The request timed out. + + Catching this error will catch both + :exc:`~requests.exceptions.ConnectTimeout` and + :exc:`~requests.exceptions.ReadTimeout` errors. + """ + + +class ConnectTimeout(ConnectionError, Timeout): + """The request timed out while trying to connect to the remote server. + + Requests that produced this error are safe to retry. + """ + + +class ReadTimeout(Timeout): + """The server did not send any data in the allotted amount of time.""" + + +class URLRequired(RequestException): + """A valid URL is required to make a request.""" + + +class TooManyRedirects(RequestException): + """Too many redirects.""" + + +class MissingSchema(RequestException, ValueError): + """The URL schema (e.g. http or https) is missing.""" + + +class InvalidSchema(RequestException, ValueError): + """See defaults.py for valid schemas.""" + + +class InvalidURL(RequestException, ValueError): + """The URL provided was somehow invalid.""" + + +class InvalidHeader(RequestException, ValueError): + """The header value provided was somehow invalid.""" + + +class InvalidProxyURL(InvalidURL): + """The proxy URL provided is invalid.""" + + +class ChunkedEncodingError(RequestException): + """The server declared chunked encoding but sent an invalid chunk.""" + + +class ContentDecodingError(RequestException, BaseHTTPError): + """Failed to decode response content.""" + + +class StreamConsumedError(RequestException, TypeError): + """The content for this response was already consumed.""" + + +class RetryError(RequestException): + """Custom retries logic failed""" + + +class UnrewindableBodyError(RequestException): + """Requests encountered an error when trying to rewind a body.""" + +# Warnings + + +class RequestsWarning(Warning): + """Base warning for Requests.""" + + +class FileModeWarning(RequestsWarning, DeprecationWarning): + """A file was opened in text mode, but Requests determined its binary length.""" + + +class RequestsDependencyWarning(RequestsWarning): + """An imported dependency doesn't match the expected version range.""" diff --git a/src/snowflake/connector/vendored/requests/help.py b/src/snowflake/connector/vendored/requests/help.py new file mode 100644 index 000000000..6ee4c01c3 --- /dev/null +++ b/src/snowflake/connector/vendored/requests/help.py @@ -0,0 +1,135 @@ +"""Module containing bug report helper(s).""" +from __future__ import print_function + +import json +import platform +import sys +import ssl + +import idna +from .. import urllib3 + +from . import __version__ as requests_version + +try: + import charset_normalizer +except ImportError: + charset_normalizer = None + +try: + import chardet +except ImportError: + chardet = None + +try: + from ..urllib3.contrib import pyopenssl +except ImportError: + pyopenssl = None + OpenSSL = None + cryptography = None +else: + import OpenSSL + import cryptography + + +def _implementation(): + """Return a dict with the Python implementation and version. + + Provide both the name and the version of the Python implementation + currently running. For example, on CPython 2.7.5 it will return + {'name': 'CPython', 'version': '2.7.5'}. + + This function works best on CPython and PyPy: in particular, it probably + doesn't work for Jython or IronPython. Future investigation should be done + to work out the correct shape of the code for those platforms. + """ + implementation = platform.python_implementation() + + if implementation == 'CPython': + implementation_version = platform.python_version() + elif implementation == 'PyPy': + implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro) + if sys.pypy_version_info.releaselevel != 'final': + implementation_version = ''.join([ + implementation_version, sys.pypy_version_info.releaselevel + ]) + elif implementation == 'Jython': + implementation_version = platform.python_version() # Complete Guess + elif implementation == 'IronPython': + implementation_version = platform.python_version() # Complete Guess + else: + implementation_version = 'Unknown' + + return {'name': implementation, 'version': implementation_version} + + +def info(): + """Generate information for a bug report.""" + try: + platform_info = { + 'system': platform.system(), + 'release': platform.release(), + } + except IOError: + platform_info = { + 'system': 'Unknown', + 'release': 'Unknown', + } + + implementation_info = _implementation() + urllib3_info = {'version': urllib3.__version__} + charset_normalizer_info = {'version': None} + chardet_info = {'version': None} + if charset_normalizer: + charset_normalizer_info = {'version': charset_normalizer.__version__} + if chardet: + chardet_info = {'version': chardet.__version__} + + pyopenssl_info = { + 'version': None, + 'openssl_version': '', + } + if OpenSSL: + pyopenssl_info = { + 'version': OpenSSL.__version__, + 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, + } + cryptography_info = { + 'version': getattr(cryptography, '__version__', ''), + } + idna_info = { + 'version': getattr(idna, '__version__', ''), + } + + system_ssl = ssl.OPENSSL_VERSION_NUMBER + system_ssl_info = { + 'version': '%x' % system_ssl if system_ssl is not None else '' + } + + return { + 'platform': platform_info, + 'implementation': implementation_info, + 'system_ssl': system_ssl_info, + 'using_pyopenssl': pyopenssl is not None, + 'using_charset_normalizer': chardet is None, + 'pyOpenSSL': pyopenssl_info, + 'urllib3': urllib3_info, + 'chardet': chardet_info, + 'charset_normalizer': charset_normalizer_info, + 'cryptography': cryptography_info, + 'idna': idna_info, + 'requests': { + 'version': requests_version, + }, + } + + +def main(): + """Pretty-print the bug information as JSON.""" + print(json.dumps(info(), sort_keys=True, indent=2)) + + +if __name__ == '__main__': + main() diff --git a/src/snowflake/connector/vendored/requests/hooks.py b/src/snowflake/connector/vendored/requests/hooks.py new file mode 100644 index 000000000..7a51f212c --- /dev/null +++ b/src/snowflake/connector/vendored/requests/hooks.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- + +""" +requests.hooks +~~~~~~~~~~~~~~ + +This module provides the capabilities for the Requests hooks system. + +Available hooks: + +``response``: + The response generated from a Request. +""" +HOOKS = ['response'] + + +def default_hooks(): + return {event: [] for event in HOOKS} + +# TODO: response is the only one + + +def dispatch_hook(key, hooks, hook_data, **kwargs): + """Dispatches a hook dictionary on a given piece of data.""" + hooks = hooks or {} + hooks = hooks.get(key) + if hooks: + if hasattr(hooks, '__call__'): + hooks = [hooks] + for hook in hooks: + _hook_data = hook(hook_data, **kwargs) + if _hook_data is not None: + hook_data = _hook_data + return hook_data diff --git a/src/snowflake/connector/vendored/requests/models.py b/src/snowflake/connector/vendored/requests/models.py new file mode 100644 index 000000000..56a2413c6 --- /dev/null +++ b/src/snowflake/connector/vendored/requests/models.py @@ -0,0 +1,966 @@ +# -*- coding: utf-8 -*- + +""" +requests.models +~~~~~~~~~~~~~~~ + +This module contains the primary objects that power Requests. +""" + +import datetime +import sys + +# Import encoding now, to avoid implicit import later. +# Implicit import within threads may cause LookupError when standard library is in a ZIP, +# such as in Embedded Python. See https://github.com/psf/requests/issues/3578. +import encodings.idna + +from ..urllib3.fields import RequestField +from ..urllib3.filepost import encode_multipart_formdata +from ..urllib3.util import parse_url +from ..urllib3.exceptions import ( + DecodeError, ReadTimeoutError, ProtocolError, LocationParseError) + +from io import UnsupportedOperation +from .hooks import default_hooks +from .structures import CaseInsensitiveDict + +from .auth import HTTPBasicAuth +from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar +from .exceptions import ( + HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, + ContentDecodingError, ConnectionError, StreamConsumedError, InvalidJSONError) +from ._internal_utils import to_native_string, unicode_is_ascii +from .utils import ( + guess_filename, get_auth_from_url, requote_uri, + stream_decode_response_unicode, to_key_val_list, parse_header_links, + iter_slices, guess_json_utf, super_len, check_header_validity) +from .compat import ( + Callable, Mapping, + cookielib, urlunparse, urlsplit, urlencode, str, bytes, + is_py2, chardet, builtin_str, basestring) +from .compat import json as complexjson +from .status_codes import codes + +#: The set of HTTP status codes that indicate an automatically +#: processable redirect. +REDIRECT_STATI = ( + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 + codes.temporary_redirect, # 307 + codes.permanent_redirect, # 308 +) + +DEFAULT_REDIRECT_LIMIT = 30 +CONTENT_CHUNK_SIZE = 10 * 1024 +ITER_CHUNK_SIZE = 512 + + +class RequestEncodingMixin(object): + @property + def path_url(self): + """Build the path URL to use.""" + + url = [] + + p = urlsplit(self.url) + + path = p.path + if not path: + path = '/' + + url.append(path) + + query = p.query + if query: + url.append('?') + url.append(query) + + return ''.join(url) + + @staticmethod + def _encode_params(data): + """Encode parameters in a piece of data. + + Will successfully encode parameters when passed as a dict or a list of + 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary + if parameters are supplied as a dict. + """ + + if isinstance(data, (str, bytes)): + return data + elif hasattr(data, 'read'): + return data + elif hasattr(data, '__iter__'): + result = [] + for k, vs in to_key_val_list(data): + if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): + vs = [vs] + for v in vs: + if v is not None: + result.append( + (k.encode('utf-8') if isinstance(k, str) else k, + v.encode('utf-8') if isinstance(v, str) else v)) + return urlencode(result, doseq=True) + else: + return data + + @staticmethod + def _encode_files(files, data): + """Build the body for a multipart/form-data request. + + Will successfully encode files when passed as a dict or a list of + tuples. Order is retained if data is a list of tuples but arbitrary + if parameters are supplied as a dict. + The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) + or 4-tuples (filename, fileobj, contentype, custom_headers). + """ + if (not files): + raise ValueError("Files must be provided.") + elif isinstance(data, basestring): + raise ValueError("Data must not be a string.") + + new_fields = [] + fields = to_key_val_list(data or {}) + files = to_key_val_list(files or {}) + + for field, val in fields: + if isinstance(val, basestring) or not hasattr(val, '__iter__'): + val = [val] + for v in val: + if v is not None: + # Don't call str() on bytestrings: in Py3 it all goes wrong. + if not isinstance(v, bytes): + v = str(v) + + new_fields.append( + (field.decode('utf-8') if isinstance(field, bytes) else field, + v.encode('utf-8') if isinstance(v, str) else v)) + + for (k, v) in files: + # support for explicit filename + ft = None + fh = None + if isinstance(v, (tuple, list)): + if len(v) == 2: + fn, fp = v + elif len(v) == 3: + fn, fp, ft = v + else: + fn, fp, ft, fh = v + else: + fn = guess_filename(v) or k + fp = v + + if isinstance(fp, (str, bytes, bytearray)): + fdata = fp + elif hasattr(fp, 'read'): + fdata = fp.read() + elif fp is None: + continue + else: + fdata = fp + + rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) + rf.make_multipart(content_type=ft) + new_fields.append(rf) + + body, content_type = encode_multipart_formdata(new_fields) + + return body, content_type + + +class RequestHooksMixin(object): + def register_hook(self, event, hook): + """Properly register a hook.""" + + if event not in self.hooks: + raise ValueError('Unsupported event specified, with event name "%s"' % (event)) + + if isinstance(hook, Callable): + self.hooks[event].append(hook) + elif hasattr(hook, '__iter__'): + self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) + + def deregister_hook(self, event, hook): + """Deregister a previously registered hook. + Returns True if the hook existed, False if not. + """ + + try: + self.hooks[event].remove(hook) + return True + except ValueError: + return False + + +class Request(RequestHooksMixin): + """A user-created :class:`Request ` object. + + Used to prepare a :class:`PreparedRequest `, which is sent to the server. + + :param method: HTTP method to use. + :param url: URL to send. + :param headers: dictionary of headers to send. + :param files: dictionary of {filename: fileobject} files to multipart upload. + :param data: the body to attach to the request. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param json: json for the body to attach to the request (if files or data is not specified). + :param params: URL parameters to append to the URL. If a dictionary or + list of tuples ``[(key, value)]`` is provided, form-encoding will + take place. + :param auth: Auth handler or (user, pass) tuple. + :param cookies: dictionary or CookieJar of cookies to attach to this request. + :param hooks: dictionary of callback hooks, for internal usage. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> req.prepare() + + """ + + def __init__(self, + method=None, url=None, headers=None, files=None, data=None, + params=None, auth=None, cookies=None, hooks=None, json=None): + + # Default empty dicts for dict params. + data = [] if data is None else data + files = [] if files is None else files + headers = {} if headers is None else headers + params = {} if params is None else params + hooks = {} if hooks is None else hooks + + self.hooks = default_hooks() + for (k, v) in list(hooks.items()): + self.register_hook(event=k, hook=v) + + self.method = method + self.url = url + self.headers = headers + self.files = files + self.data = data + self.json = json + self.params = params + self.auth = auth + self.cookies = cookies + + def __repr__(self): + return '' % (self.method) + + def prepare(self): + """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" + p = PreparedRequest() + p.prepare( + method=self.method, + url=self.url, + headers=self.headers, + files=self.files, + data=self.data, + json=self.json, + params=self.params, + auth=self.auth, + cookies=self.cookies, + hooks=self.hooks, + ) + return p + + +class PreparedRequest(RequestEncodingMixin, RequestHooksMixin): + """The fully mutable :class:`PreparedRequest ` object, + containing the exact bytes that will be sent to the server. + + Instances are generated from a :class:`Request ` object, and + should not be instantiated manually; doing so may produce undesirable + effects. + + Usage:: + + >>> import requests + >>> req = requests.Request('GET', 'https://httpbin.org/get') + >>> r = req.prepare() + >>> r + + + >>> s = requests.Session() + >>> s.send(r) + + """ + + def __init__(self): + #: HTTP verb to send to the server. + self.method = None + #: HTTP URL to send the request to. + self.url = None + #: dictionary of HTTP headers. + self.headers = None + # The `CookieJar` used to create the Cookie header will be stored here + # after prepare_cookies is called + self._cookies = None + #: request body to send to the server. + self.body = None + #: dictionary of callback hooks, for internal usage. + self.hooks = default_hooks() + #: integer denoting starting position of a readable file-like body. + self._body_position = None + + def prepare(self, + method=None, url=None, headers=None, files=None, data=None, + params=None, auth=None, cookies=None, hooks=None, json=None): + """Prepares the entire request with the given parameters.""" + + self.prepare_method(method) + self.prepare_url(url, params) + self.prepare_headers(headers) + self.prepare_cookies(cookies) + self.prepare_body(data, files, json) + self.prepare_auth(auth, url) + + # Note that prepare_auth must be last to enable authentication schemes + # such as OAuth to work on a fully prepared request. + + # This MUST go after prepare_auth. Authenticators could add a hook + self.prepare_hooks(hooks) + + def __repr__(self): + return '' % (self.method) + + def copy(self): + p = PreparedRequest() + p.method = self.method + p.url = self.url + p.headers = self.headers.copy() if self.headers is not None else None + p._cookies = _copy_cookie_jar(self._cookies) + p.body = self.body + p.hooks = self.hooks + p._body_position = self._body_position + return p + + def prepare_method(self, method): + """Prepares the given HTTP method.""" + self.method = method + if self.method is not None: + self.method = to_native_string(self.method.upper()) + + @staticmethod + def _get_idna_encoded_host(host): + import idna + + try: + host = idna.encode(host, uts46=True).decode('utf-8') + except idna.IDNAError: + raise UnicodeError + return host + + def prepare_url(self, url, params): + """Prepares the given HTTP URL.""" + #: Accept objects that have string representations. + #: We're unable to blindly call unicode/str functions + #: as this will include the bytestring indicator (b'') + #: on python 3.x. + #: https://github.com/psf/requests/pull/2238 + if isinstance(url, bytes): + url = url.decode('utf8') + else: + url = unicode(url) if is_py2 else str(url) + + # Remove leading whitespaces from url + url = url.lstrip() + + # Don't do any URL preparation for non-HTTP schemes like `mailto`, + # `data` etc to work around exceptions from `url_parse`, which + # handles RFC 3986 only. + if ':' in url and not url.lower().startswith('http'): + self.url = url + return + + # Support for unicode domain names and paths. + try: + scheme, auth, host, port, path, query, fragment = parse_url(url) + except LocationParseError as e: + raise InvalidURL(*e.args) + + if not scheme: + error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?") + error = error.format(to_native_string(url, 'utf8')) + + raise MissingSchema(error) + + if not host: + raise InvalidURL("Invalid URL %r: No host supplied" % url) + + # In general, we want to try IDNA encoding the hostname if the string contains + # non-ASCII characters. This allows users to automatically get the correct IDNA + # behaviour. For strings containing only ASCII characters, we need to also verify + # it doesn't start with a wildcard (*), before allowing the unencoded hostname. + if not unicode_is_ascii(host): + try: + host = self._get_idna_encoded_host(host) + except UnicodeError: + raise InvalidURL('URL has an invalid label.') + elif host.startswith(u'*'): + raise InvalidURL('URL has an invalid label.') + + # Carefully reconstruct the network location + netloc = auth or '' + if netloc: + netloc += '@' + netloc += host + if port: + netloc += ':' + str(port) + + # Bare domains aren't valid URLs. + if not path: + path = '/' + + if is_py2: + if isinstance(scheme, str): + scheme = scheme.encode('utf-8') + if isinstance(netloc, str): + netloc = netloc.encode('utf-8') + if isinstance(path, str): + path = path.encode('utf-8') + if isinstance(query, str): + query = query.encode('utf-8') + if isinstance(fragment, str): + fragment = fragment.encode('utf-8') + + if isinstance(params, (str, bytes)): + params = to_native_string(params) + + enc_params = self._encode_params(params) + if enc_params: + if query: + query = '%s&%s' % (query, enc_params) + else: + query = enc_params + + url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) + self.url = url + + def prepare_headers(self, headers): + """Prepares the given HTTP headers.""" + + self.headers = CaseInsensitiveDict() + if headers: + for header in headers.items(): + # Raise exception on invalid header value. + check_header_validity(header) + name, value = header + self.headers[to_native_string(name)] = value + + def prepare_body(self, data, files, json=None): + """Prepares the given HTTP body data.""" + + # Check if file, fo, generator, iterator. + # If not, run through normal process. + + # Nottin' on you. + body = None + content_type = None + + if not data and json is not None: + # urllib3 requires a bytes-like body. Python 2's json.dumps + # provides this natively, but Python 3 gives a Unicode string. + content_type = 'application/json' + + try: + body = complexjson.dumps(json, allow_nan=False) + except ValueError as ve: + raise InvalidJSONError(ve, request=self) + + if not isinstance(body, bytes): + body = body.encode('utf-8') + + is_stream = all([ + hasattr(data, '__iter__'), + not isinstance(data, (basestring, list, tuple, Mapping)) + ]) + + if is_stream: + try: + length = super_len(data) + except (TypeError, AttributeError, UnsupportedOperation): + length = None + + body = data + + if getattr(body, 'tell', None) is not None: + # Record the current file position before reading. + # This will allow us to rewind a file in the event + # of a redirect. + try: + self._body_position = body.tell() + except (IOError, OSError): + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body + self._body_position = object() + + if files: + raise NotImplementedError('Streamed bodies and files are mutually exclusive.') + + if length: + self.headers['Content-Length'] = builtin_str(length) + else: + self.headers['Transfer-Encoding'] = 'chunked' + else: + # Multi-part file uploads. + if files: + (body, content_type) = self._encode_files(files, data) + else: + if data: + body = self._encode_params(data) + if isinstance(data, basestring) or hasattr(data, 'read'): + content_type = None + else: + content_type = 'application/x-www-form-urlencoded' + + self.prepare_content_length(body) + + # Add content-type if it wasn't explicitly provided. + if content_type and ('content-type' not in self.headers): + self.headers['Content-Type'] = content_type + + self.body = body + + def prepare_content_length(self, body): + """Prepare Content-Length header based on request method and body""" + if body is not None: + length = super_len(body) + if length: + # If length exists, set it. Otherwise, we fallback + # to Transfer-Encoding: chunked. + self.headers['Content-Length'] = builtin_str(length) + elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: + # Set Content-Length to 0 for methods that can have a body + # but don't provide one. (i.e. not GET or HEAD) + self.headers['Content-Length'] = '0' + + def prepare_auth(self, auth, url=''): + """Prepares the given HTTP auth data.""" + + # If no Auth is explicitly provided, extract it from the URL first. + if auth is None: + url_auth = get_auth_from_url(self.url) + auth = url_auth if any(url_auth) else None + + if auth: + if isinstance(auth, tuple) and len(auth) == 2: + # special-case basic HTTP auth + auth = HTTPBasicAuth(*auth) + + # Allow auth to make its changes. + r = auth(self) + + # Update self to reflect the auth changes. + self.__dict__.update(r.__dict__) + + # Recompute Content-Length + self.prepare_content_length(self.body) + + def prepare_cookies(self, cookies): + """Prepares the given HTTP cookie data. + + This function eventually generates a ``Cookie`` header from the + given cookies using cookielib. Due to cookielib's design, the header + will not be regenerated if it already exists, meaning this function + can only be called once for the life of the + :class:`PreparedRequest ` object. Any subsequent calls + to ``prepare_cookies`` will have no actual effect, unless the "Cookie" + header is removed beforehand. + """ + if isinstance(cookies, cookielib.CookieJar): + self._cookies = cookies + else: + self._cookies = cookiejar_from_dict(cookies) + + cookie_header = get_cookie_header(self._cookies, self) + if cookie_header is not None: + self.headers['Cookie'] = cookie_header + + def prepare_hooks(self, hooks): + """Prepares the given hooks.""" + # hooks can be passed as None to the prepare method and to this + # method. To prevent iterating over None, simply use an empty list + # if hooks is False-y + hooks = hooks or [] + for event in hooks: + self.register_hook(event, hooks[event]) + + +class Response(object): + """The :class:`Response ` object, which contains a + server's response to an HTTP request. + """ + + __attrs__ = [ + '_content', 'status_code', 'headers', 'url', 'history', + 'encoding', 'reason', 'cookies', 'elapsed', 'request' + ] + + def __init__(self): + self._content = False + self._content_consumed = False + self._next = None + + #: Integer Code of responded HTTP Status, e.g. 404 or 200. + self.status_code = None + + #: Case-insensitive Dictionary of Response Headers. + #: For example, ``headers['content-encoding']`` will return the + #: value of a ``'Content-Encoding'`` response header. + self.headers = CaseInsensitiveDict() + + #: File-like object representation of response (for advanced usage). + #: Use of ``raw`` requires that ``stream=True`` be set on the request. + #: This requirement does not apply for use internally to Requests. + self.raw = None + + #: Final URL location of Response. + self.url = None + + #: Encoding to decode with when accessing r.text. + self.encoding = None + + #: A list of :class:`Response ` objects from + #: the history of the Request. Any redirect responses will end + #: up here. The list is sorted from the oldest to the most recent request. + self.history = [] + + #: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK". + self.reason = None + + #: A CookieJar of Cookies the server sent back. + self.cookies = cookiejar_from_dict({}) + + #: The amount of time elapsed between sending the request + #: and the arrival of the response (as a timedelta). + #: This property specifically measures the time taken between sending + #: the first byte of the request and finishing parsing the headers. It + #: is therefore unaffected by consuming the response content or the + #: value of the ``stream`` keyword argument. + self.elapsed = datetime.timedelta(0) + + #: The :class:`PreparedRequest ` object to which this + #: is a response. + self.request = None + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def __getstate__(self): + # Consume everything; accessing the content attribute makes + # sure the content has been fully read. + if not self._content_consumed: + self.content + + return {attr: getattr(self, attr, None) for attr in self.__attrs__} + + def __setstate__(self, state): + for name, value in state.items(): + setattr(self, name, value) + + # pickled objects do not have .raw + setattr(self, '_content_consumed', True) + setattr(self, 'raw', None) + + def __repr__(self): + return '' % (self.status_code) + + def __bool__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __nonzero__(self): + """Returns True if :attr:`status_code` is less than 400. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code, is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + return self.ok + + def __iter__(self): + """Allows you to use a response as an iterator.""" + return self.iter_content(128) + + @property + def ok(self): + """Returns True if :attr:`status_code` is less than 400, False if not. + + This attribute checks if the status code of the response is between + 400 and 600 to see if there was a client error or a server error. If + the status code is between 200 and 400, this will return True. This + is **not** a check to see if the response code is ``200 OK``. + """ + try: + self.raise_for_status() + except HTTPError: + return False + return True + + @property + def is_redirect(self): + """True if this Response is a well-formed HTTP redirect that could have + been processed automatically (by :meth:`Session.resolve_redirects`). + """ + return ('location' in self.headers and self.status_code in REDIRECT_STATI) + + @property + def is_permanent_redirect(self): + """True if this Response one of the permanent versions of redirect.""" + return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) + + @property + def next(self): + """Returns a PreparedRequest for the next request in a redirect chain, if there is one.""" + return self._next + + @property + def apparent_encoding(self): + """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" + return chardet.detect(self.content)['encoding'] + + def iter_content(self, chunk_size=1, decode_unicode=False): + """Iterates over the response data. When stream=True is set on the + request, this avoids reading the content at once into memory for + large responses. The chunk size is the number of bytes it should + read into memory. This is not necessarily the length of each item + returned as decoding can take place. + + chunk_size must be of type int or None. A value of None will + function differently depending on the value of `stream`. + stream=True will read data as it arrives in whatever size the + chunks are received. If stream=False, data is returned as + a single chunk. + + If decode_unicode is True, content will be decoded using the best + available encoding based on the response. + """ + + def generate(): + # Special case for urllib3. + if hasattr(self.raw, 'stream'): + try: + for chunk in self.raw.stream(chunk_size, decode_content=True): + yield chunk + except ProtocolError as e: + raise ChunkedEncodingError(e) + except DecodeError as e: + raise ContentDecodingError(e) + except ReadTimeoutError as e: + raise ConnectionError(e) + else: + # Standard file-like object. + while True: + chunk = self.raw.read(chunk_size) + if not chunk: + break + yield chunk + + self._content_consumed = True + + if self._content_consumed and isinstance(self._content, bool): + raise StreamConsumedError() + elif chunk_size is not None and not isinstance(chunk_size, int): + raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) + # simulate reading small chunks of the content + reused_chunks = iter_slices(self._content, chunk_size) + + stream_chunks = generate() + + chunks = reused_chunks if self._content_consumed else stream_chunks + + if decode_unicode: + chunks = stream_decode_response_unicode(chunks, self) + + return chunks + + def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): + """Iterates over the response data, one line at a time. When + stream=True is set on the request, this avoids reading the + content at once into memory for large responses. + + .. note:: This method is not reentrant safe. + """ + + pending = None + + for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): + + if pending is not None: + chunk = pending + chunk + + if delimiter: + lines = chunk.split(delimiter) + else: + lines = chunk.splitlines() + + if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]: + pending = lines.pop() + else: + pending = None + + for line in lines: + yield line + + if pending is not None: + yield pending + + @property + def content(self): + """Content of the response, in bytes.""" + + if self._content is False: + # Read the contents. + if self._content_consumed: + raise RuntimeError( + 'The content for this response was already consumed') + + if self.status_code == 0 or self.raw is None: + self._content = None + else: + self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' + + self._content_consumed = True + # don't need to release the connection; that's been handled by urllib3 + # since we exhausted the data. + return self._content + + @property + def text(self): + """Content of the response, in unicode. + + If Response.encoding is None, encoding will be guessed using + ``charset_normalizer`` or ``chardet``. + + The encoding of the response content is determined based solely on HTTP + headers, following RFC 2616 to the letter. If you can take advantage of + non-HTTP knowledge to make a better guess at the encoding, you should + set ``r.encoding`` appropriately before accessing this property. + """ + + # Try charset from content-type + content = None + encoding = self.encoding + + if not self.content: + return str('') + + # Fallback to auto-detected encoding. + if self.encoding is None: + encoding = self.apparent_encoding + + # Decode unicode from given encoding. + try: + content = str(self.content, encoding, errors='replace') + except (LookupError, TypeError): + # A LookupError is raised if the encoding was not found which could + # indicate a misspelling or similar mistake. + # + # A TypeError can be raised if encoding is None + # + # So we try blindly encoding. + content = str(self.content, errors='replace') + + return content + + def json(self, **kwargs): + r"""Returns the json-encoded content of a response, if any. + + :param \*\*kwargs: Optional arguments that ``json.loads`` takes. + :raises simplejson.JSONDecodeError: If the response body does not + contain valid json and simplejson is installed. + :raises json.JSONDecodeError: If the response body does not contain + valid json and simplejson is not installed on Python 3. + :raises ValueError: If the response body does not contain valid + json and simplejson is not installed on Python 2. + """ + + if not self.encoding and self.content and len(self.content) > 3: + # No encoding set. JSON RFC 4627 section 3 states we should expect + # UTF-8, -16 or -32. Detect which one to use; If the detection or + # decoding fails, fall back to `self.text` (using charset_normalizer to make + # a best guess). + encoding = guess_json_utf(self.content) + if encoding is not None: + try: + return complexjson.loads( + self.content.decode(encoding), **kwargs + ) + except UnicodeDecodeError: + # Wrong UTF codec detected; usually because it's not UTF-8 + # but some other 8-bit codec. This is an RFC violation, + # and the server didn't bother to tell us what codec *was* + # used. + pass + return complexjson.loads(self.text, **kwargs) + + @property + def links(self): + """Returns the parsed header links of the response, if any.""" + + header = self.headers.get('link') + + # l = MultiDict() + l = {} + + if header: + links = parse_header_links(header) + + for link in links: + key = link.get('rel') or link.get('url') + l[key] = link + + return l + + def raise_for_status(self): + """Raises :class:`HTTPError`, if one occurred.""" + + http_error_msg = '' + if isinstance(self.reason, bytes): + # We attempt to decode utf-8 first because some servers + # choose to localize their reason strings. If the string + # isn't utf-8, we fall back to iso-8859-1 for all other + # encodings. (See PR #3538) + try: + reason = self.reason.decode('utf-8') + except UnicodeDecodeError: + reason = self.reason.decode('iso-8859-1') + else: + reason = self.reason + + if 400 <= self.status_code < 500: + http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) + + elif 500 <= self.status_code < 600: + http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) + + if http_error_msg: + raise HTTPError(http_error_msg, response=self) + + def close(self): + """Releases the connection back to the pool. Once this method has been + called the underlying ``raw`` object must not be accessed again. + + *Note: Should not normally need to be called explicitly.* + """ + if not self._content_consumed: + self.raw.close() + + release_conn = getattr(self.raw, 'release_conn', None) + if release_conn is not None: + release_conn() diff --git a/src/snowflake/connector/vendored/requests/sessions.py b/src/snowflake/connector/vendored/requests/sessions.py new file mode 100644 index 000000000..ae4bcc8e7 --- /dev/null +++ b/src/snowflake/connector/vendored/requests/sessions.py @@ -0,0 +1,781 @@ +# -*- coding: utf-8 -*- + +""" +requests.sessions +~~~~~~~~~~~~~~~~~ + +This module provides a Session object to manage and persist settings across +requests (cookies, auth, proxies). +""" +import os +import sys +import time +from datetime import timedelta +from collections import OrderedDict + +from .auth import _basic_auth_str +from .compat import cookielib, is_py3, urljoin, urlparse, Mapping +from .cookies import ( + cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) +from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT +from .hooks import default_hooks, dispatch_hook +from ._internal_utils import to_native_string +from .utils import to_key_val_list, default_headers, DEFAULT_PORTS +from .exceptions import ( + TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) + +from .structures import CaseInsensitiveDict +from .adapters import HTTPAdapter + +from .utils import ( + requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, + get_auth_from_url, rewind_body +) + +from .status_codes import codes + +# formerly defined here, reexposed here for backward compatibility +from .models import REDIRECT_STATI + +# Preferred clock, based on which one is more accurate on a given system. +if sys.platform == 'win32': + try: # Python 3.4+ + preferred_clock = time.perf_counter + except AttributeError: # Earlier than Python 3. + preferred_clock = time.clock +else: + preferred_clock = time.time + + +def merge_setting(request_setting, session_setting, dict_class=OrderedDict): + """Determines appropriate setting for a given request, taking into account + the explicit setting on that request, and the setting in the session. If a + setting is a dictionary, they will be merged together using `dict_class` + """ + + if session_setting is None: + return request_setting + + if request_setting is None: + return session_setting + + # Bypass if not a dictionary (e.g. verify) + if not ( + isinstance(session_setting, Mapping) and + isinstance(request_setting, Mapping) + ): + return request_setting + + merged_setting = dict_class(to_key_val_list(session_setting)) + merged_setting.update(to_key_val_list(request_setting)) + + # Remove keys that are set to None. Extract keys first to avoid altering + # the dictionary during iteration. + none_keys = [k for (k, v) in merged_setting.items() if v is None] + for key in none_keys: + del merged_setting[key] + + return merged_setting + + +def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): + """Properly merges both requests and session hooks. + + This is necessary because when request_hooks == {'response': []}, the + merge breaks Session hooks entirely. + """ + if session_hooks is None or session_hooks.get('response') == []: + return request_hooks + + if request_hooks is None or request_hooks.get('response') == []: + return session_hooks + + return merge_setting(request_hooks, session_hooks, dict_class) + + +class SessionRedirectMixin(object): + + def get_redirect_target(self, resp): + """Receives a Response. Returns a redirect URI or ``None``""" + # Due to the nature of how requests processes redirects this method will + # be called at least once upon the original response and at least twice + # on each subsequent redirect response (if any). + # If a custom mixin is used to handle this logic, it may be advantageous + # to cache the redirect location onto the response object as a private + # attribute. + if resp.is_redirect: + location = resp.headers['location'] + # Currently the underlying http module on py3 decode headers + # in latin1, but empirical evidence suggests that latin1 is very + # rarely used with non-ASCII characters in HTTP headers. + # It is more likely to get UTF8 header rather than latin1. + # This causes incorrect handling of UTF8 encoded location headers. + # To solve this, we re-encode the location in latin1. + if is_py3: + location = location.encode('latin1') + return to_native_string(location, 'utf8') + return None + + def should_strip_auth(self, old_url, new_url): + """Decide whether Authorization header should be removed when redirecting""" + old_parsed = urlparse(old_url) + new_parsed = urlparse(new_url) + if old_parsed.hostname != new_parsed.hostname: + return True + # Special case: allow http -> https redirect when using the standard + # ports. This isn't specified by RFC 7235, but is kept to avoid + # breaking backwards compatibility with older versions of requests + # that allowed any redirects on the same host. + if (old_parsed.scheme == 'http' and old_parsed.port in (80, None) + and new_parsed.scheme == 'https' and new_parsed.port in (443, None)): + return False + + # Handle default port usage corresponding to scheme. + changed_port = old_parsed.port != new_parsed.port + changed_scheme = old_parsed.scheme != new_parsed.scheme + default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) + if (not changed_scheme and old_parsed.port in default_port + and new_parsed.port in default_port): + return False + + # Standard case: root URI must match + return changed_port or changed_scheme + + def resolve_redirects(self, resp, req, stream=False, timeout=None, + verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): + """Receives a Response. Returns a generator of Responses or Requests.""" + + hist = [] # keep track of history + + url = self.get_redirect_target(resp) + previous_fragment = urlparse(req.url).fragment + while url: + prepared_request = req.copy() + + # Update history and keep track of redirects. + # resp.history must ignore the original request in this loop + hist.append(resp) + resp.history = hist[1:] + + try: + resp.content # Consume socket so it can be released + except (ChunkedEncodingError, ContentDecodingError, RuntimeError): + resp.raw.read(decode_content=False) + + if len(resp.history) >= self.max_redirects: + raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp) + + # Release the connection back into the pool. + resp.close() + + # Handle redirection without scheme (see: RFC 1808 Section 4) + if url.startswith('//'): + parsed_rurl = urlparse(resp.url) + url = ':'.join([to_native_string(parsed_rurl.scheme), url]) + + # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) + parsed = urlparse(url) + if parsed.fragment == '' and previous_fragment: + parsed = parsed._replace(fragment=previous_fragment) + elif parsed.fragment: + previous_fragment = parsed.fragment + url = parsed.geturl() + + # Facilitate relative 'location' headers, as allowed by RFC 7231. + # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') + # Compliant with RFC3986, we percent encode the url. + if not parsed.netloc: + url = urljoin(resp.url, requote_uri(url)) + else: + url = requote_uri(url) + + prepared_request.url = to_native_string(url) + + self.rebuild_method(prepared_request, resp) + + # https://github.com/psf/requests/issues/1084 + if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): + # https://github.com/psf/requests/issues/3490 + purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') + for header in purged_headers: + prepared_request.headers.pop(header, None) + prepared_request.body = None + + headers = prepared_request.headers + headers.pop('Cookie', None) + + # Extract any cookies sent on the response to the cookiejar + # in the new request. Because we've mutated our copied prepared + # request, use the old one that we haven't yet touched. + extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) + merge_cookies(prepared_request._cookies, self.cookies) + prepared_request.prepare_cookies(prepared_request._cookies) + + # Rebuild auth and proxy information. + proxies = self.rebuild_proxies(prepared_request, proxies) + self.rebuild_auth(prepared_request, resp) + + # A failed tell() sets `_body_position` to `object()`. This non-None + # value ensures `rewindable` will be True, allowing us to raise an + # UnrewindableBodyError, instead of hanging the connection. + rewindable = ( + prepared_request._body_position is not None and + ('Content-Length' in headers or 'Transfer-Encoding' in headers) + ) + + # Attempt to rewind consumed file-like object. + if rewindable: + rewind_body(prepared_request) + + # Override the original request. + req = prepared_request + + if yield_requests: + yield req + else: + + resp = self.send( + req, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + allow_redirects=False, + **adapter_kwargs + ) + + extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) + + # extract redirect url, if any, for the next loop + url = self.get_redirect_target(resp) + yield resp + + def rebuild_auth(self, prepared_request, response): + """When being redirected we may want to strip authentication from the + request to avoid leaking credentials. This method intelligently removes + and reapplies authentication where possible to avoid credential loss. + """ + headers = prepared_request.headers + url = prepared_request.url + + if 'Authorization' in headers and self.should_strip_auth(response.request.url, url): + # If we get redirected to a new host, we should strip out any + # authentication headers. + del headers['Authorization'] + + # .netrc might have more auth for us on our new host. + new_auth = get_netrc_auth(url) if self.trust_env else None + if new_auth is not None: + prepared_request.prepare_auth(new_auth) + + + def rebuild_proxies(self, prepared_request, proxies): + """This method re-evaluates the proxy configuration by considering the + environment variables. If we are redirected to a URL covered by + NO_PROXY, we strip the proxy configuration. Otherwise, we set missing + proxy keys for this URL (in case they were stripped by a previous + redirect). + + This method also replaces the Proxy-Authorization header where + necessary. + + :rtype: dict + """ + proxies = proxies if proxies is not None else {} + headers = prepared_request.headers + url = prepared_request.url + scheme = urlparse(url).scheme + new_proxies = proxies.copy() + no_proxy = proxies.get('no_proxy') + + bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy) + if self.trust_env and not bypass_proxy: + environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) + + proxy = environ_proxies.get(scheme, environ_proxies.get('all')) + + if proxy: + new_proxies.setdefault(scheme, proxy) + + if 'Proxy-Authorization' in headers: + del headers['Proxy-Authorization'] + + try: + username, password = get_auth_from_url(new_proxies[scheme]) + except KeyError: + username, password = None, None + + if username and password: + headers['Proxy-Authorization'] = _basic_auth_str(username, password) + + return new_proxies + + def rebuild_method(self, prepared_request, response): + """When being redirected we may want to change the method of the request + based on certain specs or browser behavior. + """ + method = prepared_request.method + + # https://tools.ietf.org/html/rfc7231#section-6.4.4 + if response.status_code == codes.see_other and method != 'HEAD': + method = 'GET' + + # Do what the browsers do, despite standards... + # First, turn 302s into GETs. + if response.status_code == codes.found and method != 'HEAD': + method = 'GET' + + # Second, if a POST is responded to with a 301, turn it into a GET. + # This bizarre behaviour is explained in Issue 1704. + if response.status_code == codes.moved and method == 'POST': + method = 'GET' + + prepared_request.method = method + + +class Session(SessionRedirectMixin): + """A Requests session. + + Provides cookie persistence, connection-pooling, and configuration. + + Basic Usage:: + + >>> import requests + >>> s = requests.Session() + >>> s.get('https://httpbin.org/get') + + + Or as a context manager:: + + >>> with requests.Session() as s: + ... s.get('https://httpbin.org/get') + + """ + + __attrs__ = [ + 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', + 'cert', 'adapters', 'stream', 'trust_env', + 'max_redirects', + ] + + def __init__(self): + + #: A case-insensitive dictionary of headers to be sent on each + #: :class:`Request ` sent from this + #: :class:`Session `. + self.headers = default_headers() + + #: Default Authentication tuple or object to attach to + #: :class:`Request `. + self.auth = None + + #: Dictionary mapping protocol or protocol and host to the URL of the proxy + #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to + #: be used on each :class:`Request `. + self.proxies = {} + + #: Event-handling hooks. + self.hooks = default_hooks() + + #: Dictionary of querystring data to attach to each + #: :class:`Request `. The dictionary values may be lists for + #: representing multivalued query parameters. + self.params = {} + + #: Stream response content default. + self.stream = False + + #: SSL Verification default. + #: Defaults to `True`, requiring requests to verify the TLS certificate at the + #: remote end. + #: If verify is set to `False`, requests will accept any TLS certificate + #: presented by the server, and will ignore hostname mismatches and/or + #: expired certificates, which will make your application vulnerable to + #: man-in-the-middle (MitM) attacks. + #: Only set this to `False` for testing. + self.verify = True + + #: SSL client certificate default, if String, path to ssl client + #: cert file (.pem). If Tuple, ('cert', 'key') pair. + self.cert = None + + #: Maximum number of redirects allowed. If the request exceeds this + #: limit, a :class:`TooManyRedirects` exception is raised. + #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is + #: 30. + self.max_redirects = DEFAULT_REDIRECT_LIMIT + + #: Trust environment settings for proxy configuration, default + #: authentication and similar. + self.trust_env = True + + #: A CookieJar containing all currently outstanding cookies set on this + #: session. By default it is a + #: :class:`RequestsCookieJar `, but + #: may be any other ``cookielib.CookieJar`` compatible object. + self.cookies = cookiejar_from_dict({}) + + # Default connection adapters. + self.adapters = OrderedDict() + self.mount('https://', HTTPAdapter()) + self.mount('http://', HTTPAdapter()) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def prepare_request(self, request): + """Constructs a :class:`PreparedRequest ` for + transmission and returns it. The :class:`PreparedRequest` has settings + merged from the :class:`Request ` instance and those of the + :class:`Session`. + + :param request: :class:`Request` instance to prepare with this + session's settings. + :rtype: requests.PreparedRequest + """ + cookies = request.cookies or {} + + # Bootstrap CookieJar. + if not isinstance(cookies, cookielib.CookieJar): + cookies = cookiejar_from_dict(cookies) + + # Merge with session cookies + merged_cookies = merge_cookies( + merge_cookies(RequestsCookieJar(), self.cookies), cookies) + + # Set environment's basic authentication if not explicitly set. + auth = request.auth + if self.trust_env and not auth and not self.auth: + auth = get_netrc_auth(request.url) + + p = PreparedRequest() + p.prepare( + method=request.method.upper(), + url=request.url, + files=request.files, + data=request.data, + json=request.json, + headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), + params=merge_setting(request.params, self.params), + auth=merge_setting(auth, self.auth), + cookies=merged_cookies, + hooks=merge_hooks(request.hooks, self.hooks), + ) + return p + + def request(self, method, url, + params=None, data=None, headers=None, cookies=None, files=None, + auth=None, timeout=None, allow_redirects=True, proxies=None, + hooks=None, stream=None, verify=None, cert=None, json=None): + """Constructs a :class:`Request `, prepares it and sends it. + Returns :class:`Response ` object. + + :param method: method for the new :class:`Request` object. + :param url: URL for the new :class:`Request` object. + :param params: (optional) Dictionary or bytes to be sent in the query + string for the :class:`Request`. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the + :class:`Request`. + :param headers: (optional) Dictionary of HTTP Headers to send with the + :class:`Request`. + :param cookies: (optional) Dict or CookieJar object to send with the + :class:`Request`. + :param files: (optional) Dictionary of ``'filename': file-like-objects`` + for multipart encoding upload. + :param auth: (optional) Auth tuple or callable to enable + Basic/Digest/Custom HTTP Auth. + :param timeout: (optional) How long to wait for the server to send + data before giving up, as a float, or a :ref:`(connect timeout, + read timeout) ` tuple. + :type timeout: float or tuple + :param allow_redirects: (optional) Set to True by default. + :type allow_redirects: bool + :param proxies: (optional) Dictionary mapping protocol or protocol and + hostname to the URL of the proxy. + :param stream: (optional) whether to immediately download the response + content. Defaults to ``False``. + :param verify: (optional) Either a boolean, in which case it controls whether we verify + the server's TLS certificate, or a string, in which case it must be a path + to a CA bundle to use. Defaults to ``True``. When set to + ``False``, requests will accept any TLS certificate presented by + the server, and will ignore hostname mismatches and/or expired + certificates, which will make your application vulnerable to + man-in-the-middle (MitM) attacks. Setting verify to ``False`` + may be useful during local development or testing. + :param cert: (optional) if String, path to ssl client cert file (.pem). + If Tuple, ('cert', 'key') pair. + :rtype: requests.Response + """ + # Create the Request. + req = Request( + method=method.upper(), + url=url, + headers=headers, + files=files, + data=data or {}, + json=json, + params=params or {}, + auth=auth, + cookies=cookies, + hooks=hooks, + ) + prep = self.prepare_request(req) + + proxies = proxies or {} + + settings = self.merge_environment_settings( + prep.url, proxies, stream, verify, cert + ) + + # Send the request. + send_kwargs = { + 'timeout': timeout, + 'allow_redirects': allow_redirects, + } + send_kwargs.update(settings) + resp = self.send(prep, **send_kwargs) + + return resp + + def get(self, url, **kwargs): + r"""Sends a GET request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return self.request('GET', url, **kwargs) + + def options(self, url, **kwargs): + r"""Sends a OPTIONS request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', True) + return self.request('OPTIONS', url, **kwargs) + + def head(self, url, **kwargs): + r"""Sends a HEAD request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + kwargs.setdefault('allow_redirects', False) + return self.request('HEAD', url, **kwargs) + + def post(self, url, data=None, json=None, **kwargs): + r"""Sends a POST request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param json: (optional) json to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('POST', url, data=data, json=json, **kwargs) + + def put(self, url, data=None, **kwargs): + r"""Sends a PUT request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('PUT', url, data=data, **kwargs) + + def patch(self, url, data=None, **kwargs): + r"""Sends a PATCH request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param data: (optional) Dictionary, list of tuples, bytes, or file-like + object to send in the body of the :class:`Request`. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('PATCH', url, data=data, **kwargs) + + def delete(self, url, **kwargs): + r"""Sends a DELETE request. Returns :class:`Response` object. + + :param url: URL for the new :class:`Request` object. + :param \*\*kwargs: Optional arguments that ``request`` takes. + :rtype: requests.Response + """ + + return self.request('DELETE', url, **kwargs) + + def send(self, request, **kwargs): + """Send a given PreparedRequest. + + :rtype: requests.Response + """ + # Set defaults that the hooks can utilize to ensure they always have + # the correct parameters to reproduce the previous request. + kwargs.setdefault('stream', self.stream) + kwargs.setdefault('verify', self.verify) + kwargs.setdefault('cert', self.cert) + kwargs.setdefault('proxies', self.rebuild_proxies(request, self.proxies)) + + # It's possible that users might accidentally send a Request object. + # Guard against that specific failure case. + if isinstance(request, Request): + raise ValueError('You can only send PreparedRequests.') + + # Set up variables needed for resolve_redirects and dispatching of hooks + allow_redirects = kwargs.pop('allow_redirects', True) + stream = kwargs.get('stream') + hooks = request.hooks + + # Get the appropriate adapter to use + adapter = self.get_adapter(url=request.url) + + # Start time (approximately) of the request + start = preferred_clock() + + # Send the request + r = adapter.send(request, **kwargs) + + # Total elapsed time of the request (approximately) + elapsed = preferred_clock() - start + r.elapsed = timedelta(seconds=elapsed) + + # Response manipulation hooks + r = dispatch_hook('response', hooks, r, **kwargs) + + # Persist cookies + if r.history: + + # If the hooks create history then we want those cookies too + for resp in r.history: + extract_cookies_to_jar(self.cookies, resp.request, resp.raw) + + extract_cookies_to_jar(self.cookies, request, r.raw) + + # Resolve redirects if allowed. + if allow_redirects: + # Redirect resolving generator. + gen = self.resolve_redirects(r, request, **kwargs) + history = [resp for resp in gen] + else: + history = [] + + # Shuffle things around if there's history. + if history: + # Insert the first (original) request at the start + history.insert(0, r) + # Get the last request made + r = history.pop() + r.history = history + + # If redirects aren't being followed, store the response on the Request for Response.next(). + if not allow_redirects: + try: + r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) + except StopIteration: + pass + + if not stream: + r.content + + return r + + def merge_environment_settings(self, url, proxies, stream, verify, cert): + """ + Check the environment and merge it with some settings. + + :rtype: dict + """ + # Gather clues from the surrounding environment. + if self.trust_env: + # Set environment's proxies. + no_proxy = proxies.get('no_proxy') if proxies is not None else None + env_proxies = get_environ_proxies(url, no_proxy=no_proxy) + for (k, v) in env_proxies.items(): + proxies.setdefault(k, v) + + # Look for requests environment configuration and be compatible + # with cURL. + if verify is True or verify is None: + verify = (os.environ.get('REQUESTS_CA_BUNDLE') or + os.environ.get('CURL_CA_BUNDLE')) + + # Merge all the kwargs. + proxies = merge_setting(proxies, self.proxies) + stream = merge_setting(stream, self.stream) + verify = merge_setting(verify, self.verify) + cert = merge_setting(cert, self.cert) + + return {'verify': verify, 'proxies': proxies, 'stream': stream, + 'cert': cert} + + def get_adapter(self, url): + """ + Returns the appropriate connection adapter for the given URL. + + :rtype: requests.adapters.BaseAdapter + """ + for (prefix, adapter) in self.adapters.items(): + + if url.lower().startswith(prefix.lower()): + return adapter + + # Nothing matches :-/ + raise InvalidSchema("No connection adapters were found for {!r}".format(url)) + + def close(self): + """Closes all adapters and as such the session""" + for v in self.adapters.values(): + v.close() + + def mount(self, prefix, adapter): + """Registers a connection adapter to a prefix. + + Adapters are sorted in descending order by prefix length. + """ + self.adapters[prefix] = adapter + keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] + + for key in keys_to_move: + self.adapters[key] = self.adapters.pop(key) + + def __getstate__(self): + state = {attr: getattr(self, attr, None) for attr in self.__attrs__} + return state + + def __setstate__(self, state): + for attr, value in state.items(): + setattr(self, attr, value) + + +def session(): + """ + Returns a :class:`Session` for context-management. + + .. deprecated:: 1.0.0 + + This method has been deprecated since version 1.0.0 and is only kept for + backwards compatibility. New code should use :class:`~requests.sessions.Session` + to create a session. This may be removed at a future date. + + :rtype: Session + """ + return Session() diff --git a/src/snowflake/connector/vendored/requests/status_codes.py b/src/snowflake/connector/vendored/requests/status_codes.py new file mode 100644 index 000000000..d80a7cd4d --- /dev/null +++ b/src/snowflake/connector/vendored/requests/status_codes.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- + +r""" +The ``codes`` object defines a mapping from common names for HTTP statuses +to their numerical codes, accessible either as attributes or as dictionary +items. + +Example:: + + >>> import requests + >>> requests.codes['temporary_redirect'] + 307 + >>> requests.codes.teapot + 418 + >>> requests.codes['\o/'] + 200 + +Some codes have multiple names, and both upper- and lower-case versions of +the names are allowed. For example, ``codes.ok``, ``codes.OK``, and +``codes.okay`` all correspond to the HTTP status code 200. +""" + +from .structures import LookupDict + +_codes = { + + # Informational. + 100: ('continue',), + 101: ('switching_protocols',), + 102: ('processing',), + 103: ('checkpoint',), + 122: ('uri_too_long', 'request_uri_too_long'), + 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), + 201: ('created',), + 202: ('accepted',), + 203: ('non_authoritative_info', 'non_authoritative_information'), + 204: ('no_content',), + 205: ('reset_content', 'reset'), + 206: ('partial_content', 'partial'), + 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), + 208: ('already_reported',), + 226: ('im_used',), + + # Redirection. + 300: ('multiple_choices',), + 301: ('moved_permanently', 'moved', '\\o-'), + 302: ('found',), + 303: ('see_other', 'other'), + 304: ('not_modified',), + 305: ('use_proxy',), + 306: ('switch_proxy',), + 307: ('temporary_redirect', 'temporary_moved', 'temporary'), + 308: ('permanent_redirect', + 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 + + # Client Error. + 400: ('bad_request', 'bad'), + 401: ('unauthorized',), + 402: ('payment_required', 'payment'), + 403: ('forbidden',), + 404: ('not_found', '-o-'), + 405: ('method_not_allowed', 'not_allowed'), + 406: ('not_acceptable',), + 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), + 408: ('request_timeout', 'timeout'), + 409: ('conflict',), + 410: ('gone',), + 411: ('length_required',), + 412: ('precondition_failed', 'precondition'), + 413: ('request_entity_too_large',), + 414: ('request_uri_too_large',), + 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), + 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), + 417: ('expectation_failed',), + 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), + 421: ('misdirected_request',), + 422: ('unprocessable_entity', 'unprocessable'), + 423: ('locked',), + 424: ('failed_dependency', 'dependency'), + 425: ('unordered_collection', 'unordered'), + 426: ('upgrade_required', 'upgrade'), + 428: ('precondition_required', 'precondition'), + 429: ('too_many_requests', 'too_many'), + 431: ('header_fields_too_large', 'fields_too_large'), + 444: ('no_response', 'none'), + 449: ('retry_with', 'retry'), + 450: ('blocked_by_windows_parental_controls', 'parental_controls'), + 451: ('unavailable_for_legal_reasons', 'legal_reasons'), + 499: ('client_closed_request',), + + # Server Error. + 500: ('internal_server_error', 'server_error', '/o\\', '✗'), + 501: ('not_implemented',), + 502: ('bad_gateway',), + 503: ('service_unavailable', 'unavailable'), + 504: ('gateway_timeout',), + 505: ('http_version_not_supported', 'http_version'), + 506: ('variant_also_negotiates',), + 507: ('insufficient_storage',), + 509: ('bandwidth_limit_exceeded', 'bandwidth'), + 510: ('not_extended',), + 511: ('network_authentication_required', 'network_auth', 'network_authentication'), +} + +codes = LookupDict(name='status_codes') + +def _init(): + for code, titles in _codes.items(): + for title in titles: + setattr(codes, title, code) + if not title.startswith(('\\', '/')): + setattr(codes, title.upper(), code) + + def doc(code): + names = ', '.join('``%s``' % n for n in _codes[code]) + return '* %d: %s' % (code, names) + + global __doc__ + __doc__ = (__doc__ + '\n' + + '\n'.join(doc(code) for code in sorted(_codes)) + if __doc__ is not None else None) + +_init() diff --git a/src/snowflake/connector/vendored/requests/structures.py b/src/snowflake/connector/vendored/requests/structures.py new file mode 100644 index 000000000..8ee0ba7a0 --- /dev/null +++ b/src/snowflake/connector/vendored/requests/structures.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- + +""" +requests.structures +~~~~~~~~~~~~~~~~~~~ + +Data structures that power Requests. +""" + +from collections import OrderedDict + +from .compat import Mapping, MutableMapping + + +class CaseInsensitiveDict(MutableMapping): + """A case-insensitive ``dict``-like object. + + Implements all methods and operations of + ``MutableMapping`` as well as dict's ``copy``. Also + provides ``lower_items``. + + All keys are expected to be strings. The structure remembers the + case of the last key to be set, and ``iter(instance)``, + ``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()`` + will contain case-sensitive keys. However, querying and contains + testing is case insensitive:: + + cid = CaseInsensitiveDict() + cid['Accept'] = 'application/json' + cid['aCCEPT'] == 'application/json' # True + list(cid) == ['Accept'] # True + + For example, ``headers['content-encoding']`` will return the + value of a ``'Content-Encoding'`` response header, regardless + of how the header name was originally stored. + + If the constructor, ``.update``, or equality comparison + operations are given keys that have equal ``.lower()``s, the + behavior is undefined. + """ + + def __init__(self, data=None, **kwargs): + self._store = OrderedDict() + if data is None: + data = {} + self.update(data, **kwargs) + + def __setitem__(self, key, value): + # Use the lowercased key for lookups, but store the actual + # key alongside the value. + self._store[key.lower()] = (key, value) + + def __getitem__(self, key): + return self._store[key.lower()][1] + + def __delitem__(self, key): + del self._store[key.lower()] + + def __iter__(self): + return (casedkey for casedkey, mappedvalue in self._store.values()) + + def __len__(self): + return len(self._store) + + def lower_items(self): + """Like iteritems(), but with all lowercase keys.""" + return ( + (lowerkey, keyval[1]) + for (lowerkey, keyval) + in self._store.items() + ) + + def __eq__(self, other): + if isinstance(other, Mapping): + other = CaseInsensitiveDict(other) + else: + return NotImplemented + # Compare insensitively + return dict(self.lower_items()) == dict(other.lower_items()) + + # Copy is required + def copy(self): + return CaseInsensitiveDict(self._store.values()) + + def __repr__(self): + return str(dict(self.items())) + + +class LookupDict(dict): + """Dictionary lookup object.""" + + def __init__(self, name=None): + self.name = name + super(LookupDict, self).__init__() + + def __repr__(self): + return '' % (self.name) + + def __getitem__(self, key): + # We allow fall-through here, so values default to None + + return self.__dict__.get(key, None) + + def get(self, key, default=None): + return self.__dict__.get(key, default) diff --git a/src/snowflake/connector/vendored/requests/utils.py b/src/snowflake/connector/vendored/requests/utils.py new file mode 100644 index 000000000..8e0306369 --- /dev/null +++ b/src/snowflake/connector/vendored/requests/utils.py @@ -0,0 +1,1013 @@ +# -*- coding: utf-8 -*- + +""" +requests.utils +~~~~~~~~~~~~~~ + +This module provides utility functions that are used within Requests +that are also useful for external consumption. +""" + +import codecs +import contextlib +import io +import os +import re +import socket +import struct +import sys +import tempfile +import warnings +import zipfile +from collections import OrderedDict +from ..urllib3.util import make_headers + +from .__version__ import __version__ +from . import certs +# to_native_string is unused here, but imported here for backwards compatibility +from ._internal_utils import to_native_string +from .compat import parse_http_list as _parse_list_header +from .compat import ( + quote, urlparse, bytes, str, unquote, getproxies, + proxy_bypass, urlunparse, basestring, integer_types, is_py3, + proxy_bypass_environment, getproxies_environment, Mapping) +from .cookies import cookiejar_from_dict +from .structures import CaseInsensitiveDict +from .exceptions import ( + InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) + +NETRC_FILES = ('.netrc', '_netrc') + +DEFAULT_CA_BUNDLE_PATH = certs.where() + +DEFAULT_PORTS = {'http': 80, 'https': 443} + +# Ensure that ', ' is used to preserve previous delimiter behavior. +DEFAULT_ACCEPT_ENCODING = ", ".join( + re.split(r",\s*", make_headers(accept_encoding=True)["accept-encoding"]) +) + + +if sys.platform == 'win32': + # provide a proxy_bypass version on Windows without DNS lookups + + def proxy_bypass_registry(host): + try: + if is_py3: + import winreg + else: + import _winreg as winreg + except ImportError: + return False + + try: + internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, + r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it + proxyEnable = int(winreg.QueryValueEx(internetSettings, + 'ProxyEnable')[0]) + # ProxyOverride is almost always a string + proxyOverride = winreg.QueryValueEx(internetSettings, + 'ProxyOverride')[0] + except OSError: + return False + if not proxyEnable or not proxyOverride: + return False + + # make a check value list from the registry entry: replace the + # '' string by the localhost entry and the corresponding + # canonical entry. + proxyOverride = proxyOverride.split(';') + # now check if we match one of the registry values. + for test in proxyOverride: + if test == '': + if '.' not in host: + return True + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char + if re.match(test, host, re.I): + return True + return False + + def proxy_bypass(host): # noqa + """Return True, if the host should be bypassed. + + Checks proxy settings gathered from the environment, if specified, + or the registry. + """ + if getproxies_environment(): + return proxy_bypass_environment(host) + else: + return proxy_bypass_registry(host) + + +def dict_to_sequence(d): + """Returns an internal sequence dictionary update.""" + + if hasattr(d, 'items'): + d = d.items() + + return d + + +def super_len(o): + total_length = None + current_position = 0 + + if hasattr(o, '__len__'): + total_length = len(o) + + elif hasattr(o, 'len'): + total_length = o.len + + elif hasattr(o, 'fileno'): + try: + fileno = o.fileno() + except io.UnsupportedOperation: + pass + else: + total_length = os.fstat(fileno).st_size + + # Having used fstat to determine the file length, we need to + # confirm that this file was opened up in binary mode. + if 'b' not in o.mode: + warnings.warn(( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode."), + FileModeWarning + ) + + if hasattr(o, 'tell'): + try: + current_position = o.tell() + except (OSError, IOError): + # This can happen in some weird situations, such as when the file + # is actually a special file descriptor like stdin. In this + # instance, we don't know what the length is, so set it to zero and + # let requests chunk it instead. + if total_length is not None: + current_position = total_length + else: + if hasattr(o, 'seek') and total_length is None: + # StringIO and BytesIO have seek but no useable fileno + try: + # seek to end of file + o.seek(0, 2) + total_length = o.tell() + + # seek back to current position to support + # partially read file-like objects + o.seek(current_position or 0) + except (OSError, IOError): + total_length = 0 + + if total_length is None: + total_length = 0 + + return max(0, total_length - current_position) + + +def get_netrc_auth(url, raise_errors=False): + """Returns the Requests tuple auth for a given url from netrc.""" + + netrc_file = os.environ.get('NETRC') + if netrc_file is not None: + netrc_locations = (netrc_file,) + else: + netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES) + + try: + from netrc import netrc, NetrcParseError + + netrc_path = None + + for f in netrc_locations: + try: + loc = os.path.expanduser(f) + except KeyError: + # os.path.expanduser can fail when $HOME is undefined and + # getpwuid fails. See https://bugs.python.org/issue20164 & + # https://github.com/psf/requests/issues/1846 + return + + if os.path.exists(loc): + netrc_path = loc + break + + # Abort early if there isn't one. + if netrc_path is None: + return + + ri = urlparse(url) + + # Strip port numbers from netloc. This weird `if...encode`` dance is + # used for Python 3.2, which doesn't support unicode literals. + splitstr = b':' + if isinstance(url, str): + splitstr = splitstr.decode('ascii') + host = ri.netloc.split(splitstr)[0] + + try: + _netrc = netrc(netrc_path).authenticators(host) + if _netrc: + # Return with login / password + login_i = (0 if _netrc[0] else 1) + return (_netrc[login_i], _netrc[2]) + except (NetrcParseError, IOError): + # If there was a parsing error or a permissions issue reading the file, + # we'll just skip netrc auth unless explicitly asked to raise errors. + if raise_errors: + raise + + # App Engine hackiness. + except (ImportError, AttributeError): + pass + + +def guess_filename(obj): + """Tries to guess the filename of the given object.""" + name = getattr(obj, 'name', None) + if (name and isinstance(name, basestring) and name[0] != '<' and + name[-1] != '>'): + return os.path.basename(name) + + +def extract_zipped_paths(path): + """Replace nonexistent paths that look like they refer to a member of a zip + archive with the location of an extracted copy of the target, or else + just return the provided path unchanged. + """ + if os.path.exists(path): + # this is already a valid path, no need to do anything further + return path + + # find the first valid part of the provided path and treat that as a zip archive + # assume the rest of the path is the name of a member in the archive + archive, member = os.path.split(path) + while archive and not os.path.exists(archive): + archive, prefix = os.path.split(archive) + member = '/'.join([prefix, member]) + + if not zipfile.is_zipfile(archive): + return path + + zip_file = zipfile.ZipFile(archive) + if member not in zip_file.namelist(): + return path + + # we have a valid zip archive and a valid member of that archive + tmp = tempfile.gettempdir() + extracted_path = os.path.join(tmp, member.split('/')[-1]) + if not os.path.exists(extracted_path): + # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition + with atomic_open(extracted_path) as file_handler: + file_handler.write(zip_file.read(member)) + return extracted_path + + +@contextlib.contextmanager +def atomic_open(filename): + """Write a file to the disk in an atomic fashion""" + replacer = os.rename if sys.version_info[0] == 2 else os.replace + tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename)) + try: + with os.fdopen(tmp_descriptor, 'wb') as tmp_handler: + yield tmp_handler + replacer(tmp_name, filename) + except BaseException: + os.remove(tmp_name) + raise + + +def from_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. Unless it can not be represented as such, return an + OrderedDict, e.g., + + :: + + >>> from_key_val_list([('key', 'val')]) + OrderedDict([('key', 'val')]) + >>> from_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + >>> from_key_val_list({'key': 'val'}) + OrderedDict([('key', 'val')]) + + :rtype: OrderedDict + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError('cannot encode objects that are not 2-tuples') + + return OrderedDict(value) + + +def to_key_val_list(value): + """Take an object and test to see if it can be represented as a + dictionary. If it can be, return a list of tuples, e.g., + + :: + + >>> to_key_val_list([('key', 'val')]) + [('key', 'val')] + >>> to_key_val_list({'key': 'val'}) + [('key', 'val')] + >>> to_key_val_list('string') + Traceback (most recent call last): + ... + ValueError: cannot encode objects that are not 2-tuples + + :rtype: list + """ + if value is None: + return None + + if isinstance(value, (str, bytes, bool, int)): + raise ValueError('cannot encode objects that are not 2-tuples') + + if isinstance(value, Mapping): + value = value.items() + + return list(value) + + +# From mitsuhiko/werkzeug (used with permission). +def parse_list_header(value): + """Parse lists as described by RFC 2068 Section 2. + + In particular, parse comma-separated lists where the elements of + the list may include quoted-strings. A quoted-string could + contain a comma. A non-quoted string could have quotes in the + middle. Quotes are removed automatically after parsing. + + It basically works like :func:`parse_set_header` just that items + may appear multiple times and case sensitivity is preserved. + + The return value is a standard :class:`list`: + + >>> parse_list_header('token, "quoted value"') + ['token', 'quoted value'] + + To create a header from the :class:`list` again, use the + :func:`dump_header` function. + + :param value: a string with a list header. + :return: :class:`list` + :rtype: list + """ + result = [] + for item in _parse_list_header(value): + if item[:1] == item[-1:] == '"': + item = unquote_header_value(item[1:-1]) + result.append(item) + return result + + +# From mitsuhiko/werkzeug (used with permission). +def parse_dict_header(value): + """Parse lists of key, value pairs as described by RFC 2068 Section 2 and + convert them into a python dict: + + >>> d = parse_dict_header('foo="is a fish", bar="as well"') + >>> type(d) is dict + True + >>> sorted(d.items()) + [('bar', 'as well'), ('foo', 'is a fish')] + + If there is no value for a key it will be `None`: + + >>> parse_dict_header('key_without_value') + {'key_without_value': None} + + To create a header from the :class:`dict` again, use the + :func:`dump_header` function. + + :param value: a string with a dict header. + :return: :class:`dict` + :rtype: dict + """ + result = {} + for item in _parse_list_header(value): + if '=' not in item: + result[item] = None + continue + name, value = item.split('=', 1) + if value[:1] == value[-1:] == '"': + value = unquote_header_value(value[1:-1]) + result[name] = value + return result + + +# From mitsuhiko/werkzeug (used with permission). +def unquote_header_value(value, is_filename=False): + r"""Unquotes a header value. (Reversal of :func:`quote_header_value`). + This does not use the real unquoting but what browsers are actually + using for quoting. + + :param value: the header value to unquote. + :rtype: str + """ + if value and value[0] == value[-1] == '"': + # this is not the real unquoting, but fixing this so that the + # RFC is met will result in bugs with internet explorer and + # probably some other browsers as well. IE for example is + # uploading files with "C:\foo\bar.txt" as filename + value = value[1:-1] + + # if this is a filename and the starting characters look like + # a UNC path, then just return the value without quotes. Using the + # replace sequence below on a UNC path has the effect of turning + # the leading double slash into a single slash and then + # _fix_ie_filename() doesn't work correctly. See #458. + if not is_filename or value[:2] != '\\\\': + return value.replace('\\\\', '\\').replace('\\"', '"') + return value + + +def dict_from_cookiejar(cj): + """Returns a key/value dictionary from a CookieJar. + + :param cj: CookieJar object to extract cookies from. + :rtype: dict + """ + + cookie_dict = {} + + for cookie in cj: + cookie_dict[cookie.name] = cookie.value + + return cookie_dict + + +def add_dict_to_cookiejar(cj, cookie_dict): + """Returns a CookieJar from a key/value dictionary. + + :param cj: CookieJar to insert cookies into. + :param cookie_dict: Dict of key/values to insert into CookieJar. + :rtype: CookieJar + """ + + return cookiejar_from_dict(cookie_dict, cj) + + +def get_encodings_from_content(content): + """Returns encodings from given content string. + + :param content: bytestring to extract encodings from. + """ + warnings.warn(( + 'In requests 3.0, get_encodings_from_content will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) + + charset_re = re.compile(r']', flags=re.I) + pragma_re = re.compile(r']', flags=re.I) + xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') + + return (charset_re.findall(content) + + pragma_re.findall(content) + + xml_re.findall(content)) + + +def _parse_content_type_header(header): + """Returns content type and parameters from given header + + :param header: string + :return: tuple containing content type and dictionary of + parameters + """ + + tokens = header.split(';') + content_type, params = tokens[0].strip(), tokens[1:] + params_dict = {} + items_to_strip = "\"' " + + for param in params: + param = param.strip() + if param: + key, value = param, True + index_of_equals = param.find("=") + if index_of_equals != -1: + key = param[:index_of_equals].strip(items_to_strip) + value = param[index_of_equals + 1:].strip(items_to_strip) + params_dict[key.lower()] = value + return content_type, params_dict + + +def get_encoding_from_headers(headers): + """Returns encodings from given HTTP Header Dict. + + :param headers: dictionary to extract encoding from. + :rtype: str + """ + + content_type = headers.get('content-type') + + if not content_type: + return None + + content_type, params = _parse_content_type_header(content_type) + + if 'charset' in params: + return params['charset'].strip("'\"") + + if 'text' in content_type: + return 'ISO-8859-1' + + if 'application/json' in content_type: + # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset + return 'utf-8' + + +def stream_decode_response_unicode(iterator, r): + """Stream decodes a iterator.""" + + if r.encoding is None: + for item in iterator: + yield item + return + + decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') + for chunk in iterator: + rv = decoder.decode(chunk) + if rv: + yield rv + rv = decoder.decode(b'', final=True) + if rv: + yield rv + + +def iter_slices(string, slice_length): + """Iterate over slices of a string.""" + pos = 0 + if slice_length is None or slice_length <= 0: + slice_length = len(string) + while pos < len(string): + yield string[pos:pos + slice_length] + pos += slice_length + + +def get_unicode_from_response(r): + """Returns the requested content back in unicode. + + :param r: Response object to get unicode content from. + + Tried: + + 1. charset from content-type + 2. fall back and replace all unicode characters + + :rtype: str + """ + warnings.warn(( + 'In requests 3.0, get_unicode_from_response will be removed. For ' + 'more information, please see the discussion on issue #2266. (This' + ' warning should only appear once.)'), + DeprecationWarning) + + tried_encodings = [] + + # Try charset from content-type + encoding = get_encoding_from_headers(r.headers) + + if encoding: + try: + return str(r.content, encoding) + except UnicodeError: + tried_encodings.append(encoding) + + # Fall back: + try: + return str(r.content, encoding, errors='replace') + except TypeError: + return r.content + + +# The unreserved URI characters (RFC 3986) +UNRESERVED_SET = frozenset( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") + + +def unquote_unreserved(uri): + """Un-escape any percent-escape sequences in a URI that are unreserved + characters. This leaves all reserved, illegal and non-ASCII bytes encoded. + + :rtype: str + """ + parts = uri.split('%') + for i in range(1, len(parts)): + h = parts[i][0:2] + if len(h) == 2 and h.isalnum(): + try: + c = chr(int(h, 16)) + except ValueError: + raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) + + if c in UNRESERVED_SET: + parts[i] = c + parts[i][2:] + else: + parts[i] = '%' + parts[i] + else: + parts[i] = '%' + parts[i] + return ''.join(parts) + + +def requote_uri(uri): + """Re-quote the given URI. + + This function passes the given URI through an unquote/quote cycle to + ensure that it is fully and consistently quoted. + + :rtype: str + """ + safe_with_percent = "!#$%&'()*+,/:;=?@[]~" + safe_without_percent = "!#$&'()*+,/:;=?@[]~" + try: + # Unquote only the unreserved characters + # Then quote only illegal characters (do not quote reserved, + # unreserved, or '%') + return quote(unquote_unreserved(uri), safe=safe_with_percent) + except InvalidURL: + # We couldn't unquote the given URI, so let's try quoting it, but + # there may be unquoted '%'s in the URI. We need to make sure they're + # properly quoted so they do not cause issues elsewhere. + return quote(uri, safe=safe_without_percent) + + +def address_in_network(ip, net): + """This function allows you to check if an IP belongs to a network subnet + + Example: returns True if ip = 192.168.1.1 and net = 192.168.1.0/24 + returns False if ip = 192.168.1.1 and net = 192.168.100.0/24 + + :rtype: bool + """ + ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] + netaddr, bits = net.split('/') + netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] + network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask + return (ipaddr & netmask) == (network & netmask) + + +def dotted_netmask(mask): + """Converts mask from /xx format to xxx.xxx.xxx.xxx + + Example: if mask is 24 function returns 255.255.255.0 + + :rtype: str + """ + bits = 0xffffffff ^ (1 << 32 - mask) - 1 + return socket.inet_ntoa(struct.pack('>I', bits)) + + +def is_ipv4_address(string_ip): + """ + :rtype: bool + """ + try: + socket.inet_aton(string_ip) + except socket.error: + return False + return True + + +def is_valid_cidr(string_network): + """ + Very simple check of the cidr format in no_proxy variable. + + :rtype: bool + """ + if string_network.count('/') == 1: + try: + mask = int(string_network.split('/')[1]) + except ValueError: + return False + + if mask < 1 or mask > 32: + return False + + try: + socket.inet_aton(string_network.split('/')[0]) + except socket.error: + return False + else: + return False + return True + + +@contextlib.contextmanager +def set_environ(env_name, value): + """Set the environment variable 'env_name' to 'value' + + Save previous value, yield, and then restore the previous value stored in + the environment variable 'env_name'. + + If 'value' is None, do nothing""" + value_changed = value is not None + if value_changed: + old_value = os.environ.get(env_name) + os.environ[env_name] = value + try: + yield + finally: + if value_changed: + if old_value is None: + del os.environ[env_name] + else: + os.environ[env_name] = old_value + + +def should_bypass_proxies(url, no_proxy): + """ + Returns whether we should bypass proxies or not. + + :rtype: bool + """ + # Prioritize lowercase environment variables over uppercase + # to keep a consistent behaviour with other http projects (curl, wget). + get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) + + # First check whether no_proxy is defined. If it is, check that the URL + # we're getting isn't in the no_proxy list. + no_proxy_arg = no_proxy + if no_proxy is None: + no_proxy = get_proxy('no_proxy') + parsed = urlparse(url) + + if parsed.hostname is None: + # URLs don't always have hostnames, e.g. file:/// urls. + return True + + if no_proxy: + # We need to check whether we match here. We need to see if we match + # the end of the hostname, both with and without the port. + no_proxy = ( + host for host in no_proxy.replace(' ', '').split(',') if host + ) + + if is_ipv4_address(parsed.hostname): + for proxy_ip in no_proxy: + if is_valid_cidr(proxy_ip): + if address_in_network(parsed.hostname, proxy_ip): + return True + elif parsed.hostname == proxy_ip: + # If no_proxy ip was defined in plain IP notation instead of cidr notation & + # matches the IP of the index + return True + else: + host_with_port = parsed.hostname + if parsed.port: + host_with_port += ':{}'.format(parsed.port) + + for host in no_proxy: + if parsed.hostname.endswith(host) or host_with_port.endswith(host): + # The URL does match something in no_proxy, so we don't want + # to apply the proxies on this URL. + return True + + with set_environ('no_proxy', no_proxy_arg): + # parsed.hostname can be `None` in cases such as a file URI. + try: + bypass = proxy_bypass(parsed.hostname) + except (TypeError, socket.gaierror): + bypass = False + + if bypass: + return True + + return False + + +def get_environ_proxies(url, no_proxy=None): + """ + Return a dict of environment proxies. + + :rtype: dict + """ + if should_bypass_proxies(url, no_proxy=no_proxy): + return {} + else: + return getproxies() + + +def select_proxy(url, proxies): + """Select a proxy for the url, if applicable. + + :param url: The url being for the request + :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs + """ + proxies = proxies or {} + urlparts = urlparse(url) + if urlparts.hostname is None: + return proxies.get(urlparts.scheme, proxies.get('all')) + + proxy_keys = [ + urlparts.scheme + '://' + urlparts.hostname, + urlparts.scheme, + 'all://' + urlparts.hostname, + 'all', + ] + proxy = None + for proxy_key in proxy_keys: + if proxy_key in proxies: + proxy = proxies[proxy_key] + break + + return proxy + + +def default_user_agent(name="python-requests"): + """ + Return a string representing the default user agent. + + :rtype: str + """ + return '%s/%s' % (name, __version__) + + +def default_headers(): + """ + :rtype: requests.structures.CaseInsensitiveDict + """ + return CaseInsensitiveDict({ + 'User-Agent': default_user_agent(), + 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING, + 'Accept': '*/*', + 'Connection': 'keep-alive', + }) + + +def parse_header_links(value): + """Return a list of parsed link headers proxies. + + i.e. Link: ; rel=front; type="image/jpeg",; rel=back;type="image/jpeg" + + :rtype: list + """ + + links = [] + + replace_chars = ' \'"' + + value = value.strip(replace_chars) + if not value: + return links + + for val in re.split(', *<', value): + try: + url, params = val.split(';', 1) + except ValueError: + url, params = val, '' + + link = {'url': url.strip('<> \'"')} + + for param in params.split(';'): + try: + key, value = param.split('=') + except ValueError: + break + + link[key.strip(replace_chars)] = value.strip(replace_chars) + + links.append(link) + + return links + + +# Null bytes; no need to recreate these on each call to guess_json_utf +_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 +_null2 = _null * 2 +_null3 = _null * 3 + + +def guess_json_utf(data): + """ + :rtype: str + """ + # JSON always starts with two ASCII characters, so detection is as + # easy as counting the nulls and from their location and count + # determine the encoding. Also detect a BOM, if present. + sample = data[:4] + if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): + return 'utf-32' # BOM included + if sample[:3] == codecs.BOM_UTF8: + return 'utf-8-sig' # BOM included, MS style (discouraged) + if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): + return 'utf-16' # BOM included + nullcount = sample.count(_null) + if nullcount == 0: + return 'utf-8' + if nullcount == 2: + if sample[::2] == _null2: # 1st and 3rd are null + return 'utf-16-be' + if sample[1::2] == _null2: # 2nd and 4th are null + return 'utf-16-le' + # Did not detect 2 valid UTF-16 ascii-range characters + if nullcount == 3: + if sample[:3] == _null3: + return 'utf-32-be' + if sample[1:] == _null3: + return 'utf-32-le' + # Did not detect a valid UTF-32 ascii-range character + return None + + +def prepend_scheme_if_needed(url, new_scheme): + """Given a URL that may or may not have a scheme, prepend the given scheme. + Does not replace a present scheme with the one provided as an argument. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url, new_scheme) + + # urlparse is a finicky beast, and sometimes decides that there isn't a + # netloc present. Assume that it's being over-cautious, and switch netloc + # and path if urlparse decided there was no netloc. + if not netloc: + netloc, path = path, netloc + + return urlunparse((scheme, netloc, path, params, query, fragment)) + + +def get_auth_from_url(url): + """Given a url with authentication components, extract them into a tuple of + username,password. + + :rtype: (str,str) + """ + parsed = urlparse(url) + + try: + auth = (unquote(parsed.username), unquote(parsed.password)) + except (AttributeError, TypeError): + auth = ('', '') + + return auth + + +# Moved outside of function to avoid recompile every call +_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') +_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') + + +def check_header_validity(header): + """Verifies that header value is a string which doesn't contain + leading whitespace or return characters. This prevents unintended + header injection. + + :param header: tuple, in the format (name, value). + """ + name, value = header + + if isinstance(value, bytes): + pat = _CLEAN_HEADER_REGEX_BYTE + else: + pat = _CLEAN_HEADER_REGEX_STR + try: + if not pat.match(value): + raise InvalidHeader("Invalid return character or leading space in header: %s" % name) + except TypeError: + raise InvalidHeader("Value for header {%s: %s} must be of type str or " + "bytes, not %s" % (name, value, type(value))) + + +def urldefragauth(url): + """ + Given a url remove the fragment and the authentication part. + + :rtype: str + """ + scheme, netloc, path, params, query, fragment = urlparse(url) + + # see func:`prepend_scheme_if_needed` + if not netloc: + netloc, path = path, netloc + + netloc = netloc.rsplit('@', 1)[-1] + + return urlunparse((scheme, netloc, path, params, query, '')) + + +def rewind_body(prepared_request): + """Move file pointer back to its recorded starting position + so it can be read again on redirect. + """ + body_seek = getattr(prepared_request.body, 'seek', None) + if body_seek is not None and isinstance(prepared_request._body_position, integer_types): + try: + body_seek(prepared_request._body_position) + except (IOError, OSError): + raise UnrewindableBodyError("An error occurred when rewinding request " + "body for redirect.") + else: + raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/src/snowflake/connector/vendored/urllib3/LICENSE.txt b/src/snowflake/connector/vendored/urllib3/LICENSE.txt new file mode 100644 index 000000000..429a1767e --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/LICENSE.txt @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2008-2020 Andrey Petrov and contributors (see CONTRIBUTORS.txt) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/snowflake/connector/vendored/urllib3/__init__.py b/src/snowflake/connector/vendored/urllib3/__init__.py new file mode 100644 index 000000000..fe86b59d7 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/__init__.py @@ -0,0 +1,85 @@ +""" +Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more +""" +from __future__ import absolute_import + +# Set default logging handler to avoid "No handler found" warnings. +import logging +import warnings +from logging import NullHandler + +from . import exceptions +from ._version import __version__ +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url +from .filepost import encode_multipart_formdata +from .poolmanager import PoolManager, ProxyManager, proxy_from_url +from .response import HTTPResponse +from .util.request import make_headers +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import get_host + +__author__ = "Andrey Petrov (andrey.petrov@shazow.net)" +__license__ = "MIT" +__version__ = __version__ + +__all__ = ( + "HTTPConnectionPool", + "HTTPSConnectionPool", + "PoolManager", + "ProxyManager", + "HTTPResponse", + "Retry", + "Timeout", + "add_stderr_logger", + "connection_from_url", + "disable_warnings", + "encode_multipart_formdata", + "get_host", + "make_headers", + "proxy_from_url", +) + +logging.getLogger(__name__).addHandler(NullHandler()) + + +def add_stderr_logger(level=logging.DEBUG): + """ + Helper for quickly adding a StreamHandler to the logger. Useful for + debugging. + + Returns the handler after adding it. + """ + # This method needs to be in this __init__.py to get the __name__ correct + # even if urllib3 is vendored within another package. + logger = logging.getLogger(__name__) + handler = logging.StreamHandler() + handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s")) + logger.addHandler(handler) + logger.setLevel(level) + logger.debug("Added a stderr logging handler to logger: %s", __name__) + return handler + + +# ... Clean up. +del NullHandler + + +# All warning filters *must* be appended unless you're really certain that they +# shouldn't be: otherwise, it's very hard for users to use most Python +# mechanisms to silence them. +# SecurityWarning's always go off by default. +warnings.simplefilter("always", exceptions.SecurityWarning, append=True) +# SubjectAltNameWarning's should go off once per host +warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True) +# InsecurePlatformWarning's don't vary between requests, so we keep it default. +warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True) +# SNIMissingWarnings should go off only once. +warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True) + + +def disable_warnings(category=exceptions.HTTPWarning): + """ + Helper for quickly disabling all urllib3 warnings. + """ + warnings.simplefilter("ignore", category) diff --git a/src/snowflake/connector/vendored/urllib3/_collections.py b/src/snowflake/connector/vendored/urllib3/_collections.py new file mode 100644 index 000000000..da9857e98 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/_collections.py @@ -0,0 +1,337 @@ +from __future__ import absolute_import + +try: + from collections.abc import Mapping, MutableMapping +except ImportError: + from collections import Mapping, MutableMapping +try: + from threading import RLock +except ImportError: # Platform-specific: No threads available + + class RLock: + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + pass + + +from collections import OrderedDict + +from .exceptions import InvalidHeader +from .packages import six +from .packages.six import iterkeys, itervalues + +__all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"] + + +_Null = object() + + +class RecentlyUsedContainer(MutableMapping): + """ + Provides a thread-safe dict-like container which maintains up to + ``maxsize`` keys while throwing away the least-recently-used keys beyond + ``maxsize``. + + :param maxsize: + Maximum number of recent elements to retain. + + :param dispose_func: + Every time an item is evicted from the container, + ``dispose_func(value)`` is called. Callback which will get called + """ + + ContainerCls = OrderedDict + + def __init__(self, maxsize=10, dispose_func=None): + self._maxsize = maxsize + self.dispose_func = dispose_func + + self._container = self.ContainerCls() + self.lock = RLock() + + def __getitem__(self, key): + # Re-insert the item, moving it to the end of the eviction line. + with self.lock: + item = self._container.pop(key) + self._container[key] = item + return item + + def __setitem__(self, key, value): + evicted_value = _Null + with self.lock: + # Possibly evict the existing value of 'key' + evicted_value = self._container.get(key, _Null) + self._container[key] = value + + # If we didn't evict an existing value, we might have to evict the + # least recently used item from the beginning of the container. + if len(self._container) > self._maxsize: + _key, evicted_value = self._container.popitem(last=False) + + if self.dispose_func and evicted_value is not _Null: + self.dispose_func(evicted_value) + + def __delitem__(self, key): + with self.lock: + value = self._container.pop(key) + + if self.dispose_func: + self.dispose_func(value) + + def __len__(self): + with self.lock: + return len(self._container) + + def __iter__(self): + raise NotImplementedError( + "Iteration over this class is unlikely to be threadsafe." + ) + + def clear(self): + with self.lock: + # Copy pointers to all values, then wipe the mapping + values = list(itervalues(self._container)) + self._container.clear() + + if self.dispose_func: + for value in values: + self.dispose_func(value) + + def keys(self): + with self.lock: + return list(iterkeys(self._container)) + + +class HTTPHeaderDict(MutableMapping): + """ + :param headers: + An iterable of field-value pairs. Must not contain multiple field names + when compared case-insensitively. + + :param kwargs: + Additional field-value pairs to pass in to ``dict.update``. + + A ``dict`` like container for storing HTTP Headers. + + Field names are stored and compared case-insensitively in compliance with + RFC 7230. Iteration provides the first case-sensitive key seen for each + case-insensitive pair. + + Using ``__setitem__`` syntax overwrites fields that compare equal + case-insensitively in order to maintain ``dict``'s api. For fields that + compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add`` + in a loop. + + If multiple fields that are equal case-insensitively are passed to the + constructor or ``.update``, the behavior is undefined and some will be + lost. + + >>> headers = HTTPHeaderDict() + >>> headers.add('Set-Cookie', 'foo=bar') + >>> headers.add('set-cookie', 'baz=quxx') + >>> headers['content-length'] = '7' + >>> headers['SET-cookie'] + 'foo=bar, baz=quxx' + >>> headers['Content-Length'] + '7' + """ + + def __init__(self, headers=None, **kwargs): + super(HTTPHeaderDict, self).__init__() + self._container = OrderedDict() + if headers is not None: + if isinstance(headers, HTTPHeaderDict): + self._copy_from(headers) + else: + self.extend(headers) + if kwargs: + self.extend(kwargs) + + def __setitem__(self, key, val): + self._container[key.lower()] = [key, val] + return self._container[key.lower()] + + def __getitem__(self, key): + val = self._container[key.lower()] + return ", ".join(val[1:]) + + def __delitem__(self, key): + del self._container[key.lower()] + + def __contains__(self, key): + return key.lower() in self._container + + def __eq__(self, other): + if not isinstance(other, Mapping) and not hasattr(other, "keys"): + return False + if not isinstance(other, type(self)): + other = type(self)(other) + return dict((k.lower(), v) for k, v in self.itermerged()) == dict( + (k.lower(), v) for k, v in other.itermerged() + ) + + def __ne__(self, other): + return not self.__eq__(other) + + if six.PY2: # Python 2 + iterkeys = MutableMapping.iterkeys + itervalues = MutableMapping.itervalues + + __marker = object() + + def __len__(self): + return len(self._container) + + def __iter__(self): + # Only provide the originally cased names + for vals in self._container.values(): + yield vals[0] + + def pop(self, key, default=__marker): + """D.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + """ + # Using the MutableMapping function directly fails due to the private marker. + # Using ordinary dict.pop would expose the internal structures. + # So let's reinvent the wheel. + try: + value = self[key] + except KeyError: + if default is self.__marker: + raise + return default + else: + del self[key] + return value + + def discard(self, key): + try: + del self[key] + except KeyError: + pass + + def add(self, key, val): + """Adds a (name, value) pair, doesn't overwrite the value if it already + exists. + + >>> headers = HTTPHeaderDict(foo='bar') + >>> headers.add('Foo', 'baz') + >>> headers['foo'] + 'bar, baz' + """ + key_lower = key.lower() + new_vals = [key, val] + # Keep the common case aka no item present as fast as possible + vals = self._container.setdefault(key_lower, new_vals) + if new_vals is not vals: + vals.append(val) + + def extend(self, *args, **kwargs): + """Generic import function for any type of header-like object. + Adapted version of MutableMapping.update in order to insert items + with self.add instead of self.__setitem__ + """ + if len(args) > 1: + raise TypeError( + "extend() takes at most 1 positional " + "arguments ({0} given)".format(len(args)) + ) + other = args[0] if len(args) >= 1 else () + + if isinstance(other, HTTPHeaderDict): + for key, val in other.iteritems(): + self.add(key, val) + elif isinstance(other, Mapping): + for key in other: + self.add(key, other[key]) + elif hasattr(other, "keys"): + for key in other.keys(): + self.add(key, other[key]) + else: + for key, value in other: + self.add(key, value) + + for key, value in kwargs.items(): + self.add(key, value) + + def getlist(self, key, default=__marker): + """Returns a list of all the values for the named field. Returns an + empty list if the key doesn't exist.""" + try: + vals = self._container[key.lower()] + except KeyError: + if default is self.__marker: + return [] + return default + else: + return vals[1:] + + # Backwards compatibility for httplib + getheaders = getlist + getallmatchingheaders = getlist + iget = getlist + + # Backwards compatibility for http.cookiejar + get_all = getlist + + def __repr__(self): + return "%s(%s)" % (type(self).__name__, dict(self.itermerged())) + + def _copy_from(self, other): + for key in other: + val = other.getlist(key) + if isinstance(val, list): + # Don't need to convert tuples + val = list(val) + self._container[key.lower()] = [key] + val + + def copy(self): + clone = type(self)() + clone._copy_from(self) + return clone + + def iteritems(self): + """Iterate over all header lines, including duplicate ones.""" + for key in self: + vals = self._container[key.lower()] + for val in vals[1:]: + yield vals[0], val + + def itermerged(self): + """Iterate over all headers, merging duplicate ones together.""" + for key in self: + val = self._container[key.lower()] + yield val[0], ", ".join(val[1:]) + + def items(self): + return list(self.iteritems()) + + @classmethod + def from_httplib(cls, message): # Python 2 + """Read headers from a Python 2 httplib message object.""" + # python2.7 does not expose a proper API for exporting multiheaders + # efficiently. This function re-reads raw lines from the message + # object and extracts the multiheaders properly. + obs_fold_continued_leaders = (" ", "\t") + headers = [] + + for line in message.headers: + if line.startswith(obs_fold_continued_leaders): + if not headers: + # We received a header line that starts with OWS as described + # in RFC-7230 S3.2.4. This indicates a multiline header, but + # there exists no previous header to which we can attach it. + raise InvalidHeader( + "Header continuation with no previous header: %s" % line + ) + else: + key, value = headers[-1] + headers[-1] = (key, value + " " + line.strip()) + continue + + key, value = line.split(":", 1) + headers.append((key, value.strip())) + + return cls(headers) diff --git a/src/snowflake/connector/vendored/urllib3/_version.py b/src/snowflake/connector/vendored/urllib3/_version.py new file mode 100644 index 000000000..e8ebee957 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/_version.py @@ -0,0 +1,2 @@ +# This file is protected via CODEOWNERS +__version__ = "1.26.6" diff --git a/src/snowflake/connector/vendored/urllib3/connection.py b/src/snowflake/connector/vendored/urllib3/connection.py new file mode 100644 index 000000000..4c996659c --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/connection.py @@ -0,0 +1,539 @@ +from __future__ import absolute_import + +import datetime +import logging +import os +import re +import socket +import warnings +from socket import error as SocketError +from socket import timeout as SocketTimeout + +from .packages import six +from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection +from .packages.six.moves.http_client import HTTPException # noqa: F401 +from .util.proxy import create_proxy_ssl_context + +try: # Compiled with SSL? + import ssl + + BaseSSLError = ssl.SSLError +except (ImportError, AttributeError): # Platform-specific: No SSL. + ssl = None + + class BaseSSLError(BaseException): + pass + + +try: + # Python 3: not a no-op, we're adding this to the namespace so it can be imported. + ConnectionError = ConnectionError +except NameError: + # Python 2 + class ConnectionError(Exception): + pass + + +try: # Python 3: + # Not a no-op, we're adding this to the namespace so it can be imported. + BrokenPipeError = BrokenPipeError +except NameError: # Python 2: + + class BrokenPipeError(Exception): + pass + + +from ._collections import HTTPHeaderDict # noqa (historical, removed in v2) +from ._version import __version__ +from .exceptions import ( + ConnectTimeoutError, + NewConnectionError, + SubjectAltNameWarning, + SystemTimeWarning, +) +from .packages.ssl_match_hostname import CertificateError, match_hostname +from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection +from .util.ssl_ import ( + assert_fingerprint, + create_urllib3_context, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) + +log = logging.getLogger(__name__) + +port_by_scheme = {"http": 80, "https": 443} + +# When it comes time to update this value as a part of regular maintenance +# (ie test_recent_date is failing) update it to ~6 months before the current date. +RECENT_DATE = datetime.date(2020, 7, 1) + +_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]") + + +class HTTPConnection(_HTTPConnection, object): + """ + Based on :class:`http.client.HTTPConnection` but provides an extra constructor + backwards-compatibility layer between older and newer Pythons. + + Additional keyword parameters are used to configure attributes of the connection. + Accepted parameters include: + + - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool` + - ``source_address``: Set the source address for the current connection. + - ``socket_options``: Set specific options on the underlying socket. If not specified, then + defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling + Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy. + + For example, if you wish to enable TCP Keep Alive in addition to the defaults, + you might pass: + + .. code-block:: python + + HTTPConnection.default_socket_options + [ + (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1), + ] + + Or you may want to disable the defaults by passing an empty list (e.g., ``[]``). + """ + + default_port = port_by_scheme["http"] + + #: Disable Nagle's algorithm by default. + #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]`` + default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] + + #: Whether this connection verifies the host's certificate. + is_verified = False + + def __init__(self, *args, **kw): + if not six.PY2: + kw.pop("strict", None) + + # Pre-set source_address. + self.source_address = kw.get("source_address") + + #: The socket options provided by the user. If no options are + #: provided, we use the default options. + self.socket_options = kw.pop("socket_options", self.default_socket_options) + + # Proxy options provided by the user. + self.proxy = kw.pop("proxy", None) + self.proxy_config = kw.pop("proxy_config", None) + + _HTTPConnection.__init__(self, *args, **kw) + + @property + def host(self): + """ + Getter method to remove any trailing dots that indicate the hostname is an FQDN. + + In general, SSL certificates don't include the trailing dot indicating a + fully-qualified domain name, and thus, they don't validate properly when + checked against a domain name that includes the dot. In addition, some + servers may not expect to receive the trailing dot when provided. + + However, the hostname with trailing dot is critical to DNS resolution; doing a + lookup with the trailing dot will properly only resolve the appropriate FQDN, + whereas a lookup without a trailing dot will search the system's search domain + list. Thus, it's important to keep the original host around for use only in + those cases where it's appropriate (i.e., when doing DNS lookup to establish the + actual TCP connection across which we're going to send HTTP requests). + """ + return self._dns_host.rstrip(".") + + @host.setter + def host(self, value): + """ + Setter for the `host` property. + + We assume that only urllib3 uses the _dns_host attribute; httplib itself + only uses `host`, and it seems reasonable that other libraries follow suit. + """ + self._dns_host = value + + def _new_conn(self): + """Establish a socket connection and set nodelay settings on it. + + :return: New socket connection. + """ + extra_kw = {} + if self.source_address: + extra_kw["source_address"] = self.source_address + + if self.socket_options: + extra_kw["socket_options"] = self.socket_options + + try: + conn = connection.create_connection( + (self._dns_host, self.port), self.timeout, **extra_kw + ) + + except SocketTimeout: + raise ConnectTimeoutError( + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) + + except SocketError as e: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e + ) + + return conn + + def _is_using_tunnel(self): + # Google App Engine's httplib does not define _tunnel_host + return getattr(self, "_tunnel_host", None) + + def _prepare_conn(self, conn): + self.sock = conn + if self._is_using_tunnel(): + # TODO: Fix tunnel so it doesn't depend on self.sock state. + self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 + + def connect(self): + conn = self._new_conn() + self._prepare_conn(conn) + + def putrequest(self, method, url, *args, **kwargs): + """ """ + # Empty docstring because the indentation of CPython's implementation + # is broken but we don't want this method in our documentation. + match = _CONTAINS_CONTROL_CHAR_RE.search(method) + if match: + raise ValueError( + "Method cannot contain non-token characters %r (found at least %r)" + % (method, match.group()) + ) + + return _HTTPConnection.putrequest(self, method, url, *args, **kwargs) + + def putheader(self, header, *values): + """ """ + if not any(isinstance(v, str) and v == SKIP_HEADER for v in values): + _HTTPConnection.putheader(self, header, *values) + elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS: + raise ValueError( + "urllib3.util.SKIP_HEADER only supports '%s'" + % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),) + ) + + def request(self, method, url, body=None, headers=None): + if headers is None: + headers = {} + else: + # Avoid modifying the headers passed into .request() + headers = headers.copy() + if "user-agent" not in (six.ensure_str(k.lower()) for k in headers): + headers["User-Agent"] = _get_default_user_agent() + super(HTTPConnection, self).request(method, url, body=body, headers=headers) + + def request_chunked(self, method, url, body=None, headers=None): + """ + Alternative to the common request method, which sends the + body with chunked encoding and not as one block + """ + headers = headers or {} + header_keys = set([six.ensure_str(k.lower()) for k in headers]) + skip_accept_encoding = "accept-encoding" in header_keys + skip_host = "host" in header_keys + self.putrequest( + method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host + ) + if "user-agent" not in header_keys: + self.putheader("User-Agent", _get_default_user_agent()) + for header, value in headers.items(): + self.putheader(header, value) + if "transfer-encoding" not in header_keys: + self.putheader("Transfer-Encoding", "chunked") + self.endheaders() + + if body is not None: + stringish_types = six.string_types + (bytes,) + if isinstance(body, stringish_types): + body = (body,) + for chunk in body: + if not chunk: + continue + if not isinstance(chunk, bytes): + chunk = chunk.encode("utf8") + len_str = hex(len(chunk))[2:] + to_send = bytearray(len_str.encode()) + to_send += b"\r\n" + to_send += chunk + to_send += b"\r\n" + self.send(to_send) + + # After the if clause, to always have a closed body + self.send(b"0\r\n\r\n") + + +class HTTPSConnection(HTTPConnection): + """ + Many of the parameters to this constructor are passed to the underlying SSL + socket by means of :py:func:`urllib3.util.ssl_wrap_socket`. + """ + + default_port = port_by_scheme["https"] + + cert_reqs = None + ca_certs = None + ca_cert_dir = None + ca_cert_data = None + ssl_version = None + assert_fingerprint = None + tls_in_tls_required = False + + def __init__( + self, + host, + port=None, + key_file=None, + cert_file=None, + key_password=None, + strict=None, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + ssl_context=None, + server_hostname=None, + **kw + ): + + HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw) + + self.key_file = key_file + self.cert_file = cert_file + self.key_password = key_password + self.ssl_context = ssl_context + self.server_hostname = server_hostname + + # Required property for Google AppEngine 1.9.0 which otherwise causes + # HTTPS requests to go out as HTTP. (See Issue #356) + self._protocol = "https" + + def set_cert( + self, + key_file=None, + cert_file=None, + cert_reqs=None, + key_password=None, + ca_certs=None, + assert_hostname=None, + assert_fingerprint=None, + ca_cert_dir=None, + ca_cert_data=None, + ): + """ + This method should only be called once, before the connection is used. + """ + # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also + # have an SSLContext object in which case we'll use its verify_mode. + if cert_reqs is None: + if self.ssl_context is not None: + cert_reqs = self.ssl_context.verify_mode + else: + cert_reqs = resolve_cert_reqs(None) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + self.ca_certs = ca_certs and os.path.expanduser(ca_certs) + self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) + self.ca_cert_data = ca_cert_data + + def connect(self): + # Add certificate verification + conn = self._new_conn() + hostname = self.host + tls_in_tls = False + + if self._is_using_tunnel(): + if self.tls_in_tls_required: + conn = self._connect_tls_proxy(hostname, conn) + tls_in_tls = True + + self.sock = conn + + # Calls self._set_hostport(), so self.host is + # self._tunnel_host below. + self._tunnel() + # Mark this connection as not reusable + self.auto_open = 0 + + # Override the host with the one we're requesting data from. + hostname = self._tunnel_host + + server_hostname = hostname + if self.server_hostname is not None: + server_hostname = self.server_hostname + + is_time_off = datetime.date.today() < RECENT_DATE + if is_time_off: + warnings.warn( + ( + "System time is way off (before {0}). This will probably " + "lead to SSL verification errors" + ).format(RECENT_DATE), + SystemTimeWarning, + ) + + # Wrap socket using verification with the root certs in + # trusted_root_certs + default_ssl_context = False + if self.ssl_context is None: + default_ssl_context = True + self.ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(self.ssl_version), + cert_reqs=resolve_cert_reqs(self.cert_reqs), + ) + + context = self.ssl_context + context.verify_mode = resolve_cert_reqs(self.cert_reqs) + + # Try to load OS default certs if none are given. + # Works well on Windows (requires Python3.4+) + if ( + not self.ca_certs + and not self.ca_cert_dir + and not self.ca_cert_data + and default_ssl_context + and hasattr(context, "load_default_certs") + ): + context.load_default_certs() + + self.sock = ssl_wrap_socket( + sock=conn, + keyfile=self.key_file, + certfile=self.cert_file, + key_password=self.key_password, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + server_hostname=server_hostname, + ssl_context=context, + tls_in_tls=tls_in_tls, + ) + + # If we're using all defaults and the connection + # is TLSv1 or TLSv1.1 we throw a DeprecationWarning + # for the host. + if ( + default_ssl_context + and self.ssl_version is None + and hasattr(self.sock, "version") + and self.sock.version() in {"TLSv1", "TLSv1.1"} + ): + warnings.warn( + "Negotiating TLSv1/TLSv1.1 by default is deprecated " + "and will be disabled in urllib3 v2.0.0. Connecting to " + "'%s' with '%s' can be enabled by explicitly opting-in " + "with 'ssl_version'" % (self.host, self.sock.version()), + DeprecationWarning, + ) + + if self.assert_fingerprint: + assert_fingerprint( + self.sock.getpeercert(binary_form=True), self.assert_fingerprint + ) + elif ( + context.verify_mode != ssl.CERT_NONE + and not getattr(context, "check_hostname", False) + and self.assert_hostname is not False + ): + # While urllib3 attempts to always turn off hostname matching from + # the TLS library, this cannot always be done. So we check whether + # the TLS Library still thinks it's matching hostnames. + cert = self.sock.getpeercert() + if not cert.get("subjectAltName", ()): + warnings.warn( + ( + "Certificate for {0} has no `subjectAltName`, falling back to check for a " + "`commonName` for now. This feature is being removed by major browsers and " + "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 " + "for details.)".format(hostname) + ), + SubjectAltNameWarning, + ) + _match_hostname(cert, self.assert_hostname or server_hostname) + + self.is_verified = ( + context.verify_mode == ssl.CERT_REQUIRED + or self.assert_fingerprint is not None + ) + + def _connect_tls_proxy(self, hostname, conn): + """ + Establish a TLS connection to the proxy using the provided SSL context. + """ + proxy_config = self.proxy_config + ssl_context = proxy_config.ssl_context + if ssl_context: + # If the user provided a proxy context, we assume CA and client + # certificates have already been set + return ssl_wrap_socket( + sock=conn, + server_hostname=hostname, + ssl_context=ssl_context, + ) + + ssl_context = create_proxy_ssl_context( + self.ssl_version, + self.cert_reqs, + self.ca_certs, + self.ca_cert_dir, + self.ca_cert_data, + ) + # By default urllib3's SSLContext disables `check_hostname` and uses + # a custom check. For proxies we're good with relying on the default + # verification. + ssl_context.check_hostname = True + + # If no cert was provided, use only the default options for server + # certificate validation + return ssl_wrap_socket( + sock=conn, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + ca_cert_data=self.ca_cert_data, + server_hostname=hostname, + ssl_context=ssl_context, + ) + + +def _match_hostname(cert, asserted_hostname): + try: + match_hostname(cert, asserted_hostname) + except CertificateError as e: + log.warning( + "Certificate did not match expected hostname: %s. Certificate: %s", + asserted_hostname, + cert, + ) + # Add cert to exception and reraise so client code can inspect + # the cert when catching the exception, if they want to + e._peer_cert = cert + raise + + +def _get_default_user_agent(): + return "python-urllib3/%s" % __version__ + + +class DummyConnection(object): + """Used to detect a failed ConnectionCls import.""" + + pass + + +if not ssl: + HTTPSConnection = DummyConnection # noqa: F811 + + +VerifiedHTTPSConnection = HTTPSConnection diff --git a/src/snowflake/connector/vendored/urllib3/connectionpool.py b/src/snowflake/connector/vendored/urllib3/connectionpool.py new file mode 100644 index 000000000..459bbe095 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/connectionpool.py @@ -0,0 +1,1067 @@ +from __future__ import absolute_import + +import errno +import logging +import socket +import sys +import warnings +from socket import error as SocketError +from socket import timeout as SocketTimeout + +from .connection import ( + BaseSSLError, + BrokenPipeError, + DummyConnection, + HTTPConnection, + HTTPException, + HTTPSConnection, + VerifiedHTTPSConnection, + port_by_scheme, +) +from .exceptions import ( + ClosedPoolError, + EmptyPoolError, + HeaderParsingError, + HostChangedError, + InsecureRequestWarning, + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, + ProxyError, + ReadTimeoutError, + SSLError, + TimeoutError, +) +from .packages import six +from .packages.six.moves import queue +from .packages.ssl_match_hostname import CertificateError +from .request import RequestMethods +from .response import HTTPResponse +from .util.connection import is_connection_dropped +from .util.proxy import connection_requires_http_tunnel +from .util.queue import LifoQueue +from .util.request import set_file_position +from .util.response import assert_header_parsing +from .util.retry import Retry +from .util.timeout import Timeout +from .util.url import Url, _encode_target +from .util.url import _normalize_host as normalize_host +from .util.url import get_host, parse_url + +xrange = six.moves.xrange + +log = logging.getLogger(__name__) + +_Default = object() + + +# Pool objects +class ConnectionPool(object): + """ + Base class for all connection pools, such as + :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`. + + .. note:: + ConnectionPool.urlopen() does not normalize or percent-encode target URIs + which is useful if your target server doesn't support percent-encoded + target URIs. + """ + + scheme = None + QueueCls = LifoQueue + + def __init__(self, host, port=None): + if not host: + raise LocationValueError("No host specified.") + + self.host = _normalize_host(host, scheme=self.scheme) + self._proxy_host = host.lower() + self.port = port + + def __str__(self): + return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port) + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.close() + # Return False to re-raise any potential exceptions + return False + + def close(self): + """ + Close all pooled connections and disable the pool. + """ + pass + + +# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 +_blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK} + + +class HTTPConnectionPool(ConnectionPool, RequestMethods): + """ + Thread-safe connection pool for one host. + + :param host: + Host used for this HTTP Connection (e.g. "localhost"), passed into + :class:`http.client.HTTPConnection`. + + :param port: + Port used for this HTTP Connection (None is equivalent to 80), passed + into :class:`http.client.HTTPConnection`. + + :param strict: + Causes BadStatusLine to be raised if the status line can't be parsed + as a valid HTTP/1.0 or 1.1 status line, passed into + :class:`http.client.HTTPConnection`. + + .. note:: + Only works in Python 2. This parameter is ignored in Python 3. + + :param timeout: + Socket timeout in seconds for each individual connection. This can + be a float or integer, which sets the timeout for the HTTP request, + or an instance of :class:`urllib3.util.Timeout` which gives you more + fine-grained control over request timeouts. After the constructor has + been parsed, this is always a `urllib3.util.Timeout` object. + + :param maxsize: + Number of connections to save that can be reused. More than 1 is useful + in multithreaded situations. If ``block`` is set to False, more + connections will be created but they will not be saved once they've + been used. + + :param block: + If set to True, no more than ``maxsize`` connections will be used at + a time. When no free connections are available, the call will block + until a connection has been released. This is a useful side effect for + particular multithreaded situations where one does not want to use more + than maxsize connections per host to prevent flooding. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param retries: + Retry configuration to use by default with requests in this pool. + + :param _proxy: + Parsed proxy URL, should not be used directly, instead, see + :class:`urllib3.ProxyManager` + + :param _proxy_headers: + A dictionary with proxy headers, should not be used directly, + instead, see :class:`urllib3.ProxyManager` + + :param \\**conn_kw: + Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`, + :class:`urllib3.connection.HTTPSConnection` instances. + """ + + scheme = "http" + ConnectionCls = HTTPConnection + ResponseCls = HTTPResponse + + def __init__( + self, + host, + port=None, + strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, + maxsize=1, + block=False, + headers=None, + retries=None, + _proxy=None, + _proxy_headers=None, + _proxy_config=None, + **conn_kw + ): + ConnectionPool.__init__(self, host, port) + RequestMethods.__init__(self, headers) + + self.strict = strict + + if not isinstance(timeout, Timeout): + timeout = Timeout.from_float(timeout) + + if retries is None: + retries = Retry.DEFAULT + + self.timeout = timeout + self.retries = retries + + self.pool = self.QueueCls(maxsize) + self.block = block + + self.proxy = _proxy + self.proxy_headers = _proxy_headers or {} + self.proxy_config = _proxy_config + + # Fill the queue up so that doing get() on it will block properly + for _ in xrange(maxsize): + self.pool.put(None) + + # These are mostly for testing and debugging purposes. + self.num_connections = 0 + self.num_requests = 0 + self.conn_kw = conn_kw + + if self.proxy: + # Enable Nagle's algorithm for proxies, to avoid packet fragmentation. + # We cannot know if the user has added default socket options, so we cannot replace the + # list. + self.conn_kw.setdefault("socket_options", []) + + self.conn_kw["proxy"] = self.proxy + self.conn_kw["proxy_config"] = self.proxy_config + + def _new_conn(self): + """ + Return a fresh :class:`HTTPConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTP connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "80", + ) + + conn = self.ConnectionCls( + host=self.host, + port=self.port, + timeout=self.timeout.connect_timeout, + strict=self.strict, + **self.conn_kw + ) + return conn + + def _get_conn(self, timeout=None): + """ + Get a connection. Will return a pooled connection if one is available. + + If no connections are available and :prop:`.block` is ``False``, then a + fresh connection is returned. + + :param timeout: + Seconds to wait before giving up and raising + :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and + :prop:`.block` is ``True``. + """ + conn = None + try: + conn = self.pool.get(block=self.block, timeout=timeout) + + except AttributeError: # self.pool is None + raise ClosedPoolError(self, "Pool is closed.") + + except queue.Empty: + if self.block: + raise EmptyPoolError( + self, + "Pool reached maximum size and no more connections are allowed.", + ) + pass # Oh well, we'll create a new connection then + + # If this is a persistent connection, check if it got disconnected + if conn and is_connection_dropped(conn): + log.debug("Resetting dropped connection: %s", self.host) + conn.close() + if getattr(conn, "auto_open", 1) == 0: + # This is a proxied connection that has been mutated by + # http.client._tunnel() and cannot be reused (since it would + # attempt to bypass the proxy) + conn = None + + return conn or self._new_conn() + + def _put_conn(self, conn): + """ + Put a connection back into the pool. + + :param conn: + Connection object for the current host and port as returned by + :meth:`._new_conn` or :meth:`._get_conn`. + + If the pool is already full, the connection is closed and discarded + because we exceeded maxsize. If connections are discarded frequently, + then maxsize should be increased. + + If the pool is closed, then the connection will be closed and discarded. + """ + try: + self.pool.put(conn, block=False) + return # Everything is dandy, done. + except AttributeError: + # self.pool is None. + pass + except queue.Full: + # This should never happen if self.block == True + log.warning("Connection pool is full, discarding connection: %s", self.host) + + # Connection never got put back into the pool, close it. + if conn: + conn.close() + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + pass + + def _prepare_proxy(self, conn): + # Nothing to do for HTTP connections. + pass + + def _get_timeout(self, timeout): + """Helper that always returns a :class:`urllib3.util.Timeout`""" + if timeout is _Default: + return self.timeout.clone() + + if isinstance(timeout, Timeout): + return timeout.clone() + else: + # User passed us an int/float. This is for backwards compatibility, + # can be removed later + return Timeout.from_float(timeout) + + def _raise_timeout(self, err, url, timeout_value): + """Is the error actually a timeout? Will raise a ReadTimeout or pass""" + + if isinstance(err, SocketTimeout): + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) + + # See the above comment about EAGAIN in Python 3. In Python 2 we have + # to specifically catch it and throw the timeout error + if hasattr(err, "errno") and err.errno in _blocking_errnos: + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) + + # Catch possible read timeouts thrown as SSL errors. If not the + # case, rethrow the original. We need to do this because of: + # http://bugs.python.org/issue10272 + if "timed out" in str(err) or "did not complete (read)" in str( + err + ): # Python < 2.7.4 + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % timeout_value + ) + + def _make_request( + self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw + ): + """ + Perform a request on a given urllib connection object taken from our + pool. + + :param conn: + a connection from one of our connection pools + + :param timeout: + Socket timeout in seconds for the request. This can be a + float or integer, which will set the same timeout value for + the socket connect and the socket read, or an instance of + :class:`urllib3.util.Timeout`, which gives you more fine-grained + control over your timeouts. + """ + self.num_requests += 1 + + timeout_obj = self._get_timeout(timeout) + timeout_obj.start_connect() + conn.timeout = timeout_obj.connect_timeout + + # Trigger any extra validation we need to do. + try: + self._validate_conn(conn) + except (SocketTimeout, BaseSSLError) as e: + # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout. + self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) + raise + + # conn.request() calls http.client.*.request, not the method in + # urllib3.request. It also calls makefile (recv) on the socket. + try: + if chunked: + conn.request_chunked(method, url, **httplib_request_kw) + else: + conn.request(method, url, **httplib_request_kw) + + # We are swallowing BrokenPipeError (errno.EPIPE) since the server is + # legitimately able to close the connection after sending a valid response. + # With this behaviour, the received response is still readable. + except BrokenPipeError: + # Python 3 + pass + except IOError as e: + # Python 2 and macOS/Linux + # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS + # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/ + if e.errno not in { + errno.EPIPE, + errno.ESHUTDOWN, + errno.EPROTOTYPE, + }: + raise + + # Reset the timeout for the recv() on the socket + read_timeout = timeout_obj.read_timeout + + # App Engine doesn't have a sock attr + if getattr(conn, "sock", None): + # In Python 3 socket.py will catch EAGAIN and return None when you + # try and read into the file pointer created by http.client, which + # instead raises a BadStatusLine exception. Instead of catching + # the exception and assuming all BadStatusLine exceptions are read + # timeouts, check for a zero timeout before making the request. + if read_timeout == 0: + raise ReadTimeoutError( + self, url, "Read timed out. (read timeout=%s)" % read_timeout + ) + if read_timeout is Timeout.DEFAULT_TIMEOUT: + conn.sock.settimeout(socket.getdefaulttimeout()) + else: # None or a value + conn.sock.settimeout(read_timeout) + + # Receive the response from the server + try: + try: + # Python 2.7, use buffering of HTTP responses + httplib_response = conn.getresponse(buffering=True) + except TypeError: + # Python 3 + try: + httplib_response = conn.getresponse() + except BaseException as e: + # Remove the TypeError from the exception chain in + # Python 3 (including for exceptions like SystemExit). + # Otherwise it looks like a bug in the code. + six.raise_from(e, None) + except (SocketTimeout, BaseSSLError, SocketError) as e: + self._raise_timeout(err=e, url=url, timeout_value=read_timeout) + raise + + # AppEngine doesn't have a version attr. + http_version = getattr(conn, "_http_vsn_str", "HTTP/?") + log.debug( + '%s://%s:%s "%s %s %s" %s %s', + self.scheme, + self.host, + self.port, + method, + url, + http_version, + httplib_response.status, + httplib_response.length, + ) + + try: + assert_header_parsing(httplib_response.msg) + except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3 + log.warning( + "Failed to parse headers (url=%s): %s", + self._absolute_url(url), + hpe, + exc_info=True, + ) + + return httplib_response + + def _absolute_url(self, path): + return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url + + def close(self): + """ + Close all pooled connections and disable the pool. + """ + if self.pool is None: + return + # Disable access to the pool + old_pool, self.pool = self.pool, None + + try: + while True: + conn = old_pool.get(block=False) + if conn: + conn.close() + + except queue.Empty: + pass # Done. + + def is_same_host(self, url): + """ + Check if the given ``url`` is a member of the same host as this + connection pool. + """ + if url.startswith("/"): + return True + + # TODO: Add optional support for socket.gethostbyname checking. + scheme, host, port = get_host(url) + if host is not None: + host = _normalize_host(host, scheme=scheme) + + # Use explicit default port for comparison when none is given + if self.port and not port: + port = port_by_scheme.get(scheme) + elif not self.port and port == port_by_scheme.get(scheme): + port = None + + return (scheme, host, port) == (self.scheme, self.host, self.port) + + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=None, + redirect=True, + assert_same_host=True, + timeout=_Default, + pool_timeout=None, + release_conn=None, + chunked=False, + body_pos=None, + **response_kw + ): + """ + Get a connection from the pool and perform an HTTP request. This is the + lowest level call for making a request, so you'll need to specify all + the raw details. + + .. note:: + + More commonly, it's appropriate to use a convenience method provided + by :class:`.RequestMethods`, such as :meth:`request`. + + .. note:: + + `release_conn` will only behave as expected if + `preload_content=False` because we want to make + `preload_content=False` the default behaviour someday soon without + breaking backwards compatibility. + + :param method: + HTTP request method (such as GET, POST, PUT, etc.) + + :param url: + The URL to perform the request on. + + :param body: + Data to send in the request body, either :class:`str`, :class:`bytes`, + an iterable of :class:`str`/:class:`bytes`, or a file-like object. + + :param headers: + Dictionary of custom headers to send, such as User-Agent, + If-None-Match, etc. If None, pool headers are used. If provided, + these headers completely replace any pool-specific headers. + + :param retries: + Configure the number of retries to allow before raising a + :class:`~urllib3.exceptions.MaxRetryError` exception. + + Pass ``None`` to retry until you receive a response. Pass a + :class:`~urllib3.util.retry.Retry` object for fine-grained control + over different types of retries. + Pass an integer number to retry connection errors that many times, + but no other types of errors. Pass zero to never retry. + + If ``False``, then retries are disabled and any exception is raised + immediately. Also, instead of raising a MaxRetryError on redirects, + the redirect response will be returned. + + :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. + + :param redirect: + If True, automatically handle redirects (status codes 301, 302, + 303, 307, 308). Each redirect counts as a retry. Disabling retries + will disable redirect, too. + + :param assert_same_host: + If ``True``, will make sure that the host of the pool requests is + consistent else will raise HostChangedError. When ``False``, you can + use the pool on an HTTP proxy and request foreign hosts. + + :param timeout: + If specified, overrides the default timeout for this one + request. It may be a float (in seconds) or an instance of + :class:`urllib3.util.Timeout`. + + :param pool_timeout: + If set and the pool is set to block=True, then this method will + block for ``pool_timeout`` seconds and raise EmptyPoolError if no + connection is available within the time period. + + :param release_conn: + If False, then the urlopen call will not release the connection + back into the pool once a response is received (but will release if + you read the entire contents of the response such as when + `preload_content=True`). This is useful if you're not preloading + the response's content immediately. You will need to call + ``r.release_conn()`` on the response ``r`` to return the connection + back into the pool. If None, it takes the value of + ``response_kw.get('preload_content', True)``. + + :param chunked: + If True, urllib3 will send the body using chunked transfer + encoding. Otherwise, urllib3 will send the body using the standard + content-length form. Defaults to False. + + :param int body_pos: + Position to seek to in file-like body in the event of a retry or + redirect. Typically this won't need to be set because urllib3 will + auto-populate the value when needed. + + :param \\**response_kw: + Additional parameters are passed to + :meth:`urllib3.response.HTTPResponse.from_httplib` + """ + + parsed_url = parse_url(url) + destination_scheme = parsed_url.scheme + + if headers is None: + headers = self.headers + + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) + + if release_conn is None: + release_conn = response_kw.get("preload_content", True) + + # Check host + if assert_same_host and not self.is_same_host(url): + raise HostChangedError(self, url, retries) + + # Ensure that the URL we're connecting to is properly encoded + if url.startswith("/"): + url = six.ensure_str(_encode_target(url)) + else: + url = six.ensure_str(parsed_url.url) + + conn = None + + # Track whether `conn` needs to be released before + # returning/raising/recursing. Update this variable if necessary, and + # leave `release_conn` constant throughout the function. That way, if + # the function recurses, the original value of `release_conn` will be + # passed down into the recursive call, and its value will be respected. + # + # See issue #651 [1] for details. + # + # [1] + release_this_conn = release_conn + + http_tunnel_required = connection_requires_http_tunnel( + self.proxy, self.proxy_config, destination_scheme + ) + + # Merge the proxy headers. Only done when not using HTTP CONNECT. We + # have to copy the headers dict so we can safely change it without those + # changes being reflected in anyone else's copy. + if not http_tunnel_required: + headers = headers.copy() + headers.update(self.proxy_headers) + + # Must keep the exception bound to a separate variable or else Python 3 + # complains about UnboundLocalError. + err = None + + # Keep track of whether we cleanly exited the except block. This + # ensures we do proper cleanup in finally. + clean_exit = False + + # Rewind body position, if needed. Record current position + # for future rewinds in the event of a redirect/retry. + body_pos = set_file_position(body, body_pos) + + try: + # Request a connection from the queue. + timeout_obj = self._get_timeout(timeout) + conn = self._get_conn(timeout=pool_timeout) + + conn.timeout = timeout_obj.connect_timeout + + is_new_proxy_conn = self.proxy is not None and not getattr( + conn, "sock", None + ) + if is_new_proxy_conn and http_tunnel_required: + self._prepare_proxy(conn) + + # Make the request on the httplib connection object. + httplib_response = self._make_request( + conn, + method, + url, + timeout=timeout_obj, + body=body, + headers=headers, + chunked=chunked, + ) + + # If we're going to release the connection in ``finally:``, then + # the response doesn't need to know about the connection. Otherwise + # it will also try to release it and we'll have a double-release + # mess. + response_conn = conn if not release_conn else None + + # Pass method to Response for length checking + response_kw["request_method"] = method + + # Import httplib's response into our own wrapper object + response = self.ResponseCls.from_httplib( + httplib_response, + pool=self, + connection=response_conn, + retries=retries, + **response_kw + ) + + # Everything went great! + clean_exit = True + + except EmptyPoolError: + # Didn't get a connection from the pool, no need to clean up + clean_exit = True + release_this_conn = False + raise + + except ( + TimeoutError, + HTTPException, + SocketError, + ProtocolError, + BaseSSLError, + SSLError, + CertificateError, + ) as e: + # Discard the connection for these exceptions. It will be + # replaced during the next _get_conn() call. + clean_exit = False + if isinstance(e, (BaseSSLError, CertificateError)): + e = SSLError(e) + elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy: + e = ProxyError("Cannot connect to proxy.", e) + elif isinstance(e, (SocketError, HTTPException)): + e = ProtocolError("Connection aborted.", e) + + retries = retries.increment( + method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2] + ) + retries.sleep() + + # Keep track of the error for the retry warning. + err = e + + finally: + if not clean_exit: + # We hit some kind of exception, handled or otherwise. We need + # to throw the connection away unless explicitly told not to. + # Close the connection, set the variable to None, and make sure + # we put the None back in the pool to avoid leaking it. + conn = conn and conn.close() + release_this_conn = True + + if release_this_conn: + # Put the connection back to be reused. If the connection is + # expired then it will be None, which will get replaced with a + # fresh connection during _get_conn. + self._put_conn(conn) + + if not conn: + # Try again + log.warning( + "Retrying (%r) after connection broken by '%r': %s", retries, err, url + ) + return self.urlopen( + method, + url, + body, + headers, + retries, + redirect, + assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) + + # Handle redirect? + redirect_location = redirect and response.get_redirect_location() + if redirect_location: + if response.status == 303: + method = "GET" + + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep_for_retry(response) + log.debug("Redirecting %s -> %s", url, redirect_location) + return self.urlopen( + method, + redirect_location, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) + + # Check if we should retry the HTTP response. + has_retry_after = bool(response.getheader("Retry-After")) + if retries.is_retry(method, response.status, has_retry_after): + try: + retries = retries.increment(method, url, response=response, _pool=self) + except MaxRetryError: + if retries.raise_on_status: + response.drain_conn() + raise + return response + + response.drain_conn() + retries.sleep(response) + log.debug("Retry: %s", url) + return self.urlopen( + method, + url, + body, + headers, + retries=retries, + redirect=redirect, + assert_same_host=assert_same_host, + timeout=timeout, + pool_timeout=pool_timeout, + release_conn=release_conn, + chunked=chunked, + body_pos=body_pos, + **response_kw + ) + + return response + + +class HTTPSConnectionPool(HTTPConnectionPool): + """ + Same as :class:`.HTTPConnectionPool`, but HTTPS. + + :class:`.HTTPSConnection` uses one of ``assert_fingerprint``, + ``assert_hostname`` and ``host`` in this order to verify connections. + If ``assert_hostname`` is False, no verification is done. + + The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``, + ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl` + is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade + the connection socket into an SSL socket. + """ + + scheme = "https" + ConnectionCls = HTTPSConnection + + def __init__( + self, + host, + port=None, + strict=False, + timeout=Timeout.DEFAULT_TIMEOUT, + maxsize=1, + block=False, + headers=None, + retries=None, + _proxy=None, + _proxy_headers=None, + key_file=None, + cert_file=None, + cert_reqs=None, + key_password=None, + ca_certs=None, + ssl_version=None, + assert_hostname=None, + assert_fingerprint=None, + ca_cert_dir=None, + **conn_kw + ): + + HTTPConnectionPool.__init__( + self, + host, + port, + strict, + timeout, + maxsize, + block, + headers, + retries, + _proxy, + _proxy_headers, + **conn_kw + ) + + self.key_file = key_file + self.cert_file = cert_file + self.cert_reqs = cert_reqs + self.key_password = key_password + self.ca_certs = ca_certs + self.ca_cert_dir = ca_cert_dir + self.ssl_version = ssl_version + self.assert_hostname = assert_hostname + self.assert_fingerprint = assert_fingerprint + + def _prepare_conn(self, conn): + """ + Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` + and establish the tunnel if proxy is used. + """ + + if isinstance(conn, VerifiedHTTPSConnection): + conn.set_cert( + key_file=self.key_file, + key_password=self.key_password, + cert_file=self.cert_file, + cert_reqs=self.cert_reqs, + ca_certs=self.ca_certs, + ca_cert_dir=self.ca_cert_dir, + assert_hostname=self.assert_hostname, + assert_fingerprint=self.assert_fingerprint, + ) + conn.ssl_version = self.ssl_version + return conn + + def _prepare_proxy(self, conn): + """ + Establishes a tunnel connection through HTTP CONNECT. + + Tunnel connection is established early because otherwise httplib would + improperly set Host: header to proxy's IP:port. + """ + + conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers) + + if self.proxy.scheme == "https": + conn.tls_in_tls_required = True + + conn.connect() + + def _new_conn(self): + """ + Return a fresh :class:`http.client.HTTPSConnection`. + """ + self.num_connections += 1 + log.debug( + "Starting new HTTPS connection (%d): %s:%s", + self.num_connections, + self.host, + self.port or "443", + ) + + if not self.ConnectionCls or self.ConnectionCls is DummyConnection: + raise SSLError( + "Can't connect to HTTPS URL because the SSL module is not available." + ) + + actual_host = self.host + actual_port = self.port + if self.proxy is not None: + actual_host = self.proxy.host + actual_port = self.proxy.port + + conn = self.ConnectionCls( + host=actual_host, + port=actual_port, + timeout=self.timeout.connect_timeout, + strict=self.strict, + cert_file=self.cert_file, + key_file=self.key_file, + key_password=self.key_password, + **self.conn_kw + ) + + return self._prepare_conn(conn) + + def _validate_conn(self, conn): + """ + Called right before a request is made, after the socket is created. + """ + super(HTTPSConnectionPool, self)._validate_conn(conn) + + # Force connect early to allow us to validate the connection. + if not getattr(conn, "sock", None): # AppEngine might not have `.sock` + conn.connect() + + if not conn.is_verified: + warnings.warn( + ( + "Unverified HTTPS request is being made to host '%s'. " + "Adding certificate verification is strongly advised. See: " + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" + "#ssl-warnings" % conn.host + ), + InsecureRequestWarning, + ) + + +def connection_from_url(url, **kw): + """ + Given a url, return an :class:`.ConnectionPool` instance of its host. + + This is a shortcut for not having to parse out the scheme, host, and port + of the url before creating an :class:`.ConnectionPool` instance. + + :param url: + Absolute URL string that must include the scheme. Port is optional. + + :param \\**kw: + Passes additional parameters to the constructor of the appropriate + :class:`.ConnectionPool`. Useful for specifying things like + timeout, maxsize, headers, etc. + + Example:: + + >>> conn = connection_from_url('http://google.com/') + >>> r = conn.request('GET', '/') + """ + scheme, host, port = get_host(url) + port = port or port_by_scheme.get(scheme, 80) + if scheme == "https": + return HTTPSConnectionPool(host, port=port, **kw) + else: + return HTTPConnectionPool(host, port=port, **kw) + + +def _normalize_host(host, scheme): + """ + Normalize hosts for comparisons and use with sockets. + """ + + host = normalize_host(host, scheme) + + # httplib doesn't like it when we include brackets in IPv6 addresses + # Specifically, if we include brackets but also pass the port then + # httplib crazily doubles up the square brackets on the Host header. + # Instead, we need to make sure we never pass ``None`` as the port. + # However, for backward compatibility reasons we can't actually + # *assert* that. See http://bugs.python.org/issue28539 + if host.startswith("[") and host.endswith("]"): + host = host[1:-1] + return host diff --git a/src/snowflake/connector/vendored/urllib3/contrib/__init__.py b/src/snowflake/connector/vendored/urllib3/contrib/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/snowflake/connector/vendored/urllib3/contrib/_appengine_environ.py b/src/snowflake/connector/vendored/urllib3/contrib/_appengine_environ.py new file mode 100644 index 000000000..8765b907d --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/contrib/_appengine_environ.py @@ -0,0 +1,36 @@ +""" +This module provides means to detect the App Engine environment. +""" + +import os + + +def is_appengine(): + return is_local_appengine() or is_prod_appengine() + + +def is_appengine_sandbox(): + """Reports if the app is running in the first generation sandbox. + + The second generation runtimes are technically still in a sandbox, but it + is much less restrictive, so generally you shouldn't need to check for it. + see https://cloud.google.com/appengine/docs/standard/runtimes + """ + return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27" + + +def is_local_appengine(): + return "APPENGINE_RUNTIME" in os.environ and os.environ.get( + "SERVER_SOFTWARE", "" + ).startswith("Development/") + + +def is_prod_appengine(): + return "APPENGINE_RUNTIME" in os.environ and os.environ.get( + "SERVER_SOFTWARE", "" + ).startswith("Google App Engine/") + + +def is_prod_appengine_mvms(): + """Deprecated.""" + return False diff --git a/src/snowflake/connector/vendored/urllib3/contrib/_securetransport/__init__.py b/src/snowflake/connector/vendored/urllib3/contrib/_securetransport/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/snowflake/connector/vendored/urllib3/contrib/_securetransport/bindings.py b/src/snowflake/connector/vendored/urllib3/contrib/_securetransport/bindings.py new file mode 100644 index 000000000..11524d400 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/contrib/_securetransport/bindings.py @@ -0,0 +1,519 @@ +""" +This module uses ctypes to bind a whole bunch of functions and constants from +SecureTransport. The goal here is to provide the low-level API to +SecureTransport. These are essentially the C-level functions and constants, and +they're pretty gross to work with. + +This code is a bastardised version of the code found in Will Bond's oscrypto +library. An enormous debt is owed to him for blazing this trail for us. For +that reason, this code should be considered to be covered both by urllib3's +license and by oscrypto's: + + Copyright (c) 2015-2016 Will Bond + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. +""" +from __future__ import absolute_import + +import platform +from ctypes import ( + CDLL, + CFUNCTYPE, + POINTER, + c_bool, + c_byte, + c_char_p, + c_int32, + c_long, + c_size_t, + c_uint32, + c_ulong, + c_void_p, +) +from ctypes.util import find_library + +from urllib3.packages.six import raise_from + +if platform.system() != "Darwin": + raise ImportError("Only macOS is supported") + +version = platform.mac_ver()[0] +version_info = tuple(map(int, version.split("."))) +if version_info < (10, 8): + raise OSError( + "Only OS X 10.8 and newer are supported, not %s.%s" + % (version_info[0], version_info[1]) + ) + + +def load_cdll(name, macos10_16_path): + """Loads a CDLL by name, falling back to known path on 10.16+""" + try: + # Big Sur is technically 11 but we use 10.16 due to the Big Sur + # beta being labeled as 10.16. + if version_info >= (10, 16): + path = macos10_16_path + else: + path = find_library(name) + if not path: + raise OSError # Caught and reraised as 'ImportError' + return CDLL(path, use_errno=True) + except OSError: + raise_from(ImportError("The library %s failed to load" % name), None) + + +Security = load_cdll( + "Security", "/System/Library/Frameworks/Security.framework/Security" +) +CoreFoundation = load_cdll( + "CoreFoundation", + "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation", +) + + +Boolean = c_bool +CFIndex = c_long +CFStringEncoding = c_uint32 +CFData = c_void_p +CFString = c_void_p +CFArray = c_void_p +CFMutableArray = c_void_p +CFDictionary = c_void_p +CFError = c_void_p +CFType = c_void_p +CFTypeID = c_ulong + +CFTypeRef = POINTER(CFType) +CFAllocatorRef = c_void_p + +OSStatus = c_int32 + +CFDataRef = POINTER(CFData) +CFStringRef = POINTER(CFString) +CFArrayRef = POINTER(CFArray) +CFMutableArrayRef = POINTER(CFMutableArray) +CFDictionaryRef = POINTER(CFDictionary) +CFArrayCallBacks = c_void_p +CFDictionaryKeyCallBacks = c_void_p +CFDictionaryValueCallBacks = c_void_p + +SecCertificateRef = POINTER(c_void_p) +SecExternalFormat = c_uint32 +SecExternalItemType = c_uint32 +SecIdentityRef = POINTER(c_void_p) +SecItemImportExportFlags = c_uint32 +SecItemImportExportKeyParameters = c_void_p +SecKeychainRef = POINTER(c_void_p) +SSLProtocol = c_uint32 +SSLCipherSuite = c_uint32 +SSLContextRef = POINTER(c_void_p) +SecTrustRef = POINTER(c_void_p) +SSLConnectionRef = c_uint32 +SecTrustResultType = c_uint32 +SecTrustOptionFlags = c_uint32 +SSLProtocolSide = c_uint32 +SSLConnectionType = c_uint32 +SSLSessionOption = c_uint32 + + +try: + Security.SecItemImport.argtypes = [ + CFDataRef, + CFStringRef, + POINTER(SecExternalFormat), + POINTER(SecExternalItemType), + SecItemImportExportFlags, + POINTER(SecItemImportExportKeyParameters), + SecKeychainRef, + POINTER(CFArrayRef), + ] + Security.SecItemImport.restype = OSStatus + + Security.SecCertificateGetTypeID.argtypes = [] + Security.SecCertificateGetTypeID.restype = CFTypeID + + Security.SecIdentityGetTypeID.argtypes = [] + Security.SecIdentityGetTypeID.restype = CFTypeID + + Security.SecKeyGetTypeID.argtypes = [] + Security.SecKeyGetTypeID.restype = CFTypeID + + Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef] + Security.SecCertificateCreateWithData.restype = SecCertificateRef + + Security.SecCertificateCopyData.argtypes = [SecCertificateRef] + Security.SecCertificateCopyData.restype = CFDataRef + + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SecIdentityCreateWithCertificate.argtypes = [ + CFTypeRef, + SecCertificateRef, + POINTER(SecIdentityRef), + ] + Security.SecIdentityCreateWithCertificate.restype = OSStatus + + Security.SecKeychainCreate.argtypes = [ + c_char_p, + c_uint32, + c_void_p, + Boolean, + c_void_p, + POINTER(SecKeychainRef), + ] + Security.SecKeychainCreate.restype = OSStatus + + Security.SecKeychainDelete.argtypes = [SecKeychainRef] + Security.SecKeychainDelete.restype = OSStatus + + Security.SecPKCS12Import.argtypes = [ + CFDataRef, + CFDictionaryRef, + POINTER(CFArrayRef), + ] + Security.SecPKCS12Import.restype = OSStatus + + SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t)) + SSLWriteFunc = CFUNCTYPE( + OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t) + ) + + Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc] + Security.SSLSetIOFuncs.restype = OSStatus + + Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t] + Security.SSLSetPeerID.restype = OSStatus + + Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef] + Security.SSLSetCertificate.restype = OSStatus + + Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean] + Security.SSLSetCertificateAuthorities.restype = OSStatus + + Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef] + Security.SSLSetConnection.restype = OSStatus + + Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t] + Security.SSLSetPeerDomainName.restype = OSStatus + + Security.SSLHandshake.argtypes = [SSLContextRef] + Security.SSLHandshake.restype = OSStatus + + Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] + Security.SSLRead.restype = OSStatus + + Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)] + Security.SSLWrite.restype = OSStatus + + Security.SSLClose.argtypes = [SSLContextRef] + Security.SSLClose.restype = OSStatus + + Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)] + Security.SSLGetNumberSupportedCiphers.restype = OSStatus + + Security.SSLGetSupportedCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + POINTER(c_size_t), + ] + Security.SSLGetSupportedCiphers.restype = OSStatus + + Security.SSLSetEnabledCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + c_size_t, + ] + Security.SSLSetEnabledCiphers.restype = OSStatus + + Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)] + Security.SSLGetNumberEnabledCiphers.restype = OSStatus + + Security.SSLGetEnabledCiphers.argtypes = [ + SSLContextRef, + POINTER(SSLCipherSuite), + POINTER(c_size_t), + ] + Security.SSLGetEnabledCiphers.restype = OSStatus + + Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)] + Security.SSLGetNegotiatedCipher.restype = OSStatus + + Security.SSLGetNegotiatedProtocolVersion.argtypes = [ + SSLContextRef, + POINTER(SSLProtocol), + ] + Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus + + Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)] + Security.SSLCopyPeerTrust.restype = OSStatus + + Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef] + Security.SecTrustSetAnchorCertificates.restype = OSStatus + + Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean] + Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus + + Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)] + Security.SecTrustEvaluate.restype = OSStatus + + Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef] + Security.SecTrustGetCertificateCount.restype = CFIndex + + Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex] + Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef + + Security.SSLCreateContext.argtypes = [ + CFAllocatorRef, + SSLProtocolSide, + SSLConnectionType, + ] + Security.SSLCreateContext.restype = SSLContextRef + + Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean] + Security.SSLSetSessionOption.restype = OSStatus + + Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol] + Security.SSLSetProtocolVersionMin.restype = OSStatus + + Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol] + Security.SSLSetProtocolVersionMax.restype = OSStatus + + try: + Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef] + Security.SSLSetALPNProtocols.restype = OSStatus + except AttributeError: + # Supported only in 10.12+ + pass + + Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p] + Security.SecCopyErrorMessageString.restype = CFStringRef + + Security.SSLReadFunc = SSLReadFunc + Security.SSLWriteFunc = SSLWriteFunc + Security.SSLContextRef = SSLContextRef + Security.SSLProtocol = SSLProtocol + Security.SSLCipherSuite = SSLCipherSuite + Security.SecIdentityRef = SecIdentityRef + Security.SecKeychainRef = SecKeychainRef + Security.SecTrustRef = SecTrustRef + Security.SecTrustResultType = SecTrustResultType + Security.SecExternalFormat = SecExternalFormat + Security.OSStatus = OSStatus + + Security.kSecImportExportPassphrase = CFStringRef.in_dll( + Security, "kSecImportExportPassphrase" + ) + Security.kSecImportItemIdentity = CFStringRef.in_dll( + Security, "kSecImportItemIdentity" + ) + + # CoreFoundation time! + CoreFoundation.CFRetain.argtypes = [CFTypeRef] + CoreFoundation.CFRetain.restype = CFTypeRef + + CoreFoundation.CFRelease.argtypes = [CFTypeRef] + CoreFoundation.CFRelease.restype = None + + CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef] + CoreFoundation.CFGetTypeID.restype = CFTypeID + + CoreFoundation.CFStringCreateWithCString.argtypes = [ + CFAllocatorRef, + c_char_p, + CFStringEncoding, + ] + CoreFoundation.CFStringCreateWithCString.restype = CFStringRef + + CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding] + CoreFoundation.CFStringGetCStringPtr.restype = c_char_p + + CoreFoundation.CFStringGetCString.argtypes = [ + CFStringRef, + c_char_p, + CFIndex, + CFStringEncoding, + ] + CoreFoundation.CFStringGetCString.restype = c_bool + + CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex] + CoreFoundation.CFDataCreate.restype = CFDataRef + + CoreFoundation.CFDataGetLength.argtypes = [CFDataRef] + CoreFoundation.CFDataGetLength.restype = CFIndex + + CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef] + CoreFoundation.CFDataGetBytePtr.restype = c_void_p + + CoreFoundation.CFDictionaryCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + POINTER(CFTypeRef), + CFIndex, + CFDictionaryKeyCallBacks, + CFDictionaryValueCallBacks, + ] + CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef + + CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef] + CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef + + CoreFoundation.CFArrayCreate.argtypes = [ + CFAllocatorRef, + POINTER(CFTypeRef), + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreate.restype = CFArrayRef + + CoreFoundation.CFArrayCreateMutable.argtypes = [ + CFAllocatorRef, + CFIndex, + CFArrayCallBacks, + ] + CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef + + CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p] + CoreFoundation.CFArrayAppendValue.restype = None + + CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef] + CoreFoundation.CFArrayGetCount.restype = CFIndex + + CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex] + CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p + + CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll( + CoreFoundation, "kCFAllocatorDefault" + ) + CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll( + CoreFoundation, "kCFTypeArrayCallBacks" + ) + CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll( + CoreFoundation, "kCFTypeDictionaryKeyCallBacks" + ) + CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll( + CoreFoundation, "kCFTypeDictionaryValueCallBacks" + ) + + CoreFoundation.CFTypeRef = CFTypeRef + CoreFoundation.CFArrayRef = CFArrayRef + CoreFoundation.CFStringRef = CFStringRef + CoreFoundation.CFDictionaryRef = CFDictionaryRef + +except (AttributeError): + raise ImportError("Error initializing ctypes") + + +class CFConst(object): + """ + A class object that acts as essentially a namespace for CoreFoundation + constants. + """ + + kCFStringEncodingUTF8 = CFStringEncoding(0x08000100) + + +class SecurityConst(object): + """ + A class object that acts as essentially a namespace for Security constants. + """ + + kSSLSessionOptionBreakOnServerAuth = 0 + + kSSLProtocol2 = 1 + kSSLProtocol3 = 2 + kTLSProtocol1 = 4 + kTLSProtocol11 = 7 + kTLSProtocol12 = 8 + # SecureTransport does not support TLS 1.3 even if there's a constant for it + kTLSProtocol13 = 10 + kTLSProtocolMaxSupported = 999 + + kSSLClientSide = 1 + kSSLStreamType = 0 + + kSecFormatPEMSequence = 10 + + kSecTrustResultInvalid = 0 + kSecTrustResultProceed = 1 + # This gap is present on purpose: this was kSecTrustResultConfirm, which + # is deprecated. + kSecTrustResultDeny = 3 + kSecTrustResultUnspecified = 4 + kSecTrustResultRecoverableTrustFailure = 5 + kSecTrustResultFatalTrustFailure = 6 + kSecTrustResultOtherError = 7 + + errSSLProtocol = -9800 + errSSLWouldBlock = -9803 + errSSLClosedGraceful = -9805 + errSSLClosedNoNotify = -9816 + errSSLClosedAbort = -9806 + + errSSLXCertChainInvalid = -9807 + errSSLCrypto = -9809 + errSSLInternal = -9810 + errSSLCertExpired = -9814 + errSSLCertNotYetValid = -9815 + errSSLUnknownRootCert = -9812 + errSSLNoRootCert = -9813 + errSSLHostNameMismatch = -9843 + errSSLPeerHandshakeFail = -9824 + errSSLPeerUserCancelled = -9839 + errSSLWeakPeerEphemeralDHKey = -9850 + errSSLServerAuthCompleted = -9841 + errSSLRecordOverflow = -9847 + + errSecVerifyFailed = -67808 + errSecNoTrustSettings = -25263 + errSecItemNotFound = -25300 + errSecInvalidTrustSettings = -25262 + + # Cipher suites. We only pick the ones our default cipher string allows. + # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values + TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C + TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030 + TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B + TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F + TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9 + TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8 + TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F + TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E + TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024 + TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028 + TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A + TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014 + TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B + TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039 + TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023 + TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027 + TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009 + TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013 + TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067 + TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033 + TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D + TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C + TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D + TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C + TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035 + TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F + TLS_AES_128_GCM_SHA256 = 0x1301 + TLS_AES_256_GCM_SHA384 = 0x1302 + TLS_AES_128_CCM_8_SHA256 = 0x1305 + TLS_AES_128_CCM_SHA256 = 0x1304 diff --git a/src/snowflake/connector/vendored/urllib3/contrib/_securetransport/low_level.py b/src/snowflake/connector/vendored/urllib3/contrib/_securetransport/low_level.py new file mode 100644 index 000000000..ed8120190 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/contrib/_securetransport/low_level.py @@ -0,0 +1,396 @@ +""" +Low-level helpers for the SecureTransport bindings. + +These are Python functions that are not directly related to the high-level APIs +but are necessary to get them to work. They include a whole bunch of low-level +CoreFoundation messing about and memory management. The concerns in this module +are almost entirely about trying to avoid memory leaks and providing +appropriate and useful assistance to the higher-level code. +""" +import base64 +import ctypes +import itertools +import os +import re +import ssl +import struct +import tempfile + +from .bindings import CFConst, CoreFoundation, Security + +# This regular expression is used to grab PEM data out of a PEM bundle. +_PEM_CERTS_RE = re.compile( + b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL +) + + +def _cf_data_from_bytes(bytestring): + """ + Given a bytestring, create a CFData object from it. This CFData object must + be CFReleased by the caller. + """ + return CoreFoundation.CFDataCreate( + CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring) + ) + + +def _cf_dictionary_from_tuples(tuples): + """ + Given a list of Python tuples, create an associated CFDictionary. + """ + dictionary_size = len(tuples) + + # We need to get the dictionary keys and values out in the same order. + keys = (t[0] for t in tuples) + values = (t[1] for t in tuples) + cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys) + cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values) + + return CoreFoundation.CFDictionaryCreate( + CoreFoundation.kCFAllocatorDefault, + cf_keys, + cf_values, + dictionary_size, + CoreFoundation.kCFTypeDictionaryKeyCallBacks, + CoreFoundation.kCFTypeDictionaryValueCallBacks, + ) + + +def _cfstr(py_bstr): + """ + Given a Python binary data, create a CFString. + The string must be CFReleased by the caller. + """ + c_str = ctypes.c_char_p(py_bstr) + cf_str = CoreFoundation.CFStringCreateWithCString( + CoreFoundation.kCFAllocatorDefault, + c_str, + CFConst.kCFStringEncodingUTF8, + ) + return cf_str + + +def _create_cfstring_array(lst): + """ + Given a list of Python binary data, create an associated CFMutableArray. + The array must be CFReleased by the caller. + + Raises an ssl.SSLError on failure. + """ + cf_arr = None + try: + cf_arr = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + if not cf_arr: + raise MemoryError("Unable to allocate memory!") + for item in lst: + cf_str = _cfstr(item) + if not cf_str: + raise MemoryError("Unable to allocate memory!") + try: + CoreFoundation.CFArrayAppendValue(cf_arr, cf_str) + finally: + CoreFoundation.CFRelease(cf_str) + except BaseException as e: + if cf_arr: + CoreFoundation.CFRelease(cf_arr) + raise ssl.SSLError("Unable to allocate array: %s" % (e,)) + return cf_arr + + +def _cf_string_to_unicode(value): + """ + Creates a Unicode string from a CFString object. Used entirely for error + reporting. + + Yes, it annoys me quite a lot that this function is this complex. + """ + value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p)) + + string = CoreFoundation.CFStringGetCStringPtr( + value_as_void_p, CFConst.kCFStringEncodingUTF8 + ) + if string is None: + buffer = ctypes.create_string_buffer(1024) + result = CoreFoundation.CFStringGetCString( + value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8 + ) + if not result: + raise OSError("Error copying C string from CFStringRef") + string = buffer.value + if string is not None: + string = string.decode("utf-8") + return string + + +def _assert_no_error(error, exception_class=None): + """ + Checks the return code and throws an exception if there is an error to + report + """ + if error == 0: + return + + cf_error_string = Security.SecCopyErrorMessageString(error, None) + output = _cf_string_to_unicode(cf_error_string) + CoreFoundation.CFRelease(cf_error_string) + + if output is None or output == u"": + output = u"OSStatus %s" % error + + if exception_class is None: + exception_class = ssl.SSLError + + raise exception_class(output) + + +def _cert_array_from_pem(pem_bundle): + """ + Given a bundle of certs in PEM format, turns them into a CFArray of certs + that can be used to validate a cert chain. + """ + # Normalize the PEM bundle's line endings. + pem_bundle = pem_bundle.replace(b"\r\n", b"\n") + + der_certs = [ + base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle) + ] + if not der_certs: + raise ssl.SSLError("No root certificates specified") + + cert_array = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + if not cert_array: + raise ssl.SSLError("Unable to allocate memory!") + + try: + for der_bytes in der_certs: + certdata = _cf_data_from_bytes(der_bytes) + if not certdata: + raise ssl.SSLError("Unable to allocate memory!") + cert = Security.SecCertificateCreateWithData( + CoreFoundation.kCFAllocatorDefault, certdata + ) + CoreFoundation.CFRelease(certdata) + if not cert: + raise ssl.SSLError("Unable to build cert object!") + + CoreFoundation.CFArrayAppendValue(cert_array, cert) + CoreFoundation.CFRelease(cert) + except Exception: + # We need to free the array before the exception bubbles further. + # We only want to do that if an error occurs: otherwise, the caller + # should free. + CoreFoundation.CFRelease(cert_array) + + return cert_array + + +def _is_cert(item): + """ + Returns True if a given CFTypeRef is a certificate. + """ + expected = Security.SecCertificateGetTypeID() + return CoreFoundation.CFGetTypeID(item) == expected + + +def _is_identity(item): + """ + Returns True if a given CFTypeRef is an identity. + """ + expected = Security.SecIdentityGetTypeID() + return CoreFoundation.CFGetTypeID(item) == expected + + +def _temporary_keychain(): + """ + This function creates a temporary Mac keychain that we can use to work with + credentials. This keychain uses a one-time password and a temporary file to + store the data. We expect to have one keychain per socket. The returned + SecKeychainRef must be freed by the caller, including calling + SecKeychainDelete. + + Returns a tuple of the SecKeychainRef and the path to the temporary + directory that contains it. + """ + # Unfortunately, SecKeychainCreate requires a path to a keychain. This + # means we cannot use mkstemp to use a generic temporary file. Instead, + # we're going to create a temporary directory and a filename to use there. + # This filename will be 8 random bytes expanded into base64. We also need + # some random bytes to password-protect the keychain we're creating, so we + # ask for 40 random bytes. + random_bytes = os.urandom(40) + filename = base64.b16encode(random_bytes[:8]).decode("utf-8") + password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8 + tempdirectory = tempfile.mkdtemp() + + keychain_path = os.path.join(tempdirectory, filename).encode("utf-8") + + # We now want to create the keychain itself. + keychain = Security.SecKeychainRef() + status = Security.SecKeychainCreate( + keychain_path, len(password), password, False, None, ctypes.byref(keychain) + ) + _assert_no_error(status) + + # Having created the keychain, we want to pass it off to the caller. + return keychain, tempdirectory + + +def _load_items_from_file(keychain, path): + """ + Given a single file, loads all the trust objects from it into arrays and + the keychain. + Returns a tuple of lists: the first list is a list of identities, the + second a list of certs. + """ + certificates = [] + identities = [] + result_array = None + + with open(path, "rb") as f: + raw_filedata = f.read() + + try: + filedata = CoreFoundation.CFDataCreate( + CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata) + ) + result_array = CoreFoundation.CFArrayRef() + result = Security.SecItemImport( + filedata, # cert data + None, # Filename, leaving it out for now + None, # What the type of the file is, we don't care + None, # what's in the file, we don't care + 0, # import flags + None, # key params, can include passphrase in the future + keychain, # The keychain to insert into + ctypes.byref(result_array), # Results + ) + _assert_no_error(result) + + # A CFArray is not very useful to us as an intermediary + # representation, so we are going to extract the objects we want + # and then free the array. We don't need to keep hold of keys: the + # keychain already has them! + result_count = CoreFoundation.CFArrayGetCount(result_array) + for index in range(result_count): + item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index) + item = ctypes.cast(item, CoreFoundation.CFTypeRef) + + if _is_cert(item): + CoreFoundation.CFRetain(item) + certificates.append(item) + elif _is_identity(item): + CoreFoundation.CFRetain(item) + identities.append(item) + finally: + if result_array: + CoreFoundation.CFRelease(result_array) + + CoreFoundation.CFRelease(filedata) + + return (identities, certificates) + + +def _load_client_cert_chain(keychain, *paths): + """ + Load certificates and maybe keys from a number of files. Has the end goal + of returning a CFArray containing one SecIdentityRef, and then zero or more + SecCertificateRef objects, suitable for use as a client certificate trust + chain. + """ + # Ok, the strategy. + # + # This relies on knowing that macOS will not give you a SecIdentityRef + # unless you have imported a key into a keychain. This is a somewhat + # artificial limitation of macOS (for example, it doesn't necessarily + # affect iOS), but there is nothing inside Security.framework that lets you + # get a SecIdentityRef without having a key in a keychain. + # + # So the policy here is we take all the files and iterate them in order. + # Each one will use SecItemImport to have one or more objects loaded from + # it. We will also point at a keychain that macOS can use to work with the + # private key. + # + # Once we have all the objects, we'll check what we actually have. If we + # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise, + # we'll take the first certificate (which we assume to be our leaf) and + # ask the keychain to give us a SecIdentityRef with that cert's associated + # key. + # + # We'll then return a CFArray containing the trust chain: one + # SecIdentityRef and then zero-or-more SecCertificateRef objects. The + # responsibility for freeing this CFArray will be with the caller. This + # CFArray must remain alive for the entire connection, so in practice it + # will be stored with a single SSLSocket, along with the reference to the + # keychain. + certificates = [] + identities = [] + + # Filter out bad paths. + paths = (path for path in paths if path) + + try: + for file_path in paths: + new_identities, new_certs = _load_items_from_file(keychain, file_path) + identities.extend(new_identities) + certificates.extend(new_certs) + + # Ok, we have everything. The question is: do we have an identity? If + # not, we want to grab one from the first cert we have. + if not identities: + new_identity = Security.SecIdentityRef() + status = Security.SecIdentityCreateWithCertificate( + keychain, certificates[0], ctypes.byref(new_identity) + ) + _assert_no_error(status) + identities.append(new_identity) + + # We now want to release the original certificate, as we no longer + # need it. + CoreFoundation.CFRelease(certificates.pop(0)) + + # We now need to build a new CFArray that holds the trust chain. + trust_chain = CoreFoundation.CFArrayCreateMutable( + CoreFoundation.kCFAllocatorDefault, + 0, + ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks), + ) + for item in itertools.chain(identities, certificates): + # ArrayAppendValue does a CFRetain on the item. That's fine, + # because the finally block will release our other refs to them. + CoreFoundation.CFArrayAppendValue(trust_chain, item) + + return trust_chain + finally: + for obj in itertools.chain(identities, certificates): + CoreFoundation.CFRelease(obj) + + +TLS_PROTOCOL_VERSIONS = { + "SSLv2": (0, 2), + "SSLv3": (3, 0), + "TLSv1": (3, 1), + "TLSv1.1": (3, 2), + "TLSv1.2": (3, 3), +} + + +def _build_tls_unknown_ca_alert(version): + """ + Builds a TLS alert record for an unknown CA. + """ + ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version] + severity_fatal = 0x02 + description_unknown_ca = 0x30 + msg = struct.pack(">BB", severity_fatal, description_unknown_ca) + msg_len = len(msg) + record_type_alert = 0x15 + record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg + return record diff --git a/src/snowflake/connector/vendored/urllib3/contrib/appengine.py b/src/snowflake/connector/vendored/urllib3/contrib/appengine.py new file mode 100644 index 000000000..f91bdd6e7 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/contrib/appengine.py @@ -0,0 +1,314 @@ +""" +This module provides a pool manager that uses Google App Engine's +`URLFetch Service `_. + +Example usage:: + + from urllib3 import PoolManager + from urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox + + if is_appengine_sandbox(): + # AppEngineManager uses AppEngine's URLFetch API behind the scenes + http = AppEngineManager() + else: + # PoolManager uses a socket-level API behind the scenes + http = PoolManager() + + r = http.request('GET', 'https://google.com/') + +There are `limitations `_ to the URLFetch service and it may not be +the best choice for your application. There are three options for using +urllib3 on Google App Engine: + +1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is + cost-effective in many circumstances as long as your usage is within the + limitations. +2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets. + Sockets also have `limitations and restrictions + `_ and have a lower free quota than URLFetch. + To use sockets, be sure to specify the following in your ``app.yaml``:: + + env_variables: + GAE_USE_SOCKETS_HTTPLIB : 'true' + +3. If you are using `App Engine Flexible +`_, you can use the standard +:class:`PoolManager` without any configuration or special environment variables. +""" + +from __future__ import absolute_import + +import io +import logging +import warnings + +from ..exceptions import ( + HTTPError, + HTTPWarning, + MaxRetryError, + ProtocolError, + SSLError, + TimeoutError, +) +from ..packages.six.moves.urllib.parse import urljoin +from ..request import RequestMethods +from ..response import HTTPResponse +from ..util.retry import Retry +from ..util.timeout import Timeout +from . import _appengine_environ + +try: + from google.appengine.api import urlfetch +except ImportError: + urlfetch = None + + +log = logging.getLogger(__name__) + + +class AppEnginePlatformWarning(HTTPWarning): + pass + + +class AppEnginePlatformError(HTTPError): + pass + + +class AppEngineManager(RequestMethods): + """ + Connection manager for Google App Engine sandbox applications. + + This manager uses the URLFetch service directly instead of using the + emulated httplib, and is subject to URLFetch limitations as described in + the App Engine documentation `here + `_. + + Notably it will raise an :class:`AppEnginePlatformError` if: + * URLFetch is not available. + * If you attempt to use this on App Engine Flexible, as full socket + support is available. + * If a request size is more than 10 megabytes. + * If a response size is more than 32 megabytes. + * If you use an unsupported request method such as OPTIONS. + + Beyond those cases, it will raise normal urllib3 errors. + """ + + def __init__( + self, + headers=None, + retries=None, + validate_certificate=True, + urlfetch_retries=True, + ): + if not urlfetch: + raise AppEnginePlatformError( + "URLFetch is not available in this environment." + ) + + warnings.warn( + "urllib3 is using URLFetch on Google App Engine sandbox instead " + "of sockets. To use sockets directly instead of URLFetch see " + "https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.", + AppEnginePlatformWarning, + ) + + RequestMethods.__init__(self, headers) + self.validate_certificate = validate_certificate + self.urlfetch_retries = urlfetch_retries + + self.retries = retries or Retry.DEFAULT + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + # Return False to re-raise any potential exceptions + return False + + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=None, + redirect=True, + timeout=Timeout.DEFAULT_TIMEOUT, + **response_kw + ): + + retries = self._get_retries(retries, redirect) + + try: + follow_redirects = redirect and retries.redirect != 0 and retries.total + response = urlfetch.fetch( + url, + payload=body, + method=method, + headers=headers or {}, + allow_truncated=False, + follow_redirects=self.urlfetch_retries and follow_redirects, + deadline=self._get_absolute_timeout(timeout), + validate_certificate=self.validate_certificate, + ) + except urlfetch.DeadlineExceededError as e: + raise TimeoutError(self, e) + + except urlfetch.InvalidURLError as e: + if "too large" in str(e): + raise AppEnginePlatformError( + "URLFetch request too large, URLFetch only " + "supports requests up to 10mb in size.", + e, + ) + raise ProtocolError(e) + + except urlfetch.DownloadError as e: + if "Too many redirects" in str(e): + raise MaxRetryError(self, url, reason=e) + raise ProtocolError(e) + + except urlfetch.ResponseTooLargeError as e: + raise AppEnginePlatformError( + "URLFetch response too large, URLFetch only supports" + "responses up to 32mb in size.", + e, + ) + + except urlfetch.SSLCertificateError as e: + raise SSLError(e) + + except urlfetch.InvalidMethodError as e: + raise AppEnginePlatformError( + "URLFetch does not support method: %s" % method, e + ) + + http_response = self._urlfetch_response_to_http_response( + response, retries=retries, **response_kw + ) + + # Handle redirect? + redirect_location = redirect and http_response.get_redirect_location() + if redirect_location: + # Check for redirect response + if self.urlfetch_retries and retries.raise_on_redirect: + raise MaxRetryError(self, url, "too many redirects") + else: + if http_response.status == 303: + method = "GET" + + try: + retries = retries.increment( + method, url, response=http_response, _pool=self + ) + except MaxRetryError: + if retries.raise_on_redirect: + raise MaxRetryError(self, url, "too many redirects") + return http_response + + retries.sleep_for_retry(http_response) + log.debug("Redirecting %s -> %s", url, redirect_location) + redirect_url = urljoin(url, redirect_location) + return self.urlopen( + method, + redirect_url, + body, + headers, + retries=retries, + redirect=redirect, + timeout=timeout, + **response_kw + ) + + # Check if we should retry the HTTP response. + has_retry_after = bool(http_response.getheader("Retry-After")) + if retries.is_retry(method, http_response.status, has_retry_after): + retries = retries.increment(method, url, response=http_response, _pool=self) + log.debug("Retry: %s", url) + retries.sleep(http_response) + return self.urlopen( + method, + url, + body=body, + headers=headers, + retries=retries, + redirect=redirect, + timeout=timeout, + **response_kw + ) + + return http_response + + def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw): + + if is_prod_appengine(): + # Production GAE handles deflate encoding automatically, but does + # not remove the encoding header. + content_encoding = urlfetch_resp.headers.get("content-encoding") + + if content_encoding == "deflate": + del urlfetch_resp.headers["content-encoding"] + + transfer_encoding = urlfetch_resp.headers.get("transfer-encoding") + # We have a full response's content, + # so let's make sure we don't report ourselves as chunked data. + if transfer_encoding == "chunked": + encodings = transfer_encoding.split(",") + encodings.remove("chunked") + urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings) + + original_response = HTTPResponse( + # In order for decoding to work, we must present the content as + # a file-like object. + body=io.BytesIO(urlfetch_resp.content), + msg=urlfetch_resp.header_msg, + headers=urlfetch_resp.headers, + status=urlfetch_resp.status_code, + **response_kw + ) + + return HTTPResponse( + body=io.BytesIO(urlfetch_resp.content), + headers=urlfetch_resp.headers, + status=urlfetch_resp.status_code, + original_response=original_response, + **response_kw + ) + + def _get_absolute_timeout(self, timeout): + if timeout is Timeout.DEFAULT_TIMEOUT: + return None # Defer to URLFetch's default. + if isinstance(timeout, Timeout): + if timeout._read is not None or timeout._connect is not None: + warnings.warn( + "URLFetch does not support granular timeout settings, " + "reverting to total or default URLFetch timeout.", + AppEnginePlatformWarning, + ) + return timeout.total + return timeout + + def _get_retries(self, retries, redirect): + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect, default=self.retries) + + if retries.connect or retries.read or retries.redirect: + warnings.warn( + "URLFetch only supports total retries and does not " + "recognize connect, read, or redirect retry parameters.", + AppEnginePlatformWarning, + ) + + return retries + + +# Alias methods from _appengine_environ to maintain public API interface. + +is_appengine = _appengine_environ.is_appengine +is_appengine_sandbox = _appengine_environ.is_appengine_sandbox +is_local_appengine = _appengine_environ.is_local_appengine +is_prod_appengine = _appengine_environ.is_prod_appengine +is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms diff --git a/src/snowflake/connector/vendored/urllib3/contrib/ntlmpool.py b/src/snowflake/connector/vendored/urllib3/contrib/ntlmpool.py new file mode 100644 index 000000000..41a8fd174 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/contrib/ntlmpool.py @@ -0,0 +1,130 @@ +""" +NTLM authenticating pool, contributed by erikcederstran + +Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10 +""" +from __future__ import absolute_import + +import warnings +from logging import getLogger + +from ntlm import ntlm + +from .. import HTTPSConnectionPool +from ..packages.six.moves.http_client import HTTPSConnection + +warnings.warn( + "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed " + "in urllib3 v2.0 release, urllib3 is not able to support it properly due " + "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. " + "If you are a user of this module please comment in the mentioned issue.", + DeprecationWarning, +) + +log = getLogger(__name__) + + +class NTLMConnectionPool(HTTPSConnectionPool): + """ + Implements an NTLM authentication version of an urllib3 connection pool + """ + + scheme = "https" + + def __init__(self, user, pw, authurl, *args, **kwargs): + """ + authurl is a random URL on the server that is protected by NTLM. + user is the Windows user, probably in the DOMAIN\\username format. + pw is the password for the user. + """ + super(NTLMConnectionPool, self).__init__(*args, **kwargs) + self.authurl = authurl + self.rawuser = user + user_parts = user.split("\\", 1) + self.domain = user_parts[0].upper() + self.user = user_parts[1] + self.pw = pw + + def _new_conn(self): + # Performs the NTLM handshake that secures the connection. The socket + # must be kept open while requests are performed. + self.num_connections += 1 + log.debug( + "Starting NTLM HTTPS connection no. %d: https://%s%s", + self.num_connections, + self.host, + self.authurl, + ) + + headers = {"Connection": "Keep-Alive"} + req_header = "Authorization" + resp_header = "www-authenticate" + + conn = HTTPSConnection(host=self.host, port=self.port) + + # Send negotiation message + headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE( + self.rawuser + ) + log.debug("Request headers: %s", headers) + conn.request("GET", self.authurl, None, headers) + res = conn.getresponse() + reshdr = dict(res.getheaders()) + log.debug("Response status: %s %s", res.status, res.reason) + log.debug("Response headers: %s", reshdr) + log.debug("Response data: %s [...]", res.read(100)) + + # Remove the reference to the socket, so that it can not be closed by + # the response object (we want to keep the socket open) + res.fp = None + + # Server should respond with a challenge message + auth_header_values = reshdr[resp_header].split(", ") + auth_header_value = None + for s in auth_header_values: + if s[:5] == "NTLM ": + auth_header_value = s[5:] + if auth_header_value is None: + raise Exception( + "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header]) + ) + + # Send authentication message + ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE( + auth_header_value + ) + auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE( + ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags + ) + headers[req_header] = "NTLM %s" % auth_msg + log.debug("Request headers: %s", headers) + conn.request("GET", self.authurl, None, headers) + res = conn.getresponse() + log.debug("Response status: %s %s", res.status, res.reason) + log.debug("Response headers: %s", dict(res.getheaders())) + log.debug("Response data: %s [...]", res.read()[:100]) + if res.status != 200: + if res.status == 401: + raise Exception("Server rejected request: wrong username or password") + raise Exception("Wrong server response: %s %s" % (res.status, res.reason)) + + res.fp = None + log.debug("Connection established") + return conn + + def urlopen( + self, + method, + url, + body=None, + headers=None, + retries=3, + redirect=True, + assert_same_host=True, + ): + if headers is None: + headers = {} + headers["Connection"] = "Keep-Alive" + return super(NTLMConnectionPool, self).urlopen( + method, url, body, headers, retries, redirect, assert_same_host + ) diff --git a/src/snowflake/connector/vendored/urllib3/contrib/pyopenssl.py b/src/snowflake/connector/vendored/urllib3/contrib/pyopenssl.py new file mode 100644 index 000000000..def83afdb --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/contrib/pyopenssl.py @@ -0,0 +1,511 @@ +""" +TLS with SNI_-support for Python 2. Follow these instructions if you would +like to verify TLS certificates in Python 2. Note, the default libraries do +*not* do certificate checking; you need to do additional work to validate +certificates yourself. + +This needs the following packages installed: + +* `pyOpenSSL`_ (tested with 16.0.0) +* `cryptography`_ (minimum 1.3.4, from pyopenssl) +* `idna`_ (minimum 2.0, from cryptography) + +However, pyopenssl depends on cryptography, which depends on idna, so while we +use all three directly here we end up having relatively few packages required. + +You can install them with the following command: + +.. code-block:: bash + + $ python -m pip install pyopenssl cryptography idna + +To activate certificate checking, call +:func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code +before you begin making HTTP requests. This can be done in a ``sitecustomize`` +module, or at any other time before your application begins using ``urllib3``, +like this: + +.. code-block:: python + + try: + import urllib3.contrib.pyopenssl + urllib3.contrib.pyopenssl.inject_into_urllib3() + except ImportError: + pass + +Now you can use :mod:`urllib3` as you normally would, and it will support SNI +when the required modules are installed. + +Activating this module also has the positive side effect of disabling SSL/TLS +compression in Python 2 (see `CRIME attack`_). + +.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication +.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) +.. _pyopenssl: https://www.pyopenssl.org +.. _cryptography: https://cryptography.io +.. _idna: https://github.com/kjd/idna +""" +from __future__ import absolute_import + +import OpenSSL.SSL +from cryptography import x509 +from cryptography.hazmat.backends.openssl import backend as openssl_backend +from cryptography.hazmat.backends.openssl.x509 import _Certificate + +try: + from cryptography.x509 import UnsupportedExtension +except ImportError: + # UnsupportedExtension is gone in cryptography >= 2.1.0 + class UnsupportedExtension(Exception): + pass + + +from io import BytesIO +from socket import error as SocketError +from socket import timeout + +try: # Platform-specific: Python 2 + from socket import _fileobject +except ImportError: # Platform-specific: Python 3 + _fileobject = None + from ..packages.backports.makefile import backport_makefile + +import logging +import ssl +import sys + +from .. import util +from ..packages import six +from ..util.ssl_ import PROTOCOL_TLS_CLIENT + +__all__ = ["inject_into_urllib3", "extract_from_urllib3"] + +# SNI always works. +HAS_SNI = True + +# Map from urllib3 to PyOpenSSL compatible parameter-values. +_openssl_versions = { + util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD, + PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD, + ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, +} + +if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"): + _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD + +if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"): + _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD + +if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"): + _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD + + +_stdlib_to_openssl_verify = { + ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, + ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, + ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, +} +_openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items()) + +# OpenSSL will only write 16K at a time +SSL_WRITE_BLOCKSIZE = 16384 + +orig_util_HAS_SNI = util.HAS_SNI +orig_util_SSLContext = util.ssl_.SSLContext + + +log = logging.getLogger(__name__) + + +def inject_into_urllib3(): + "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support." + + _validate_dependencies_met() + + util.SSLContext = PyOpenSSLContext + util.ssl_.SSLContext = PyOpenSSLContext + util.HAS_SNI = HAS_SNI + util.ssl_.HAS_SNI = HAS_SNI + util.IS_PYOPENSSL = True + util.ssl_.IS_PYOPENSSL = True + + +def extract_from_urllib3(): + "Undo monkey-patching by :func:`inject_into_urllib3`." + + util.SSLContext = orig_util_SSLContext + util.ssl_.SSLContext = orig_util_SSLContext + util.HAS_SNI = orig_util_HAS_SNI + util.ssl_.HAS_SNI = orig_util_HAS_SNI + util.IS_PYOPENSSL = False + util.ssl_.IS_PYOPENSSL = False + + +def _validate_dependencies_met(): + """ + Verifies that PyOpenSSL's package-level dependencies have been met. + Throws `ImportError` if they are not met. + """ + # Method added in `cryptography==1.1`; not available in older versions + from cryptography.x509.extensions import Extensions + + if getattr(Extensions, "get_extension_for_class", None) is None: + raise ImportError( + "'cryptography' module missing required functionality. " + "Try upgrading to v1.3.4 or newer." + ) + + # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509 + # attribute is only present on those versions. + from OpenSSL.crypto import X509 + + x509 = X509() + if getattr(x509, "_x509", None) is None: + raise ImportError( + "'pyOpenSSL' module missing required functionality. " + "Try upgrading to v0.14 or newer." + ) + + +def _dnsname_to_stdlib(name): + """ + Converts a dNSName SubjectAlternativeName field to the form used by the + standard library on the given Python version. + + Cryptography produces a dNSName as a unicode string that was idna-decoded + from ASCII bytes. We need to idna-encode that string to get it back, and + then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib + uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). + + If the name cannot be idna-encoded then we return None signalling that + the name given should be skipped. + """ + + def idna_encode(name): + """ + Borrowed wholesale from the Python Cryptography Project. It turns out + that we can't just safely call `idna.encode`: it can explode for + wildcard names. This avoids that problem. + """ + import idna + + try: + for prefix in [u"*.", u"."]: + if name.startswith(prefix): + name = name[len(prefix) :] + return prefix.encode("ascii") + idna.encode(name) + return idna.encode(name) + except idna.core.IDNAError: + return None + + # Don't send IPv6 addresses through the IDNA encoder. + if ":" in name: + return name + + name = idna_encode(name) + if name is None: + return None + elif sys.version_info >= (3, 0): + name = name.decode("utf-8") + return name + + +def get_subj_alt_name(peer_cert): + """ + Given an PyOpenSSL certificate, provides all the subject alternative names. + """ + # Pass the cert to cryptography, which has much better APIs for this. + if hasattr(peer_cert, "to_cryptography"): + cert = peer_cert.to_cryptography() + else: + # This is technically using private APIs, but should work across all + # relevant versions before PyOpenSSL got a proper API for this. + cert = _Certificate(openssl_backend, peer_cert._x509) + + # We want to find the SAN extension. Ask Cryptography to locate it (it's + # faster than looping in Python) + try: + ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value + except x509.ExtensionNotFound: + # No such extension, return the empty list. + return [] + except ( + x509.DuplicateExtension, + UnsupportedExtension, + x509.UnsupportedGeneralNameType, + UnicodeError, + ) as e: + # A problem has been found with the quality of the certificate. Assume + # no SAN field is present. + log.warning( + "A problem was encountered with the certificate that prevented " + "urllib3 from finding the SubjectAlternativeName field. This can " + "affect certificate validation. The error was %s", + e, + ) + return [] + + # We want to return dNSName and iPAddress fields. We need to cast the IPs + # back to strings because the match_hostname function wants them as + # strings. + # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 + # decoded. This is pretty frustrating, but that's what the standard library + # does with certificates, and so we need to attempt to do the same. + # We also want to skip over names which cannot be idna encoded. + names = [ + ("DNS", name) + for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName)) + if name is not None + ] + names.extend( + ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress) + ) + + return names + + +class WrappedSocket(object): + """API-compatibility wrapper for Python OpenSSL's Connection-class. + + Note: _makefile_refs, _drop() and _reuse() are needed for the garbage + collector of pypy. + """ + + def __init__(self, connection, socket, suppress_ragged_eofs=True): + self.connection = connection + self.socket = socket + self.suppress_ragged_eofs = suppress_ragged_eofs + self._makefile_refs = 0 + self._closed = False + + def fileno(self): + return self.socket.fileno() + + # Copy-pasted from Python 3.5 source code + def _decref_socketios(self): + if self._makefile_refs > 0: + self._makefile_refs -= 1 + if self._closed: + self.close() + + def recv(self, *args, **kwargs): + try: + data = self.connection.recv(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): + return b"" + else: + raise SocketError(str(e)) + except OpenSSL.SSL.ZeroReturnError: + if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: + return b"" + else: + raise + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): + raise timeout("The read operation timed out") + else: + return self.recv(*args, **kwargs) + + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("read error: %r" % e) + else: + return data + + def recv_into(self, *args, **kwargs): + try: + return self.connection.recv_into(*args, **kwargs) + except OpenSSL.SSL.SysCallError as e: + if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"): + return 0 + else: + raise SocketError(str(e)) + except OpenSSL.SSL.ZeroReturnError: + if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: + return 0 + else: + raise + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(self.socket, self.socket.gettimeout()): + raise timeout("The read operation timed out") + else: + return self.recv_into(*args, **kwargs) + + # TLS 1.3 post-handshake authentication + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("read error: %r" % e) + + def settimeout(self, timeout): + return self.socket.settimeout(timeout) + + def _send_until_done(self, data): + while True: + try: + return self.connection.send(data) + except OpenSSL.SSL.WantWriteError: + if not util.wait_for_write(self.socket, self.socket.gettimeout()): + raise timeout() + continue + except OpenSSL.SSL.SysCallError as e: + raise SocketError(str(e)) + + def sendall(self, data): + total_sent = 0 + while total_sent < len(data): + sent = self._send_until_done( + data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE] + ) + total_sent += sent + + def shutdown(self): + # FIXME rethrow compatible exceptions should we ever use this + self.connection.shutdown() + + def close(self): + if self._makefile_refs < 1: + try: + self._closed = True + return self.connection.close() + except OpenSSL.SSL.Error: + return + else: + self._makefile_refs -= 1 + + def getpeercert(self, binary_form=False): + x509 = self.connection.get_peer_certificate() + + if not x509: + return x509 + + if binary_form: + return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509) + + return { + "subject": ((("commonName", x509.get_subject().CN),),), + "subjectAltName": get_subj_alt_name(x509), + } + + def version(self): + return self.connection.get_protocol_version_name() + + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + + +if _fileobject: # Platform-specific: Python 2 + + def makefile(self, mode, bufsize=-1): + self._makefile_refs += 1 + return _fileobject(self, mode, bufsize, close=True) + + +else: # Platform-specific: Python 3 + makefile = backport_makefile + +WrappedSocket.makefile = makefile + + +class PyOpenSSLContext(object): + """ + I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible + for translating the interface of the standard library ``SSLContext`` object + to calls into PyOpenSSL. + """ + + def __init__(self, protocol): + self.protocol = _openssl_versions[protocol] + self._ctx = OpenSSL.SSL.Context(self.protocol) + self._options = 0 + self.check_hostname = False + + @property + def options(self): + return self._options + + @options.setter + def options(self, value): + self._options = value + self._ctx.set_options(value) + + @property + def verify_mode(self): + return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()] + + @verify_mode.setter + def verify_mode(self, value): + self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback) + + def set_default_verify_paths(self): + self._ctx.set_default_verify_paths() + + def set_ciphers(self, ciphers): + if isinstance(ciphers, six.text_type): + ciphers = ciphers.encode("utf-8") + self._ctx.set_cipher_list(ciphers) + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + if cafile is not None: + cafile = cafile.encode("utf-8") + if capath is not None: + capath = capath.encode("utf-8") + try: + self._ctx.load_verify_locations(cafile, capath) + if cadata is not None: + self._ctx.load_verify_locations(BytesIO(cadata)) + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("unable to load trusted certificates: %r" % e) + + def load_cert_chain(self, certfile, keyfile=None, password=None): + self._ctx.use_certificate_chain_file(certfile) + if password is not None: + if not isinstance(password, six.binary_type): + password = password.encode("utf-8") + self._ctx.set_passwd_cb(lambda *_: password) + self._ctx.use_privatekey_file(keyfile or certfile) + + def set_alpn_protocols(self, protocols): + protocols = [six.ensure_binary(p) for p in protocols] + return self._ctx.set_alpn_protos(protocols) + + def wrap_socket( + self, + sock, + server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None, + ): + cnx = OpenSSL.SSL.Connection(self._ctx, sock) + + if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3 + server_hostname = server_hostname.encode("utf-8") + + if server_hostname is not None: + cnx.set_tlsext_host_name(server_hostname) + + cnx.set_connect_state() + + while True: + try: + cnx.do_handshake() + except OpenSSL.SSL.WantReadError: + if not util.wait_for_read(sock, sock.gettimeout()): + raise timeout("select timed out") + continue + except OpenSSL.SSL.Error as e: + raise ssl.SSLError("bad handshake: %r" % e) + break + + return WrappedSocket(cnx, sock) + + +def _verify_callback(cnx, x509, err_no, err_depth, return_code): + return err_no == 0 diff --git a/src/snowflake/connector/vendored/urllib3/contrib/securetransport.py b/src/snowflake/connector/vendored/urllib3/contrib/securetransport.py new file mode 100644 index 000000000..554c015fe --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/contrib/securetransport.py @@ -0,0 +1,922 @@ +""" +SecureTranport support for urllib3 via ctypes. + +This makes platform-native TLS available to urllib3 users on macOS without the +use of a compiler. This is an important feature because the Python Package +Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL +that ships with macOS is not capable of doing TLSv1.2. The only way to resolve +this is to give macOS users an alternative solution to the problem, and that +solution is to use SecureTransport. + +We use ctypes here because this solution must not require a compiler. That's +because pip is not allowed to require a compiler either. + +This is not intended to be a seriously long-term solution to this problem. +The hope is that PEP 543 will eventually solve this issue for us, at which +point we can retire this contrib module. But in the short term, we need to +solve the impending tire fire that is Python on Mac without this kind of +contrib module. So...here we are. + +To use this module, simply import and inject it:: + + import urllib3.contrib.securetransport + urllib3.contrib.securetransport.inject_into_urllib3() + +Happy TLSing! + +This code is a bastardised version of the code found in Will Bond's oscrypto +library. An enormous debt is owed to him for blazing this trail for us. For +that reason, this code should be considered to be covered both by urllib3's +license and by oscrypto's: + +.. code-block:: + + Copyright (c) 2015-2016 Will Bond + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. +""" +from __future__ import absolute_import + +import contextlib +import ctypes +import errno +import os.path +import shutil +import socket +import ssl +import struct +import threading +import weakref + +import six + +from .. import util +from ..util.ssl_ import PROTOCOL_TLS_CLIENT +from ._securetransport.bindings import CoreFoundation, Security, SecurityConst +from ._securetransport.low_level import ( + _assert_no_error, + _build_tls_unknown_ca_alert, + _cert_array_from_pem, + _create_cfstring_array, + _load_client_cert_chain, + _temporary_keychain, +) + +try: # Platform-specific: Python 2 + from socket import _fileobject +except ImportError: # Platform-specific: Python 3 + _fileobject = None + from ..packages.backports.makefile import backport_makefile + +__all__ = ["inject_into_urllib3", "extract_from_urllib3"] + +# SNI always works +HAS_SNI = True + +orig_util_HAS_SNI = util.HAS_SNI +orig_util_SSLContext = util.ssl_.SSLContext + +# This dictionary is used by the read callback to obtain a handle to the +# calling wrapped socket. This is a pretty silly approach, but for now it'll +# do. I feel like I should be able to smuggle a handle to the wrapped socket +# directly in the SSLConnectionRef, but for now this approach will work I +# guess. +# +# We need to lock around this structure for inserts, but we don't do it for +# reads/writes in the callbacks. The reasoning here goes as follows: +# +# 1. It is not possible to call into the callbacks before the dictionary is +# populated, so once in the callback the id must be in the dictionary. +# 2. The callbacks don't mutate the dictionary, they only read from it, and +# so cannot conflict with any of the insertions. +# +# This is good: if we had to lock in the callbacks we'd drastically slow down +# the performance of this code. +_connection_refs = weakref.WeakValueDictionary() +_connection_ref_lock = threading.Lock() + +# Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over +# for no better reason than we need *a* limit, and this one is right there. +SSL_WRITE_BLOCKSIZE = 16384 + +# This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to +# individual cipher suites. We need to do this because this is how +# SecureTransport wants them. +CIPHER_SUITES = [ + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA, + SecurityConst.TLS_AES_256_GCM_SHA384, + SecurityConst.TLS_AES_128_GCM_SHA256, + SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384, + SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256, + SecurityConst.TLS_AES_128_CCM_8_SHA256, + SecurityConst.TLS_AES_128_CCM_SHA256, + SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256, + SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256, + SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA, + SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA, +] + +# Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of +# TLSv1 and a high of TLSv1.2. For everything else, we pin to that version. +# TLSv1 to 1.2 are supported on macOS 10.8+ +_protocol_to_min_max = { + util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), + PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12), +} + +if hasattr(ssl, "PROTOCOL_SSLv2"): + _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = ( + SecurityConst.kSSLProtocol2, + SecurityConst.kSSLProtocol2, + ) +if hasattr(ssl, "PROTOCOL_SSLv3"): + _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = ( + SecurityConst.kSSLProtocol3, + SecurityConst.kSSLProtocol3, + ) +if hasattr(ssl, "PROTOCOL_TLSv1"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = ( + SecurityConst.kTLSProtocol1, + SecurityConst.kTLSProtocol1, + ) +if hasattr(ssl, "PROTOCOL_TLSv1_1"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = ( + SecurityConst.kTLSProtocol11, + SecurityConst.kTLSProtocol11, + ) +if hasattr(ssl, "PROTOCOL_TLSv1_2"): + _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = ( + SecurityConst.kTLSProtocol12, + SecurityConst.kTLSProtocol12, + ) + + +def inject_into_urllib3(): + """ + Monkey-patch urllib3 with SecureTransport-backed SSL-support. + """ + util.SSLContext = SecureTransportContext + util.ssl_.SSLContext = SecureTransportContext + util.HAS_SNI = HAS_SNI + util.ssl_.HAS_SNI = HAS_SNI + util.IS_SECURETRANSPORT = True + util.ssl_.IS_SECURETRANSPORT = True + + +def extract_from_urllib3(): + """ + Undo monkey-patching by :func:`inject_into_urllib3`. + """ + util.SSLContext = orig_util_SSLContext + util.ssl_.SSLContext = orig_util_SSLContext + util.HAS_SNI = orig_util_HAS_SNI + util.ssl_.HAS_SNI = orig_util_HAS_SNI + util.IS_SECURETRANSPORT = False + util.ssl_.IS_SECURETRANSPORT = False + + +def _read_callback(connection_id, data_buffer, data_length_pointer): + """ + SecureTransport read callback. This is called by ST to request that data + be returned from the socket. + """ + wrapped_socket = None + try: + wrapped_socket = _connection_refs.get(connection_id) + if wrapped_socket is None: + return SecurityConst.errSSLInternal + base_socket = wrapped_socket.socket + + requested_length = data_length_pointer[0] + + timeout = wrapped_socket.gettimeout() + error = None + read_count = 0 + + try: + while read_count < requested_length: + if timeout is None or timeout >= 0: + if not util.wait_for_read(base_socket, timeout): + raise socket.error(errno.EAGAIN, "timed out") + + remaining = requested_length - read_count + buffer = (ctypes.c_char * remaining).from_address( + data_buffer + read_count + ) + chunk_size = base_socket.recv_into(buffer, remaining) + read_count += chunk_size + if not chunk_size: + if not read_count: + return SecurityConst.errSSLClosedGraceful + break + except (socket.error) as e: + error = e.errno + + if error is not None and error != errno.EAGAIN: + data_length_pointer[0] = read_count + if error == errno.ECONNRESET or error == errno.EPIPE: + return SecurityConst.errSSLClosedAbort + raise + + data_length_pointer[0] = read_count + + if read_count != requested_length: + return SecurityConst.errSSLWouldBlock + + return 0 + except Exception as e: + if wrapped_socket is not None: + wrapped_socket._exception = e + return SecurityConst.errSSLInternal + + +def _write_callback(connection_id, data_buffer, data_length_pointer): + """ + SecureTransport write callback. This is called by ST to request that data + actually be sent on the network. + """ + wrapped_socket = None + try: + wrapped_socket = _connection_refs.get(connection_id) + if wrapped_socket is None: + return SecurityConst.errSSLInternal + base_socket = wrapped_socket.socket + + bytes_to_write = data_length_pointer[0] + data = ctypes.string_at(data_buffer, bytes_to_write) + + timeout = wrapped_socket.gettimeout() + error = None + sent = 0 + + try: + while sent < bytes_to_write: + if timeout is None or timeout >= 0: + if not util.wait_for_write(base_socket, timeout): + raise socket.error(errno.EAGAIN, "timed out") + chunk_sent = base_socket.send(data) + sent += chunk_sent + + # This has some needless copying here, but I'm not sure there's + # much value in optimising this data path. + data = data[chunk_sent:] + except (socket.error) as e: + error = e.errno + + if error is not None and error != errno.EAGAIN: + data_length_pointer[0] = sent + if error == errno.ECONNRESET or error == errno.EPIPE: + return SecurityConst.errSSLClosedAbort + raise + + data_length_pointer[0] = sent + + if sent != bytes_to_write: + return SecurityConst.errSSLWouldBlock + + return 0 + except Exception as e: + if wrapped_socket is not None: + wrapped_socket._exception = e + return SecurityConst.errSSLInternal + + +# We need to keep these two objects references alive: if they get GC'd while +# in use then SecureTransport could attempt to call a function that is in freed +# memory. That would be...uh...bad. Yeah, that's the word. Bad. +_read_callback_pointer = Security.SSLReadFunc(_read_callback) +_write_callback_pointer = Security.SSLWriteFunc(_write_callback) + + +class WrappedSocket(object): + """ + API-compatibility wrapper for Python's OpenSSL wrapped socket object. + + Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage + collector of PyPy. + """ + + def __init__(self, socket): + self.socket = socket + self.context = None + self._makefile_refs = 0 + self._closed = False + self._exception = None + self._keychain = None + self._keychain_dir = None + self._client_cert_chain = None + + # We save off the previously-configured timeout and then set it to + # zero. This is done because we use select and friends to handle the + # timeouts, but if we leave the timeout set on the lower socket then + # Python will "kindly" call select on that socket again for us. Avoid + # that by forcing the timeout to zero. + self._timeout = self.socket.gettimeout() + self.socket.settimeout(0) + + @contextlib.contextmanager + def _raise_on_error(self): + """ + A context manager that can be used to wrap calls that do I/O from + SecureTransport. If any of the I/O callbacks hit an exception, this + context manager will correctly propagate the exception after the fact. + This avoids silently swallowing those exceptions. + + It also correctly forces the socket closed. + """ + self._exception = None + + # We explicitly don't catch around this yield because in the unlikely + # event that an exception was hit in the block we don't want to swallow + # it. + yield + if self._exception is not None: + exception, self._exception = self._exception, None + self.close() + raise exception + + def _set_ciphers(self): + """ + Sets up the allowed ciphers. By default this matches the set in + util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done + custom and doesn't allow changing at this time, mostly because parsing + OpenSSL cipher strings is going to be a freaking nightmare. + """ + ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES) + result = Security.SSLSetEnabledCiphers( + self.context, ciphers, len(CIPHER_SUITES) + ) + _assert_no_error(result) + + def _set_alpn_protocols(self, protocols): + """ + Sets up the ALPN protocols on the context. + """ + if not protocols: + return + protocols_arr = _create_cfstring_array(protocols) + try: + result = Security.SSLSetALPNProtocols(self.context, protocols_arr) + _assert_no_error(result) + finally: + CoreFoundation.CFRelease(protocols_arr) + + def _custom_validate(self, verify, trust_bundle): + """ + Called when we have set custom validation. We do this in two cases: + first, when cert validation is entirely disabled; and second, when + using a custom trust DB. + Raises an SSLError if the connection is not trusted. + """ + # If we disabled cert validation, just say: cool. + if not verify: + return + + successes = ( + SecurityConst.kSecTrustResultUnspecified, + SecurityConst.kSecTrustResultProceed, + ) + try: + trust_result = self._evaluate_trust(trust_bundle) + if trust_result in successes: + return + reason = "error code: %d" % (trust_result,) + except Exception as e: + # Do not trust on error + reason = "exception: %r" % (e,) + + # SecureTransport does not send an alert nor shuts down the connection. + rec = _build_tls_unknown_ca_alert(self.version()) + self.socket.sendall(rec) + # close the connection immediately + # l_onoff = 1, activate linger + # l_linger = 0, linger for 0 seoncds + opts = struct.pack("ii", 1, 0) + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts) + self.close() + raise ssl.SSLError("certificate verify failed, %s" % reason) + + def _evaluate_trust(self, trust_bundle): + # We want data in memory, so load it up. + if os.path.isfile(trust_bundle): + with open(trust_bundle, "rb") as f: + trust_bundle = f.read() + + cert_array = None + trust = Security.SecTrustRef() + + try: + # Get a CFArray that contains the certs we want. + cert_array = _cert_array_from_pem(trust_bundle) + + # Ok, now the hard part. We want to get the SecTrustRef that ST has + # created for this connection, shove our CAs into it, tell ST to + # ignore everything else it knows, and then ask if it can build a + # chain. This is a buuuunch of code. + result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) + _assert_no_error(result) + if not trust: + raise ssl.SSLError("Failed to copy trust reference") + + result = Security.SecTrustSetAnchorCertificates(trust, cert_array) + _assert_no_error(result) + + result = Security.SecTrustSetAnchorCertificatesOnly(trust, True) + _assert_no_error(result) + + trust_result = Security.SecTrustResultType() + result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result)) + _assert_no_error(result) + finally: + if trust: + CoreFoundation.CFRelease(trust) + + if cert_array is not None: + CoreFoundation.CFRelease(cert_array) + + return trust_result.value + + def handshake( + self, + server_hostname, + verify, + trust_bundle, + min_version, + max_version, + client_cert, + client_key, + client_key_passphrase, + alpn_protocols, + ): + """ + Actually performs the TLS handshake. This is run automatically by + wrapped socket, and shouldn't be needed in user code. + """ + # First, we do the initial bits of connection setup. We need to create + # a context, set its I/O funcs, and set the connection reference. + self.context = Security.SSLCreateContext( + None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType + ) + result = Security.SSLSetIOFuncs( + self.context, _read_callback_pointer, _write_callback_pointer + ) + _assert_no_error(result) + + # Here we need to compute the handle to use. We do this by taking the + # id of self modulo 2**31 - 1. If this is already in the dictionary, we + # just keep incrementing by one until we find a free space. + with _connection_ref_lock: + handle = id(self) % 2147483647 + while handle in _connection_refs: + handle = (handle + 1) % 2147483647 + _connection_refs[handle] = self + + result = Security.SSLSetConnection(self.context, handle) + _assert_no_error(result) + + # If we have a server hostname, we should set that too. + if server_hostname: + if not isinstance(server_hostname, bytes): + server_hostname = server_hostname.encode("utf-8") + + result = Security.SSLSetPeerDomainName( + self.context, server_hostname, len(server_hostname) + ) + _assert_no_error(result) + + # Setup the ciphers. + self._set_ciphers() + + # Setup the ALPN protocols. + self._set_alpn_protocols(alpn_protocols) + + # Set the minimum and maximum TLS versions. + result = Security.SSLSetProtocolVersionMin(self.context, min_version) + _assert_no_error(result) + + result = Security.SSLSetProtocolVersionMax(self.context, max_version) + _assert_no_error(result) + + # If there's a trust DB, we need to use it. We do that by telling + # SecureTransport to break on server auth. We also do that if we don't + # want to validate the certs at all: we just won't actually do any + # authing in that case. + if not verify or trust_bundle is not None: + result = Security.SSLSetSessionOption( + self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True + ) + _assert_no_error(result) + + # If there's a client cert, we need to use it. + if client_cert: + self._keychain, self._keychain_dir = _temporary_keychain() + self._client_cert_chain = _load_client_cert_chain( + self._keychain, client_cert, client_key + ) + result = Security.SSLSetCertificate(self.context, self._client_cert_chain) + _assert_no_error(result) + + while True: + with self._raise_on_error(): + result = Security.SSLHandshake(self.context) + + if result == SecurityConst.errSSLWouldBlock: + raise socket.timeout("handshake timed out") + elif result == SecurityConst.errSSLServerAuthCompleted: + self._custom_validate(verify, trust_bundle) + continue + else: + _assert_no_error(result) + break + + def fileno(self): + return self.socket.fileno() + + # Copy-pasted from Python 3.5 source code + def _decref_socketios(self): + if self._makefile_refs > 0: + self._makefile_refs -= 1 + if self._closed: + self.close() + + def recv(self, bufsiz): + buffer = ctypes.create_string_buffer(bufsiz) + bytes_read = self.recv_into(buffer, bufsiz) + data = buffer[:bytes_read] + return data + + def recv_into(self, buffer, nbytes=None): + # Read short on EOF. + if self._closed: + return 0 + + if nbytes is None: + nbytes = len(buffer) + + buffer = (ctypes.c_char * nbytes).from_buffer(buffer) + processed_bytes = ctypes.c_size_t(0) + + with self._raise_on_error(): + result = Security.SSLRead( + self.context, buffer, nbytes, ctypes.byref(processed_bytes) + ) + + # There are some result codes that we want to treat as "not always + # errors". Specifically, those are errSSLWouldBlock, + # errSSLClosedGraceful, and errSSLClosedNoNotify. + if result == SecurityConst.errSSLWouldBlock: + # If we didn't process any bytes, then this was just a time out. + # However, we can get errSSLWouldBlock in situations when we *did* + # read some data, and in those cases we should just read "short" + # and return. + if processed_bytes.value == 0: + # Timed out, no data read. + raise socket.timeout("recv timed out") + elif result in ( + SecurityConst.errSSLClosedGraceful, + SecurityConst.errSSLClosedNoNotify, + ): + # The remote peer has closed this connection. We should do so as + # well. Note that we don't actually return here because in + # principle this could actually be fired along with return data. + # It's unlikely though. + self.close() + else: + _assert_no_error(result) + + # Ok, we read and probably succeeded. We should return whatever data + # was actually read. + return processed_bytes.value + + def settimeout(self, timeout): + self._timeout = timeout + + def gettimeout(self): + return self._timeout + + def send(self, data): + processed_bytes = ctypes.c_size_t(0) + + with self._raise_on_error(): + result = Security.SSLWrite( + self.context, data, len(data), ctypes.byref(processed_bytes) + ) + + if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0: + # Timed out + raise socket.timeout("send timed out") + else: + _assert_no_error(result) + + # We sent, and probably succeeded. Tell them how much we sent. + return processed_bytes.value + + def sendall(self, data): + total_sent = 0 + while total_sent < len(data): + sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]) + total_sent += sent + + def shutdown(self): + with self._raise_on_error(): + Security.SSLClose(self.context) + + def close(self): + # TODO: should I do clean shutdown here? Do I have to? + if self._makefile_refs < 1: + self._closed = True + if self.context: + CoreFoundation.CFRelease(self.context) + self.context = None + if self._client_cert_chain: + CoreFoundation.CFRelease(self._client_cert_chain) + self._client_cert_chain = None + if self._keychain: + Security.SecKeychainDelete(self._keychain) + CoreFoundation.CFRelease(self._keychain) + shutil.rmtree(self._keychain_dir) + self._keychain = self._keychain_dir = None + return self.socket.close() + else: + self._makefile_refs -= 1 + + def getpeercert(self, binary_form=False): + # Urgh, annoying. + # + # Here's how we do this: + # + # 1. Call SSLCopyPeerTrust to get hold of the trust object for this + # connection. + # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf. + # 3. To get the CN, call SecCertificateCopyCommonName and process that + # string so that it's of the appropriate type. + # 4. To get the SAN, we need to do something a bit more complex: + # a. Call SecCertificateCopyValues to get the data, requesting + # kSecOIDSubjectAltName. + # b. Mess about with this dictionary to try to get the SANs out. + # + # This is gross. Really gross. It's going to be a few hundred LoC extra + # just to repeat something that SecureTransport can *already do*. So my + # operating assumption at this time is that what we want to do is + # instead to just flag to urllib3 that it shouldn't do its own hostname + # validation when using SecureTransport. + if not binary_form: + raise ValueError("SecureTransport only supports dumping binary certs") + trust = Security.SecTrustRef() + certdata = None + der_bytes = None + + try: + # Grab the trust store. + result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust)) + _assert_no_error(result) + if not trust: + # Probably we haven't done the handshake yet. No biggie. + return None + + cert_count = Security.SecTrustGetCertificateCount(trust) + if not cert_count: + # Also a case that might happen if we haven't handshaked. + # Handshook? Handshaken? + return None + + leaf = Security.SecTrustGetCertificateAtIndex(trust, 0) + assert leaf + + # Ok, now we want the DER bytes. + certdata = Security.SecCertificateCopyData(leaf) + assert certdata + + data_length = CoreFoundation.CFDataGetLength(certdata) + data_buffer = CoreFoundation.CFDataGetBytePtr(certdata) + der_bytes = ctypes.string_at(data_buffer, data_length) + finally: + if certdata: + CoreFoundation.CFRelease(certdata) + if trust: + CoreFoundation.CFRelease(trust) + + return der_bytes + + def version(self): + protocol = Security.SSLProtocol() + result = Security.SSLGetNegotiatedProtocolVersion( + self.context, ctypes.byref(protocol) + ) + _assert_no_error(result) + if protocol.value == SecurityConst.kTLSProtocol13: + raise ssl.SSLError("SecureTransport does not support TLS 1.3") + elif protocol.value == SecurityConst.kTLSProtocol12: + return "TLSv1.2" + elif protocol.value == SecurityConst.kTLSProtocol11: + return "TLSv1.1" + elif protocol.value == SecurityConst.kTLSProtocol1: + return "TLSv1" + elif protocol.value == SecurityConst.kSSLProtocol3: + return "SSLv3" + elif protocol.value == SecurityConst.kSSLProtocol2: + return "SSLv2" + else: + raise ssl.SSLError("Unknown TLS version: %r" % protocol) + + def _reuse(self): + self._makefile_refs += 1 + + def _drop(self): + if self._makefile_refs < 1: + self.close() + else: + self._makefile_refs -= 1 + + +if _fileobject: # Platform-specific: Python 2 + + def makefile(self, mode, bufsize=-1): + self._makefile_refs += 1 + return _fileobject(self, mode, bufsize, close=True) + + +else: # Platform-specific: Python 3 + + def makefile(self, mode="r", buffering=None, *args, **kwargs): + # We disable buffering with SecureTransport because it conflicts with + # the buffering that ST does internally (see issue #1153 for more). + buffering = 0 + return backport_makefile(self, mode, buffering, *args, **kwargs) + + +WrappedSocket.makefile = makefile + + +class SecureTransportContext(object): + """ + I am a wrapper class for the SecureTransport library, to translate the + interface of the standard library ``SSLContext`` object to calls into + SecureTransport. + """ + + def __init__(self, protocol): + self._min_version, self._max_version = _protocol_to_min_max[protocol] + self._options = 0 + self._verify = False + self._trust_bundle = None + self._client_cert = None + self._client_key = None + self._client_key_passphrase = None + self._alpn_protocols = None + + @property + def check_hostname(self): + """ + SecureTransport cannot have its hostname checking disabled. For more, + see the comment on getpeercert() in this file. + """ + return True + + @check_hostname.setter + def check_hostname(self, value): + """ + SecureTransport cannot have its hostname checking disabled. For more, + see the comment on getpeercert() in this file. + """ + pass + + @property + def options(self): + # TODO: Well, crap. + # + # So this is the bit of the code that is the most likely to cause us + # trouble. Essentially we need to enumerate all of the SSL options that + # users might want to use and try to see if we can sensibly translate + # them, or whether we should just ignore them. + return self._options + + @options.setter + def options(self, value): + # TODO: Update in line with above. + self._options = value + + @property + def verify_mode(self): + return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE + + @verify_mode.setter + def verify_mode(self, value): + self._verify = True if value == ssl.CERT_REQUIRED else False + + def set_default_verify_paths(self): + # So, this has to do something a bit weird. Specifically, what it does + # is nothing. + # + # This means that, if we had previously had load_verify_locations + # called, this does not undo that. We need to do that because it turns + # out that the rest of the urllib3 code will attempt to load the + # default verify paths if it hasn't been told about any paths, even if + # the context itself was sometime earlier. We resolve that by just + # ignoring it. + pass + + def load_default_certs(self): + return self.set_default_verify_paths() + + def set_ciphers(self, ciphers): + # For now, we just require the default cipher string. + if ciphers != util.ssl_.DEFAULT_CIPHERS: + raise ValueError("SecureTransport doesn't support custom cipher strings") + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + # OK, we only really support cadata and cafile. + if capath is not None: + raise ValueError("SecureTransport does not support cert directories") + + # Raise if cafile does not exist. + if cafile is not None: + with open(cafile): + pass + + self._trust_bundle = cafile or cadata + + def load_cert_chain(self, certfile, keyfile=None, password=None): + self._client_cert = certfile + self._client_key = keyfile + self._client_cert_passphrase = password + + def set_alpn_protocols(self, protocols): + """ + Sets the ALPN protocols that will later be set on the context. + + Raises a NotImplementedError if ALPN is not supported. + """ + if not hasattr(Security, "SSLSetALPNProtocols"): + raise NotImplementedError( + "SecureTransport supports ALPN only in macOS 10.12+" + ) + self._alpn_protocols = [six.ensure_binary(p) for p in protocols] + + def wrap_socket( + self, + sock, + server_side=False, + do_handshake_on_connect=True, + suppress_ragged_eofs=True, + server_hostname=None, + ): + # So, what do we do here? Firstly, we assert some properties. This is a + # stripped down shim, so there is some functionality we don't support. + # See PEP 543 for the real deal. + assert not server_side + assert do_handshake_on_connect + assert suppress_ragged_eofs + + # Ok, we're good to go. Now we want to create the wrapped socket object + # and store it in the appropriate place. + wrapped_socket = WrappedSocket(sock) + + # Now we can handshake + wrapped_socket.handshake( + server_hostname, + self._verify, + self._trust_bundle, + self._min_version, + self._max_version, + self._client_cert, + self._client_key, + self._client_key_passphrase, + self._alpn_protocols, + ) + return wrapped_socket diff --git a/src/snowflake/connector/vendored/urllib3/contrib/socks.py b/src/snowflake/connector/vendored/urllib3/contrib/socks.py new file mode 100644 index 000000000..c326e80dd --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/contrib/socks.py @@ -0,0 +1,216 @@ +# -*- coding: utf-8 -*- +""" +This module contains provisional support for SOCKS proxies from within +urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and +SOCKS5. To enable its functionality, either install PySocks or install this +module with the ``socks`` extra. + +The SOCKS implementation supports the full range of urllib3 features. It also +supports the following SOCKS features: + +- SOCKS4A (``proxy_url='socks4a://...``) +- SOCKS4 (``proxy_url='socks4://...``) +- SOCKS5 with remote DNS (``proxy_url='socks5h://...``) +- SOCKS5 with local DNS (``proxy_url='socks5://...``) +- Usernames and passwords for the SOCKS proxy + +.. note:: + It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in + your ``proxy_url`` to ensure that DNS resolution is done from the remote + server instead of client-side when connecting to a domain name. + +SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5 +supports IPv4, IPv6, and domain names. + +When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url`` +will be sent as the ``userid`` section of the SOCKS request: + +.. code-block:: python + + proxy_url="socks4a://@proxy-host" + +When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion +of the ``proxy_url`` will be sent as the username/password to authenticate +with the proxy: + +.. code-block:: python + + proxy_url="socks5h://:@proxy-host" + +""" +from __future__ import absolute_import + +try: + import socks +except ImportError: + import warnings + + from ..exceptions import DependencyWarning + + warnings.warn( + ( + "SOCKS support in urllib3 requires the installation of optional " + "dependencies: specifically, PySocks. For more information, see " + "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies" + ), + DependencyWarning, + ) + raise + +from socket import error as SocketError +from socket import timeout as SocketTimeout + +from ..connection import HTTPConnection, HTTPSConnection +from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool +from ..exceptions import ConnectTimeoutError, NewConnectionError +from ..poolmanager import PoolManager +from ..util.url import parse_url + +try: + import ssl +except ImportError: + ssl = None + + +class SOCKSConnection(HTTPConnection): + """ + A plain-text HTTP connection that connects via a SOCKS proxy. + """ + + def __init__(self, *args, **kwargs): + self._socks_options = kwargs.pop("_socks_options") + super(SOCKSConnection, self).__init__(*args, **kwargs) + + def _new_conn(self): + """ + Establish a new connection via the SOCKS proxy. + """ + extra_kw = {} + if self.source_address: + extra_kw["source_address"] = self.source_address + + if self.socket_options: + extra_kw["socket_options"] = self.socket_options + + try: + conn = socks.create_connection( + (self.host, self.port), + proxy_type=self._socks_options["socks_version"], + proxy_addr=self._socks_options["proxy_host"], + proxy_port=self._socks_options["proxy_port"], + proxy_username=self._socks_options["username"], + proxy_password=self._socks_options["password"], + proxy_rdns=self._socks_options["rdns"], + timeout=self.timeout, + **extra_kw + ) + + except SocketTimeout: + raise ConnectTimeoutError( + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) + + except socks.ProxyError as e: + # This is fragile as hell, but it seems to be the only way to raise + # useful errors here. + if e.socket_err: + error = e.socket_err + if isinstance(error, SocketTimeout): + raise ConnectTimeoutError( + self, + "Connection to %s timed out. (connect timeout=%s)" + % (self.host, self.timeout), + ) + else: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % error + ) + else: + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e + ) + + except SocketError as e: # Defensive: PySocks should catch all these. + raise NewConnectionError( + self, "Failed to establish a new connection: %s" % e + ) + + return conn + + +# We don't need to duplicate the Verified/Unverified distinction from +# urllib3/connection.py here because the HTTPSConnection will already have been +# correctly set to either the Verified or Unverified form by that module. This +# means the SOCKSHTTPSConnection will automatically be the correct type. +class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection): + pass + + +class SOCKSHTTPConnectionPool(HTTPConnectionPool): + ConnectionCls = SOCKSConnection + + +class SOCKSHTTPSConnectionPool(HTTPSConnectionPool): + ConnectionCls = SOCKSHTTPSConnection + + +class SOCKSProxyManager(PoolManager): + """ + A version of the urllib3 ProxyManager that routes connections via the + defined SOCKS proxy. + """ + + pool_classes_by_scheme = { + "http": SOCKSHTTPConnectionPool, + "https": SOCKSHTTPSConnectionPool, + } + + def __init__( + self, + proxy_url, + username=None, + password=None, + num_pools=10, + headers=None, + **connection_pool_kw + ): + parsed = parse_url(proxy_url) + + if username is None and password is None and parsed.auth is not None: + split = parsed.auth.split(":") + if len(split) == 2: + username, password = split + if parsed.scheme == "socks5": + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = False + elif parsed.scheme == "socks5h": + socks_version = socks.PROXY_TYPE_SOCKS5 + rdns = True + elif parsed.scheme == "socks4": + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = False + elif parsed.scheme == "socks4a": + socks_version = socks.PROXY_TYPE_SOCKS4 + rdns = True + else: + raise ValueError("Unable to determine SOCKS version from %s" % proxy_url) + + self.proxy_url = proxy_url + + socks_options = { + "socks_version": socks_version, + "proxy_host": parsed.host, + "proxy_port": parsed.port, + "username": username, + "password": password, + "rdns": rdns, + } + connection_pool_kw["_socks_options"] = socks_options + + super(SOCKSProxyManager, self).__init__( + num_pools, headers, **connection_pool_kw + ) + + self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme diff --git a/src/snowflake/connector/vendored/urllib3/exceptions.py b/src/snowflake/connector/vendored/urllib3/exceptions.py new file mode 100644 index 000000000..cba6f3f56 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/exceptions.py @@ -0,0 +1,323 @@ +from __future__ import absolute_import + +from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead + +# Base Exceptions + + +class HTTPError(Exception): + """Base exception used by this module.""" + + pass + + +class HTTPWarning(Warning): + """Base warning used by this module.""" + + pass + + +class PoolError(HTTPError): + """Base exception for errors caused within a pool.""" + + def __init__(self, pool, message): + self.pool = pool + HTTPError.__init__(self, "%s: %s" % (pool, message)) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, None) + + +class RequestError(PoolError): + """Base exception for PoolErrors that have associated URLs.""" + + def __init__(self, pool, url, message): + self.url = url + PoolError.__init__(self, pool, message) + + def __reduce__(self): + # For pickling purposes. + return self.__class__, (None, self.url, None) + + +class SSLError(HTTPError): + """Raised when SSL certificate fails in an HTTPS connection.""" + + pass + + +class ProxyError(HTTPError): + """Raised when the connection to a proxy fails.""" + + def __init__(self, message, error, *args): + super(ProxyError, self).__init__(message, error, *args) + self.original_error = error + + +class DecodeError(HTTPError): + """Raised when automatic decoding based on Content-Type fails.""" + + pass + + +class ProtocolError(HTTPError): + """Raised when something unexpected happens mid-request/response.""" + + pass + + +#: Renamed to ProtocolError but aliased for backwards compatibility. +ConnectionError = ProtocolError + + +# Leaf Exceptions + + +class MaxRetryError(RequestError): + """Raised when the maximum number of retries is exceeded. + + :param pool: The connection pool + :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool` + :param string url: The requested Url + :param exceptions.Exception reason: The underlying error + + """ + + def __init__(self, pool, url, reason=None): + self.reason = reason + + message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason) + + RequestError.__init__(self, pool, url, message) + + +class HostChangedError(RequestError): + """Raised when an existing pool gets a request for a foreign host.""" + + def __init__(self, pool, url, retries=3): + message = "Tried to open a foreign host with url: %s" % url + RequestError.__init__(self, pool, url, message) + self.retries = retries + + +class TimeoutStateError(HTTPError): + """Raised when passing an invalid state to a timeout""" + + pass + + +class TimeoutError(HTTPError): + """Raised when a socket timeout error occurs. + + Catching this error will catch both :exc:`ReadTimeoutErrors + ` and :exc:`ConnectTimeoutErrors `. + """ + + pass + + +class ReadTimeoutError(TimeoutError, RequestError): + """Raised when a socket timeout occurs while receiving data from a server""" + + pass + + +# This timeout error does not have a URL attached and needs to inherit from the +# base HTTPError +class ConnectTimeoutError(TimeoutError): + """Raised when a socket timeout occurs while connecting to a server""" + + pass + + +class NewConnectionError(ConnectTimeoutError, PoolError): + """Raised when we fail to establish a new connection. Usually ECONNREFUSED.""" + + pass + + +class EmptyPoolError(PoolError): + """Raised when a pool runs out of connections and no more are allowed.""" + + pass + + +class ClosedPoolError(PoolError): + """Raised when a request enters a pool after the pool has been closed.""" + + pass + + +class LocationValueError(ValueError, HTTPError): + """Raised when there is something wrong with a given URL input.""" + + pass + + +class LocationParseError(LocationValueError): + """Raised when get_host or similar fails to parse the URL input.""" + + def __init__(self, location): + message = "Failed to parse: %s" % location + HTTPError.__init__(self, message) + + self.location = location + + +class URLSchemeUnknown(LocationValueError): + """Raised when a URL input has an unsupported scheme.""" + + def __init__(self, scheme): + message = "Not supported URL scheme %s" % scheme + super(URLSchemeUnknown, self).__init__(message) + + self.scheme = scheme + + +class ResponseError(HTTPError): + """Used as a container for an error reason supplied in a MaxRetryError.""" + + GENERIC_ERROR = "too many error responses" + SPECIFIC_ERROR = "too many {status_code} error responses" + + +class SecurityWarning(HTTPWarning): + """Warned when performing security reducing actions""" + + pass + + +class SubjectAltNameWarning(SecurityWarning): + """Warned when connecting to a host with a certificate missing a SAN.""" + + pass + + +class InsecureRequestWarning(SecurityWarning): + """Warned when making an unverified HTTPS request.""" + + pass + + +class SystemTimeWarning(SecurityWarning): + """Warned when system time is suspected to be wrong""" + + pass + + +class InsecurePlatformWarning(SecurityWarning): + """Warned when certain TLS/SSL configuration is not available on a platform.""" + + pass + + +class SNIMissingWarning(HTTPWarning): + """Warned when making a HTTPS request without SNI available.""" + + pass + + +class DependencyWarning(HTTPWarning): + """ + Warned when an attempt is made to import a module with missing optional + dependencies. + """ + + pass + + +class ResponseNotChunked(ProtocolError, ValueError): + """Response needs to be chunked in order to read it as chunks.""" + + pass + + +class BodyNotHttplibCompatible(HTTPError): + """ + Body should be :class:`http.client.HTTPResponse` like + (have an fp attribute which returns raw chunks) for read_chunked(). + """ + + pass + + +class IncompleteRead(HTTPError, httplib_IncompleteRead): + """ + Response length doesn't match expected Content-Length + + Subclass of :class:`http.client.IncompleteRead` to allow int value + for ``partial`` to avoid creating large objects on streamed reads. + """ + + def __init__(self, partial, expected): + super(IncompleteRead, self).__init__(partial, expected) + + def __repr__(self): + return "IncompleteRead(%i bytes read, %i more expected)" % ( + self.partial, + self.expected, + ) + + +class InvalidChunkLength(HTTPError, httplib_IncompleteRead): + """Invalid chunk length in a chunked response.""" + + def __init__(self, response, length): + super(InvalidChunkLength, self).__init__( + response.tell(), response.length_remaining + ) + self.response = response + self.length = length + + def __repr__(self): + return "InvalidChunkLength(got length %r, %i bytes read)" % ( + self.length, + self.partial, + ) + + +class InvalidHeader(HTTPError): + """The header provided was somehow invalid.""" + + pass + + +class ProxySchemeUnknown(AssertionError, URLSchemeUnknown): + """ProxyManager does not support the supplied scheme""" + + # TODO(t-8ch): Stop inheriting from AssertionError in v2.0. + + def __init__(self, scheme): + # 'localhost' is here because our URL parser parses + # localhost:8080 -> scheme=localhost, remove if we fix this. + if scheme == "localhost": + scheme = None + if scheme is None: + message = "Proxy URL had no scheme, should start with http:// or https://" + else: + message = ( + "Proxy URL had unsupported scheme %s, should use http:// or https://" + % scheme + ) + super(ProxySchemeUnknown, self).__init__(message) + + +class ProxySchemeUnsupported(ValueError): + """Fetching HTTPS resources through HTTPS proxies is unsupported""" + + pass + + +class HeaderParsingError(HTTPError): + """Raised by assert_header_parsing, but we convert it to a log.warning statement.""" + + def __init__(self, defects, unparsed_data): + message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data) + super(HeaderParsingError, self).__init__(message) + + +class UnrewindableBodyError(HTTPError): + """urllib3 encountered an error when trying to rewind a body""" + + pass diff --git a/src/snowflake/connector/vendored/urllib3/fields.py b/src/snowflake/connector/vendored/urllib3/fields.py new file mode 100644 index 000000000..9d630f491 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/fields.py @@ -0,0 +1,274 @@ +from __future__ import absolute_import + +import email.utils +import mimetypes +import re + +from .packages import six + + +def guess_content_type(filename, default="application/octet-stream"): + """ + Guess the "Content-Type" of a file. + + :param filename: + The filename to guess the "Content-Type" of using :mod:`mimetypes`. + :param default: + If no "Content-Type" can be guessed, default to `default`. + """ + if filename: + return mimetypes.guess_type(filename)[0] or default + return default + + +def format_header_param_rfc2231(name, value): + """ + Helper function to format and quote a single header parameter using the + strategy defined in RFC 2231. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows + `RFC 2388 Section 4.4 `_. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as ``bytes`` or `str``. + :ret: + An RFC-2231-formatted unicode string. + """ + if isinstance(value, six.binary_type): + value = value.decode("utf-8") + + if not any(ch in value for ch in '"\\\r\n'): + result = u'%s="%s"' % (name, value) + try: + result.encode("ascii") + except (UnicodeEncodeError, UnicodeDecodeError): + pass + else: + return result + + if six.PY2: # Python 2: + value = value.encode("utf-8") + + # encode_rfc2231 accepts an encoded string and returns an ascii-encoded + # string in Python 2 but accepts and returns unicode strings in Python 3 + value = email.utils.encode_rfc2231(value, "utf-8") + value = "%s*=%s" % (name, value) + + if six.PY2: # Python 2: + value = value.decode("utf-8") + + return value + + +_HTML5_REPLACEMENTS = { + u"\u0022": u"%22", + # Replace "\" with "\\". + u"\u005C": u"\u005C\u005C", +} + +# All control characters from 0x00 to 0x1F *except* 0x1B. +_HTML5_REPLACEMENTS.update( + { + six.unichr(cc): u"%{:02X}".format(cc) + for cc in range(0x00, 0x1F + 1) + if cc not in (0x1B,) + } +) + + +def _replace_multiple(value, needles_and_replacements): + def replacer(match): + return needles_and_replacements[match.group(0)] + + pattern = re.compile( + r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()]) + ) + + result = pattern.sub(replacer, value) + + return result + + +def format_header_param_html5(name, value): + """ + Helper function to format and quote a single header parameter using the + HTML5 strategy. + + Particularly useful for header parameters which might contain + non-ASCII values, like file names. This follows the `HTML5 Working Draft + Section 4.10.22.7`_ and matches the behavior of curl and modern browsers. + + .. _HTML5 Working Draft Section 4.10.22.7: + https://w3c.github.io/html/sec-forms.html#multipart-form-data + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as ``bytes`` or `str``. + :ret: + A unicode string, stripped of troublesome characters. + """ + if isinstance(value, six.binary_type): + value = value.decode("utf-8") + + value = _replace_multiple(value, _HTML5_REPLACEMENTS) + + return u'%s="%s"' % (name, value) + + +# For backwards-compatibility. +format_header_param = format_header_param_html5 + + +class RequestField(object): + """ + A data container for request body parameters. + + :param name: + The name of this request field. Must be unicode. + :param data: + The data/value body. + :param filename: + An optional filename of the request field. Must be unicode. + :param headers: + An optional dict-like object of headers to initially use for the field. + :param header_formatter: + An optional callable that is used to encode and format the headers. By + default, this is :func:`format_header_param_html5`. + """ + + def __init__( + self, + name, + data, + filename=None, + headers=None, + header_formatter=format_header_param_html5, + ): + self._name = name + self._filename = filename + self.data = data + self.headers = {} + if headers: + self.headers = dict(headers) + self.header_formatter = header_formatter + + @classmethod + def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5): + """ + A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. + + Supports constructing :class:`~urllib3.fields.RequestField` from + parameter of key/value strings AND key/filetuple. A filetuple is a + (filename, data, MIME type) tuple where the MIME type is optional. + For example:: + + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + + Field names and filenames must be unicode. + """ + if isinstance(value, tuple): + if len(value) == 3: + filename, data, content_type = value + else: + filename, data = value + content_type = guess_content_type(filename) + else: + filename = None + content_type = None + data = value + + request_param = cls( + fieldname, data, filename=filename, header_formatter=header_formatter + ) + request_param.make_multipart(content_type=content_type) + + return request_param + + def _render_part(self, name, value): + """ + Overridable helper function to format a single header parameter. By + default, this calls ``self.header_formatter``. + + :param name: + The name of the parameter, a string expected to be ASCII only. + :param value: + The value of the parameter, provided as a unicode string. + """ + + return self.header_formatter(name, value) + + def _render_parts(self, header_parts): + """ + Helper function to format and quote a single header. + + Useful for single headers that are composed of multiple items. E.g., + 'Content-Disposition' fields. + + :param header_parts: + A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format + as `k1="v1"; k2="v2"; ...`. + """ + parts = [] + iterable = header_parts + if isinstance(header_parts, dict): + iterable = header_parts.items() + + for name, value in iterable: + if value is not None: + parts.append(self._render_part(name, value)) + + return u"; ".join(parts) + + def render_headers(self): + """ + Renders the headers for this request field. + """ + lines = [] + + sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"] + for sort_key in sort_keys: + if self.headers.get(sort_key, False): + lines.append(u"%s: %s" % (sort_key, self.headers[sort_key])) + + for header_name, header_value in self.headers.items(): + if header_name not in sort_keys: + if header_value: + lines.append(u"%s: %s" % (header_name, header_value)) + + lines.append(u"\r\n") + return u"\r\n".join(lines) + + def make_multipart( + self, content_disposition=None, content_type=None, content_location=None + ): + """ + Makes this request field into a multipart request field. + + This method overrides "Content-Disposition", "Content-Type" and + "Content-Location" headers to the request parameter. + + :param content_type: + The 'Content-Type' of the request body. + :param content_location: + The 'Content-Location' of the request body. + + """ + self.headers["Content-Disposition"] = content_disposition or u"form-data" + self.headers["Content-Disposition"] += u"; ".join( + [ + u"", + self._render_parts( + ((u"name", self._name), (u"filename", self._filename)) + ), + ] + ) + self.headers["Content-Type"] = content_type + self.headers["Content-Location"] = content_location diff --git a/src/snowflake/connector/vendored/urllib3/filepost.py b/src/snowflake/connector/vendored/urllib3/filepost.py new file mode 100644 index 000000000..36c9252c6 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/filepost.py @@ -0,0 +1,98 @@ +from __future__ import absolute_import + +import binascii +import codecs +import os +from io import BytesIO + +from .fields import RequestField +from .packages import six +from .packages.six import b + +writer = codecs.lookup("utf-8")[3] + + +def choose_boundary(): + """ + Our embarrassingly-simple replacement for mimetools.choose_boundary. + """ + boundary = binascii.hexlify(os.urandom(16)) + if not six.PY2: + boundary = boundary.decode("ascii") + return boundary + + +def iter_field_objects(fields): + """ + Iterate over fields. + + Supports list of (k, v) tuples and dicts, and lists of + :class:`~urllib3.fields.RequestField`. + + """ + if isinstance(fields, dict): + i = six.iteritems(fields) + else: + i = iter(fields) + + for field in i: + if isinstance(field, RequestField): + yield field + else: + yield RequestField.from_tuples(*field) + + +def iter_fields(fields): + """ + .. deprecated:: 1.6 + + Iterate over fields. + + The addition of :class:`~urllib3.fields.RequestField` makes this function + obsolete. Instead, use :func:`iter_field_objects`, which returns + :class:`~urllib3.fields.RequestField` objects. + + Supports list of (k, v) tuples and dicts. + """ + if isinstance(fields, dict): + return ((k, v) for k, v in six.iteritems(fields)) + + return ((k, v) for k, v in fields) + + +def encode_multipart_formdata(fields, boundary=None): + """ + Encode a dictionary of ``fields`` using the multipart/form-data MIME format. + + :param fields: + Dictionary of fields or list of (key, :class:`~urllib3.fields.RequestField`). + + :param boundary: + If not specified, then a random boundary will be generated using + :func:`urllib3.filepost.choose_boundary`. + """ + body = BytesIO() + if boundary is None: + boundary = choose_boundary() + + for field in iter_field_objects(fields): + body.write(b("--%s\r\n" % (boundary))) + + writer(body).write(field.render_headers()) + data = field.data + + if isinstance(data, int): + data = str(data) # Backwards compatibility + + if isinstance(data, six.text_type): + writer(body).write(data) + else: + body.write(data) + + body.write(b"\r\n") + + body.write(b("--%s--\r\n" % (boundary))) + + content_type = str("multipart/form-data; boundary=%s" % boundary) + + return body.getvalue(), content_type diff --git a/src/snowflake/connector/vendored/urllib3/packages/__init__.py b/src/snowflake/connector/vendored/urllib3/packages/__init__.py new file mode 100644 index 000000000..fce4caa65 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/packages/__init__.py @@ -0,0 +1,5 @@ +from __future__ import absolute_import + +from . import ssl_match_hostname + +__all__ = ("ssl_match_hostname",) diff --git a/src/snowflake/connector/vendored/urllib3/packages/backports/__init__.py b/src/snowflake/connector/vendored/urllib3/packages/backports/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/backport_makefile.py b/src/snowflake/connector/vendored/urllib3/packages/backports/makefile.py similarity index 75% rename from backport_makefile.py rename to src/snowflake/connector/vendored/urllib3/packages/backports/makefile.py index bab792dfd..b8fb2154b 100644 --- a/backport_makefile.py +++ b/src/snowflake/connector/vendored/urllib3/packages/backports/makefile.py @@ -2,26 +2,22 @@ """ backports.makefile ~~~~~~~~~~~~~~~~~~ + Backports the Python 3 ``socket.makefile`` method for use with anything that wants to create a "fake" socket object. - -Copied from: -https://github.com/kennethreitz/requests/blob/master/requests/packages/urllib3/packages/backports/makefile.py """ import io - from socket import SocketIO -def backport_makefile(self, mode="r", buffering=None, encoding=None, - errors=None, newline=None): +def backport_makefile( + self, mode="r", buffering=None, encoding=None, errors=None, newline=None +): """ Backport of ``socket.makefile`` from Python 3.5. """ - if not set(mode) <= set(["r", "w", "b"]): - raise ValueError( - "invalid mode %r (only r, w, b allowed)" % (mode,) - ) + if not set(mode) <= {"r", "w", "b"}: + raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) writing = "w" in mode reading = "r" in mode or not writing assert reading or writing diff --git a/src/snowflake/connector/vendored/urllib3/packages/six.py b/src/snowflake/connector/vendored/urllib3/packages/six.py new file mode 100644 index 000000000..ba50acb06 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/packages/six.py @@ -0,0 +1,1077 @@ +# Copyright (c) 2010-2020 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +"""Utilities for writing code that runs on Python 2 and 3""" + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.16.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = (str,) + integer_types = (int,) + class_types = (type,) + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = (basestring,) + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + def __len__(self): + return 1 << 31 + + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + +if PY34: + from importlib.util import spec_from_loader +else: + spec_from_loader = None + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def find_spec(self, fullname, path, target=None): + if fullname in self.known_modules: + return spec_from_loader(fullname, self) + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + + get_source = get_code # same as get_code + + def create_module(self, spec): + return self.load_module(spec.name) + + def exec_module(self, module): + pass + + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute( + "filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse" + ), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("getoutput", "commands", "subprocess"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute( + "reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload" + ), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute( + "zip_longest", "itertools", "itertools", "izip_longest", "zip_longest" + ), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule( + "collections_abc", + "collections", + "collections.abc" if sys.version_info >= (3, 3) else "collections", + ), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("dbm_ndbm", "dbm", "dbm.ndbm"), + MovedModule( + "_dummy_thread", + "dummy_thread", + "_dummy_thread" if sys.version_info < (3, 9) else "_thread", + ), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule( + "email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart" + ), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute( + "unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes" + ), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("splitvalue", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", + "moves.urllib.parse", +) + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", + "moves.urllib.error", +) + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), + MovedAttribute("parse_http_list", "urllib2", "urllib.request"), + MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", + "moves.urllib.request", +) + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module( + Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", + "moves.urllib.response", +) + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = ( + _urllib_robotparser_moved_attributes +) + +_importer._add_module( + Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", + "moves.urllib.robotparser", +) + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ["parse", "error", "request", "response", "robotparser"] + + +_importer._add_module( + Module_six_moves_urllib(__name__ + ".moves.urllib"), "moves.urllib" +) + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + + def advance_iterator(it): + return it.next() + + +next = advance_iterator + + +try: + callable = callable +except NameError: + + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc( + get_unbound_function, """Get the function out of a possibly unbound function""" +) + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc( + iterlists, "Return an iterator over the (key, [values]) pairs of a dictionary." +) + + +if PY3: + + def b(s): + return s.encode("latin-1") + + def u(s): + return s + + unichr = chr + import struct + + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + + StringIO = io.StringIO + BytesIO = io.BytesIO + del io + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" + _assertNotRegex = "assertNotRegex" +else: + + def b(s): + return s + + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r"\\", r"\\\\"), "unicode_escape") + + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + _assertNotRegex = "assertNotRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +def assertNotRegex(self, *args, **kwargs): + return getattr(self, _assertNotRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + try: + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + finally: + value = None + tb = None + + +else: + + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec ("""exec _code_ in _globs_, _locs_""") + + exec_( + """def reraise(tp, value, tb=None): + try: + raise tp, value, tb + finally: + tb = None +""" + ) + + +if sys.version_info[:2] > (3,): + exec_( + """def raise_from(value, from_value): + try: + raise value from from_value + finally: + value = None +""" + ) +else: + + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if ( + isinstance(fp, file) + and isinstance(data, unicode) + and fp.encoding is not None + ): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) + + +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + # This does exactly the same what the :func:`py3:functools.update_wrapper` + # function does on Python versions after 3.2. It sets the ``__wrapped__`` + # attribute on ``wrapper`` object and it doesn't raise an error if any of + # the attributes mentioned in ``assigned`` and ``updated`` are missing on + # ``wrapped`` object. + def _update_wrapper( + wrapper, + wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES, + ): + for attr in assigned: + try: + value = getattr(wrapped, attr) + except AttributeError: + continue + else: + setattr(wrapper, attr, value) + for attr in updated: + getattr(wrapper, attr).update(getattr(wrapped, attr, {})) + wrapper.__wrapped__ = wrapped + return wrapper + + _update_wrapper.__doc__ = functools.update_wrapper.__doc__ + + def wraps( + wrapped, + assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES, + ): + return functools.partial( + _update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated + ) + + wraps.__doc__ = functools.wraps.__doc__ + +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(type): + def __new__(cls, name, this_bases, d): + if sys.version_info[:2] >= (3, 7): + # This version introduced PEP 560 that requires a bit + # of extra care (we mimic what is done by __build_class__). + resolved_bases = types.resolve_bases(bases) + if resolved_bases is not bases: + d["__orig_bases__"] = bases + else: + resolved_bases = bases + return meta(name, resolved_bases, d) + + @classmethod + def __prepare__(cls, name, this_bases): + return meta.__prepare__(name, bases) + + return type.__new__(metaclass, "temporary_class", (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get("__slots__") + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop("__dict__", None) + orig_vars.pop("__weakref__", None) + if hasattr(cls, "__qualname__"): + orig_vars["__qualname__"] = cls.__qualname__ + return metaclass(cls.__name__, cls.__bases__, orig_vars) + + return wrapper + + +def ensure_binary(s, encoding="utf-8", errors="strict"): + """Coerce **s** to six.binary_type. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> encoded to `bytes` + - `bytes` -> `bytes` + """ + if isinstance(s, binary_type): + return s + if isinstance(s, text_type): + return s.encode(encoding, errors) + raise TypeError("not expecting type '%s'" % type(s)) + + +def ensure_str(s, encoding="utf-8", errors="strict"): + """Coerce *s* to `str`. + + For Python 2: + - `unicode` -> encoded to `str` + - `str` -> `str` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + # Optimization: Fast return for the common case. + if type(s) is str: + return s + if PY2 and isinstance(s, text_type): + return s.encode(encoding, errors) + elif PY3 and isinstance(s, binary_type): + return s.decode(encoding, errors) + elif not isinstance(s, (text_type, binary_type)): + raise TypeError("not expecting type '%s'" % type(s)) + return s + + +def ensure_text(s, encoding="utf-8", errors="strict"): + """Coerce *s* to six.text_type. + + For Python 2: + - `unicode` -> `unicode` + - `str` -> `unicode` + + For Python 3: + - `str` -> `str` + - `bytes` -> decoded to `str` + """ + if isinstance(s, binary_type): + return s.decode(encoding, errors) + elif isinstance(s, text_type): + return s + else: + raise TypeError("not expecting type '%s'" % type(s)) + + +def python_2_unicode_compatible(klass): + """ + A class decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if "__str__" not in klass.__dict__: + raise ValueError( + "@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % klass.__name__ + ) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode("utf-8") + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if ( + type(importer).__name__ == "_SixMetaPathImporter" + and importer.name == __name__ + ): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/src/snowflake/connector/vendored/urllib3/packages/ssl_match_hostname/__init__.py b/src/snowflake/connector/vendored/urllib3/packages/ssl_match_hostname/__init__.py new file mode 100644 index 000000000..ef3fde520 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/packages/ssl_match_hostname/__init__.py @@ -0,0 +1,24 @@ +import sys + +try: + # Our match_hostname function is the same as 3.10's, so we only want to + # import the match_hostname function if it's at least that good. + # We also fallback on Python 3.10+ because our code doesn't emit + # deprecation warnings and is the same as Python 3.10 otherwise. + if sys.version_info < (3, 5) or sys.version_info >= (3, 10): + raise ImportError("Fallback to vendored code") + + from ssl import CertificateError, match_hostname +except ImportError: + try: + # Backport of the function from a pypi module + from backports.ssl_match_hostname import ( # type: ignore + CertificateError, + match_hostname, + ) + except ImportError: + # Our vendored copy + from ._implementation import CertificateError, match_hostname # type: ignore + +# Not needed, but documenting what we provide. +__all__ = ("CertificateError", "match_hostname") diff --git a/src/snowflake/connector/vendored/urllib3/packages/ssl_match_hostname/_implementation.py b/src/snowflake/connector/vendored/urllib3/packages/ssl_match_hostname/_implementation.py new file mode 100644 index 000000000..689208d3c --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/packages/ssl_match_hostname/_implementation.py @@ -0,0 +1,160 @@ +"""The match_hostname() function from Python 3.3.3, essential when using SSL.""" + +# Note: This file is under the PSF license as the code comes from the python +# stdlib. http://docs.python.org/3/license.html + +import re +import sys + +# ipaddress has been backported to 2.6+ in pypi. If it is installed on the +# system, use it to handle IPAddress ServerAltnames (this was added in +# python-3.5) otherwise only do DNS matching. This allows +# backports.ssl_match_hostname to continue to be used in Python 2.7. +try: + import ipaddress +except ImportError: + ipaddress = None + +__version__ = "3.5.0.1" + + +class CertificateError(ValueError): + pass + + +def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + # Ported from python3-syntax: + # leftmost, *remainder = dn.split(r'.') + parts = dn.split(r".") + leftmost = parts[0] + remainder = parts[1:] + + wildcards = leftmost.count("*") + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn) + ) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == "*": + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append("[^.]+") + elif leftmost.startswith("xn--") or hostname.startswith("xn--"): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r"\*", "[^.]*")) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r"\A" + r"\.".join(pats) + r"\Z", re.IGNORECASE) + return pat.match(hostname) + + +def _to_unicode(obj): + if isinstance(obj, str) and sys.version_info < (3,): + obj = unicode(obj, encoding="ascii", errors="strict") + return obj + + +def _ipaddress_match(ipname, host_ip): + """Exact matching of IP addresses. + + RFC 6125 explicitly doesn't define an algorithm for this + (section 1.7.2 - "Out of Scope"). + """ + # OpenSSL may add a trailing newline to a subjectAltName's IP address + # Divergence from upstream: ipaddress can't handle byte str + ip = ipaddress.ip_address(_to_unicode(ipname).rstrip()) + return ip == host_ip + + +def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError( + "empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED" + ) + try: + # Divergence from upstream: ipaddress can't handle byte str + host_ip = ipaddress.ip_address(_to_unicode(hostname)) + except ValueError: + # Not an IP address (common case) + host_ip = None + except UnicodeError: + # Divergence from upstream: Have to deal with ipaddress not taking + # byte strings. addresses should be all ascii, so we consider it not + # an ipaddress in this case + host_ip = None + except AttributeError: + # Divergence from upstream: Make ipaddress library optional + if ipaddress is None: + host_ip = None + else: + raise + dnsnames = [] + san = cert.get("subjectAltName", ()) + for key, value in san: + if key == "DNS": + if host_ip is None and _dnsname_match(value, hostname): + return + dnsnames.append(value) + elif key == "IP Address": + if host_ip is not None and _ipaddress_match(value, host_ip): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get("subject", ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == "commonName": + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError( + "hostname %r " + "doesn't match either of %s" % (hostname, ", ".join(map(repr, dnsnames))) + ) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r doesn't match %r" % (hostname, dnsnames[0])) + else: + raise CertificateError( + "no appropriate commonName or subjectAltName fields were found" + ) diff --git a/src/snowflake/connector/vendored/urllib3/poolmanager.py b/src/snowflake/connector/vendored/urllib3/poolmanager.py new file mode 100644 index 000000000..3a31a285b --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/poolmanager.py @@ -0,0 +1,536 @@ +from __future__ import absolute_import + +import collections +import functools +import logging + +from ._collections import RecentlyUsedContainer +from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, port_by_scheme +from .exceptions import ( + LocationValueError, + MaxRetryError, + ProxySchemeUnknown, + ProxySchemeUnsupported, + URLSchemeUnknown, +) +from .packages import six +from .packages.six.moves.urllib.parse import urljoin +from .request import RequestMethods +from .util.proxy import connection_requires_http_tunnel +from .util.retry import Retry +from .util.url import parse_url + +__all__ = ["PoolManager", "ProxyManager", "proxy_from_url"] + + +log = logging.getLogger(__name__) + +SSL_KEYWORDS = ( + "key_file", + "cert_file", + "cert_reqs", + "ca_certs", + "ssl_version", + "ca_cert_dir", + "ssl_context", + "key_password", +) + +# All known keyword arguments that could be provided to the pool manager, its +# pools, or the underlying connections. This is used to construct a pool key. +_key_fields = ( + "key_scheme", # str + "key_host", # str + "key_port", # int + "key_timeout", # int or float or Timeout + "key_retries", # int or Retry + "key_strict", # bool + "key_block", # bool + "key_source_address", # str + "key_key_file", # str + "key_key_password", # str + "key_cert_file", # str + "key_cert_reqs", # str + "key_ca_certs", # str + "key_ssl_version", # str + "key_ca_cert_dir", # str + "key_ssl_context", # instance of ssl.SSLContext or urllib3.util.ssl_.SSLContext + "key_maxsize", # int + "key_headers", # dict + "key__proxy", # parsed proxy url + "key__proxy_headers", # dict + "key__proxy_config", # class + "key_socket_options", # list of (level (int), optname (int), value (int or str)) tuples + "key__socks_options", # dict + "key_assert_hostname", # bool or string + "key_assert_fingerprint", # str + "key_server_hostname", # str +) + +#: The namedtuple class used to construct keys for the connection pool. +#: All custom key schemes should include the fields in this key at a minimum. +PoolKey = collections.namedtuple("PoolKey", _key_fields) + +_proxy_config_fields = ("ssl_context", "use_forwarding_for_https") +ProxyConfig = collections.namedtuple("ProxyConfig", _proxy_config_fields) + + +def _default_key_normalizer(key_class, request_context): + """ + Create a pool key out of a request context dictionary. + + According to RFC 3986, both the scheme and host are case-insensitive. + Therefore, this function normalizes both before constructing the pool + key for an HTTPS request. If you wish to change this behaviour, provide + alternate callables to ``key_fn_by_scheme``. + + :param key_class: + The class to use when constructing the key. This should be a namedtuple + with the ``scheme`` and ``host`` keys at a minimum. + :type key_class: namedtuple + :param request_context: + A dictionary-like object that contain the context for a request. + :type request_context: dict + + :return: A namedtuple that can be used as a connection pool key. + :rtype: PoolKey + """ + # Since we mutate the dictionary, make a copy first + context = request_context.copy() + context["scheme"] = context["scheme"].lower() + context["host"] = context["host"].lower() + + # These are both dictionaries and need to be transformed into frozensets + for key in ("headers", "_proxy_headers", "_socks_options"): + if key in context and context[key] is not None: + context[key] = frozenset(context[key].items()) + + # The socket_options key may be a list and needs to be transformed into a + # tuple. + socket_opts = context.get("socket_options") + if socket_opts is not None: + context["socket_options"] = tuple(socket_opts) + + # Map the kwargs to the names in the namedtuple - this is necessary since + # namedtuples can't have fields starting with '_'. + for key in list(context.keys()): + context["key_" + key] = context.pop(key) + + # Default to ``None`` for keys missing from the context + for field in key_class._fields: + if field not in context: + context[field] = None + + return key_class(**context) + + +#: A dictionary that maps a scheme to a callable that creates a pool key. +#: This can be used to alter the way pool keys are constructed, if desired. +#: Each PoolManager makes a copy of this dictionary so they can be configured +#: globally here, or individually on the instance. +key_fn_by_scheme = { + "http": functools.partial(_default_key_normalizer, PoolKey), + "https": functools.partial(_default_key_normalizer, PoolKey), +} + +pool_classes_by_scheme = {"http": HTTPConnectionPool, "https": HTTPSConnectionPool} + + +class PoolManager(RequestMethods): + """ + Allows for arbitrary requests while transparently keeping track of + necessary connection pools for you. + + :param num_pools: + Number of connection pools to cache before discarding the least + recently used pool. + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + + :param \\**connection_pool_kw: + Additional parameters are used to create fresh + :class:`urllib3.connectionpool.ConnectionPool` instances. + + Example:: + + >>> manager = PoolManager(num_pools=2) + >>> r = manager.request('GET', 'http://google.com/') + >>> r = manager.request('GET', 'http://google.com/mail') + >>> r = manager.request('GET', 'http://yahoo.com/') + >>> len(manager.pools) + 2 + + """ + + proxy = None + proxy_config = None + + def __init__(self, num_pools=10, headers=None, **connection_pool_kw): + RequestMethods.__init__(self, headers) + self.connection_pool_kw = connection_pool_kw + self.pools = RecentlyUsedContainer(num_pools, dispose_func=lambda p: p.close()) + + # Locally set the pool classes and keys so other PoolManagers can + # override them. + self.pool_classes_by_scheme = pool_classes_by_scheme + self.key_fn_by_scheme = key_fn_by_scheme.copy() + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + self.clear() + # Return False to re-raise any potential exceptions + return False + + def _new_pool(self, scheme, host, port, request_context=None): + """ + Create a new :class:`urllib3.connectionpool.ConnectionPool` based on host, port, scheme, and + any additional pool keyword arguments. + + If ``request_context`` is provided, it is provided as keyword arguments + to the pool class used. This method is used to actually create the + connection pools handed out by :meth:`connection_from_url` and + companion methods. It is intended to be overridden for customization. + """ + pool_cls = self.pool_classes_by_scheme[scheme] + if request_context is None: + request_context = self.connection_pool_kw.copy() + + # Although the context has everything necessary to create the pool, + # this function has historically only used the scheme, host, and port + # in the positional args. When an API change is acceptable these can + # be removed. + for key in ("scheme", "host", "port"): + request_context.pop(key, None) + + if scheme == "http": + for kw in SSL_KEYWORDS: + request_context.pop(kw, None) + + return pool_cls(host, port, **request_context) + + def clear(self): + """ + Empty our store of pools and direct them all to close. + + This will not affect in-flight connections, but they will not be + re-used after completion. + """ + self.pools.clear() + + def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the host, port, and scheme. + + If ``port`` isn't given, it will be derived from the ``scheme`` using + ``urllib3.connectionpool.port_by_scheme``. If ``pool_kwargs`` is + provided, it is merged with the instance's ``connection_pool_kw`` + variable and used to create the new connection pool, if one is + needed. + """ + + if not host: + raise LocationValueError("No host specified.") + + request_context = self._merge_pool_kwargs(pool_kwargs) + request_context["scheme"] = scheme or "http" + if not port: + port = port_by_scheme.get(request_context["scheme"].lower(), 80) + request_context["port"] = port + request_context["host"] = host + + return self.connection_from_context(request_context) + + def connection_from_context(self, request_context): + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the request context. + + ``request_context`` must at least contain the ``scheme`` key and its + value must be a key in ``key_fn_by_scheme`` instance variable. + """ + scheme = request_context["scheme"].lower() + pool_key_constructor = self.key_fn_by_scheme.get(scheme) + if not pool_key_constructor: + raise URLSchemeUnknown(scheme) + pool_key = pool_key_constructor(request_context) + + return self.connection_from_pool_key(pool_key, request_context=request_context) + + def connection_from_pool_key(self, pool_key, request_context=None): + """ + Get a :class:`urllib3.connectionpool.ConnectionPool` based on the provided pool key. + + ``pool_key`` should be a namedtuple that only contains immutable + objects. At a minimum it must have the ``scheme``, ``host``, and + ``port`` fields. + """ + with self.pools.lock: + # If the scheme, host, or port doesn't match existing open + # connections, open a new ConnectionPool. + pool = self.pools.get(pool_key) + if pool: + return pool + + # Make a fresh ConnectionPool of the desired type + scheme = request_context["scheme"] + host = request_context["host"] + port = request_context["port"] + pool = self._new_pool(scheme, host, port, request_context=request_context) + self.pools[pool_key] = pool + + return pool + + def connection_from_url(self, url, pool_kwargs=None): + """ + Similar to :func:`urllib3.connectionpool.connection_from_url`. + + If ``pool_kwargs`` is not provided and a new pool needs to be + constructed, ``self.connection_pool_kw`` is used to initialize + the :class:`urllib3.connectionpool.ConnectionPool`. If ``pool_kwargs`` + is provided, it is used instead. Note that if a new pool does not + need to be created for the request, the provided ``pool_kwargs`` are + not used. + """ + u = parse_url(url) + return self.connection_from_host( + u.host, port=u.port, scheme=u.scheme, pool_kwargs=pool_kwargs + ) + + def _merge_pool_kwargs(self, override): + """ + Merge a dictionary of override values for self.connection_pool_kw. + + This does not modify self.connection_pool_kw and returns a new dict. + Any keys in the override dictionary with a value of ``None`` are + removed from the merged dictionary. + """ + base_pool_kwargs = self.connection_pool_kw.copy() + if override: + for key, value in override.items(): + if value is None: + try: + del base_pool_kwargs[key] + except KeyError: + pass + else: + base_pool_kwargs[key] = value + return base_pool_kwargs + + def _proxy_requires_url_absolute_form(self, parsed_url): + """ + Indicates if the proxy requires the complete destination URL in the + request. Normally this is only needed when not using an HTTP CONNECT + tunnel. + """ + if self.proxy is None: + return False + + return not connection_requires_http_tunnel( + self.proxy, self.proxy_config, parsed_url.scheme + ) + + def _validate_proxy_scheme_url_selection(self, url_scheme): + """ + Validates that were not attempting to do TLS in TLS connections on + Python2 or with unsupported SSL implementations. + """ + if self.proxy is None or url_scheme != "https": + return + + if self.proxy.scheme != "https": + return + + if six.PY2 and not self.proxy_config.use_forwarding_for_https: + raise ProxySchemeUnsupported( + "Contacting HTTPS destinations through HTTPS proxies " + "'via CONNECT tunnels' is not supported in Python 2" + ) + + def urlopen(self, method, url, redirect=True, **kw): + """ + Same as :meth:`urllib3.HTTPConnectionPool.urlopen` + with custom cross-host redirect logic and only sends the request-uri + portion of the ``url``. + + The given ``url`` parameter must be absolute, such that an appropriate + :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it. + """ + u = parse_url(url) + self._validate_proxy_scheme_url_selection(u.scheme) + + conn = self.connection_from_host(u.host, port=u.port, scheme=u.scheme) + + kw["assert_same_host"] = False + kw["redirect"] = False + + if "headers" not in kw: + kw["headers"] = self.headers.copy() + + if self._proxy_requires_url_absolute_form(u): + response = conn.urlopen(method, url, **kw) + else: + response = conn.urlopen(method, u.request_uri, **kw) + + redirect_location = redirect and response.get_redirect_location() + if not redirect_location: + return response + + # Support relative URLs for redirecting. + redirect_location = urljoin(url, redirect_location) + + # RFC 7231, Section 6.4.4 + if response.status == 303: + method = "GET" + + retries = kw.get("retries") + if not isinstance(retries, Retry): + retries = Retry.from_int(retries, redirect=redirect) + + # Strip headers marked as unsafe to forward to the redirected location. + # Check remove_headers_on_redirect to avoid a potential network call within + # conn.is_same_host() which may use socket.gethostbyname() in the future. + if retries.remove_headers_on_redirect and not conn.is_same_host( + redirect_location + ): + headers = list(six.iterkeys(kw["headers"])) + for header in headers: + if header.lower() in retries.remove_headers_on_redirect: + kw["headers"].pop(header, None) + + try: + retries = retries.increment(method, url, response=response, _pool=conn) + except MaxRetryError: + if retries.raise_on_redirect: + response.drain_conn() + raise + return response + + kw["retries"] = retries + kw["redirect"] = redirect + + log.info("Redirecting %s -> %s", url, redirect_location) + + response.drain_conn() + return self.urlopen(method, redirect_location, **kw) + + +class ProxyManager(PoolManager): + """ + Behaves just like :class:`PoolManager`, but sends all requests through + the defined proxy, using the CONNECT method for HTTPS URLs. + + :param proxy_url: + The URL of the proxy to be used. + + :param proxy_headers: + A dictionary containing headers that will be sent to the proxy. In case + of HTTP they are being sent with each request, while in the + HTTPS/CONNECT case they are sent only once. Could be used for proxy + authentication. + + :param proxy_ssl_context: + The proxy SSL context is used to establish the TLS connection to the + proxy when using HTTPS proxies. + + :param use_forwarding_for_https: + (Defaults to False) If set to True will forward requests to the HTTPS + proxy to be made on behalf of the client instead of creating a TLS + tunnel via the CONNECT method. **Enabling this flag means that request + and response headers and content will be visible from the HTTPS proxy** + whereas tunneling keeps request and response headers and content + private. IP address, target hostname, SNI, and port are always visible + to an HTTPS proxy even when this flag is disabled. + + Example: + >>> proxy = urllib3.ProxyManager('http://localhost:3128/') + >>> r1 = proxy.request('GET', 'http://google.com/') + >>> r2 = proxy.request('GET', 'http://httpbin.org/') + >>> len(proxy.pools) + 1 + >>> r3 = proxy.request('GET', 'https://httpbin.org/') + >>> r4 = proxy.request('GET', 'https://twitter.com/') + >>> len(proxy.pools) + 3 + + """ + + def __init__( + self, + proxy_url, + num_pools=10, + headers=None, + proxy_headers=None, + proxy_ssl_context=None, + use_forwarding_for_https=False, + **connection_pool_kw + ): + + if isinstance(proxy_url, HTTPConnectionPool): + proxy_url = "%s://%s:%i" % ( + proxy_url.scheme, + proxy_url.host, + proxy_url.port, + ) + proxy = parse_url(proxy_url) + + if proxy.scheme not in ("http", "https"): + raise ProxySchemeUnknown(proxy.scheme) + + if not proxy.port: + port = port_by_scheme.get(proxy.scheme, 80) + proxy = proxy._replace(port=port) + + self.proxy = proxy + self.proxy_headers = proxy_headers or {} + self.proxy_ssl_context = proxy_ssl_context + self.proxy_config = ProxyConfig(proxy_ssl_context, use_forwarding_for_https) + + connection_pool_kw["_proxy"] = self.proxy + connection_pool_kw["_proxy_headers"] = self.proxy_headers + connection_pool_kw["_proxy_config"] = self.proxy_config + + super(ProxyManager, self).__init__(num_pools, headers, **connection_pool_kw) + + def connection_from_host(self, host, port=None, scheme="http", pool_kwargs=None): + if scheme == "https": + return super(ProxyManager, self).connection_from_host( + host, port, scheme, pool_kwargs=pool_kwargs + ) + + return super(ProxyManager, self).connection_from_host( + self.proxy.host, self.proxy.port, self.proxy.scheme, pool_kwargs=pool_kwargs + ) + + def _set_proxy_headers(self, url, headers=None): + """ + Sets headers needed by proxies: specifically, the Accept and Host + headers. Only sets headers not provided by the user. + """ + headers_ = {"Accept": "*/*"} + + netloc = parse_url(url).netloc + if netloc: + headers_["Host"] = netloc + + if headers: + headers_.update(headers) + return headers_ + + def urlopen(self, method, url, redirect=True, **kw): + "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute." + u = parse_url(url) + if not connection_requires_http_tunnel(self.proxy, self.proxy_config, u.scheme): + # For connections using HTTP CONNECT, httplib sets the necessary + # headers on the CONNECT to the proxy. If we're not using CONNECT, + # we'll definitely need to set 'Host' at the very least. + headers = kw.get("headers", self.headers) + kw["headers"] = self._set_proxy_headers(url, headers) + + return super(ProxyManager, self).urlopen(method, url, redirect=redirect, **kw) + + +def proxy_from_url(url, **kw): + return ProxyManager(proxy_url=url, **kw) diff --git a/src/snowflake/connector/vendored/urllib3/request.py b/src/snowflake/connector/vendored/urllib3/request.py new file mode 100644 index 000000000..398386a5b --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/request.py @@ -0,0 +1,170 @@ +from __future__ import absolute_import + +from .filepost import encode_multipart_formdata +from .packages.six.moves.urllib.parse import urlencode + +__all__ = ["RequestMethods"] + + +class RequestMethods(object): + """ + Convenience mixin for classes who implement a :meth:`urlopen` method, such + as :class:`urllib3.HTTPConnectionPool` and + :class:`urllib3.PoolManager`. + + Provides behavior for making common types of HTTP request methods and + decides which type of request field encoding to use. + + Specifically, + + :meth:`.request_encode_url` is for sending requests whose fields are + encoded in the URL (such as GET, HEAD, DELETE). + + :meth:`.request_encode_body` is for sending requests whose fields are + encoded in the *body* of the request using multipart or www-form-urlencoded + (such as for POST, PUT, PATCH). + + :meth:`.request` is for making any kind of request, it will look up the + appropriate encoding format and use one of the above two methods to make + the request. + + Initializer parameters: + + :param headers: + Headers to include with all requests, unless other headers are given + explicitly. + """ + + _encode_url_methods = {"DELETE", "GET", "HEAD", "OPTIONS"} + + def __init__(self, headers=None): + self.headers = headers or {} + + def urlopen( + self, + method, + url, + body=None, + headers=None, + encode_multipart=True, + multipart_boundary=None, + **kw + ): # Abstract + raise NotImplementedError( + "Classes extending RequestMethods must implement " + "their own ``urlopen`` method." + ) + + def request(self, method, url, fields=None, headers=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the appropriate encoding of + ``fields`` based on the ``method`` used. + + This is a convenience method that requires the least amount of manual + effort. It can be used in most situations, while still having the + option to drop down to more specific methods when necessary, such as + :meth:`request_encode_url`, :meth:`request_encode_body`, + or even the lowest level :meth:`urlopen`. + """ + method = method.upper() + + urlopen_kw["request_url"] = url + + if method in self._encode_url_methods: + return self.request_encode_url( + method, url, fields=fields, headers=headers, **urlopen_kw + ) + else: + return self.request_encode_body( + method, url, fields=fields, headers=headers, **urlopen_kw + ) + + def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the url. This is useful for request methods like GET, HEAD, DELETE, etc. + """ + if headers is None: + headers = self.headers + + extra_kw = {"headers": headers} + extra_kw.update(urlopen_kw) + + if fields: + url += "?" + urlencode(fields) + + return self.urlopen(method, url, **extra_kw) + + def request_encode_body( + self, + method, + url, + fields=None, + headers=None, + encode_multipart=True, + multipart_boundary=None, + **urlopen_kw + ): + """ + Make a request using :meth:`urlopen` with the ``fields`` encoded in + the body. This is useful for request methods like POST, PUT, PATCH, etc. + + When ``encode_multipart=True`` (default), then + :func:`urllib3.encode_multipart_formdata` is used to encode + the payload with the appropriate content type. Otherwise + :func:`urllib.parse.urlencode` is used with the + 'application/x-www-form-urlencoded' content type. + + Multipart encoding must be used when posting files, and it's reasonably + safe to use it in other times too. However, it may break request + signing, such as with OAuth. + + Supports an optional ``fields`` parameter of key/value strings AND + key/filetuple. A filetuple is a (filename, data, MIME type) tuple where + the MIME type is optional. For example:: + + fields = { + 'foo': 'bar', + 'fakefile': ('foofile.txt', 'contents of foofile'), + 'realfile': ('barfile.txt', open('realfile').read()), + 'typedfile': ('bazfile.bin', open('bazfile').read(), + 'image/jpeg'), + 'nonamefile': 'contents of nonamefile field', + } + + When uploading a file, providing a filename (the first parameter of the + tuple) is optional but recommended to best mimic behavior of browsers. + + Note that if ``headers`` are supplied, the 'Content-Type' header will + be overwritten because it depends on the dynamic random boundary string + which is used to compose the body of the request. The random boundary + string can be explicitly set with the ``multipart_boundary`` parameter. + """ + if headers is None: + headers = self.headers + + extra_kw = {"headers": {}} + + if fields: + if "body" in urlopen_kw: + raise TypeError( + "request got values for both 'fields' and 'body', can only specify one." + ) + + if encode_multipart: + body, content_type = encode_multipart_formdata( + fields, boundary=multipart_boundary + ) + else: + body, content_type = ( + urlencode(fields), + "application/x-www-form-urlencoded", + ) + + extra_kw["body"] = body + extra_kw["headers"] = {"Content-Type": content_type} + + extra_kw["headers"].update(headers) + extra_kw.update(urlopen_kw) + + return self.urlopen(method, url, **extra_kw) diff --git a/src/snowflake/connector/vendored/urllib3/response.py b/src/snowflake/connector/vendored/urllib3/response.py new file mode 100644 index 000000000..38693f4fc --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/response.py @@ -0,0 +1,821 @@ +from __future__ import absolute_import + +import io +import logging +import zlib +from contextlib import contextmanager +from socket import error as SocketError +from socket import timeout as SocketTimeout + +try: + import brotli +except ImportError: + brotli = None + +from ._collections import HTTPHeaderDict +from .connection import BaseSSLError, HTTPException +from .exceptions import ( + BodyNotHttplibCompatible, + DecodeError, + HTTPError, + IncompleteRead, + InvalidChunkLength, + InvalidHeader, + ProtocolError, + ReadTimeoutError, + ResponseNotChunked, + SSLError, +) +from .packages import six +from .util.response import is_fp_closed, is_response_to_head + +log = logging.getLogger(__name__) + + +class DeflateDecoder(object): + def __init__(self): + self._first_try = True + self._data = b"" + self._obj = zlib.decompressobj() + + def __getattr__(self, name): + return getattr(self._obj, name) + + def decompress(self, data): + if not data: + return data + + if not self._first_try: + return self._obj.decompress(data) + + self._data += data + try: + decompressed = self._obj.decompress(data) + if decompressed: + self._first_try = False + self._data = None + return decompressed + except zlib.error: + self._first_try = False + self._obj = zlib.decompressobj(-zlib.MAX_WBITS) + try: + return self.decompress(self._data) + finally: + self._data = None + + +class GzipDecoderState(object): + + FIRST_MEMBER = 0 + OTHER_MEMBERS = 1 + SWALLOW_DATA = 2 + + +class GzipDecoder(object): + def __init__(self): + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + self._state = GzipDecoderState.FIRST_MEMBER + + def __getattr__(self, name): + return getattr(self._obj, name) + + def decompress(self, data): + ret = bytearray() + if self._state == GzipDecoderState.SWALLOW_DATA or not data: + return bytes(ret) + while True: + try: + ret += self._obj.decompress(data) + except zlib.error: + previous_state = self._state + # Ignore data after the first error + self._state = GzipDecoderState.SWALLOW_DATA + if previous_state == GzipDecoderState.OTHER_MEMBERS: + # Allow trailing garbage acceptable in other gzip clients + return bytes(ret) + raise + data = self._obj.unused_data + if not data: + return bytes(ret) + self._state = GzipDecoderState.OTHER_MEMBERS + self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS) + + +if brotli is not None: + + class BrotliDecoder(object): + # Supports both 'brotlipy' and 'Brotli' packages + # since they share an import name. The top branches + # are for 'brotlipy' and bottom branches for 'Brotli' + def __init__(self): + self._obj = brotli.Decompressor() + if hasattr(self._obj, "decompress"): + self.decompress = self._obj.decompress + else: + self.decompress = self._obj.process + + def flush(self): + if hasattr(self._obj, "flush"): + return self._obj.flush() + return b"" + + +class MultiDecoder(object): + """ + From RFC7231: + If one or more encodings have been applied to a representation, the + sender that applied the encodings MUST generate a Content-Encoding + header field that lists the content codings in the order in which + they were applied. + """ + + def __init__(self, modes): + self._decoders = [_get_decoder(m.strip()) for m in modes.split(",")] + + def flush(self): + return self._decoders[0].flush() + + def decompress(self, data): + for d in reversed(self._decoders): + data = d.decompress(data) + return data + + +def _get_decoder(mode): + if "," in mode: + return MultiDecoder(mode) + + if mode == "gzip": + return GzipDecoder() + + if brotli is not None and mode == "br": + return BrotliDecoder() + + return DeflateDecoder() + + +class HTTPResponse(io.IOBase): + """ + HTTP Response container. + + Backwards-compatible with :class:`http.client.HTTPResponse` but the response ``body`` is + loaded and decoded on-demand when the ``data`` property is accessed. This + class is also compatible with the Python standard library's :mod:`io` + module, and can hence be treated as a readable object in the context of that + framework. + + Extra parameters for behaviour not present in :class:`http.client.HTTPResponse`: + + :param preload_content: + If True, the response's body will be preloaded during construction. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param original_response: + When this HTTPResponse wrapper is generated from an :class:`http.client.HTTPResponse` + object, it's convenient to include the original for debug purposes. It's + otherwise unused. + + :param retries: + The retries contains the last :class:`~urllib3.util.retry.Retry` that + was used during the request. + + :param enforce_content_length: + Enforce content length checking. Body returned by server must match + value of Content-Length header, if present. Otherwise, raise error. + """ + + CONTENT_DECODERS = ["gzip", "deflate"] + if brotli is not None: + CONTENT_DECODERS += ["br"] + REDIRECT_STATUSES = [301, 302, 303, 307, 308] + + def __init__( + self, + body="", + headers=None, + status=0, + version=0, + reason=None, + strict=0, + preload_content=True, + decode_content=True, + original_response=None, + pool=None, + connection=None, + msg=None, + retries=None, + enforce_content_length=False, + request_method=None, + request_url=None, + auto_close=True, + ): + + if isinstance(headers, HTTPHeaderDict): + self.headers = headers + else: + self.headers = HTTPHeaderDict(headers) + self.status = status + self.version = version + self.reason = reason + self.strict = strict + self.decode_content = decode_content + self.retries = retries + self.enforce_content_length = enforce_content_length + self.auto_close = auto_close + + self._decoder = None + self._body = None + self._fp = None + self._original_response = original_response + self._fp_bytes_read = 0 + self.msg = msg + self._request_url = request_url + + if body and isinstance(body, (six.string_types, bytes)): + self._body = body + + self._pool = pool + self._connection = connection + + if hasattr(body, "read"): + self._fp = body + + # Are we using the chunked-style of transfer encoding? + self.chunked = False + self.chunk_left = None + tr_enc = self.headers.get("transfer-encoding", "").lower() + # Don't incur the penalty of creating a list and then discarding it + encodings = (enc.strip() for enc in tr_enc.split(",")) + if "chunked" in encodings: + self.chunked = True + + # Determine length of response + self.length_remaining = self._init_length(request_method) + + # If requested, preload the body. + if preload_content and not self._body: + self._body = self.read(decode_content=decode_content) + + def get_redirect_location(self): + """ + Should we redirect and where to? + + :returns: Truthy redirect location string if we got a redirect status + code and valid location. ``None`` if redirect status and no + location. ``False`` if not a redirect status code. + """ + if self.status in self.REDIRECT_STATUSES: + return self.headers.get("location") + + return False + + def release_conn(self): + if not self._pool or not self._connection: + return + + self._pool._put_conn(self._connection) + self._connection = None + + def drain_conn(self): + """ + Read and discard any remaining HTTP response data in the response connection. + + Unread data in the HTTPResponse connection blocks the connection from being released back to the pool. + """ + try: + self.read() + except (HTTPError, SocketError, BaseSSLError, HTTPException): + pass + + @property + def data(self): + # For backwards-compat with earlier urllib3 0.4 and earlier. + if self._body: + return self._body + + if self._fp: + return self.read(cache_content=True) + + @property + def connection(self): + return self._connection + + def isclosed(self): + return is_fp_closed(self._fp) + + def tell(self): + """ + Obtain the number of bytes pulled over the wire so far. May differ from + the amount of content returned by :meth:``urllib3.response.HTTPResponse.read`` + if bytes are encoded on the wire (e.g, compressed). + """ + return self._fp_bytes_read + + def _init_length(self, request_method): + """ + Set initial length value for Response content if available. + """ + length = self.headers.get("content-length") + + if length is not None: + if self.chunked: + # This Response will fail with an IncompleteRead if it can't be + # received as chunked. This method falls back to attempt reading + # the response before raising an exception. + log.warning( + "Received response with both Content-Length and " + "Transfer-Encoding set. This is expressly forbidden " + "by RFC 7230 sec 3.3.2. Ignoring Content-Length and " + "attempting to process response as Transfer-Encoding: " + "chunked." + ) + return None + + try: + # RFC 7230 section 3.3.2 specifies multiple content lengths can + # be sent in a single Content-Length header + # (e.g. Content-Length: 42, 42). This line ensures the values + # are all valid ints and that as long as the `set` length is 1, + # all values are the same. Otherwise, the header is invalid. + lengths = set([int(val) for val in length.split(",")]) + if len(lengths) > 1: + raise InvalidHeader( + "Content-Length contained multiple " + "unmatching values (%s)" % length + ) + length = lengths.pop() + except ValueError: + length = None + else: + if length < 0: + length = None + + # Convert status to int for comparison + # In some cases, httplib returns a status of "_UNKNOWN" + try: + status = int(self.status) + except ValueError: + status = 0 + + # Check for responses that shouldn't include a body + if status in (204, 304) or 100 <= status < 200 or request_method == "HEAD": + length = 0 + + return length + + def _init_decoder(self): + """ + Set-up the _decoder attribute if necessary. + """ + # Note: content-encoding value should be case-insensitive, per RFC 7230 + # Section 3.2 + content_encoding = self.headers.get("content-encoding", "").lower() + if self._decoder is None: + if content_encoding in self.CONTENT_DECODERS: + self._decoder = _get_decoder(content_encoding) + elif "," in content_encoding: + encodings = [ + e.strip() + for e in content_encoding.split(",") + if e.strip() in self.CONTENT_DECODERS + ] + if len(encodings): + self._decoder = _get_decoder(content_encoding) + + DECODER_ERROR_CLASSES = (IOError, zlib.error) + if brotli is not None: + DECODER_ERROR_CLASSES += (brotli.error,) + + def _decode(self, data, decode_content, flush_decoder): + """ + Decode the data passed in and potentially flush the decoder. + """ + if not decode_content: + return data + + try: + if self._decoder: + data = self._decoder.decompress(data) + except self.DECODER_ERROR_CLASSES as e: + content_encoding = self.headers.get("content-encoding", "").lower() + raise DecodeError( + "Received response with content-encoding: %s, but " + "failed to decode it." % content_encoding, + e, + ) + if flush_decoder: + data += self._flush_decoder() + + return data + + def _flush_decoder(self): + """ + Flushes the decoder. Should only be called if the decoder is actually + being used. + """ + if self._decoder: + buf = self._decoder.decompress(b"") + return buf + self._decoder.flush() + + return b"" + + @contextmanager + def _error_catcher(self): + """ + Catch low-level python exceptions, instead re-raising urllib3 + variants, so that low-level exceptions are not leaked in the + high-level api. + + On exit, release the connection back to the pool. + """ + clean_exit = False + + try: + try: + yield + + except SocketTimeout: + # FIXME: Ideally we'd like to include the url in the ReadTimeoutError but + # there is yet no clean way to get at it from this context. + raise ReadTimeoutError(self._pool, None, "Read timed out.") + + except BaseSSLError as e: + # FIXME: Is there a better way to differentiate between SSLErrors? + if "read operation timed out" not in str(e): + # SSL errors related to framing/MAC get wrapped and reraised here + raise SSLError(e) + + raise ReadTimeoutError(self._pool, None, "Read timed out.") + + except (HTTPException, SocketError) as e: + # This includes IncompleteRead. + raise ProtocolError("Connection broken: %r" % e, e) + + # If no exception is thrown, we should avoid cleaning up + # unnecessarily. + clean_exit = True + finally: + # If we didn't terminate cleanly, we need to throw away our + # connection. + if not clean_exit: + # The response may not be closed but we're not going to use it + # anymore so close it now to ensure that the connection is + # released back to the pool. + if self._original_response: + self._original_response.close() + + # Closing the response may not actually be sufficient to close + # everything, so if we have a hold of the connection close that + # too. + if self._connection: + self._connection.close() + + # If we hold the original response but it's closed now, we should + # return the connection back to the pool. + if self._original_response and self._original_response.isclosed(): + self.release_conn() + + def read(self, amt=None, decode_content=None, cache_content=False): + """ + Similar to :meth:`http.client.HTTPResponse.read`, but with two additional + parameters: ``decode_content`` and ``cache_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + + :param cache_content: + If True, will save the returned data such that the same result is + returned despite of the state of the underlying file object. This + is useful if you want the ``.data`` property to continue working + after having ``.read()`` the file object. (Overridden if ``amt`` is + set.) + """ + self._init_decoder() + if decode_content is None: + decode_content = self.decode_content + + if self._fp is None: + return + + flush_decoder = False + fp_closed = getattr(self._fp, "closed", False) + + with self._error_catcher(): + if amt is None: + # cStringIO doesn't like amt=None + data = self._fp.read() if not fp_closed else b"" + flush_decoder = True + else: + cache_content = False + data = self._fp.read(amt) if not fp_closed else b"" + if ( + amt != 0 and not data + ): # Platform-specific: Buggy versions of Python. + # Close the connection when no data is returned + # + # This is redundant to what httplib/http.client _should_ + # already do. However, versions of python released before + # December 15, 2012 (http://bugs.python.org/issue16298) do + # not properly close the connection in all cases. There is + # no harm in redundantly calling close. + self._fp.close() + flush_decoder = True + if self.enforce_content_length and self.length_remaining not in ( + 0, + None, + ): + # This is an edge case that httplib failed to cover due + # to concerns of backward compatibility. We're + # addressing it here to make sure IncompleteRead is + # raised during streaming, so all calls with incorrect + # Content-Length are caught. + raise IncompleteRead(self._fp_bytes_read, self.length_remaining) + + if data: + self._fp_bytes_read += len(data) + if self.length_remaining is not None: + self.length_remaining -= len(data) + + data = self._decode(data, decode_content, flush_decoder) + + if cache_content: + self._body = data + + return data + + def stream(self, amt=2 ** 16, decode_content=None): + """ + A generator wrapper for the read() method. A call will block until + ``amt`` bytes have been read from the connection or until the + connection is closed. + + :param amt: + How much of the content to read. The generator will return up to + much data per iteration, but may return less. This is particularly + likely when using compressed data. However, the empty string will + never be returned. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + if self.chunked and self.supports_chunked_reads(): + for line in self.read_chunked(amt, decode_content=decode_content): + yield line + else: + while not is_fp_closed(self._fp): + data = self.read(amt=amt, decode_content=decode_content) + + if data: + yield data + + @classmethod + def from_httplib(ResponseCls, r, **response_kw): + """ + Given an :class:`http.client.HTTPResponse` instance ``r``, return a + corresponding :class:`urllib3.response.HTTPResponse` object. + + Remaining parameters are passed to the HTTPResponse constructor, along + with ``original_response=r``. + """ + headers = r.msg + + if not isinstance(headers, HTTPHeaderDict): + if six.PY2: + # Python 2.7 + headers = HTTPHeaderDict.from_httplib(headers) + else: + headers = HTTPHeaderDict(headers.items()) + + # HTTPResponse objects in Python 3 don't have a .strict attribute + strict = getattr(r, "strict", 0) + resp = ResponseCls( + body=r, + headers=headers, + status=r.status, + version=r.version, + reason=r.reason, + strict=strict, + original_response=r, + **response_kw + ) + return resp + + # Backwards-compatibility methods for http.client.HTTPResponse + def getheaders(self): + return self.headers + + def getheader(self, name, default=None): + return self.headers.get(name, default) + + # Backwards compatibility for http.cookiejar + def info(self): + return self.headers + + # Overrides from io.IOBase + def close(self): + if not self.closed: + self._fp.close() + + if self._connection: + self._connection.close() + + if not self.auto_close: + io.IOBase.close(self) + + @property + def closed(self): + if not self.auto_close: + return io.IOBase.closed.__get__(self) + elif self._fp is None: + return True + elif hasattr(self._fp, "isclosed"): + return self._fp.isclosed() + elif hasattr(self._fp, "closed"): + return self._fp.closed + else: + return True + + def fileno(self): + if self._fp is None: + raise IOError("HTTPResponse has no file to get a fileno from") + elif hasattr(self._fp, "fileno"): + return self._fp.fileno() + else: + raise IOError( + "The file-like object this HTTPResponse is wrapped " + "around has no file descriptor" + ) + + def flush(self): + if ( + self._fp is not None + and hasattr(self._fp, "flush") + and not getattr(self._fp, "closed", False) + ): + return self._fp.flush() + + def readable(self): + # This method is required for `io` module compatibility. + return True + + def readinto(self, b): + # This method is required for `io` module compatibility. + temp = self.read(len(b)) + if len(temp) == 0: + return 0 + else: + b[: len(temp)] = temp + return len(temp) + + def supports_chunked_reads(self): + """ + Checks if the underlying file-like object looks like a + :class:`http.client.HTTPResponse` object. We do this by testing for + the fp attribute. If it is present we assume it returns raw chunks as + processed by read_chunked(). + """ + return hasattr(self._fp, "fp") + + def _update_chunk_length(self): + # First, we'll figure out length of a chunk and then + # we'll try to read it from socket. + if self.chunk_left is not None: + return + line = self._fp.fp.readline() + line = line.split(b";", 1)[0] + try: + self.chunk_left = int(line, 16) + except ValueError: + # Invalid chunked protocol response, abort. + self.close() + raise InvalidChunkLength(self, line) + + def _handle_chunk(self, amt): + returned_chunk = None + if amt is None: + chunk = self._fp._safe_read(self.chunk_left) + returned_chunk = chunk + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + elif amt < self.chunk_left: + value = self._fp._safe_read(amt) + self.chunk_left = self.chunk_left - amt + returned_chunk = value + elif amt == self.chunk_left: + value = self._fp._safe_read(amt) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + returned_chunk = value + else: # amt > self.chunk_left + returned_chunk = self._fp._safe_read(self.chunk_left) + self._fp._safe_read(2) # Toss the CRLF at the end of the chunk. + self.chunk_left = None + return returned_chunk + + def read_chunked(self, amt=None, decode_content=None): + """ + Similar to :meth:`HTTPResponse.read`, but with an additional + parameter: ``decode_content``. + + :param amt: + How much of the content to read. If specified, caching is skipped + because it doesn't make sense to cache partial content as the full + response. + + :param decode_content: + If True, will attempt to decode the body based on the + 'content-encoding' header. + """ + self._init_decoder() + # FIXME: Rewrite this method and make it a class with a better structured logic. + if not self.chunked: + raise ResponseNotChunked( + "Response is not chunked. " + "Header 'transfer-encoding: chunked' is missing." + ) + if not self.supports_chunked_reads(): + raise BodyNotHttplibCompatible( + "Body should be http.client.HTTPResponse like. " + "It should have have an fp attribute which returns raw chunks." + ) + + with self._error_catcher(): + # Don't bother reading the body of a HEAD request. + if self._original_response and is_response_to_head(self._original_response): + self._original_response.close() + return + + # If a response is already read and closed + # then return immediately. + if self._fp.fp is None: + return + + while True: + self._update_chunk_length() + if self.chunk_left == 0: + break + chunk = self._handle_chunk(amt) + decoded = self._decode( + chunk, decode_content=decode_content, flush_decoder=False + ) + if decoded: + yield decoded + + if decode_content: + # On CPython and PyPy, we should never need to flush the + # decoder. However, on Jython we *might* need to, so + # lets defensively do it anyway. + decoded = self._flush_decoder() + if decoded: # Platform-specific: Jython. + yield decoded + + # Chunk content ends with \r\n: discard it. + while True: + line = self._fp.fp.readline() + if not line: + # Some sites may not end with '\r\n'. + break + if line == b"\r\n": + break + + # We read everything; close the "file". + if self._original_response: + self._original_response.close() + + def geturl(self): + """ + Returns the URL that was the source of this response. + If the request that generated this response redirected, this method + will return the final redirect location. + """ + if self.retries is not None and len(self.retries.history): + return self.retries.history[-1].redirect_location + else: + return self._request_url + + def __iter__(self): + buffer = [] + for chunk in self.stream(decode_content=True): + if b"\n" in chunk: + chunk = chunk.split(b"\n") + yield b"".join(buffer) + chunk[0] + b"\n" + for x in chunk[1:-1]: + yield x + b"\n" + if chunk[-1]: + buffer = [chunk[-1]] + else: + buffer = [] + else: + buffer.append(chunk) + if buffer: + yield b"".join(buffer) diff --git a/src/snowflake/connector/vendored/urllib3/util/__init__.py b/src/snowflake/connector/vendored/urllib3/util/__init__.py new file mode 100644 index 000000000..4547fc522 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/__init__.py @@ -0,0 +1,49 @@ +from __future__ import absolute_import + +# For backwards compatibility, provide imports that used to be here. +from .connection import is_connection_dropped +from .request import SKIP_HEADER, SKIPPABLE_HEADERS, make_headers +from .response import is_fp_closed +from .retry import Retry +from .ssl_ import ( + ALPN_PROTOCOLS, + HAS_SNI, + IS_PYOPENSSL, + IS_SECURETRANSPORT, + PROTOCOL_TLS, + SSLContext, + assert_fingerprint, + resolve_cert_reqs, + resolve_ssl_version, + ssl_wrap_socket, +) +from .timeout import Timeout, current_time +from .url import Url, get_host, parse_url, split_first +from .wait import wait_for_read, wait_for_write + +__all__ = ( + "HAS_SNI", + "IS_PYOPENSSL", + "IS_SECURETRANSPORT", + "SSLContext", + "PROTOCOL_TLS", + "ALPN_PROTOCOLS", + "Retry", + "Timeout", + "Url", + "assert_fingerprint", + "current_time", + "is_connection_dropped", + "is_fp_closed", + "get_host", + "parse_url", + "make_headers", + "resolve_cert_reqs", + "resolve_ssl_version", + "split_first", + "ssl_wrap_socket", + "wait_for_read", + "wait_for_write", + "SKIP_HEADER", + "SKIPPABLE_HEADERS", +) diff --git a/src/snowflake/connector/vendored/urllib3/util/connection.py b/src/snowflake/connector/vendored/urllib3/util/connection.py new file mode 100644 index 000000000..bdc240c50 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/connection.py @@ -0,0 +1,150 @@ +from __future__ import absolute_import + +import socket + +from urllib3.exceptions import LocationParseError + +from ..contrib import _appengine_environ +from ..packages import six +from .wait import NoWayToWaitForSocketError, wait_for_read + + +def is_connection_dropped(conn): # Platform-specific + """ + Returns True if the connection is dropped and should be closed. + + :param conn: + :class:`http.client.HTTPConnection` object. + + Note: For platforms like AppEngine, this will always return ``False`` to + let the platform handle connection recycling transparently for us. + """ + sock = getattr(conn, "sock", False) + if sock is False: # Platform-specific: AppEngine + return False + if sock is None: # Connection already closed (such as by httplib). + return True + try: + # Returns True if readable, which here means it's been dropped + return wait_for_read(sock, timeout=0.0) + except NoWayToWaitForSocketError: # Platform-specific: AppEngine + return False + + +# This function is copied from socket.py in the Python 2.7 standard +# library test suite. Added to its signature is only `socket_options`. +# One additional modification is that we avoid binding to IPv6 servers +# discovered in DNS if the system doesn't have IPv6 functionality. +def create_connection( + address, + timeout=socket._GLOBAL_DEFAULT_TIMEOUT, + source_address=None, + socket_options=None, +): + """Connect to *address* and return the socket object. + + Convenience function. Connect to *address* (a 2-tuple ``(host, + port)``) and return the socket object. Passing the optional + *timeout* parameter will set the timeout on the socket instance + before attempting to connect. If no *timeout* is supplied, the + global default timeout setting returned by :func:`socket.getdefaulttimeout` + is used. If *source_address* is set it must be a tuple of (host, port) + for the socket to bind as a source address before making the connection. + An host of '' or port 0 tells the OS to use the default. + """ + + host, port = address + if host.startswith("["): + host = host.strip("[]") + err = None + + # Using the value from allowed_gai_family() in the context of getaddrinfo lets + # us select whether to work with IPv4 DNS records, IPv6 records, or both. + # The original create_connection function always returns all records. + family = allowed_gai_family() + + try: + host.encode("idna") + except UnicodeError: + return six.raise_from( + LocationParseError(u"'%s', label empty or too long" % host), None + ) + + for res in socket.getaddrinfo(host, port, family, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket.socket(af, socktype, proto) + + # If provided, set socket level options before connecting. + _set_socket_options(sock, socket_options) + + if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except socket.error as e: + err = e + if sock is not None: + sock.close() + sock = None + + if err is not None: + raise err + + raise socket.error("getaddrinfo returns an empty list") + + +def _set_socket_options(sock, options): + if options is None: + return + + for opt in options: + sock.setsockopt(*opt) + + +def allowed_gai_family(): + """This function is designed to work in the context of + getaddrinfo, where family=socket.AF_UNSPEC is the default and + will perform a DNS search for both IPv6 and IPv4 records.""" + + family = socket.AF_INET + if HAS_IPV6: + family = socket.AF_UNSPEC + return family + + +def _has_ipv6(host): + """Returns True if the system can bind an IPv6 address.""" + sock = None + has_ipv6 = False + + # App Engine doesn't support IPV6 sockets and actually has a quota on the + # number of sockets that can be used, so just early out here instead of + # creating a socket needlessly. + # See https://github.com/urllib3/urllib3/issues/1446 + if _appengine_environ.is_appengine_sandbox(): + return False + + if socket.has_ipv6: + # has_ipv6 returns true if cPython was compiled with IPv6 support. + # It does not tell us if the system has IPv6 support enabled. To + # determine that we must bind to an IPv6 address. + # https://github.com/urllib3/urllib3/pull/611 + # https://bugs.python.org/issue658327 + try: + sock = socket.socket(socket.AF_INET6) + sock.bind((host, 0)) + has_ipv6 = True + except Exception: + pass + + if sock: + sock.close() + return has_ipv6 + + +HAS_IPV6 = _has_ipv6("::1") diff --git a/src/snowflake/connector/vendored/urllib3/util/proxy.py b/src/snowflake/connector/vendored/urllib3/util/proxy.py new file mode 100644 index 000000000..34f884d5b --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/proxy.py @@ -0,0 +1,56 @@ +from .ssl_ import create_urllib3_context, resolve_cert_reqs, resolve_ssl_version + + +def connection_requires_http_tunnel( + proxy_url=None, proxy_config=None, destination_scheme=None +): + """ + Returns True if the connection requires an HTTP CONNECT through the proxy. + + :param URL proxy_url: + URL of the proxy. + :param ProxyConfig proxy_config: + Proxy configuration from poolmanager.py + :param str destination_scheme: + The scheme of the destination. (i.e https, http, etc) + """ + # If we're not using a proxy, no way to use a tunnel. + if proxy_url is None: + return False + + # HTTP destinations never require tunneling, we always forward. + if destination_scheme == "http": + return False + + # Support for forwarding with HTTPS proxies and HTTPS destinations. + if ( + proxy_url.scheme == "https" + and proxy_config + and proxy_config.use_forwarding_for_https + ): + return False + + # Otherwise always use a tunnel. + return True + + +def create_proxy_ssl_context( + ssl_version, cert_reqs, ca_certs=None, ca_cert_dir=None, ca_cert_data=None +): + """ + Generates a default proxy ssl context if one hasn't been provided by the + user. + """ + ssl_context = create_urllib3_context( + ssl_version=resolve_ssl_version(ssl_version), + cert_reqs=resolve_cert_reqs(cert_reqs), + ) + if ( + not ca_certs + and not ca_cert_dir + and not ca_cert_data + and hasattr(ssl_context, "load_default_certs") + ): + ssl_context.load_default_certs() + + return ssl_context diff --git a/src/snowflake/connector/vendored/urllib3/util/queue.py b/src/snowflake/connector/vendored/urllib3/util/queue.py new file mode 100644 index 000000000..41784104e --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/queue.py @@ -0,0 +1,22 @@ +import collections + +from ..packages import six +from ..packages.six.moves import queue + +if six.PY2: + # Queue is imported for side effects on MS Windows. See issue #229. + import Queue as _unused_module_Queue # noqa: F401 + + +class LifoQueue(queue.Queue): + def _init(self, _): + self.queue = collections.deque() + + def _qsize(self, len=len): + return len(self.queue) + + def _put(self, item): + self.queue.append(item) + + def _get(self): + return self.queue.pop() diff --git a/src/snowflake/connector/vendored/urllib3/util/request.py b/src/snowflake/connector/vendored/urllib3/util/request.py new file mode 100644 index 000000000..25103383e --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/request.py @@ -0,0 +1,143 @@ +from __future__ import absolute_import + +from base64 import b64encode + +from ..exceptions import UnrewindableBodyError +from ..packages.six import b, integer_types + +# Pass as a value within ``headers`` to skip +# emitting some HTTP headers that are added automatically. +# The only headers that are supported are ``Accept-Encoding``, +# ``Host``, and ``User-Agent``. +SKIP_HEADER = "@@@SKIP_HEADER@@@" +SKIPPABLE_HEADERS = frozenset(["accept-encoding", "host", "user-agent"]) + +ACCEPT_ENCODING = "gzip,deflate" +try: + import brotli as _unused_module_brotli # noqa: F401 +except ImportError: + pass +else: + ACCEPT_ENCODING += ",br" + +_FAILEDTELL = object() + + +def make_headers( + keep_alive=None, + accept_encoding=None, + user_agent=None, + basic_auth=None, + proxy_basic_auth=None, + disable_cache=None, +): + """ + Shortcuts for generating request headers. + + :param keep_alive: + If ``True``, adds 'connection: keep-alive' header. + + :param accept_encoding: + Can be a boolean, list, or string. + ``True`` translates to 'gzip,deflate'. + List will get joined by comma. + String will be used as provided. + + :param user_agent: + String representing the user-agent you want, such as + "python-urllib3/0.6" + + :param basic_auth: + Colon-separated username:password string for 'authorization: basic ...' + auth header. + + :param proxy_basic_auth: + Colon-separated username:password string for 'proxy-authorization: basic ...' + auth header. + + :param disable_cache: + If ``True``, adds 'cache-control: no-cache' header. + + Example:: + + >>> make_headers(keep_alive=True, user_agent="Batman/1.0") + {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'} + >>> make_headers(accept_encoding=True) + {'accept-encoding': 'gzip,deflate'} + """ + headers = {} + if accept_encoding: + if isinstance(accept_encoding, str): + pass + elif isinstance(accept_encoding, list): + accept_encoding = ",".join(accept_encoding) + else: + accept_encoding = ACCEPT_ENCODING + headers["accept-encoding"] = accept_encoding + + if user_agent: + headers["user-agent"] = user_agent + + if keep_alive: + headers["connection"] = "keep-alive" + + if basic_auth: + headers["authorization"] = "Basic " + b64encode(b(basic_auth)).decode("utf-8") + + if proxy_basic_auth: + headers["proxy-authorization"] = "Basic " + b64encode( + b(proxy_basic_auth) + ).decode("utf-8") + + if disable_cache: + headers["cache-control"] = "no-cache" + + return headers + + +def set_file_position(body, pos): + """ + If a position is provided, move file to that point. + Otherwise, we'll attempt to record a position for future use. + """ + if pos is not None: + rewind_body(body, pos) + elif getattr(body, "tell", None) is not None: + try: + pos = body.tell() + except (IOError, OSError): + # This differentiates from None, allowing us to catch + # a failed `tell()` later when trying to rewind the body. + pos = _FAILEDTELL + + return pos + + +def rewind_body(body, body_pos): + """ + Attempt to rewind body to a certain position. + Primarily used for request redirects and retries. + + :param body: + File-like object that supports seek. + + :param int pos: + Position to seek to in file. + """ + body_seek = getattr(body, "seek", None) + if body_seek is not None and isinstance(body_pos, integer_types): + try: + body_seek(body_pos) + except (IOError, OSError): + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect/retry." + ) + elif body_pos is _FAILEDTELL: + raise UnrewindableBodyError( + "Unable to record file position for rewinding " + "request body during a redirect/retry." + ) + else: + raise ValueError( + "body_pos must be of type integer, instead it was %s." % type(body_pos) + ) diff --git a/src/snowflake/connector/vendored/urllib3/util/response.py b/src/snowflake/connector/vendored/urllib3/util/response.py new file mode 100644 index 000000000..5ea609cce --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/response.py @@ -0,0 +1,107 @@ +from __future__ import absolute_import + +from email.errors import MultipartInvariantViolationDefect, StartBoundaryNotFoundDefect + +from ..exceptions import HeaderParsingError +from ..packages.six.moves import http_client as httplib + + +def is_fp_closed(obj): + """ + Checks whether a given file-like object is closed. + + :param obj: + The file-like object to check. + """ + + try: + # Check `isclosed()` first, in case Python3 doesn't set `closed`. + # GH Issue #928 + return obj.isclosed() + except AttributeError: + pass + + try: + # Check via the official file-like-object way. + return obj.closed + except AttributeError: + pass + + try: + # Check if the object is a container for another file-like object that + # gets released on exhaustion (e.g. HTTPResponse). + return obj.fp is None + except AttributeError: + pass + + raise ValueError("Unable to determine whether fp is closed.") + + +def assert_header_parsing(headers): + """ + Asserts whether all headers have been successfully parsed. + Extracts encountered errors from the result of parsing headers. + + Only works on Python 3. + + :param http.client.HTTPMessage headers: Headers to verify. + + :raises urllib3.exceptions.HeaderParsingError: + If parsing errors are found. + """ + + # This will fail silently if we pass in the wrong kind of parameter. + # To make debugging easier add an explicit check. + if not isinstance(headers, httplib.HTTPMessage): + raise TypeError("expected httplib.Message, got {0}.".format(type(headers))) + + defects = getattr(headers, "defects", None) + get_payload = getattr(headers, "get_payload", None) + + unparsed_data = None + if get_payload: + # get_payload is actually email.message.Message.get_payload; + # we're only interested in the result if it's not a multipart message + if not headers.is_multipart(): + payload = get_payload() + + if isinstance(payload, (bytes, str)): + unparsed_data = payload + if defects: + # httplib is assuming a response body is available + # when parsing headers even when httplib only sends + # header data to parse_headers() This results in + # defects on multipart responses in particular. + # See: https://github.com/urllib3/urllib3/issues/800 + + # So we ignore the following defects: + # - StartBoundaryNotFoundDefect: + # The claimed start boundary was never found. + # - MultipartInvariantViolationDefect: + # A message claimed to be a multipart but no subparts were found. + defects = [ + defect + for defect in defects + if not isinstance( + defect, (StartBoundaryNotFoundDefect, MultipartInvariantViolationDefect) + ) + ] + + if defects or unparsed_data: + raise HeaderParsingError(defects=defects, unparsed_data=unparsed_data) + + +def is_response_to_head(response): + """ + Checks whether the request of a response has been a HEAD-request. + Handles the quirks of AppEngine. + + :param http.client.HTTPResponse response: + Response to check if the originating request + used 'HEAD' as a method. + """ + # FIXME: Can we do this somehow without accessing private httplib _method? + method = response._method + if isinstance(method, int): # Platform-specific: Appengine + return method == 3 + return method.upper() == "HEAD" diff --git a/src/snowflake/connector/vendored/urllib3/util/retry.py b/src/snowflake/connector/vendored/urllib3/util/retry.py new file mode 100644 index 000000000..c7dc42f1d --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/retry.py @@ -0,0 +1,602 @@ +from __future__ import absolute_import + +import email +import logging +import re +import time +import warnings +from collections import namedtuple +from itertools import takewhile + +from ..exceptions import ( + ConnectTimeoutError, + InvalidHeader, + MaxRetryError, + ProtocolError, + ProxyError, + ReadTimeoutError, + ResponseError, +) +from ..packages import six + +log = logging.getLogger(__name__) + + +# Data structure for representing the metadata of requests that result in a retry. +RequestHistory = namedtuple( + "RequestHistory", ["method", "url", "error", "status", "redirect_location"] +) + + +# TODO: In v2 we can remove this sentinel and metaclass with deprecated options. +_Default = object() + + +class _RetryMeta(type): + @property + def DEFAULT_METHOD_WHITELIST(cls): + warnings.warn( + "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead", + DeprecationWarning, + ) + return cls.DEFAULT_ALLOWED_METHODS + + @DEFAULT_METHOD_WHITELIST.setter + def DEFAULT_METHOD_WHITELIST(cls, value): + warnings.warn( + "Using 'Retry.DEFAULT_METHOD_WHITELIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_ALLOWED_METHODS' instead", + DeprecationWarning, + ) + cls.DEFAULT_ALLOWED_METHODS = value + + @property + def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls): + warnings.warn( + "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead", + DeprecationWarning, + ) + return cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT + + @DEFAULT_REDIRECT_HEADERS_BLACKLIST.setter + def DEFAULT_REDIRECT_HEADERS_BLACKLIST(cls, value): + warnings.warn( + "Using 'Retry.DEFAULT_REDIRECT_HEADERS_BLACKLIST' is deprecated and " + "will be removed in v2.0. Use 'Retry.DEFAULT_REMOVE_HEADERS_ON_REDIRECT' instead", + DeprecationWarning, + ) + cls.DEFAULT_REMOVE_HEADERS_ON_REDIRECT = value + + +@six.add_metaclass(_RetryMeta) +class Retry(object): + """Retry configuration. + + Each retry attempt will create a new Retry object with updated values, so + they can be safely reused. + + Retries can be defined as a default for a pool:: + + retries = Retry(connect=5, read=2, redirect=5) + http = PoolManager(retries=retries) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool):: + + response = http.request('GET', 'http://example.com/', retries=Retry(10)) + + Retries can be disabled by passing ``False``:: + + response = http.request('GET', 'http://example.com/', retries=False) + + Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless + retries are disabled, in which case the causing exception will be raised. + + :param int total: + Total number of retries to allow. Takes precedence over other counts. + + Set to ``None`` to remove this constraint and fall back on other + counts. + + Set to ``0`` to fail on the first retry. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int connect: + How many connection-related errors to retry on. + + These are errors raised before the request is sent to the remote server, + which we assume has not triggered the server to process the request. + + Set to ``0`` to fail on the first retry of this type. + + :param int read: + How many times to retry on read errors. + + These errors are raised after the request was sent to the server, so the + request may have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + :param int redirect: + How many redirects to perform. Limit this to avoid infinite redirect + loops. + + A redirect is a HTTP response with a status code 301, 302, 303, 307 or + 308. + + Set to ``0`` to fail on the first retry of this type. + + Set to ``False`` to disable and imply ``raise_on_redirect=False``. + + :param int status: + How many times to retry on bad status codes. + + These are retries made on responses, where status code matches + ``status_forcelist``. + + Set to ``0`` to fail on the first retry of this type. + + :param int other: + How many times to retry on other errors. + + Other errors are errors that are not connect, read, redirect or status errors. + These errors might be raised after the request was sent to the server, so the + request might have side-effects. + + Set to ``0`` to fail on the first retry of this type. + + If ``total`` is not set, it's a good idea to set this to 0 to account + for unexpected edge cases and avoid infinite retry loops. + + :param iterable allowed_methods: + Set of uppercased HTTP method verbs that we should retry on. + + By default, we only retry on methods which are considered to be + idempotent (multiple requests with the same parameters end with the + same state). See :attr:`Retry.DEFAULT_ALLOWED_METHODS`. + + Set to a ``False`` value to retry on any verb. + + .. warning:: + + Previously this parameter was named ``method_whitelist``, that + usage is deprecated in v1.26.0 and will be removed in v2.0. + + :param iterable status_forcelist: + A set of integer HTTP status codes that we should force a retry on. + A retry is initiated if the request method is in ``allowed_methods`` + and the response status code is in ``status_forcelist``. + + By default, this is disabled with ``None``. + + :param float backoff_factor: + A backoff factor to apply between attempts after the second try + (most errors are resolved immediately by a second try without a + delay). urllib3 will sleep for:: + + {backoff factor} * (2 ** ({number of total retries} - 1)) + + seconds. If the backoff_factor is 0.1, then :func:`.sleep` will sleep + for [0.0s, 0.2s, 0.4s, ...] between retries. It will never be longer + than :attr:`Retry.BACKOFF_MAX`. + + By default, backoff is disabled (set to 0). + + :param bool raise_on_redirect: Whether, if the number of redirects is + exhausted, to raise a MaxRetryError, or to return a response with a + response code in the 3xx range. + + :param bool raise_on_status: Similar meaning to ``raise_on_redirect``: + whether we should raise an exception, or return a response, + if status falls in ``status_forcelist`` range and retries have + been exhausted. + + :param tuple history: The history of the request encountered during + each call to :meth:`~Retry.increment`. The list is in the order + the requests occurred. Each list item is of class :class:`RequestHistory`. + + :param bool respect_retry_after_header: + Whether to respect Retry-After header on status codes defined as + :attr:`Retry.RETRY_AFTER_STATUS_CODES` or not. + + :param iterable remove_headers_on_redirect: + Sequence of headers to remove from the request when a response + indicating a redirect is returned before firing off the redirected + request. + """ + + #: Default methods to be used for ``allowed_methods`` + DEFAULT_ALLOWED_METHODS = frozenset( + ["HEAD", "GET", "PUT", "DELETE", "OPTIONS", "TRACE"] + ) + + #: Default status codes to be used for ``status_forcelist`` + RETRY_AFTER_STATUS_CODES = frozenset([413, 429, 503]) + + #: Default headers to be used for ``remove_headers_on_redirect`` + DEFAULT_REMOVE_HEADERS_ON_REDIRECT = frozenset(["Authorization"]) + + #: Maximum backoff time. + BACKOFF_MAX = 120 + + def __init__( + self, + total=10, + connect=None, + read=None, + redirect=None, + status=None, + other=None, + allowed_methods=_Default, + status_forcelist=None, + backoff_factor=0, + raise_on_redirect=True, + raise_on_status=True, + history=None, + respect_retry_after_header=True, + remove_headers_on_redirect=_Default, + # TODO: Deprecated, remove in v2.0 + method_whitelist=_Default, + ): + + if method_whitelist is not _Default: + if allowed_methods is not _Default: + raise ValueError( + "Using both 'allowed_methods' and " + "'method_whitelist' together is not allowed. " + "Instead only use 'allowed_methods'" + ) + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + stacklevel=2, + ) + allowed_methods = method_whitelist + if allowed_methods is _Default: + allowed_methods = self.DEFAULT_ALLOWED_METHODS + if remove_headers_on_redirect is _Default: + remove_headers_on_redirect = self.DEFAULT_REMOVE_HEADERS_ON_REDIRECT + + self.total = total + self.connect = connect + self.read = read + self.status = status + self.other = other + + if redirect is False or total is False: + redirect = 0 + raise_on_redirect = False + + self.redirect = redirect + self.status_forcelist = status_forcelist or set() + self.allowed_methods = allowed_methods + self.backoff_factor = backoff_factor + self.raise_on_redirect = raise_on_redirect + self.raise_on_status = raise_on_status + self.history = history or tuple() + self.respect_retry_after_header = respect_retry_after_header + self.remove_headers_on_redirect = frozenset( + [h.lower() for h in remove_headers_on_redirect] + ) + + def new(self, **kw): + params = dict( + total=self.total, + connect=self.connect, + read=self.read, + redirect=self.redirect, + status=self.status, + other=self.other, + status_forcelist=self.status_forcelist, + backoff_factor=self.backoff_factor, + raise_on_redirect=self.raise_on_redirect, + raise_on_status=self.raise_on_status, + history=self.history, + remove_headers_on_redirect=self.remove_headers_on_redirect, + respect_retry_after_header=self.respect_retry_after_header, + ) + + # TODO: If already given in **kw we use what's given to us + # If not given we need to figure out what to pass. We decide + # based on whether our class has the 'method_whitelist' property + # and if so we pass the deprecated 'method_whitelist' otherwise + # we use 'allowed_methods'. Remove in v2.0 + if "method_whitelist" not in kw and "allowed_methods" not in kw: + if "method_whitelist" in self.__dict__: + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + params["method_whitelist"] = self.allowed_methods + else: + params["allowed_methods"] = self.allowed_methods + + params.update(kw) + return type(self)(**params) + + @classmethod + def from_int(cls, retries, redirect=True, default=None): + """Backwards-compatibility for the old retries format.""" + if retries is None: + retries = default if default is not None else cls.DEFAULT + + if isinstance(retries, Retry): + return retries + + redirect = bool(redirect) and None + new_retries = cls(retries, redirect=redirect) + log.debug("Converted retries value: %r -> %r", retries, new_retries) + return new_retries + + def get_backoff_time(self): + """Formula for computing the current backoff + + :rtype: float + """ + # We want to consider only the last consecutive errors sequence (Ignore redirects). + consecutive_errors_len = len( + list( + takewhile(lambda x: x.redirect_location is None, reversed(self.history)) + ) + ) + if consecutive_errors_len <= 1: + return 0 + + backoff_value = self.backoff_factor * (2 ** (consecutive_errors_len - 1)) + return min(self.BACKOFF_MAX, backoff_value) + + def parse_retry_after(self, retry_after): + # Whitespace: https://tools.ietf.org/html/rfc7230#section-3.2.4 + if re.match(r"^\s*[0-9]+\s*$", retry_after): + seconds = int(retry_after) + else: + retry_date_tuple = email.utils.parsedate_tz(retry_after) + if retry_date_tuple is None: + raise InvalidHeader("Invalid Retry-After header: %s" % retry_after) + if retry_date_tuple[9] is None: # Python 2 + # Assume UTC if no timezone was specified + # On Python2.7, parsedate_tz returns None for a timezone offset + # instead of 0 if no timezone is given, where mktime_tz treats + # a None timezone offset as local time. + retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:] + + retry_date = email.utils.mktime_tz(retry_date_tuple) + seconds = retry_date - time.time() + + if seconds < 0: + seconds = 0 + + return seconds + + def get_retry_after(self, response): + """Get the value of Retry-After in seconds.""" + + retry_after = response.getheader("Retry-After") + + if retry_after is None: + return None + + return self.parse_retry_after(retry_after) + + def sleep_for_retry(self, response=None): + retry_after = self.get_retry_after(response) + if retry_after: + time.sleep(retry_after) + return True + + return False + + def _sleep_backoff(self): + backoff = self.get_backoff_time() + if backoff <= 0: + return + time.sleep(backoff) + + def sleep(self, response=None): + """Sleep between retry attempts. + + This method will respect a server's ``Retry-After`` response header + and sleep the duration of the time requested. If that is not present, it + will use an exponential backoff. By default, the backoff factor is 0 and + this method will return immediately. + """ + + if self.respect_retry_after_header and response: + slept = self.sleep_for_retry(response) + if slept: + return + + self._sleep_backoff() + + def _is_connection_error(self, err): + """Errors when we're fairly sure that the server did not receive the + request, so it should be safe to retry. + """ + if isinstance(err, ProxyError): + err = err.original_error + return isinstance(err, ConnectTimeoutError) + + def _is_read_error(self, err): + """Errors that occur after the request has been started, so we should + assume that the server began processing it. + """ + return isinstance(err, (ReadTimeoutError, ProtocolError)) + + def _is_method_retryable(self, method): + """Checks if a given HTTP method should be retried upon, depending if + it is included in the allowed_methods + """ + # TODO: For now favor if the Retry implementation sets its own method_whitelist + # property outside of our constructor to avoid breaking custom implementations. + if "method_whitelist" in self.__dict__: + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + allowed_methods = self.method_whitelist + else: + allowed_methods = self.allowed_methods + + if allowed_methods and method.upper() not in allowed_methods: + return False + return True + + def is_retry(self, method, status_code, has_retry_after=False): + """Is this method/status code retryable? (Based on allowlists and control + variables such as the number of total retries to allow, whether to + respect the Retry-After header, whether this header is present, and + whether the returned status code is on the list of status codes to + be retried upon on the presence of the aforementioned header) + """ + if not self._is_method_retryable(method): + return False + + if self.status_forcelist and status_code in self.status_forcelist: + return True + + return ( + self.total + and self.respect_retry_after_header + and has_retry_after + and (status_code in self.RETRY_AFTER_STATUS_CODES) + ) + + def is_exhausted(self): + """Are we out of retries?""" + retry_counts = ( + self.total, + self.connect, + self.read, + self.redirect, + self.status, + self.other, + ) + retry_counts = list(filter(None, retry_counts)) + if not retry_counts: + return False + + return min(retry_counts) < 0 + + def increment( + self, + method=None, + url=None, + response=None, + error=None, + _pool=None, + _stacktrace=None, + ): + """Return a new Retry object with incremented retry counters. + + :param response: A response object, or None, if the server did not + return a response. + :type response: :class:`~urllib3.response.HTTPResponse` + :param Exception error: An error encountered during the request, or + None if the response was received successfully. + + :return: A new ``Retry`` object. + """ + if self.total is False and error: + # Disabled, indicate to re-raise the error. + raise six.reraise(type(error), error, _stacktrace) + + total = self.total + if total is not None: + total -= 1 + + connect = self.connect + read = self.read + redirect = self.redirect + status_count = self.status + other = self.other + cause = "unknown" + status = None + redirect_location = None + + if error and self._is_connection_error(error): + # Connect retry? + if connect is False: + raise six.reraise(type(error), error, _stacktrace) + elif connect is not None: + connect -= 1 + + elif error and self._is_read_error(error): + # Read retry? + if read is False or not self._is_method_retryable(method): + raise six.reraise(type(error), error, _stacktrace) + elif read is not None: + read -= 1 + + elif error: + # Other retry? + if other is not None: + other -= 1 + + elif response and response.get_redirect_location(): + # Redirect retry? + if redirect is not None: + redirect -= 1 + cause = "too many redirects" + redirect_location = response.get_redirect_location() + status = response.status + + else: + # Incrementing because of a server error like a 500 in + # status_forcelist and the given method is in the allowed_methods + cause = ResponseError.GENERIC_ERROR + if response and response.status: + if status_count is not None: + status_count -= 1 + cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status) + status = response.status + + history = self.history + ( + RequestHistory(method, url, error, status, redirect_location), + ) + + new_retry = self.new( + total=total, + connect=connect, + read=read, + redirect=redirect, + status=status_count, + other=other, + history=history, + ) + + if new_retry.is_exhausted(): + raise MaxRetryError(_pool, url, error or ResponseError(cause)) + + log.debug("Incremented Retry for (url='%s'): %r", url, new_retry) + + return new_retry + + def __repr__(self): + return ( + "{cls.__name__}(total={self.total}, connect={self.connect}, " + "read={self.read}, redirect={self.redirect}, status={self.status})" + ).format(cls=type(self), self=self) + + def __getattr__(self, item): + if item == "method_whitelist": + # TODO: Remove this deprecated alias in v2.0 + warnings.warn( + "Using 'method_whitelist' with Retry is deprecated and " + "will be removed in v2.0. Use 'allowed_methods' instead", + DeprecationWarning, + ) + return self.allowed_methods + try: + return getattr(super(Retry, self), item) + except AttributeError: + return getattr(Retry, item) + + +# For backwards compatibility (equivalent to pre-v1.9): +Retry.DEFAULT = Retry(3) diff --git a/src/snowflake/connector/vendored/urllib3/util/ssl_.py b/src/snowflake/connector/vendored/urllib3/util/ssl_.py new file mode 100644 index 000000000..8f867812a --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/ssl_.py @@ -0,0 +1,495 @@ +from __future__ import absolute_import + +import hmac +import os +import sys +import warnings +from binascii import hexlify, unhexlify +from hashlib import md5, sha1, sha256 + +from ..exceptions import ( + InsecurePlatformWarning, + ProxySchemeUnsupported, + SNIMissingWarning, + SSLError, +) +from ..packages import six +from .url import BRACELESS_IPV6_ADDRZ_RE, IPV4_RE + +SSLContext = None +SSLTransport = None +HAS_SNI = False +IS_PYOPENSSL = False +IS_SECURETRANSPORT = False +ALPN_PROTOCOLS = ["http/1.1"] + +# Maps the length of a digest to a possible hash function producing this digest +HASHFUNC_MAP = {32: md5, 40: sha1, 64: sha256} + + +def _const_compare_digest_backport(a, b): + """ + Compare two digests of equal length in constant time. + + The digests must be of type str/bytes. + Returns True if the digests match, and False otherwise. + """ + result = abs(len(a) - len(b)) + for left, right in zip(bytearray(a), bytearray(b)): + result |= left ^ right + return result == 0 + + +_const_compare_digest = getattr(hmac, "compare_digest", _const_compare_digest_backport) + +try: # Test for SSL features + import ssl + from ssl import CERT_REQUIRED, wrap_socket +except ImportError: + pass + +try: + from ssl import HAS_SNI # Has SNI? +except ImportError: + pass + +try: + from .ssltransport import SSLTransport +except ImportError: + pass + + +try: # Platform-specific: Python 3.6 + from ssl import PROTOCOL_TLS + + PROTOCOL_SSLv23 = PROTOCOL_TLS +except ImportError: + try: + from ssl import PROTOCOL_SSLv23 as PROTOCOL_TLS + + PROTOCOL_SSLv23 = PROTOCOL_TLS + except ImportError: + PROTOCOL_SSLv23 = PROTOCOL_TLS = 2 + +try: + from ssl import PROTOCOL_TLS_CLIENT +except ImportError: + PROTOCOL_TLS_CLIENT = PROTOCOL_TLS + + +try: + from ssl import OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3 +except ImportError: + OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 + OP_NO_COMPRESSION = 0x20000 + + +try: # OP_NO_TICKET was added in Python 3.6 + from ssl import OP_NO_TICKET +except ImportError: + OP_NO_TICKET = 0x4000 + + +# A secure default. +# Sources for more information on TLS ciphers: +# +# - https://wiki.mozilla.org/Security/Server_Side_TLS +# - https://www.ssllabs.com/projects/best-practices/index.html +# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/ +# +# The general intent is: +# - prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE), +# - prefer ECDHE over DHE for better performance, +# - prefer any AES-GCM and ChaCha20 over any AES-CBC for better performance and +# security, +# - prefer AES-GCM over ChaCha20 because hardware-accelerated AES is common, +# - disable NULL authentication, MD5 MACs, DSS, and other +# insecure ciphers for security reasons. +# - NOTE: TLS 1.3 cipher suites are managed through a different interface +# not exposed by CPython (yet!) and are enabled by default if they're available. +DEFAULT_CIPHERS = ":".join( + [ + "ECDHE+AESGCM", + "ECDHE+CHACHA20", + "DHE+AESGCM", + "DHE+CHACHA20", + "ECDH+AESGCM", + "DH+AESGCM", + "ECDH+AES", + "DH+AES", + "RSA+AESGCM", + "RSA+AES", + "!aNULL", + "!eNULL", + "!MD5", + "!DSS", + ] +) + +try: + from ssl import SSLContext # Modern SSL? +except ImportError: + + class SSLContext(object): # Platform-specific: Python 2 + def __init__(self, protocol_version): + self.protocol = protocol_version + # Use default values from a real SSLContext + self.check_hostname = False + self.verify_mode = ssl.CERT_NONE + self.ca_certs = None + self.options = 0 + self.certfile = None + self.keyfile = None + self.ciphers = None + + def load_cert_chain(self, certfile, keyfile): + self.certfile = certfile + self.keyfile = keyfile + + def load_verify_locations(self, cafile=None, capath=None, cadata=None): + self.ca_certs = cafile + + if capath is not None: + raise SSLError("CA directories not supported in older Pythons") + + if cadata is not None: + raise SSLError("CA data not supported in older Pythons") + + def set_ciphers(self, cipher_suite): + self.ciphers = cipher_suite + + def wrap_socket(self, socket, server_hostname=None, server_side=False): + warnings.warn( + "A true SSLContext object is not available. This prevents " + "urllib3 from configuring SSL appropriately and may cause " + "certain SSL connections to fail. You can upgrade to a newer " + "version of Python to solve this. For more information, see " + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" + "#ssl-warnings", + InsecurePlatformWarning, + ) + kwargs = { + "keyfile": self.keyfile, + "certfile": self.certfile, + "ca_certs": self.ca_certs, + "cert_reqs": self.verify_mode, + "ssl_version": self.protocol, + "server_side": server_side, + } + return wrap_socket(socket, ciphers=self.ciphers, **kwargs) + + +def assert_fingerprint(cert, fingerprint): + """ + Checks if given fingerprint matches the supplied certificate. + + :param cert: + Certificate as bytes object. + :param fingerprint: + Fingerprint as string of hexdigits, can be interspersed by colons. + """ + + fingerprint = fingerprint.replace(":", "").lower() + digest_length = len(fingerprint) + hashfunc = HASHFUNC_MAP.get(digest_length) + if not hashfunc: + raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint)) + + # We need encode() here for py32; works on py2 and p33. + fingerprint_bytes = unhexlify(fingerprint.encode()) + + cert_digest = hashfunc(cert).digest() + + if not _const_compare_digest(cert_digest, fingerprint_bytes): + raise SSLError( + 'Fingerprints did not match. Expected "{0}", got "{1}".'.format( + fingerprint, hexlify(cert_digest) + ) + ) + + +def resolve_cert_reqs(candidate): + """ + Resolves the argument to a numeric constant, which can be passed to + the wrap_socket function/method from the ssl module. + Defaults to :data:`ssl.CERT_REQUIRED`. + If given a string it is assumed to be the name of the constant in the + :mod:`ssl` module or its abbreviation. + (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. + If it's neither `None` nor a string we assume it is already the numeric + constant which can directly be passed to wrap_socket. + """ + if candidate is None: + return CERT_REQUIRED + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, "CERT_" + candidate) + return res + + return candidate + + +def resolve_ssl_version(candidate): + """ + like resolve_cert_reqs + """ + if candidate is None: + return PROTOCOL_TLS + + if isinstance(candidate, str): + res = getattr(ssl, candidate, None) + if res is None: + res = getattr(ssl, "PROTOCOL_" + candidate) + return res + + return candidate + + +def create_urllib3_context( + ssl_version=None, cert_reqs=None, options=None, ciphers=None +): + """All arguments have the same meaning as ``ssl_wrap_socket``. + + By default, this function does a lot of the same work that + ``ssl.create_default_context`` does on Python 3.4+. It: + + - Disables SSLv2, SSLv3, and compression + - Sets a restricted set of server ciphers + + If you wish to enable SSLv3, you can do:: + + from urllib3.util import ssl_ + context = ssl_.create_urllib3_context() + context.options &= ~ssl_.OP_NO_SSLv3 + + You can do the same to enable compression (substituting ``COMPRESSION`` + for ``SSLv3`` in the last line above). + + :param ssl_version: + The desired protocol version to use. This will default to + PROTOCOL_SSLv23 which will negotiate the highest protocol that both + the server and your installation of OpenSSL support. + :param cert_reqs: + Whether to require the certificate verification. This defaults to + ``ssl.CERT_REQUIRED``. + :param options: + Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, + ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``. + :param ciphers: + Which cipher suites to allow the server to select. + :returns: + Constructed SSLContext object with specified options + :rtype: SSLContext + """ + # PROTOCOL_TLS is deprecated in Python 3.10 + if not ssl_version or ssl_version == PROTOCOL_TLS: + ssl_version = PROTOCOL_TLS_CLIENT + + context = SSLContext(ssl_version) + + context.set_ciphers(ciphers or DEFAULT_CIPHERS) + + # Setting the default here, as we may have no ssl module on import + cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs + + if options is None: + options = 0 + # SSLv2 is easily broken and is considered harmful and dangerous + options |= OP_NO_SSLv2 + # SSLv3 has several problems and is now dangerous + options |= OP_NO_SSLv3 + # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ + # (issue #309) + options |= OP_NO_COMPRESSION + # TLSv1.2 only. Unless set explicitly, do not request tickets. + # This may save some bandwidth on wire, and although the ticket is encrypted, + # there is a risk associated with it being on wire, + # if the server is not rotating its ticketing keys properly. + options |= OP_NO_TICKET + + context.options |= options + + # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is + # necessary for conditional client cert authentication with TLS 1.3. + # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older + # versions of Python. We only enable on Python 3.7.4+ or if certificate + # verification is enabled to work around Python issue #37428 + # See: https://bugs.python.org/issue37428 + if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr( + context, "post_handshake_auth", None + ) is not None: + context.post_handshake_auth = True + + def disable_check_hostname(): + if ( + getattr(context, "check_hostname", None) is not None + ): # Platform-specific: Python 3.2 + # We do our own verification, including fingerprints and alternative + # hostnames. So disable it here + context.check_hostname = False + + # The order of the below lines setting verify_mode and check_hostname + # matter due to safe-guards SSLContext has to prevent an SSLContext with + # check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more + # complex because we don't know whether PROTOCOL_TLS_CLIENT will be used + # or not so we don't know the initial state of the freshly created SSLContext. + if cert_reqs == ssl.CERT_REQUIRED: + context.verify_mode = cert_reqs + disable_check_hostname() + else: + disable_check_hostname() + context.verify_mode = cert_reqs + + # Enable logging of TLS session keys via defacto standard environment variable + # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. + if hasattr(context, "keylog_filename"): + sslkeylogfile = os.environ.get("SSLKEYLOGFILE") + if sslkeylogfile: + context.keylog_filename = sslkeylogfile + + return context + + +def ssl_wrap_socket( + sock, + keyfile=None, + certfile=None, + cert_reqs=None, + ca_certs=None, + server_hostname=None, + ssl_version=None, + ciphers=None, + ssl_context=None, + ca_cert_dir=None, + key_password=None, + ca_cert_data=None, + tls_in_tls=False, +): + """ + All arguments except for server_hostname, ssl_context, and ca_cert_dir have + the same meaning as they do when using :func:`ssl.wrap_socket`. + + :param server_hostname: + When SNI is supported, the expected hostname of the certificate + :param ssl_context: + A pre-made :class:`SSLContext` object. If none is provided, one will + be created using :func:`create_urllib3_context`. + :param ciphers: + A string of ciphers we wish the client to support. + :param ca_cert_dir: + A directory containing CA certificates in multiple separate files, as + supported by OpenSSL's -CApath flag or the capath argument to + SSLContext.load_verify_locations(). + :param key_password: + Optional password if the keyfile is encrypted. + :param ca_cert_data: + Optional string containing CA certificates in PEM format suitable for + passing as the cadata parameter to SSLContext.load_verify_locations() + :param tls_in_tls: + Use SSLTransport to wrap the existing socket. + """ + context = ssl_context + if context is None: + # Note: This branch of code and all the variables in it are no longer + # used by urllib3 itself. We should consider deprecating and removing + # this code. + context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers) + + if ca_certs or ca_cert_dir or ca_cert_data: + try: + context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data) + except (IOError, OSError) as e: + raise SSLError(e) + + elif ssl_context is None and hasattr(context, "load_default_certs"): + # try to load OS default certs; works well on Windows (require Python3.4+) + context.load_default_certs() + + # Attempt to detect if we get the goofy behavior of the + # keyfile being encrypted and OpenSSL asking for the + # passphrase via the terminal and instead error out. + if keyfile and key_password is None and _is_key_file_encrypted(keyfile): + raise SSLError("Client private key is encrypted, password is required") + + if certfile: + if key_password is None: + context.load_cert_chain(certfile, keyfile) + else: + context.load_cert_chain(certfile, keyfile, key_password) + + try: + if hasattr(context, "set_alpn_protocols"): + context.set_alpn_protocols(ALPN_PROTOCOLS) + except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols + pass + + # If we detect server_hostname is an IP address then the SNI + # extension should not be used according to RFC3546 Section 3.1 + use_sni_hostname = server_hostname and not is_ipaddress(server_hostname) + # SecureTransport uses server_hostname in certificate verification. + send_sni = (use_sni_hostname and HAS_SNI) or ( + IS_SECURETRANSPORT and server_hostname + ) + # Do not warn the user if server_hostname is an invalid SNI hostname. + if not HAS_SNI and use_sni_hostname: + warnings.warn( + "An HTTPS request has been made, but the SNI (Server Name " + "Indication) extension to TLS is not available on this platform. " + "This may cause the server to present an incorrect TLS " + "certificate, which can cause validation failures. You can upgrade to " + "a newer version of Python to solve this. For more information, see " + "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" + "#ssl-warnings", + SNIMissingWarning, + ) + + if send_sni: + ssl_sock = _ssl_wrap_socket_impl( + sock, context, tls_in_tls, server_hostname=server_hostname + ) + else: + ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls) + return ssl_sock + + +def is_ipaddress(hostname): + """Detects whether the hostname given is an IPv4 or IPv6 address. + Also detects IPv6 addresses with Zone IDs. + + :param str hostname: Hostname to examine. + :return: True if the hostname is an IP address, False otherwise. + """ + if not six.PY2 and isinstance(hostname, bytes): + # IDN A-label bytes are ASCII compatible. + hostname = hostname.decode("ascii") + return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname)) + + +def _is_key_file_encrypted(key_file): + """Detects if a key file is encrypted or not.""" + with open(key_file, "r") as f: + for line in f: + # Look for Proc-Type: 4,ENCRYPTED + if "ENCRYPTED" in line: + return True + + return False + + +def _ssl_wrap_socket_impl(sock, ssl_context, tls_in_tls, server_hostname=None): + if tls_in_tls: + if not SSLTransport: + # Import error, ssl is not available. + raise ProxySchemeUnsupported( + "TLS in TLS requires support for the 'ssl' module" + ) + + SSLTransport._validate_ssl_context_for_tls_in_tls(ssl_context) + return SSLTransport(sock, ssl_context, server_hostname) + + if server_hostname: + return ssl_context.wrap_socket(sock, server_hostname=server_hostname) + else: + return ssl_context.wrap_socket(sock) diff --git a/src/snowflake/connector/vendored/urllib3/util/ssltransport.py b/src/snowflake/connector/vendored/urllib3/util/ssltransport.py new file mode 100644 index 000000000..c2186bced --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/ssltransport.py @@ -0,0 +1,221 @@ +import io +import socket +import ssl + +from urllib3.exceptions import ProxySchemeUnsupported +from urllib3.packages import six + +SSL_BLOCKSIZE = 16384 + + +class SSLTransport: + """ + The SSLTransport wraps an existing socket and establishes an SSL connection. + + Contrary to Python's implementation of SSLSocket, it allows you to chain + multiple TLS connections together. It's particularly useful if you need to + implement TLS within TLS. + + The class supports most of the socket API operations. + """ + + @staticmethod + def _validate_ssl_context_for_tls_in_tls(ssl_context): + """ + Raises a ProxySchemeUnsupported if the provided ssl_context can't be used + for TLS in TLS. + + The only requirement is that the ssl_context provides the 'wrap_bio' + methods. + """ + + if not hasattr(ssl_context, "wrap_bio"): + if six.PY2: + raise ProxySchemeUnsupported( + "TLS in TLS requires SSLContext.wrap_bio() which isn't " + "supported on Python 2" + ) + else: + raise ProxySchemeUnsupported( + "TLS in TLS requires SSLContext.wrap_bio() which isn't " + "available on non-native SSLContext" + ) + + def __init__( + self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True + ): + """ + Create an SSLTransport around socket using the provided ssl_context. + """ + self.incoming = ssl.MemoryBIO() + self.outgoing = ssl.MemoryBIO() + + self.suppress_ragged_eofs = suppress_ragged_eofs + self.socket = socket + + self.sslobj = ssl_context.wrap_bio( + self.incoming, self.outgoing, server_hostname=server_hostname + ) + + # Perform initial handshake. + self._ssl_io_loop(self.sslobj.do_handshake) + + def __enter__(self): + return self + + def __exit__(self, *_): + self.close() + + def fileno(self): + return self.socket.fileno() + + def read(self, len=1024, buffer=None): + return self._wrap_ssl_read(len, buffer) + + def recv(self, len=1024, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv") + return self._wrap_ssl_read(len) + + def recv_into(self, buffer, nbytes=None, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to recv_into") + if buffer and (nbytes is None): + nbytes = len(buffer) + elif nbytes is None: + nbytes = 1024 + return self.read(nbytes, buffer) + + def sendall(self, data, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to sendall") + count = 0 + with memoryview(data) as view, view.cast("B") as byte_view: + amount = len(byte_view) + while count < amount: + v = self.send(byte_view[count:]) + count += v + + def send(self, data, flags=0): + if flags != 0: + raise ValueError("non-zero flags not allowed in calls to send") + response = self._ssl_io_loop(self.sslobj.write, data) + return response + + def makefile( + self, mode="r", buffering=None, encoding=None, errors=None, newline=None + ): + """ + Python's httpclient uses makefile and buffered io when reading HTTP + messages and we need to support it. + + This is unfortunately a copy and paste of socket.py makefile with small + changes to point to the socket directly. + """ + if not set(mode) <= {"r", "w", "b"}: + raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,)) + + writing = "w" in mode + reading = "r" in mode or not writing + assert reading or writing + binary = "b" in mode + rawmode = "" + if reading: + rawmode += "r" + if writing: + rawmode += "w" + raw = socket.SocketIO(self, rawmode) + self.socket._io_refs += 1 + if buffering is None: + buffering = -1 + if buffering < 0: + buffering = io.DEFAULT_BUFFER_SIZE + if buffering == 0: + if not binary: + raise ValueError("unbuffered streams must be binary") + return raw + if reading and writing: + buffer = io.BufferedRWPair(raw, raw, buffering) + elif reading: + buffer = io.BufferedReader(raw, buffering) + else: + assert writing + buffer = io.BufferedWriter(raw, buffering) + if binary: + return buffer + text = io.TextIOWrapper(buffer, encoding, errors, newline) + text.mode = mode + return text + + def unwrap(self): + self._ssl_io_loop(self.sslobj.unwrap) + + def close(self): + self.socket.close() + + def getpeercert(self, binary_form=False): + return self.sslobj.getpeercert(binary_form) + + def version(self): + return self.sslobj.version() + + def cipher(self): + return self.sslobj.cipher() + + def selected_alpn_protocol(self): + return self.sslobj.selected_alpn_protocol() + + def selected_npn_protocol(self): + return self.sslobj.selected_npn_protocol() + + def shared_ciphers(self): + return self.sslobj.shared_ciphers() + + def compression(self): + return self.sslobj.compression() + + def settimeout(self, value): + self.socket.settimeout(value) + + def gettimeout(self): + return self.socket.gettimeout() + + def _decref_socketios(self): + self.socket._decref_socketios() + + def _wrap_ssl_read(self, len, buffer=None): + try: + return self._ssl_io_loop(self.sslobj.read, len, buffer) + except ssl.SSLError as e: + if e.errno == ssl.SSL_ERROR_EOF and self.suppress_ragged_eofs: + return 0 # eof, return 0. + else: + raise + + def _ssl_io_loop(self, func, *args): + """Performs an I/O loop between incoming/outgoing and the socket.""" + should_loop = True + ret = None + + while should_loop: + errno = None + try: + ret = func(*args) + except ssl.SSLError as e: + if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE): + # WANT_READ, and WANT_WRITE are expected, others are not. + raise e + errno = e.errno + + buf = self.outgoing.read() + self.socket.sendall(buf) + + if errno is None: + should_loop = False + elif errno == ssl.SSL_ERROR_WANT_READ: + buf = self.socket.recv(SSL_BLOCKSIZE) + if buf: + self.incoming.write(buf) + else: + self.incoming.write_eof() + return ret diff --git a/src/snowflake/connector/vendored/urllib3/util/timeout.py b/src/snowflake/connector/vendored/urllib3/util/timeout.py new file mode 100644 index 000000000..ff69593b0 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/timeout.py @@ -0,0 +1,268 @@ +from __future__ import absolute_import + +import time + +# The default socket timeout, used by httplib to indicate that no timeout was +# specified by the user +from socket import _GLOBAL_DEFAULT_TIMEOUT + +from ..exceptions import TimeoutStateError + +# A sentinel value to indicate that no timeout was specified by the user in +# urllib3 +_Default = object() + + +# Use time.monotonic if available. +current_time = getattr(time, "monotonic", time.time) + + +class Timeout(object): + """Timeout configuration. + + Timeouts can be defined as a default for a pool: + + .. code-block:: python + + timeout = Timeout(connect=2.0, read=7.0) + http = PoolManager(timeout=timeout) + response = http.request('GET', 'http://example.com/') + + Or per-request (which overrides the default for the pool): + + .. code-block:: python + + response = http.request('GET', 'http://example.com/', timeout=Timeout(10)) + + Timeouts can be disabled by setting all the parameters to ``None``: + + .. code-block:: python + + no_timeout = Timeout(connect=None, read=None) + response = http.request('GET', 'http://example.com/, timeout=no_timeout) + + + :param total: + This combines the connect and read timeouts into one; the read timeout + will be set to the time leftover from the connect attempt. In the + event that both a connect timeout and a total are specified, or a read + timeout and a total are specified, the shorter timeout will be applied. + + Defaults to None. + + :type total: int, float, or None + + :param connect: + The maximum amount of time (in seconds) to wait for a connection + attempt to a server to succeed. Omitting the parameter will default the + connect timeout to the system default, probably `the global default + timeout in socket.py + `_. + None will set an infinite timeout for connection attempts. + + :type connect: int, float, or None + + :param read: + The maximum amount of time (in seconds) to wait between consecutive + read operations for a response from the server. Omitting the parameter + will default the read timeout to the system default, probably `the + global default timeout in socket.py + `_. + None will set an infinite timeout. + + :type read: int, float, or None + + .. note:: + + Many factors can affect the total amount of time for urllib3 to return + an HTTP response. + + For example, Python's DNS resolver does not obey the timeout specified + on the socket. Other factors that can affect total request time include + high CPU load, high swap, the program running at a low priority level, + or other behaviors. + + In addition, the read and total timeouts only measure the time between + read operations on the socket connecting the client and the server, + not the total amount of time for the request to return a complete + response. For most requests, the timeout is raised because the server + has not sent the first byte in the specified time. This is not always + the case; if a server streams one byte every fifteen seconds, a timeout + of 20 seconds will not trigger, even though the request will take + several minutes to complete. + + If your goal is to cut off any request after a set amount of wall clock + time, consider having a second "watcher" thread to cut off a slow + request. + """ + + #: A sentinel object representing the default timeout value + DEFAULT_TIMEOUT = _GLOBAL_DEFAULT_TIMEOUT + + def __init__(self, total=None, connect=_Default, read=_Default): + self._connect = self._validate_timeout(connect, "connect") + self._read = self._validate_timeout(read, "read") + self.total = self._validate_timeout(total, "total") + self._start_connect = None + + def __repr__(self): + return "%s(connect=%r, read=%r, total=%r)" % ( + type(self).__name__, + self._connect, + self._read, + self.total, + ) + + # __str__ provided for backwards compatibility + __str__ = __repr__ + + @classmethod + def _validate_timeout(cls, value, name): + """Check that a timeout attribute is valid. + + :param value: The timeout value to validate + :param name: The name of the timeout attribute to validate. This is + used to specify in error messages. + :return: The validated and casted version of the given value. + :raises ValueError: If it is a numeric value less than or equal to + zero, or the type is not an integer, float, or None. + """ + if value is _Default: + return cls.DEFAULT_TIMEOUT + + if value is None or value is cls.DEFAULT_TIMEOUT: + return value + + if isinstance(value, bool): + raise ValueError( + "Timeout cannot be a boolean value. It must " + "be an int, float or None." + ) + try: + float(value) + except (TypeError, ValueError): + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) + + try: + if value <= 0: + raise ValueError( + "Attempted to set %s timeout to %s, but the " + "timeout cannot be set to a value less " + "than or equal to 0." % (name, value) + ) + except TypeError: + # Python 3 + raise ValueError( + "Timeout value %s was %s, but it must be an " + "int, float or None." % (name, value) + ) + + return value + + @classmethod + def from_float(cls, timeout): + """Create a new Timeout from a legacy timeout value. + + The timeout value used by httplib.py sets the same timeout on the + connect(), and recv() socket requests. This creates a :class:`Timeout` + object that sets the individual timeouts to the ``timeout`` value + passed to this function. + + :param timeout: The legacy timeout value. + :type timeout: integer, float, sentinel default object, or None + :return: Timeout object + :rtype: :class:`Timeout` + """ + return Timeout(read=timeout, connect=timeout) + + def clone(self): + """Create a copy of the timeout object + + Timeout properties are stored per-pool but each request needs a fresh + Timeout object to ensure each one has its own start/stop configured. + + :return: a copy of the timeout object + :rtype: :class:`Timeout` + """ + # We can't use copy.deepcopy because that will also create a new object + # for _GLOBAL_DEFAULT_TIMEOUT, which socket.py uses as a sentinel to + # detect the user default. + return Timeout(connect=self._connect, read=self._read, total=self.total) + + def start_connect(self): + """Start the timeout clock, used during a connect() attempt + + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to start a timer that has been started already. + """ + if self._start_connect is not None: + raise TimeoutStateError("Timeout timer has already been started.") + self._start_connect = current_time() + return self._start_connect + + def get_connect_duration(self): + """Gets the time elapsed since the call to :meth:`start_connect`. + + :return: Elapsed time in seconds. + :rtype: float + :raises urllib3.exceptions.TimeoutStateError: if you attempt + to get duration for a timer that hasn't been started. + """ + if self._start_connect is None: + raise TimeoutStateError( + "Can't get connect duration for timer that has not started." + ) + return current_time() - self._start_connect + + @property + def connect_timeout(self): + """Get the value to use when setting a connection timeout. + + This will be a positive float or integer, the value None + (never timeout), or the default system timeout. + + :return: Connect timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + """ + if self.total is None: + return self._connect + + if self._connect is None or self._connect is self.DEFAULT_TIMEOUT: + return self.total + + return min(self._connect, self.total) + + @property + def read_timeout(self): + """Get the value for the read timeout. + + This assumes some time has elapsed in the connection timeout and + computes the read timeout appropriately. + + If self.total is set, the read timeout is dependent on the amount of + time taken by the connect timeout. If the connection time has not been + established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be + raised. + + :return: Value to use for the read timeout. + :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None + :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` + has not yet been called on this object. + """ + if ( + self.total is not None + and self.total is not self.DEFAULT_TIMEOUT + and self._read is not None + and self._read is not self.DEFAULT_TIMEOUT + ): + # In case the connect timeout has not yet been established. + if self._start_connect is None: + return self._read + return max(0, min(self.total - self.get_connect_duration(), self._read)) + elif self.total is not None and self.total is not self.DEFAULT_TIMEOUT: + return max(0, self.total - self.get_connect_duration()) + else: + return self._read diff --git a/src/snowflake/connector/vendored/urllib3/util/url.py b/src/snowflake/connector/vendored/urllib3/util/url.py new file mode 100644 index 000000000..81a03da9e --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/url.py @@ -0,0 +1,432 @@ +from __future__ import absolute_import + +import re +from collections import namedtuple + +from ..exceptions import LocationParseError +from ..packages import six + +url_attrs = ["scheme", "auth", "host", "port", "path", "query", "fragment"] + +# We only want to normalize urls with an HTTP(S) scheme. +# urllib3 infers URLs without a scheme (None) to be http. +NORMALIZABLE_SCHEMES = ("http", "https", None) + +# Almost all of these patterns were derived from the +# 'rfc3986' module: https://github.com/python-hyper/rfc3986 +PERCENT_RE = re.compile(r"%[a-fA-F0-9]{2}") +SCHEME_RE = re.compile(r"^(?:[a-zA-Z][a-zA-Z0-9+-]*:|/)") +URI_RE = re.compile( + r"^(?:([a-zA-Z][a-zA-Z0-9+.-]*):)?" + r"(?://([^\\/?#]*))?" + r"([^?#]*)" + r"(?:\?([^#]*))?" + r"(?:#(.*))?$", + re.UNICODE | re.DOTALL, +) + +IPV4_PAT = r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}" +HEX_PAT = "[0-9A-Fa-f]{1,4}" +LS32_PAT = "(?:{hex}:{hex}|{ipv4})".format(hex=HEX_PAT, ipv4=IPV4_PAT) +_subs = {"hex": HEX_PAT, "ls32": LS32_PAT} +_variations = [ + # 6( h16 ":" ) ls32 + "(?:%(hex)s:){6}%(ls32)s", + # "::" 5( h16 ":" ) ls32 + "::(?:%(hex)s:){5}%(ls32)s", + # [ h16 ] "::" 4( h16 ":" ) ls32 + "(?:%(hex)s)?::(?:%(hex)s:){4}%(ls32)s", + # [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32 + "(?:(?:%(hex)s:)?%(hex)s)?::(?:%(hex)s:){3}%(ls32)s", + # [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32 + "(?:(?:%(hex)s:){0,2}%(hex)s)?::(?:%(hex)s:){2}%(ls32)s", + # [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32 + "(?:(?:%(hex)s:){0,3}%(hex)s)?::%(hex)s:%(ls32)s", + # [ *4( h16 ":" ) h16 ] "::" ls32 + "(?:(?:%(hex)s:){0,4}%(hex)s)?::%(ls32)s", + # [ *5( h16 ":" ) h16 ] "::" h16 + "(?:(?:%(hex)s:){0,5}%(hex)s)?::%(hex)s", + # [ *6( h16 ":" ) h16 ] "::" + "(?:(?:%(hex)s:){0,6}%(hex)s)?::", +] + +UNRESERVED_PAT = r"ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._!\-~" +IPV6_PAT = "(?:" + "|".join([x % _subs for x in _variations]) + ")" +ZONE_ID_PAT = "(?:%25|%)(?:[" + UNRESERVED_PAT + "]|%[a-fA-F0-9]{2})+" +IPV6_ADDRZ_PAT = r"\[" + IPV6_PAT + r"(?:" + ZONE_ID_PAT + r")?\]" +REG_NAME_PAT = r"(?:[^\[\]%:/?#]|%[a-fA-F0-9]{2})*" +TARGET_RE = re.compile(r"^(/[^?#]*)(?:\?([^#]*))?(?:#.*)?$") + +IPV4_RE = re.compile("^" + IPV4_PAT + "$") +IPV6_RE = re.compile("^" + IPV6_PAT + "$") +IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT + "$") +BRACELESS_IPV6_ADDRZ_RE = re.compile("^" + IPV6_ADDRZ_PAT[2:-2] + "$") +ZONE_ID_RE = re.compile("(" + ZONE_ID_PAT + r")\]$") + +_HOST_PORT_PAT = ("^(%s|%s|%s)(?::([0-9]{0,5}))?$") % ( + REG_NAME_PAT, + IPV4_PAT, + IPV6_ADDRZ_PAT, +) +_HOST_PORT_RE = re.compile(_HOST_PORT_PAT, re.UNICODE | re.DOTALL) + +UNRESERVED_CHARS = set( + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789._-~" +) +SUB_DELIM_CHARS = set("!$&'()*+,;=") +USERINFO_CHARS = UNRESERVED_CHARS | SUB_DELIM_CHARS | {":"} +PATH_CHARS = USERINFO_CHARS | {"@", "/"} +QUERY_CHARS = FRAGMENT_CHARS = PATH_CHARS | {"?"} + + +class Url(namedtuple("Url", url_attrs)): + """ + Data structure for representing an HTTP URL. Used as a return value for + :func:`parse_url`. Both the scheme and host are normalized as they are + both case-insensitive according to RFC 3986. + """ + + __slots__ = () + + def __new__( + cls, + scheme=None, + auth=None, + host=None, + port=None, + path=None, + query=None, + fragment=None, + ): + if path and not path.startswith("/"): + path = "/" + path + if scheme is not None: + scheme = scheme.lower() + return super(Url, cls).__new__( + cls, scheme, auth, host, port, path, query, fragment + ) + + @property + def hostname(self): + """For backwards-compatibility with urlparse. We're nice like that.""" + return self.host + + @property + def request_uri(self): + """Absolute path including the query string.""" + uri = self.path or "/" + + if self.query is not None: + uri += "?" + self.query + + return uri + + @property + def netloc(self): + """Network location including host and port""" + if self.port: + return "%s:%d" % (self.host, self.port) + return self.host + + @property + def url(self): + """ + Convert self into a url + + This function should more or less round-trip with :func:`.parse_url`. The + returned url may not be exactly the same as the url inputted to + :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls + with a blank port will have : removed). + + Example: :: + + >>> U = parse_url('http://google.com/mail/') + >>> U.url + 'http://google.com/mail/' + >>> Url('http', 'username:password', 'host.com', 80, + ... '/path', 'query', 'fragment').url + 'http://username:password@host.com:80/path?query#fragment' + """ + scheme, auth, host, port, path, query, fragment = self + url = u"" + + # We use "is not None" we want things to happen with empty strings (or 0 port) + if scheme is not None: + url += scheme + u"://" + if auth is not None: + url += auth + u"@" + if host is not None: + url += host + if port is not None: + url += u":" + str(port) + if path is not None: + url += path + if query is not None: + url += u"?" + query + if fragment is not None: + url += u"#" + fragment + + return url + + def __str__(self): + return self.url + + +def split_first(s, delims): + """ + .. deprecated:: 1.25 + + Given a string and an iterable of delimiters, split on the first found + delimiter. Return two split parts and the matched delimiter. + + If not found, then the first part is the full input string. + + Example:: + + >>> split_first('foo/bar?baz', '?/=') + ('foo', 'bar?baz', '/') + >>> split_first('foo/bar?baz', '123') + ('foo/bar?baz', '', None) + + Scales linearly with number of delims. Not ideal for large number of delims. + """ + min_idx = None + min_delim = None + for d in delims: + idx = s.find(d) + if idx < 0: + continue + + if min_idx is None or idx < min_idx: + min_idx = idx + min_delim = d + + if min_idx is None or min_idx < 0: + return s, "", None + + return s[:min_idx], s[min_idx + 1 :], min_delim + + +def _encode_invalid_chars(component, allowed_chars, encoding="utf-8"): + """Percent-encodes a URI component without reapplying + onto an already percent-encoded component. + """ + if component is None: + return component + + component = six.ensure_text(component) + + # Normalize existing percent-encoded bytes. + # Try to see if the component we're encoding is already percent-encoded + # so we can skip all '%' characters but still encode all others. + component, percent_encodings = PERCENT_RE.subn( + lambda match: match.group(0).upper(), component + ) + + uri_bytes = component.encode("utf-8", "surrogatepass") + is_percent_encoded = percent_encodings == uri_bytes.count(b"%") + encoded_component = bytearray() + + for i in range(0, len(uri_bytes)): + # Will return a single character bytestring on both Python 2 & 3 + byte = uri_bytes[i : i + 1] + byte_ord = ord(byte) + if (is_percent_encoded and byte == b"%") or ( + byte_ord < 128 and byte.decode() in allowed_chars + ): + encoded_component += byte + continue + encoded_component.extend(b"%" + (hex(byte_ord)[2:].encode().zfill(2).upper())) + + return encoded_component.decode(encoding) + + +def _remove_path_dot_segments(path): + # See http://tools.ietf.org/html/rfc3986#section-5.2.4 for pseudo-code + segments = path.split("/") # Turn the path into a list of segments + output = [] # Initialize the variable to use to store output + + for segment in segments: + # '.' is the current directory, so ignore it, it is superfluous + if segment == ".": + continue + # Anything other than '..', should be appended to the output + elif segment != "..": + output.append(segment) + # In this case segment == '..', if we can, we should pop the last + # element + elif output: + output.pop() + + # If the path starts with '/' and the output is empty or the first string + # is non-empty + if path.startswith("/") and (not output or output[0]): + output.insert(0, "") + + # If the path starts with '/.' or '/..' ensure we add one more empty + # string to add a trailing '/' + if path.endswith(("/.", "/..")): + output.append("") + + return "/".join(output) + + +def _normalize_host(host, scheme): + if host: + if isinstance(host, six.binary_type): + host = six.ensure_str(host) + + if scheme in NORMALIZABLE_SCHEMES: + is_ipv6 = IPV6_ADDRZ_RE.match(host) + if is_ipv6: + match = ZONE_ID_RE.search(host) + if match: + start, end = match.span(1) + zone_id = host[start:end] + + if zone_id.startswith("%25") and zone_id != "%25": + zone_id = zone_id[3:] + else: + zone_id = zone_id[1:] + zone_id = "%" + _encode_invalid_chars(zone_id, UNRESERVED_CHARS) + return host[:start].lower() + zone_id + host[end:] + else: + return host.lower() + elif not IPV4_RE.match(host): + return six.ensure_str( + b".".join([_idna_encode(label) for label in host.split(".")]) + ) + return host + + +def _idna_encode(name): + if name and any([ord(x) > 128 for x in name]): + try: + import idna + except ImportError: + six.raise_from( + LocationParseError("Unable to parse URL without the 'idna' module"), + None, + ) + try: + return idna.encode(name.lower(), strict=True, std3_rules=True) + except idna.IDNAError: + six.raise_from( + LocationParseError(u"Name '%s' is not a valid IDNA label" % name), None + ) + return name.lower().encode("ascii") + + +def _encode_target(target): + """Percent-encodes a request target so that there are no invalid characters""" + path, query = TARGET_RE.match(target).groups() + target = _encode_invalid_chars(path, PATH_CHARS) + query = _encode_invalid_chars(query, QUERY_CHARS) + if query is not None: + target += "?" + query + return target + + +def parse_url(url): + """ + Given a url, return a parsed :class:`.Url` namedtuple. Best-effort is + performed to parse incomplete urls. Fields not provided will be None. + This parser is RFC 3986 compliant. + + The parser logic and helper functions are based heavily on + work done in the ``rfc3986`` module. + + :param str url: URL to parse into a :class:`.Url` namedtuple. + + Partly backwards-compatible with :mod:`urlparse`. + + Example:: + + >>> parse_url('http://google.com/mail/') + Url(scheme='http', host='google.com', port=None, path='/mail/', ...) + >>> parse_url('google.com:80') + Url(scheme=None, host='google.com', port=80, path=None, ...) + >>> parse_url('/foo?bar') + Url(scheme=None, host=None, port=None, path='/foo', query='bar', ...) + """ + if not url: + # Empty + return Url() + + source_url = url + if not SCHEME_RE.search(url): + url = "//" + url + + try: + scheme, authority, path, query, fragment = URI_RE.match(url).groups() + normalize_uri = scheme is None or scheme.lower() in NORMALIZABLE_SCHEMES + + if scheme: + scheme = scheme.lower() + + if authority: + auth, _, host_port = authority.rpartition("@") + auth = auth or None + host, port = _HOST_PORT_RE.match(host_port).groups() + if auth and normalize_uri: + auth = _encode_invalid_chars(auth, USERINFO_CHARS) + if port == "": + port = None + else: + auth, host, port = None, None, None + + if port is not None: + port = int(port) + if not (0 <= port <= 65535): + raise LocationParseError(url) + + host = _normalize_host(host, scheme) + + if normalize_uri and path: + path = _remove_path_dot_segments(path) + path = _encode_invalid_chars(path, PATH_CHARS) + if normalize_uri and query: + query = _encode_invalid_chars(query, QUERY_CHARS) + if normalize_uri and fragment: + fragment = _encode_invalid_chars(fragment, FRAGMENT_CHARS) + + except (ValueError, AttributeError): + return six.raise_from(LocationParseError(source_url), None) + + # For the sake of backwards compatibility we put empty + # string values for path if there are any defined values + # beyond the path in the URL. + # TODO: Remove this when we break backwards compatibility. + if not path: + if query is not None or fragment is not None: + path = "" + else: + path = None + + # Ensure that each part of the URL is a `str` for + # backwards compatibility. + if isinstance(url, six.text_type): + ensure_func = six.ensure_text + else: + ensure_func = six.ensure_str + + def ensure_type(x): + return x if x is None else ensure_func(x) + + return Url( + scheme=ensure_type(scheme), + auth=ensure_type(auth), + host=ensure_type(host), + port=port, + path=ensure_type(path), + query=ensure_type(query), + fragment=ensure_type(fragment), + ) + + +def get_host(url): + """ + Deprecated. Use :func:`parse_url` instead. + """ + p = parse_url(url) + return p.scheme or "http", p.hostname, p.port diff --git a/src/snowflake/connector/vendored/urllib3/util/wait.py b/src/snowflake/connector/vendored/urllib3/util/wait.py new file mode 100644 index 000000000..c280646c7 --- /dev/null +++ b/src/snowflake/connector/vendored/urllib3/util/wait.py @@ -0,0 +1,153 @@ +import errno +import select +import sys +from functools import partial + +try: + from time import monotonic +except ImportError: + from time import time as monotonic + +__all__ = ["NoWayToWaitForSocketError", "wait_for_read", "wait_for_write"] + + +class NoWayToWaitForSocketError(Exception): + pass + + +# How should we wait on sockets? +# +# There are two types of APIs you can use for waiting on sockets: the fancy +# modern stateful APIs like epoll/kqueue, and the older stateless APIs like +# select/poll. The stateful APIs are more efficient when you have a lots of +# sockets to keep track of, because you can set them up once and then use them +# lots of times. But we only ever want to wait on a single socket at a time +# and don't want to keep track of state, so the stateless APIs are actually +# more efficient. So we want to use select() or poll(). +# +# Now, how do we choose between select() and poll()? On traditional Unixes, +# select() has a strange calling convention that makes it slow, or fail +# altogether, for high-numbered file descriptors. The point of poll() is to fix +# that, so on Unixes, we prefer poll(). +# +# On Windows, there is no poll() (or at least Python doesn't provide a wrapper +# for it), but that's OK, because on Windows, select() doesn't have this +# strange calling convention; plain select() works fine. +# +# So: on Windows we use select(), and everywhere else we use poll(). We also +# fall back to select() in case poll() is somehow broken or missing. + +if sys.version_info >= (3, 5): + # Modern Python, that retries syscalls by default + def _retry_on_intr(fn, timeout): + return fn(timeout) + + +else: + # Old and broken Pythons. + def _retry_on_intr(fn, timeout): + if timeout is None: + deadline = float("inf") + else: + deadline = monotonic() + timeout + + while True: + try: + return fn(timeout) + # OSError for 3 <= pyver < 3.5, select.error for pyver <= 2.7 + except (OSError, select.error) as e: + # 'e.args[0]' incantation works for both OSError and select.error + if e.args[0] != errno.EINTR: + raise + else: + timeout = deadline - monotonic() + if timeout < 0: + timeout = 0 + if timeout == float("inf"): + timeout = None + continue + + +def select_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + rcheck = [] + wcheck = [] + if read: + rcheck.append(sock) + if write: + wcheck.append(sock) + # When doing a non-blocking connect, most systems signal success by + # marking the socket writable. Windows, though, signals success by marked + # it as "exceptional". We paper over the difference by checking the write + # sockets for both conditions. (The stdlib selectors module does the same + # thing.) + fn = partial(select.select, rcheck, wcheck, wcheck) + rready, wready, xready = _retry_on_intr(fn, timeout) + return bool(rready or wready or xready) + + +def poll_wait_for_socket(sock, read=False, write=False, timeout=None): + if not read and not write: + raise RuntimeError("must specify at least one of read=True, write=True") + mask = 0 + if read: + mask |= select.POLLIN + if write: + mask |= select.POLLOUT + poll_obj = select.poll() + poll_obj.register(sock, mask) + + # For some reason, poll() takes timeout in milliseconds + def do_poll(t): + if t is not None: + t *= 1000 + return poll_obj.poll(t) + + return bool(_retry_on_intr(do_poll, timeout)) + + +def null_wait_for_socket(*args, **kwargs): + raise NoWayToWaitForSocketError("no select-equivalent available") + + +def _have_working_poll(): + # Apparently some systems have a select.poll that fails as soon as you try + # to use it, either due to strange configuration or broken monkeypatching + # from libraries like eventlet/greenlet. + try: + poll_obj = select.poll() + _retry_on_intr(poll_obj.poll, 0) + except (AttributeError, OSError): + return False + else: + return True + + +def wait_for_socket(*args, **kwargs): + # We delay choosing which implementation to use until the first time we're + # called. We could do it at import time, but then we might make the wrong + # decision if someone goes wild with monkeypatching select.poll after + # we're imported. + global wait_for_socket + if _have_working_poll(): + wait_for_socket = poll_wait_for_socket + elif hasattr(select, "select"): + wait_for_socket = select_wait_for_socket + else: # Platform-specific: Appengine. + wait_for_socket = null_wait_for_socket + return wait_for_socket(*args, **kwargs) + + +def wait_for_read(sock, timeout=None): + """Waits for reading to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, read=True, timeout=timeout) + + +def wait_for_write(sock, timeout=None): + """Waits for writing to be available on a given socket. + Returns True if the socket is readable, or False if the timeout expired. + """ + return wait_for_socket(sock, write=True, timeout=timeout) diff --git a/version.py b/src/snowflake/connector/version.py similarity index 75% rename from version.py rename to src/snowflake/connector/version.py index b20443bcc..e84a5a385 100644 --- a/version.py +++ b/src/snowflake/connector/version.py @@ -1,3 +1,3 @@ # Update this for the versions # Don't change the forth version number from None -VERSION = (1, 9, 0, None) +VERSION = (2, 7, 7, None) diff --git a/ssl_wrap_socket.py b/ssl_wrap_socket.py deleted file mode 100644 index 7b8213a97..000000000 --- a/ssl_wrap_socket.py +++ /dev/null @@ -1,441 +0,0 @@ -# -*- coding: utf-8 -*- -# -# SSL wrap socket for PyOpenSSL. -# Mostly copied from -# -# https://github.com/shazow/urllib3/blob/master/urllib3/contrib/pyopenssl.py -# -# and added OCSP validator on the top. -# - -""" -OCSP Mode: FAIL_OPEN, FAIL_CLOSED or INSECURE -""" -from .constants import OCSPMode - -FEATURE_OCSP_MODE = OCSPMode.FAIL_OPEN - -""" -OCSP Response cache file name -""" -FEATURE_OCSP_RESPONSE_CACHE_FILE_NAME = None - -import logging -import ssl -import sys -import time -from socket import error as SocketError -from socket import (socket, timeout) - -import OpenSSL.SSL -from botocore.vendored.requests.packages.urllib3 import connection \ - as urllib3_connection -from botocore.vendored.requests.packages.urllib3 import util \ - as urllib3_util -from cryptography import x509 -from cryptography.hazmat.backends.openssl import backend as openssl_backend -from cryptography.hazmat.backends.openssl.x509 import _Certificate - -from .compat import PY2 -from .errorcode import (ER_SERVER_CERTIFICATE_REVOKED) -from .errors import (OperationalError) -from .ssl_wrap_util import wait_for_read, wait_for_write - -try: # Platform-specific: Python 2 - from socket import _fileobject -except ImportError: # Platform-specific: Python 3 - _fileobject = None - from .backport_makefile import backport_makefile - -# Map from urllib3 to PyOpenSSL compatible parameter-values. -_openssl_versions = { - ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, - ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, -} -if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): - _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD - -if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): - _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD - -try: - _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) -except AttributeError: - pass - -_stdlib_to_openssl_verify = { - ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, - ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, - ssl.CERT_REQUIRED: - OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, -} -_openssl_to_stdlib_verify = dict( - (v, k) for k, v in _stdlib_to_openssl_verify.items() -) - -# OpenSSL will only write 16K at a time -SSL_WRITE_BLOCKSIZE = 16384 - -log = logging.getLogger(__name__) - - -def inject_into_urllib3(): - """ - Monkey-patch urllib3 with PyOpenSSL-backed SSL-support and OCSP. - """ - log.debug(u'Injecting ssl_wrap_socket_with_ocsp') - urllib3_connection.ssl_wrap_socket = ssl_wrap_socket_with_ocsp - - -def _dnsname_to_stdlib(name): - """ - Converts a dNSName SubjectAlternativeName field to the form used by the - standard library on the given Python version. - - Cryptography produces a dNSName as a unicode string that was idna-decoded - from ASCII bytes. We need to idna-encode that string to get it back, and - then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib - uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8). - """ - - def idna_encode(name): - """ - Borrowed wholesale from the Python Cryptography Project. It turns out - that we can't just safely call `idna.encode`: it can explode for - wildcard names. This avoids that problem. - """ - import idna - - for prefix in [u'*.', u'.']: - if name.startswith(prefix): - name = name[len(prefix):] - return prefix.encode('ascii') + idna.encode(name) - return idna.encode(name) - - name = idna_encode(name) - if sys.version_info >= (3, 0): - name = name.decode('utf-8') - return name - - -def get_subj_alt_name(peer_cert): - """ - Given an PyOpenSSL certificate, provides all the subject alternative names. - """ - # Pass the cert to cryptography, which has much better APIs for this. - if hasattr(peer_cert, "to_cryptography"): - cert = peer_cert.to_cryptography() - else: - # This is technically using private APIs, but should work across all - # relevant versions before PyOpenSSL got a proper API for this. - cert = _Certificate(openssl_backend, peer_cert._x509) - - # We want to find the SAN extension. Ask Cryptography to locate it (it's - # faster than looping in Python) - try: - ext = cert.extensions.get_extension_for_class( - x509.SubjectAlternativeName - ).value - except x509.ExtensionNotFound: - # No such extension, return the empty list. - return [] - except (x509.DuplicateExtension, - x509.UnsupportedGeneralNameType, UnicodeError) as e: - # A problem has been found with the quality of the certificate. Assume - # no SAN field is present. - log.warning( - "A problem was encountered with the certificate that prevented " - "urllib3 from finding the SubjectAlternativeName field. This can " - "affect certificate validation. The error was %s", - e, - ) - return [] - - # We want to return dNSName and iPAddress fields. We need to cast the IPs - # back to strings because the match_hostname function wants them as - # strings. - # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8 - # decoded. This is pretty frustrating, but that's what the standard library - # does with certificates, and so we need to attempt to do the same. - names = [ - ('DNS', _dnsname_to_stdlib(name)) - for name in ext.get_values_for_type(x509.DNSName) - ] - names.extend( - ('IP Address', str(name)) - for name in ext.get_values_for_type(x509.IPAddress) - ) - - return names - - -class WrappedSocket(object): - '''API-compatibility wrapper for Python OpenSSL's Connection-class. - - Note: _makefile_refs, _drop() and _reuse() are needed for the garbage - collector of pypy. - ''' - - def __init__(self, connection, socket, suppress_ragged_eofs=True): - self.connection = connection - self.socket = socket - self.suppress_ragged_eofs = suppress_ragged_eofs - self._makefile_refs = 0 - self._closed = False - - def fileno(self): - return self.socket.fileno() - - # Copy-pasted from Python 3.5 source code - def _decref_socketios(self): - if self._makefile_refs > 0: - self._makefile_refs -= 1 - if self._closed: - self.close() - - def recv(self, *args, **kwargs): - try: - data = self.connection.recv(*args, **kwargs) - except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): - return b'' - else: - raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError: - if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: - return b'' - else: - raise - except OpenSSL.SSL.WantReadError: - rd = wait_for_read(self.socket, self.socket.gettimeout()) - if not rd: - raise timeout('The read operation timed out') - else: - return self.recv(*args, **kwargs) - else: - return data - - def recv_into(self, *args, **kwargs): - try: - return self.connection.recv_into(*args, **kwargs) - except OpenSSL.SSL.SysCallError as e: - if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): - return 0 - else: - raise SocketError(str(e)) - except OpenSSL.SSL.ZeroReturnError: - if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: - return 0 - else: - raise - except OpenSSL.SSL.WantReadError: - rd = wait_for_read(self.socket, self.socket.gettimeout()) - if not rd: - raise timeout('The read operation timed out') - else: - return self.recv_into(*args, **kwargs) - - def settimeout(self, timeout): - return self.socket.settimeout(timeout) - - def _send_until_done(self, data): - while True: - try: - if PY2 and isinstance(data, unicode): - data = data.encode('utf-8') - return self.connection.send(data) - except OpenSSL.SSL.WantWriteError: - wr = wait_for_write(self.socket, self.socket.gettimeout()) - if not wr: - raise timeout() - continue - except OpenSSL.SSL.SysCallError as e: - raise SocketError(str(e)) - - def sendall(self, data): - total_sent = 0 - while total_sent < len(data): - sent = self._send_until_done( - data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) - total_sent += sent - - def shutdown(self): - # FIXME rethrow compatible exceptions should we ever use this - self.connection.shutdown() - - def close(self): - if self._makefile_refs < 1: - try: - self._closed = True - return self.connection.close() - except OpenSSL.SSL.Error: - return - else: - self._makefile_refs -= 1 - - def getpeercert(self, binary_form=False): - x509 = self.connection.get_peer_certificate() - - if not x509: - return x509 - - if binary_form: - return OpenSSL.crypto.dump_certificate( - OpenSSL.crypto.FILETYPE_ASN1, - x509) - - return { - 'subject': ( - (('commonName', x509.get_subject().CN),), - ), - 'subjectAltName': get_subj_alt_name(x509) - } - - def _reuse(self): - self._makefile_refs += 1 - - def _drop(self): - if self._makefile_refs < 1: - self.close() - else: - self._makefile_refs -= 1 - - -if _fileobject: # Platform-specific: Python 2 - def makefile(self, mode, bufsize=-1): - self._makefile_refs += 1 - return _fileobject(self, mode, bufsize, close=True) -else: # Platform-specific: Python 3 - makefile = backport_makefile - -WrappedSocket.makefile = makefile - -DEFAULT_SSL_CIPHER_LIST = urllib3_util.ssl_.DEFAULT_CIPHERS -if isinstance(DEFAULT_SSL_CIPHER_LIST, str): - DEFAULT_SSL_CIPHER_LIST = DEFAULT_SSL_CIPHER_LIST.encode('utf-8') - - -def ssl_wrap_socket( - sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, ssl_version=None): - ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version]) - if certfile: - # Match behaviour of the normal python ssl library - keyfile = keyfile or certfile - ctx.use_certificate_file(certfile) - if keyfile: - ctx.use_privatekey_file(keyfile) - if cert_reqs != ssl.CERT_NONE: - ctx.set_verify(_stdlib_to_openssl_verify[cert_reqs], _verify_callback) - if ca_certs: - try: - ctx.load_verify_locations(ca_certs, None) - except OpenSSL.SSL.Error as e: - raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e) - else: - ctx.set_default_verify_paths() - - # Disable TLS compression to mitigate CRIME attack (issue #309) - OP_NO_COMPRESSION = 0x20000 - ctx.set_options(OP_NO_COMPRESSION) - - # Set list of supported ciphersuites. - ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST) - - cnx = OpenSSL.SSL.Connection(ctx, sock) - cnx.set_tlsext_host_name(server_hostname.encode(u'utf-8')) - cnx.set_connect_state() - - while True: - try: - cnx.do_handshake() - except OpenSSL.SSL.WantReadError: - rd = wait_for_read(sock, sock.gettimeout()) - if not rd: - raise timeout('select timed out') - continue - except OpenSSL.SSL.Error as e: - raise ssl.SSLError('bad handshake: %r' % e) - break - - return WrappedSocket(cnx, sock) - - -def _verify_callback(cnx, x509, err_no, err_depth, return_code): - # NOTE: this cannot be used to verify certificate revocation status. - # because get_cert_peer_chain returns None for some reason. - return err_no == 0 - - -def ssl_wrap_socket_with_ocsp( - sock, keyfile=None, certfile=None, cert_reqs=None, - ca_certs=None, server_hostname=None, ssl_version=None): - ret = ssl_wrap_socket( - sock, keyfile=keyfile, certfile=certfile, cert_reqs=cert_reqs, - ca_certs=ca_certs, server_hostname=server_hostname, - ssl_version=ssl_version) - global FEATURE_OCSP_MODE - global FEATURE_OCSP_RESPONSE_CACHE_FILE_NAME - - if PY2: - # Python 2 uses pyasn1 for workaround. For some reason, asn1crypto - # fails to parse OCSP response in Python 2. - from .ocsp_pyasn1 import SnowflakeOCSPPyasn1 as SFOCSP - else: - from .ocsp_asn1crypto import SnowflakeOCSPAsn1Crypto as SFOCSP - - log.debug(u'OCSP Mode: %s, ' - u'OCSP response cache file name: %s', - FEATURE_OCSP_MODE.name, - FEATURE_OCSP_RESPONSE_CACHE_FILE_NAME) - if FEATURE_OCSP_MODE != OCSPMode.INSECURE: - v = SFOCSP( - ocsp_response_cache_uri=FEATURE_OCSP_RESPONSE_CACHE_FILE_NAME, - use_fail_open=FEATURE_OCSP_MODE == OCSPMode.FAIL_OPEN - ).validate(server_hostname, ret.connection) - if not v: - raise OperationalError( - msg=( - u'The certificate is revoked or ' - u'could not be validated: hostname={0}'.format( - server_hostname)), - errno=ER_SERVER_CERTIFICATE_REVOKED) - else: - log.info(u'THIS CONNECTION IS IN INSECURE ' - u'MODE. IT MEANS THE CERTIFICATE WILL BE ' - u'VALIDATED BUT THE CERTIFICATE REVOCATION ' - u'STATUS WILL NOT BE CHECKED.') - - return ret - - -def _openssl_connect(hostname, port=443, max_retry=20): - """ - OpenSSL connection without validating certificates. This is used to diagnose - SSL issues. - """ - err = None - sleeping_time = 1 - for retry in range(max_retry): - try: - client = socket() - # client.settimeout(5) - client.connect((hostname, port)) - client_ssl = OpenSSL.SSL.Connection( - OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD), client) - client_ssl.set_connect_state() - client_ssl.set_tlsext_host_name(hostname.encode('utf-8')) - client_ssl.do_handshake() - return client_ssl - except Exception as ex: - if isinstance(ex, OpenSSL.SSL.SysCallError) or \ - ex.__class__.__name__ == 'TimeoutError': - err = ex - sleeping_time *= 2 - if sleeping_time > 16: - sleeping_time = 16 - time.sleep(sleeping_time) - else: - raise ex - if err: - raise err diff --git a/ssl_wrap_util.py b/ssl_wrap_util.py deleted file mode 100644 index 13ad01a47..000000000 --- a/ssl_wrap_util.py +++ /dev/null @@ -1,626 +0,0 @@ -# -# SSL wrap util for PyOpenSSL. - -# Copied from: -# -# https://github.com/shazow/urllib3/tree/master/urllib3/util -# -# to support ssl_wrap_socket. -# - -# Backport of selectors.py from Python 3.5+ to support Python < 3.4 -# Also has the behavior specified in PEP 475 which is to retry syscalls -# in the case of an EINTR error. This module is required because selectors34 -# does not follow this behavior and instead returns that no dile descriptor -# events have occurred rather than retry the syscall. The decision to drop -# support for select.devpoll is made to maintain 100% test coverage. - -import errno -import math -import select -import socket -import sys -import time -from collections import namedtuple -from .compat import MAPPING - -try: - monotonic = time.monotonic -except (AttributeError, ImportError): # Python 3.3< - monotonic = time.time - -EVENT_READ = (1 << 0) -EVENT_WRITE = (1 << 1) - -HAS_SELECT = True # Variable that shows whether the platform has a selector. -_SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None. -_DEFAULT_SELECTOR = None - - -class SelectorError(Exception): - def __init__(self, errcode): - super(SelectorError, self).__init__() - self.errno = errcode - - def __repr__(self): - return "".format(self.errno) - - def __str__(self): - return self.__repr__() - - -def _fileobj_to_fd(fileobj): - """ Return a file descriptor from a file object. If - given an integer will simply return that integer back. """ - if isinstance(fileobj, int): - fd = fileobj - else: - try: - fd = int(fileobj.fileno()) - except (AttributeError, TypeError, ValueError): - raise ValueError("Invalid file object: {0!r}".format(fileobj)) - if fd < 0: - raise ValueError("Invalid file descriptor: {0}".format(fd)) - return fd - - -# Determine which function to use to wrap system calls because Python 3.5+ -# already handles the case when system calls are interrupted. -if sys.version_info >= (3, 5): - def _syscall_wrapper(func, _, *args, **kwargs): - """ This is the short-circuit version of the below logic - because in Python 3.5+ all system calls automatically restart - and recalculate their timeouts. """ - try: - return func(*args, **kwargs) - except (OSError, IOError, select.error) as e: - errcode = None - if hasattr(e, "errno"): - errcode = e.errno - raise SelectorError(errcode) -else: - def _syscall_wrapper(func, recalc_timeout, *args, **kwargs): - """ Wrapper function for syscalls that could fail due to EINTR. - All functions should be retried if there is time left in the timeout - in accordance with PEP 475. """ - timeout = kwargs.get("timeout", None) - if timeout is None: - expires = None - recalc_timeout = False - else: - timeout = float(timeout) - if timeout < 0.0: # Timeout less than 0 treated as no timeout. - expires = None - else: - expires = monotonic() + timeout - - args = list(args) - if recalc_timeout and "timeout" not in kwargs: - raise ValueError( - "Timeout must be in args or kwargs to be recalculated") - - result = _SYSCALL_SENTINEL - while result is _SYSCALL_SENTINEL: - try: - result = func(*args, **kwargs) - # OSError is thrown by select.select - # IOError is thrown by select.epoll.poll - # select.error is thrown by select.poll.poll - # Aren't we thankful for Python 3.x rework for exceptions? - except (OSError, IOError, select.error) as e: - # select.error wasn't a subclass of OSError in the past. - errcode = None - if hasattr(e, "errno"): - errcode = e.errno - elif hasattr(e, "args"): - errcode = e.args[0] - - # Also test for the Windows equivalent of EINTR. - is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and - errcode == errno.WSAEINTR)) - - if is_interrupt: - if expires is not None: - current_time = monotonic() - if current_time > expires: - raise OSError(errno=errno.ETIMEDOUT) - if recalc_timeout: - if "timeout" in kwargs: - kwargs["timeout"] = expires - current_time - continue - if errcode: - raise SelectorError(errcode) - else: - raise - return result - - -SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data']) - - -class _SelectorMapping(MAPPING): - """ Mapping of file objects to selector keys """ - - def __init__(self, selector): - self._selector = selector - - def __len__(self): - return len(self._selector._fd_to_key) - - def __getitem__(self, fileobj): - try: - fd = self._selector._fileobj_lookup(fileobj) - return self._selector._fd_to_key[fd] - except KeyError: - raise KeyError("{0!r} is not registered.".format(fileobj)) - - def __iter__(self): - return iter(self._selector._fd_to_key) - - -class BaseSelector(object): - """ Abstract Selector class - - A selector supports registering file objects to be monitored - for specific I/O events. - - A file object is a file descriptor or any object with a - `fileno()` method. An arbitrary object can be attached to the - file object which can be used for example to store context info, - a callback, etc. - - A selector can use various implementations (select(), poll(), epoll(), - and kqueue()) depending on the platform. The 'DefaultSelector' class uses - the most efficient implementation for the current platform. - """ - def __init__(self): - # Maps file descriptors to keys. - self._fd_to_key = {} - - # Read-only mapping returned by get_map() - self._map = _SelectorMapping(self) - - def _fileobj_lookup(self, fileobj): - """ Return a file descriptor from a file object. - This wraps _fileobj_to_fd() to do an exhaustive - search in case the object is invalid but we still - have it in our map. Used by unregister() so we can - unregister an object that was previously registered - even if it is closed. It is also used by _SelectorMapping - """ - try: - return _fileobj_to_fd(fileobj) - except ValueError: - - # Search through all our mapped keys. - for key in self._fd_to_key.values(): - if key.fileobj is fileobj: - return key.fd - - # Raise ValueError after all. - raise - - def register(self, fileobj, events, data=None): - """ Register a file object for a set of events to monitor. """ - if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): - raise ValueError("Invalid events: {0!r}".format(events)) - - key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data) - - if key.fd in self._fd_to_key: - raise KeyError("{0!r} (FD {1}) is already registered" - .format(fileobj, key.fd)) - - self._fd_to_key[key.fd] = key - return key - - def unregister(self, fileobj): - """ Unregister a file object from being monitored. """ - try: - key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) - except KeyError: - raise KeyError("{0!r} is not registered".format(fileobj)) - - # Getting the fileno of a closed socket on Windows errors with EBADF. - except socket.error as e: # Platform-specific: Windows. - if e.errno != errno.EBADF: - raise - else: - for key in self._fd_to_key.values(): - if key.fileobj is fileobj: - self._fd_to_key.pop(key.fd) - break - else: - raise KeyError("{0!r} is not registered".format(fileobj)) - return key - - def modify(self, fileobj, events, data=None): - """ Change a registered file object monitored events and data. """ - # NOTE: Some subclasses optimize this operation even further. - try: - key = self._fd_to_key[self._fileobj_lookup(fileobj)] - except KeyError: - raise KeyError("{0!r} is not registered".format(fileobj)) - - if events != key.events: - self.unregister(fileobj) - key = self.register(fileobj, events, data) - - elif data != key.data: - # Use a shortcut to update the data. - key = key._replace(data=data) - self._fd_to_key[key.fd] = key - - return key - - def select(self, timeout=None): - """ Perform the actual selection until some monitored file objects - are ready or the timeout expires. """ - raise NotImplementedError() - - def close(self): - """ Close the selector. This must be called to ensure that all - underlying resources are freed. """ - self._fd_to_key.clear() - self._map = None - - def get_key(self, fileobj): - """ Return the key associated with a registered file object. """ - mapping = self.get_map() - if mapping is None: - raise RuntimeError("Selector is closed") - try: - return mapping[fileobj] - except KeyError: - raise KeyError("{0!r} is not registered".format(fileobj)) - - def get_map(self): - """ Return a mapping of file objects to selector keys """ - return self._map - - def _key_from_fd(self, fd): - """ Return the key associated to a given file descriptor - Return None if it is not found. """ - try: - return self._fd_to_key[fd] - except KeyError: - return None - - def __enter__(self): - return self - - def __exit__(self, *args): - self.close() - - -# Almost all platforms have select.select() -if hasattr(select, "select"): - class SelectSelector(BaseSelector): - """ Select-based selector. """ - def __init__(self): - super(SelectSelector, self).__init__() - self._readers = set() - self._writers = set() - - def register(self, fileobj, events, data=None): - key = super(SelectSelector, self).register(fileobj, events, data) - if events & EVENT_READ: - self._readers.add(key.fd) - if events & EVENT_WRITE: - self._writers.add(key.fd) - return key - - def unregister(self, fileobj): - key = super(SelectSelector, self).unregister(fileobj) - self._readers.discard(key.fd) - self._writers.discard(key.fd) - return key - - def _select(self, r, w, timeout=None): - """ Wrapper for select.select because timeout is a positional arg """ - return select.select(r, w, [], timeout) - - def select(self, timeout=None): - # Selecting on empty lists on Windows errors out. - if not len(self._readers) and not len(self._writers): - return [] - - timeout = None if timeout is None else max(timeout, 0.0) - ready = [] - r, w, _ = _syscall_wrapper(self._select, True, self._readers, - self._writers, timeout) - r = set(r) - w = set(w) - for fd in r | w: - events = 0 - if fd in r: - events |= EVENT_READ - if fd in w: - events |= EVENT_WRITE - - key = self._key_from_fd(fd) - if key: - ready.append((key, events & key.events)) - return ready - - -if hasattr(select, "poll"): - class PollSelector(BaseSelector): - """ Poll-based selector """ - def __init__(self): - super(PollSelector, self).__init__() - self._poll = select.poll() - - def register(self, fileobj, events, data=None): - key = super(PollSelector, self).register(fileobj, events, data) - event_mask = 0 - if events & EVENT_READ: - event_mask |= select.POLLIN - if events & EVENT_WRITE: - event_mask |= select.POLLOUT - self._poll.register(key.fd, event_mask) - return key - - def unregister(self, fileobj): - key = super(PollSelector, self).unregister(fileobj) - self._poll.unregister(key.fd) - return key - - def _wrap_poll(self, timeout=None): - """ Wrapper function for select.poll.poll() so that - _syscall_wrapper can work with only seconds. """ - if timeout is not None: - if timeout <= 0: - timeout = 0 - else: - # select.poll.poll() has a resolution of 1 millisecond, - # round away from zero to wait *at least* timeout seconds. - timeout = math.ceil(timeout * 1e3) - - result = self._poll.poll(timeout) - return result - - def select(self, timeout=None): - ready = [] - fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout) - for fd, event_mask in fd_events: - events = 0 - if event_mask & ~select.POLLIN: - events |= EVENT_WRITE - if event_mask & ~select.POLLOUT: - events |= EVENT_READ - - key = self._key_from_fd(fd) - if key: - ready.append((key, events & key.events)) - - return ready - - -if hasattr(select, "epoll"): - class EpollSelector(BaseSelector): - """ Epoll-based selector """ - def __init__(self): - super(EpollSelector, self).__init__() - self._epoll = select.epoll() - - def fileno(self): - return self._epoll.fileno() - - def register(self, fileobj, events, data=None): - key = super(EpollSelector, self).register(fileobj, events, data) - events_mask = 0 - if events & EVENT_READ: - events_mask |= select.EPOLLIN - if events & EVENT_WRITE: - events_mask |= select.EPOLLOUT - _syscall_wrapper(self._epoll.register, False, key.fd, events_mask) - return key - - def unregister(self, fileobj): - key = super(EpollSelector, self).unregister(fileobj) - try: - _syscall_wrapper(self._epoll.unregister, False, key.fd) - except SelectorError: - # This can occur when the fd was closed since registry. - pass - return key - - def select(self, timeout=None): - if timeout is not None: - if timeout <= 0: - timeout = 0.0 - else: - # select.epoll.poll() has a resolution of 1 millisecond - # but luckily takes seconds so we don't need a wrapper - # like PollSelector. Just for better rounding. - timeout = math.ceil(timeout * 1e3) * 1e-3 - timeout = float(timeout) - else: - timeout = -1.0 # epoll.poll() must have a float. - - # We always want at least 1 to ensure that select can be called - # with no file descriptors registered. Otherwise will fail. - max_events = max(len(self._fd_to_key), 1) - - ready = [] - fd_events = _syscall_wrapper(self._epoll.poll, True, - timeout=timeout, - maxevents=max_events) - for fd, event_mask in fd_events: - events = 0 - if event_mask & ~select.EPOLLIN: - events |= EVENT_WRITE - if event_mask & ~select.EPOLLOUT: - events |= EVENT_READ - - key = self._key_from_fd(fd) - if key: - ready.append((key, events & key.events)) - return ready - - def close(self): - self._epoll.close() - super(EpollSelector, self).close() - - -if hasattr(select, "kqueue"): - class KqueueSelector(BaseSelector): - """ Kqueue / Kevent-based selector """ - def __init__(self): - super(KqueueSelector, self).__init__() - self._kqueue = select.kqueue() - - def fileno(self): - return self._kqueue.fileno() - - def register(self, fileobj, events, data=None): - key = super(KqueueSelector, self).register(fileobj, events, data) - if events & EVENT_READ: - kevent = select.kevent(key.fd, - select.KQ_FILTER_READ, - select.KQ_EV_ADD) - - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - - if events & EVENT_WRITE: - kevent = select.kevent(key.fd, - select.KQ_FILTER_WRITE, - select.KQ_EV_ADD) - - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - - return key - - def unregister(self, fileobj): - key = super(KqueueSelector, self).unregister(fileobj) - if key.events & EVENT_READ: - kevent = select.kevent(key.fd, - select.KQ_FILTER_READ, - select.KQ_EV_DELETE) - try: - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - except SelectorError: - pass - if key.events & EVENT_WRITE: - kevent = select.kevent(key.fd, - select.KQ_FILTER_WRITE, - select.KQ_EV_DELETE) - try: - _syscall_wrapper(self._kqueue.control, False, [kevent], 0, 0) - except SelectorError: - pass - - return key - - def select(self, timeout=None): - if timeout is not None: - timeout = max(timeout, 0) - - max_events = len(self._fd_to_key) * 2 - ready_fds = {} - - kevent_list = _syscall_wrapper(self._kqueue.control, True, - None, max_events, timeout) - - for kevent in kevent_list: - fd = kevent.ident - event_mask = kevent.filter - events = 0 - if event_mask == select.KQ_FILTER_READ: - events |= EVENT_READ - if event_mask == select.KQ_FILTER_WRITE: - events |= EVENT_WRITE - - key = self._key_from_fd(fd) - if key: - if key.fd not in ready_fds: - ready_fds[key.fd] = (key, events & key.events) - else: - old_events = ready_fds[key.fd][1] - ready_fds[key.fd] = (key, (events | old_events) & key.events) - - return list(ready_fds.values()) - - def close(self): - self._kqueue.close() - super(KqueueSelector, self).close() - - -if not hasattr(select, 'select'): # Platform-specific: AppEngine - HAS_SELECT = False - - -def _can_allocate(struct): - """ Checks that select structs can be allocated by the underlying - operating system, not just advertised by the select module. We don't - check select() because we'll be hopeful that most platforms that - don't have it available will not advertise it. (ie: GAE) """ - try: - # select.poll() objects won't fail until used. - if struct == 'poll': - p = select.poll() - p.poll(0) - - # All others will fail on allocation. - else: - getattr(select, struct)().close() - return True - except (OSError, AttributeError): - return False - - -# Choose the best implementation, roughly: -# kqueue == epoll > poll > select. Devpoll not supported. (See above) -# select() also can't accept a FD > FD_SETSIZE (usually around 1024) -def DefaultSelector(): - """ This function serves as a first call for DefaultSelector to - detect if the select module is being monkey-patched incorrectly - by eventlet, greenlet, and preserve proper behavior. """ - global _DEFAULT_SELECTOR - if _DEFAULT_SELECTOR is None: - if _can_allocate('kqueue'): - _DEFAULT_SELECTOR = KqueueSelector - elif _can_allocate('epoll'): - _DEFAULT_SELECTOR = EpollSelector - elif _can_allocate('poll'): - _DEFAULT_SELECTOR = PollSelector - elif hasattr(select, 'select'): - _DEFAULT_SELECTOR = SelectSelector - else: # Platform-specific: AppEngine - raise ValueError('Platform does not have a selector') - return _DEFAULT_SELECTOR() - - -def _wait_for_io_events(socks, events, timeout=None): - """ Waits for IO events to be available from a list of sockets - or optionally a single socket if passed in. Returns a list of - sockets that can be interacted with immediately. """ - if not HAS_SELECT: - raise ValueError('Platform does not have a selector') - if not isinstance(socks, list): - # Probably just a single socket. - if hasattr(socks, "fileno"): - socks = [socks] - # Otherwise it might be a non-list iterable. - else: - socks = list(socks) - with DefaultSelector() as selector: - for sock in socks: - selector.register(sock, events) - return [key[0].fileobj for key in - selector.select(timeout) if key[1] & events] - - -def wait_for_read(socks, timeout=None): - """ Waits for reading to be available from a list of sockets - or optionally a single socket if passed in. Returns a list of - sockets that can be read from immediately. """ - return _wait_for_io_events(socks, EVENT_READ, timeout) - - -def wait_for_write(socks, timeout=None): - """ Waits for writing to be available from a list of sockets - or optionally a single socket if passed in. Returns a list of - sockets that can be written to immediately. """ - return _wait_for_io_events(socks, EVENT_WRITE, timeout) diff --git a/telemetry.py b/telemetry.py deleted file mode 100644 index 79b828e67..000000000 --- a/telemetry.py +++ /dev/null @@ -1,124 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2018-2019 Snowflake Computing Inc. All right reserved. -# -import logging -from threading import Lock - -logger = logging.getLogger(__name__) - - -class TelemetryField(object): - """ - Fields which can be logged to telemetry - """ - TIME_CONSUME_FIRST_RESULT = "client_time_consume_first_result" - TIME_CONSUME_LAST_RESULT = "client_time_consume_last_result" - TIME_DOWNLOADING_CHUNKS = "client_time_downloading_chunks" - TIME_PARSING_CHUNKS = "client_time_parsing_chunks" - - -class TelemetryData(object): - """ - An instance of telemetry data which can be sent to the server - """ - def __init__(self, message, timestamp): - self.message = message - self.timestamp = timestamp - - def to_dict(self): - return { - 'message': self.message, - 'timestamp': str(self.timestamp) - } - - def __repr__(self): - return str(self.to_dict()) - - -class TelemetryClient(object): - """ - Client to enqueue and send metrics to the telemetry endpoint in batch - """ - SF_PATH_TELEMETRY = "/telemetry/send" - DEFAULT_FORCE_FLUSH_SIZE = 100 - - def __init__(self, rest, flush_size=None): - self._rest = rest - self._log_batch = [] - self._is_closed = False - self._flush_size = \ - flush_size or TelemetryClient.DEFAULT_FORCE_FLUSH_SIZE - self._lock = Lock() - self._enabled = True - - def add_log_to_batch(self, telemetry_data): - if self._is_closed: - raise Exception( - "Attempted to add log when TelemetryClient is closed") - elif not self._enabled: - logger.debug("TelemetryClient disabled. Ignoring log.") - return - - with self._lock: - self._log_batch.append(telemetry_data) - - if len(self._log_batch) >= self._flush_size: - self.send_batch() - - def try_add_log_to_batch(self, telemetry_data): - try: - self.add_log_to_batch(telemetry_data) - except Exception: - logger.warn("Failed to add log to telemetry.", exc_info=True) - - def send_batch(self): - if self._is_closed: - raise Exception( - "Attempted to send batch when TelemetryClient is closed") - elif not self._enabled: - logger.debug("TelemetryClient disabled. Not sending logs.") - return - - with self._lock: - to_send = self._log_batch - self._log_batch = [] - - if not to_send: - logger.debug("Nothing to send to telemetry.") - return - - body = {'logs': [x.to_dict() for x in to_send]} - logger.debug("Sending %d logs to telemetry.", len(body)) - try: - ret = self._rest.request(TelemetryClient.SF_PATH_TELEMETRY, body=body, - method='post', client=None, timeout=5) - if not ret[u'success']: - logger.info( - "Non-success response from telemetry server: %s. " - "Disabling telemetry.", str(ret)) - self._enabled = False - else: - logger.debug("Successfully uploading metrics to telemetry.") - except Exception: - self._enabled = False - logger.debug("Failed to upload metrics to telemetry.", exc_info=True) - - def is_closed(self): - return self._is_closed - - def close(self): - if not self._is_closed: - logger.debug("Closing telemetry client.") - self.send_batch() - self._is_closed = True - - def disable(self): - self._enabled = False - - def is_enabled(self): - return self._enabled - - def buffer_size(self): - return len(self._log_batch) diff --git a/telemetry_oob.py b/telemetry_oob.py deleted file mode 100644 index 0facd1635..000000000 --- a/telemetry_oob.py +++ /dev/null @@ -1,503 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2018-2019 Snowflake Computing Inc. All right reserved. -# -import datetime -import json -import logging -import uuid -from collections import namedtuple -from queue import Queue - -import requests - -from .compat import OK -from .description import CLIENT_NAME, SNOWFLAKE_CONNECTOR_VERSION -from .secret_detector import SecretDetector - -logger = logging.getLogger(__name__) - -DEFAULT_BATCH_SIZE = 10 -DEFAULT_NUM_OF_RETRY_TO_TRIGGER_TELEMETRY = 10 -REQUEST_TIMEOUT = 3 - -TelemetryAPI = namedtuple('TelemetryAPI', ['url', 'api_key']) -TelemetryServer = namedtuple('TelemetryServer', ['name', 'url', 'api_key']) -TelemetryEventBase = namedtuple('TelemetryEventBase', ['name', 'tags', 'urgent', 'value']) - - -class TelemetryAPIEndpoint(object): - SFCTEST = TelemetryAPI( - url="https://sfctest.client-telemetry.snowflakecomputing.com/enqueue", - api_key="rRNY3EPNsB4U89XYuqsZKa7TSxb9QVX93yNM4tS6" - ) - SFCDEV = TelemetryAPI( - url="https://sfcdev.client-telemetry.snowflakecomputing.com/enqueue", - api_key="kyTKLWpEZSaJnrzTZ63I96QXZHKsgfqbaGmAaIWf" - ) - PROD = TelemetryAPI( - url="https://client-telemetry.snowflakecomputing.com/enqueue", - api_key="wLpEKqnLOW9tGNwTjab5N611YQApOb3t9xOnE1rX" - ) - - -class TelemetryServerDeployments(object): - DEV = TelemetryServer("dev", TelemetryAPIEndpoint.SFCTEST.url, TelemetryAPIEndpoint.SFCTEST.api_key) - REG = TelemetryServer("reg", TelemetryAPIEndpoint.SFCTEST.url, TelemetryAPIEndpoint.SFCTEST.api_key) - QA1 = TelemetryServer("qa1", TelemetryAPIEndpoint.SFCDEV.url, TelemetryAPIEndpoint.SFCDEV.api_key) - PREPROD2 = TelemetryServer("preprod2", TelemetryAPIEndpoint.SFCDEV.url, TelemetryAPIEndpoint.SFCDEV.api_key) - PROD = TelemetryServer("prod", TelemetryAPIEndpoint.PROD.url, TelemetryAPIEndpoint.PROD.api_key) - - -ENABLED_DEPLOYMENTS = ( - TelemetryServerDeployments.DEV.name, - TelemetryServerDeployments.REG.name, - TelemetryServerDeployments.QA1.name, - TelemetryServerDeployments.PREPROD2.name, - TelemetryServerDeployments.PROD.name -) - - -class TelemetryEvent(TelemetryEventBase): - """ - Base class for log and metric telemetry events. This class has - all of the logic except for the 'type' of the telemetry event. - That must be defined by the child class - """ - - def get_type(self): - """ - Get the telemetry event type. - - :return: Event type - """ - raise NotImplementedError - - def to_dict(self): - """ - Transform this event into a dictionary - - :return: This event in dictionary form - """ - event = dict() - event['Name'] = self.name - event['Urgent'] = self.urgent - event['Value'] = self.value - event['Tags'] = self.generate_tags() - event.update({ - 'UUID': str(uuid.uuid4()), - 'Created_On': datetime.datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S'), - 'Type': self.get_type(), - 'SchemaVersion': 1 - }) - return event - - def get_deployment(self): - """ - Get the deployment field specified in tags if it exists - - :return: The deployment name - """ - tags = self.tags - if tags: - for tag in tags: - if tag.get('Name', None) == "deployment": - return tag.get('Value') - - return "Unknown" - - def generate_tags(self): - """ - Generates the tags to send as part of the telemetry event. - Part of the tags are user defined. - - :return: The tags for this event - """ - tags = dict() - # Add in tags that were added to the event - if self.tags and len(self.tags) > 0: - for k, v in self.tags.items(): - if v is not None: - tags[str(k).lower()] = str(v) - - telemetry = TelemetryService.get_instance() - # Add telemetry service generated tags - tags['driver'] = CLIENT_NAME - tags['version'] = str(SNOWFLAKE_CONNECTOR_VERSION) - tags['telemetryServerDeployment'] = telemetry.deployment.name - tags['connectionString'] = telemetry.get_connection_string() - if telemetry.context and len(telemetry.context) > 0: - for k, v in telemetry.context.items(): - if v is not None: - tags['ctx_' + str(k).lower()] = str(v) - - return tags - - -class TelemetryLogEvent(TelemetryEvent): - - def get_type(self): - return 'Log' - - -class TelemetryMetricEvent(TelemetryEvent): - - def get_type(self): - return 'Metric' - - -class TelemetryService(object): - __instance = None - - @staticmethod - def get_instance(): - """ Static access method. """ - if TelemetryService.__instance is None: - TelemetryService() - return TelemetryService.__instance - - def __init__(self): - """ Virtually private constructor. """ - if TelemetryService.__instance is not None: - raise Exception("This class is a singleton!") - else: - TelemetryService.__instance = self - self._enabled = True - self._queue = Queue() - self.batch_size = DEFAULT_BATCH_SIZE - self.num_of_retry_to_trigger_telemetry = DEFAULT_NUM_OF_RETRY_TO_TRIGGER_TELEMETRY - self.context = dict() - self.connection_params = dict() - self.deployment = None - - def __del__(self): - """ - Try to flush all events left in the queue. Ignore all exceptions - """ - try: - self.close() - except: - pass - - @property - def enabled(self): - """ - Whether the Telemetry service is enabled or not - """ - return self._enabled - - def enable(self): - """ - Enabled Telemetry Service - """ - self._enabled = True - - def disable(self): - """ - Disable Telemetry Service - """ - self._enabled = False - - @property - def queue(self): - """ - Get the queue that holds all of the telemetry events - """ - return self._queue - - @property - def context(self): - """ - Returns the context of the current connection - """ - return self._context - - @context.setter - def context(self, value): - """ - Sets the context of the current connection - """ - self._context = value - - @property - def connection_params(self): - """ - Returns the connection parameters from the current connection - """ - return self._connection_params - - @connection_params.setter - def connection_params(self, value): - """ - Sets the connection parameters from the current connection - """ - self._connection_params = value - - @property - def batch_size(self): - """ - Returns the batch size for uploading results - """ - return self._batch_size - - @batch_size.setter - def batch_size(self, value): - """ - Sets the batch size for uploading results - """ - self._batch_size = value - - @property - def num_of_retry_to_trigger_telemetry(self): - """ - Returns the number of HTTP retries before we submit a telemetry event - """ - return self._num_of_retry_to_trigger_telemetry - - @num_of_retry_to_trigger_telemetry.setter - def num_of_retry_to_trigger_telemetry(self, value): - """ - Sets the number of HTTP retries before we submit a telemetry event - """ - self._num_of_retry_to_trigger_telemetry = value - - @property - def deployment(self): - """ - Returns the deployment that we are sending the telemetry information to - """ - return self._deployment - - @deployment.setter - def deployment(self, value): - """ - Sets the deployment that we are sending the telemetry information to - """ - self._deployment = value - - def is_deployment_enabled(self): - """ - Returns whether or not this deployment is enabled - """ - return (self.deployment.name in ENABLED_DEPLOYMENTS) - - def get_connection_string(self): - """ - Returns the URL used to connect to Snowflake - """ - return self.connection_params.get('protocol', '') + '://' + \ - self.connection_params.get('host', '') + ':' + \ - str(self.connection_params.get('port', '')) - - def add(self, event): - """ - Add a telemetry event to the queue. If the event is urgent, upload - all telemetry events right away - """ - if not self.enabled: - return - - self.queue.put(event) - if self.queue.qsize() > self.batch_size or event.urgent: - payload = self.export_queue_to_string() - if payload is None: - return - self._upload_payload(payload) - - def flush(self): - """ - Flush all telemetry events in the queue and submit them to the backend - """ - if not self.enabled: - return - - if not self.queue.empty(): - payload = self.export_queue_to_string() - if payload is None: - return - self._upload_payload(payload) - - def update_context(self, connection_params): - """ - Update the telemetry service context. Remove any passwords or credentials - """ - self.configure_deployment(connection_params) - self.context = dict() - - for key, value in connection_params.items(): - if "password" not in key and \ - "passcode" not in key and \ - "privateKey" not in key: - self.context[key] = value - - def configure_deployment(self, connection_params): - """ - Determines which deployment we are sending Telemetry OOB messages to - """ - self.connection_params = connection_params - account = self.connection_params.get('account') if self.connection_params.get('account') else '' - host = self.connection_params.get('host') if self.connection_params.get('host') else '' - port = self.connection_params.get('port', None) - - # Set as PROD by default - deployment = TelemetryServerDeployments.PROD - if 'reg' in host or 'local' in host: - deployment = TelemetryServerDeployments.REG - if port == 8080: - deployment = TelemetryServerDeployments.DEV - elif 'qa1' in host or 'qa1' in account: - deployment = TelemetryServerDeployments.QA1 - elif 'preprod2' in host: - deployment = TelemetryServerDeployments.PREPROD2 - - self.deployment = deployment - - def log_ocsp_exception(self, event_type, telemetry_data, exception=None, stack_trace=None, tags=dict(), urgent=False): - """ - Logs an OCSP Exception and adds it to the queue to be uploaded - """ - try: - if self.enabled: - event_name = 'OCSPException' - if exception is not None: - telemetry_data['exceptionMessage'] = str(exception) - if stack_trace is not None: - telemetry_data['exceptionStackTrace'] = stack_trace - - if tags is None: - tags = dict() - - tags['eventType'] = event_type - - log_event = TelemetryLogEvent( - name=event_name, - tags=tags, - urgent=urgent, - value=telemetry_data - ) - - self.add(log_event) - except Exception: - # Do nothing on exception, just log - logger.debug("Failed to log OCSP exception", exc_info=True) - - def log_http_request(self, - event_name, - url, - method, - sqlstate, - errno, - response=None, - retry_timeout=None, - retry_count=None, - exception=None, - stack_trace=None, - tags=dict(), - urgent=False): - """ - Logs an HTTP Request error and adds it to the queue to be uploaded - """ - try: - if self.enabled: - telemetry_data = dict() - response_status_code = -1 - # This mimics the output of HttpRequestBase.toString() from JBDC - telemetry_data['request'] = "{0} {1}".format(method, url) - telemetry_data['sqlState'] = sqlstate - telemetry_data['errorCode'] = errno - if response: - telemetry_data['response'] = response.json() - telemetry_data['responseStatusLine'] = str(response.reason) - if response.status_code: - response_status_code = str(response.status_code) - telemetry_data['responseStatusCode'] = response_status_code - if retry_timeout: - telemetry_data['retryTimeout'] = str(retry_timeout) - if retry_count: - telemetry_data['retryCount'] = str(retry_count) - if exception: - telemetry_data['exceptionMessage'] = str(exception) - if stack_trace: - telemetry_data['exceptionStackTrace'] = stack_trace - - if tags is None: - tags = dict() - - tags['responseStatusCode'] = response_status_code - tags['sqlState'] = str(sqlstate) - tags['errorCode'] = errno - - log_event = TelemetryLogEvent( - name=event_name, - tags=tags, - value=telemetry_data, - urgent=urgent - ) - - self.add(log_event) - except Exception: - # Do nothing on exception, just log - logger.debug("Failed to log HTTP request error", exc_info=True) - - def _upload_payload(self, payload): - """ - Upload the JSON-formatted string payload to the telemetry backend. Ignore - any exceptions that may arise - """ - success = True - response = None - try: - if not self.is_deployment_enabled(): - logger.debug("Skip the disabled deployment: %s", self.deployment.name) - return - logger.debug("Sending OOB telemetry data") - with requests.Session() as session: - headers = { - 'Content-type': 'application/json', - 'x-api-key': self.deployment.api_key - } - response = session.post( - self.deployment.url, - data=payload, - headers=headers, - timeout=REQUEST_TIMEOUT - ) - if response.status_code == OK and json.loads(response.text).get('statusCode', 0) == OK: - logger.debug("telemetry server request success: %d", response.status_code) - else: - logger.debug("telemetry server request error: %d", response.status_code) - success = False - except Exception as e: - logger.debug("Telemetry request failed, Exception response: %s, exception: %s", response, str(e)) - success = False - finally: - logger.debug("Telemetry request success=%s", success) - - def export_queue_to_string(self): - """ - Export all events in the queue into a JSON formatted string with secrets masked - """ - logs = list() - while not self._queue.empty(): - logs.append(self._queue.get().to_dict()) - # We may get an exception trying to serialize a python object to JSON - try: - payload = json.dumps(logs) - except Exception: - logger.debug("Failed to generate a JSON dump from the passed in telemetry OOB events. String representation of logs: %s" % str(logs), exc_info=True) - payload = None - return SecretDetector.mask_secrets(payload) - - def close(self): - """ - Close the telemetry service - """ - self.flush() - self.disable() - - def size(self): - """ - Return the size of the queue - """ - return self.queue.qsize() diff --git a/test/.gitignore b/test/.gitignore deleted file mode 100644 index eeb13b7b2..000000000 --- a/test/.gitignore +++ /dev/null @@ -1 +0,0 @@ -parameters.py diff --git a/test/README.md b/test/README.md new file mode 100644 index 000000000..4bdd721d1 --- /dev/null +++ b/test/README.md @@ -0,0 +1,57 @@ +# Building and Testing Snowflake Connector for Python + +## Running tests + +Place the `parameters.py` file in the `test` directory, with the connection information in a Python dictionary: + +```python +CONNECTION_PARAMETERS = { + 'account': 'testaccount', + 'user': 'user', + 'password': 'testpasswd', + 'schema': 'testschema', + 'database': 'testdb', +} +``` + +### Running a single test + +Assuming that all dependencies are installed, running a single test is as simple as: +`python -m pytest test/integ/test_connection.py::test_basic`. + +### Running a suite of tests + +We use `tox` to run test suites and other utilities. + +To run the most important tests, execute: + +```shell +tox -e "fix_lint,py37{,-pandas,-sso}" +``` + +## Test types +These test types can be mixed with test categories, or with each other. +Note: providing both to tox runs both integration and unit tests of the current category and not providing +either does the same as providing both of them. + +* **integ**: Integration tests that need to connect to a Snowflake environment. +* **unit**: Unit tests that can run locally, but they might still require internet connection. + +## Test categories +Chaining these categories is possible, but isn't encouraged. +Note: running multiple categories in one `tox` run should be done like: +`tox -e "fix_lint,py37-{,-sso},coverage"` + +* **pandas**: Tests specifically testing our optional dependency group "pandas". +* **sso**: Tests specifically testing our optional dependency group "sso". +* **extras**: Tests special cases under separate processes. + +Special categories: +* **skipolddriver**: We run the newest tests on the oldest still supported Python connector to verify that they +still work. However; some behaviors change over time and new features get added. For this reason tests tagged with +this marker will not run with old driver version. Any tests that verify new behavior, or old tests that are changed +to use new features should have this marker on them. + +## Other test tags +* **internal**: Tests that should only be run on our internal CI. +* **external**: Tests that should only be run on our external CI. diff --git a/test/README.rst b/test/README.rst deleted file mode 100644 index dc5809a48..000000000 --- a/test/README.rst +++ /dev/null @@ -1,50 +0,0 @@ -Building and Testing Snowflake Connector for Python -******************************************************************************** - -Building -================================================================================ - -Install Python 2.7.9 or higher, or 3.5.0 or higher. Clone the Snowflake Connector for Python repository, then run the following command to create a wheel package: - - .. code-block:: bash - - git clone git@github.com:snowflakedb/snowflake-connector-python.git - cd snowflake-connector-python - pyvenv /tmp/test_snowflake_connector_python - source /tmp/test_snowflake_connector_python/bin/activate - pip install -U pip setuptools wheel - python setup.py bdist_wheel - -Find the ``snowflake_connector_python*.whl`` package in the ``./dist`` directory. - - -Testing -================================================================================ - -Create a virtualenv, with ``parameters.py`` in a test directory. - - .. code-block:: bash - - pyvenv /tmp/test_snowflake_connector_python - source /tmp/test_snowflake_connector_python/bin/activate - pip install Cython pytest numpy pandas mock - pip install dist/snowflake_connector_python*.whl - vim test/parameters.py - -In the ``parameters.py`` file, include the connection information in a Python dictionary. - - .. code-block:: python - - CONNECTION_PARAMETERS = { - 'account': 'testaccount', - 'user': 'user1', - 'password': 'testpasswd', - 'schema': 'testschema', - 'database': 'testdb', - } - -Run the test: - - .. code-block:: bash - - py.test test diff --git a/test/__init__.py b/test/__init__.py new file mode 100644 index 000000000..59f261275 --- /dev/null +++ b/test/__init__.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +# This file houses functions and constants shared by both integration and unit tests +import os + +CLOUD_PROVIDERS = {"aws", "azure", "gcp"} +EXTERNAL_SKIP_TAGS = {"internal"} +INTERNAL_SKIP_TAGS = {"external"} +RUNNING_ON_GH = os.getenv("GITHUB_ACTIONS") == "true" + + +def running_on_public_ci() -> bool: + """Whether or not tests are currently running on one of our public CIs.""" + return RUNNING_ON_GH diff --git a/test/conftest.py b/test/conftest.py index 50f8ec76f..a834595ab 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,348 +1,144 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # +from __future__ import annotations + import os -import random -import subprocess -import sys -import time -import uuid from contextlib import contextmanager -from io import open from logging import getLogger +from pathlib import Path +from typing import Generator import pytest -from parameters import CONNECTION_PARAMETERS - -try: - from parameters import CONNECTION_PARAMETERS_S3 -except: - CONNECTION_PARAMETERS_S3 = {} - -try: - from parameters import CONNECTION_PARAMETERS_AZURE -except: - CONNECTION_PARAMETERS_AZURE = {} - -try: - from parameters import CONNECTION_PARAMETERS_ADMIN -except: - CONNECTION_PARAMETERS_ADMIN = {} - -import snowflake.connector -from snowflake.connector.connection import DefaultConverterClass -from snowflake.connector.compat import (UTF8, TO_UNICODE, IS_WINDOWS) - -logger = getLogger(__name__) - -if os.getenv('TRAVIS') == 'true': - TEST_SCHEMA = 'TRAVIS_JOB_{0}'.format(os.getenv('TRAVIS_JOB_ID')) -elif os.getenv('APPVEYOR') == 'True': - TEST_SCHEMA = 'APPVEYOR_JOB_{0}'.format(os.getenv('APPVEYOR_BUILD_ID')) -else: - TEST_SCHEMA = 'python_connector_tests_' + TO_UNICODE(uuid.uuid4()).replace( - '-', '_') - -DEFAULT_PARAMETERS = { - 'account': '', - 'user': '', - 'password': '', - 'database': '', - 'schema': '', - 'protocol': 'https', - 'host': '', - 'port': '443', -} - -IS_PUBLIC_CI = os.getenv('TRAVIS') == 'true' or os.getenv('APPVEYOR') == 'True' - - -def help(): - print("""Connection parameter must be specified in parameters.py, - for example: -CONNECTION_PARAMETERS = { - 'account': 'testaccount', - 'user': 'user1', - 'password': 'test', - 'database': 'testdb', - 'schema': 'public', -} -""") - - -@pytest.fixture(scope='session') -def db_parameters(): - return get_db_parameters() - - -def get_db_parameters(): - """ - Sets the db connection parameters - """ - ret = {} - os.environ['TZ'] = 'UTC' - if not IS_WINDOWS: - time.tzset() - - # testaccount connection info - for k, v in CONNECTION_PARAMETERS.items(): - ret[k] = v - - for k, v in DEFAULT_PARAMETERS.items(): - if k not in ret: - ret[k] = v - - # s3 testaccount connection info. Not available in TravisCI - if CONNECTION_PARAMETERS_S3: - for k, v in CONNECTION_PARAMETERS_S3.items(): - ret['s3_' + k] = v - else: - for k, v in CONNECTION_PARAMETERS.items(): - ret['s3_' + k] = v - - # azure testaccount connection info. Not available in TravisCI - if CONNECTION_PARAMETERS_AZURE: - for k, v in CONNECTION_PARAMETERS_AZURE.items(): - ret['azure_' + k] = v - else: - for k, v in CONNECTION_PARAMETERS.items(): - ret['azure_' + k] = v - - # snowflake admin account. Not available in TravisCI - for k, v in CONNECTION_PARAMETERS_ADMIN.items(): - ret['sf_' + k] = v - - if 'host' in ret and ret['host'] == DEFAULT_PARAMETERS['host']: - ret['host'] = ret['account'] + '.snowflakecomputing.com' - - if 'account' in ret and ret['account'] == DEFAULT_PARAMETERS['account']: - help() - sys.exit(2) - - # a unique table name - ret['name'] = 'python_tests_' + TO_UNICODE(uuid.uuid4()).replace('-', '_') - ret['name_wh'] = ret['name'] + 'wh' - - ret['schema'] = TEST_SCHEMA - - # This reduces a chance to exposing password in test output. - ret['a00'] = 'dummy parameter' - ret['a01'] = 'dummy parameter' - ret['a02'] = 'dummy parameter' - ret['a03'] = 'dummy parameter' - ret['a04'] = 'dummy parameter' - ret['a05'] = 'dummy parameter' - ret['a06'] = 'dummy parameter' - ret['a07'] = 'dummy parameter' - ret['a08'] = 'dummy parameter' - ret['a09'] = 'dummy parameter' - ret['a10'] = 'dummy parameter' - ret['a11'] = 'dummy parameter' - ret['a12'] = 'dummy parameter' - ret['a13'] = 'dummy parameter' - ret['a14'] = 'dummy parameter' - ret['a15'] = 'dummy parameter' - ret['a16'] = 'dummy parameter' - return ret - - -@pytest.fixture(scope='session', autouse=True) -def init_test_schema(request, db_parameters): - """ - Initializes and Deinitializes the test schema - This is automatically called per test session. - """ - ret = db_parameters - with snowflake.connector.connect( - user=ret['user'], - password=ret['password'], - host=ret['host'], - port=ret['port'], - database=ret['database'], - account=ret['account'], - protocol=ret['protocol'] - ) as con: - con.cursor().execute( - "CREATE SCHEMA IF NOT EXISTS {0}".format(TEST_SCHEMA)) - - if CONNECTION_PARAMETERS_S3: - with snowflake.connector.connect( - user=ret['s3_user'], - password=ret['s3_password'], - host=ret['s3_host'], - port=ret['s3_port'], - database=ret['s3_database'], - account=ret['s3_account'], - protocol=ret['s3_protocol'] - ) as con: - con.cursor().execute( - "CREATE SCHEMA IF NOT EXISTS {0}".format(TEST_SCHEMA)) - - if CONNECTION_PARAMETERS_AZURE: - with snowflake.connector.connect( - user=ret['azure_user'], - password=ret['azure_password'], - host=ret['azure_host'], - port=ret['azure_port'], - database=ret['azure_database'], - account=ret['azure_account'], - protocol=ret['azure_protocol'] - ) as con: - con.cursor().execute( - "CREATE SCHEMA IF NOT EXISTS {0}".format(TEST_SCHEMA)) - def fin(): - ret1 = db_parameters - with snowflake.connector.connect( - user=ret1['user'], - password=ret1['password'], - host=ret1['host'], - port=ret1['port'], - database=ret1['database'], - account=ret1['account'], - protocol=ret1['protocol'] - ) as con1: - con1.cursor().execute( - "DROP SCHEMA IF EXISTS {0}".format(TEST_SCHEMA)) - if CONNECTION_PARAMETERS_S3: - with snowflake.connector.connect( - user=ret1['s3_user'], - password=ret1['s3_password'], - host=ret1['s3_host'], - port=ret1['s3_port'], - database=ret1['s3_database'], - account=ret1['s3_account'], - protocol=ret1['s3_protocol'] - ) as con1: - con1.cursor().execute( - "DROP SCHEMA IF EXISTS {0}".format(TEST_SCHEMA)) - - request.addfinalizer(fin) - - -def create_connection(**kwargs): - """ - Creates a connection using the parameters defined in JDBC connect string +from snowflake.connector import SnowflakeConnection +from snowflake.connector.telemetry import TelemetryClient, TelemetryData + +from . import ( + CLOUD_PROVIDERS, + EXTERNAL_SKIP_TAGS, + INTERNAL_SKIP_TAGS, + running_on_public_ci, +) + + +class TelemetryCaptureHandler(TelemetryClient): + def __init__( + self, + real_telemetry: TelemetryClient, + propagate: bool = True, + ): + super().__init__(real_telemetry._rest) + self.records: list[TelemetryData] = [] + self._real_telemetry = real_telemetry + self._propagate = propagate + + def add_log_to_batch(self, telemetry_data): + self.records.append(telemetry_data) + if self._propagate: + super().add_log_to_batch(telemetry_data) + + def send_batch(self): + self.records = [] + if self._propagate: + super().send_batch() + + +class TelemetryCaptureFixture: + """Provides a way to capture Snowflake telemetry messages.""" + + @contextmanager + def patch_connection( + self, + con: SnowflakeConnection, + propagate: bool = True, + ) -> Generator[TelemetryCaptureHandler, None, None]: + original_telemetry = con._telemetry + new_telemetry = TelemetryCaptureHandler( + original_telemetry, + propagate, + ) + con._telemetry = new_telemetry + try: + yield new_telemetry + finally: + con._telemetry = original_telemetry + + +@pytest.fixture(scope="session") +def capture_sf_telemetry() -> TelemetryCaptureFixture: + return TelemetryCaptureFixture() + + +def pytest_collection_modifyitems(items) -> None: + """Applies tags to tests based on folders that they are in.""" + top_test_dir = Path(__file__).parent + for item in items: + item_path = Path(str(item.fspath)).parent + relative_path = item_path.relative_to(top_test_dir) + for part in relative_path.parts: + item.add_marker(part) + if part in ("unit", "pandas"): + item.add_marker("skipolddriver") + + +@pytest.fixture(scope="session", autouse=True) +def filter_log() -> None: + """Sets up our SecretDetector as a logging formatter. + + A workaround to use our custom Formatter in pytest based on the discussion at + https://github.com/pytest-dev/pytest/issues/2987 """ - ret = get_db_parameters() - ret.update(kwargs) - connection = snowflake.connector.connect(**ret) - return connection - - -def generate_k_lines_of_n_files(tmpdir, k, n, compress=False): - """ - Generates K lines of N files - """ - tmp_dir = str(tmpdir.mkdir('data')) - for i in range(n): - with open(os.path.join(tmp_dir, 'file{0}'.format(i)), 'w', - encoding=UTF8) as f: - for j in range(k): - num = int(random.random() * 10000.0) - tm = time.gmtime( - int(random.random() * 30000.0) - 15000) - dt = time.strftime('%Y-%m-%d', tm) - tm = time.gmtime( - int(random.random() * 30000.0) - 15000) - ts = time.strftime('%Y-%m-%d %H:%M:%S', tm) - tm = time.gmtime( - int(random.random() * 30000.0) - 15000) - tsltz = time.strftime('%Y-%m-%d %H:%M:%S', tm) - tm = time.gmtime( - int(random.random() * 30000.0) - 15000) - tsntz = time.strftime('%Y-%m-%d %H:%M:%S', tm) - tm = time.gmtime( - int(random.random() * 30000.0) - 15000) - tstz = time.strftime('%Y-%m-%dT%H:%M:%S', tm) + \ - ('-' if random.random() < 0.5 else '+') + \ - "{0:02d}:{1:02d}".format( - int(random.random() * 12.0), - int(random.random() * 60.0)) - pct = random.random() * 1000.0 - ratio = u"{0:5.2f}".format(random.random() * 1000.0) - rec = u"{0:d},{1:s},{2:s},{3:s},{4:s},{5:s},{6:f},{7:s}".format( - num, dt, ts, tsltz, tsntz, tstz, - pct, - ratio) - f.write(rec + "\n") - if compress: - if not IS_WINDOWS: - subprocess.Popen( - ['gzip', os.path.join(tmp_dir, 'file{0}'.format(i))], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE).communicate() - else: - import gzip - import shutil - fname = os.path.join(tmp_dir, 'file{0}'.format(i)) - with open(fname, 'rb') as f_in, \ - gzip.open(fname + '.gz', 'wb') as f_out: - shutil.copyfileobj(f_in, f_out) - os.unlink(fname) - return tmp_dir - - -@contextmanager -def db(**kwargs): - if not kwargs.get(u'timezone'): - kwargs[u'timezone'] = u'UTC' - if not kwargs.get(u'converter_class'): - kwargs[u'converter_class'] = DefaultConverterClass() - cnx = create_connection(**kwargs) - try: - yield cnx - finally: - cnx.close() - - -@contextmanager -def negative_db(**kwargs): - if not kwargs.get(u'timezone'): - kwargs[u'timezone'] = u'UTC' - if not kwargs.get(u'converter_class'): - kwargs[u'converter_class'] = DefaultConverterClass() - cnx = create_connection(**kwargs) - if not IS_PUBLIC_CI: - cnx.cursor().execute("alter session set SUPPRESS_INCIDENT_DUMPS=true") - try: - yield cnx - finally: - cnx.close() - - -@pytest.fixture() -def conn_testaccount(request): - connection = create_connection() - - def fin(): - connection.close() # close when done - - request.addfinalizer(fin) - return connection - - -@pytest.fixture() -def conn_cnx(): - return db - - -@pytest.fixture() -def negative_conn_cnx(): - """ - Use this if an incident is expected and we don't want GS to create a - dump file about the incident""" - return negative_db - - -@pytest.fixture() -def test_files(): - return generate_k_lines_of_n_files - - -def pytest_runtest_setup(item): - for _ in item.iter_markers(name="internal"): - if IS_PUBLIC_CI: - pytest.skip("cannot run on public CI") + import logging + import pathlib + + from snowflake.connector.secret_detector import SecretDetector + + if not isinstance(SecretDetector, logging.Formatter): + # Override it if SecretDetector is not an instance of logging.Formatter + class SecretDetector(logging.Formatter): + def format(self, record: logging.LogRecord) -> str: + return super().format(record) + + log_dir = os.getenv( + "CLIENT_LOG_DIR_PATH_DOCKER", str(pathlib.Path(__file__).parent.absolute()) + ) + + _logger = getLogger("snowflake.connector") + original_log_level = _logger.getEffectiveLevel() + # Make sure that the old handlers are unaffected by the DEBUG level set for the new handler + for handler in _logger.handlers: + handler.setLevel(original_log_level) + _logger.setLevel(logging.DEBUG) + sd = logging.FileHandler(os.path.join(log_dir, "", "..", "snowflake_ssm_rt.log")) + sd.setLevel(logging.DEBUG) + sd.setFormatter( + SecretDetector( + "%(asctime)s - %(threadName)s %(filename)s:%(lineno)d - %(funcName)s() - %(levelname)s - %(message)s" + ) + ) + _logger.addHandler(sd) + + +def pytest_runtest_setup(item) -> None: + """Ran before calling each test, used to decide whether a test should be skipped.""" + test_tags = [mark.name for mark in item.iter_markers()] + + # Get what cloud providers the test is marked for if any + test_supported_providers = CLOUD_PROVIDERS.intersection(test_tags) + # Default value means that we are probably running on a developer's machine, allow everything in this case + current_provider = os.getenv("cloud_provider", "dev") + if test_supported_providers: + # If test is tagged for specific cloud providers add the default cloud_provider as supported too + test_supported_providers.add("dev") + if current_provider not in test_supported_providers: + pytest.skip( + "cannot run unit test against cloud provider {}".format( + current_provider + ) + ) + if EXTERNAL_SKIP_TAGS.intersection(test_tags) and running_on_public_ci(): + pytest.skip("cannot run this test on external CI") + elif INTERNAL_SKIP_TAGS.intersection(test_tags) and not running_on_public_ci(): + pytest.skip("cannot run this test on internal CI") diff --git a/test/data/ExecPlatform/Database/data/orders_100.csv b/test/data/ExecPlatform/Database/data/orders_100.csv new file mode 100644 index 000000000..a27fd9eba --- /dev/null +++ b/test/data/ExecPlatform/Database/data/orders_100.csv @@ -0,0 +1,28 @@ +1|36901|O|173665.47|1996-01-02|5-LOW|Clerk#000000951|0|nstructions sleep furiously among | +2|78002|O|46929.18|1996-12-01|1-URGENT|Clerk#000000880|0| foxes. pending accounts at the pending, silent asymptot| +3|123314|F|193846.25|1993-10-14|5-LOW|Clerk#000000955|0|sly final accounts boost. carefully regular ideas cajole carefully. depos| +4|136777|O|32151.78|1995-10-11|5-LOW|Clerk#000000124|0|sits. slyly regular warthogs cajole. regular, regular theodolites acro| +5|44485|F|144659.20|1994-07-30|5-LOW|Clerk#000000925|0|quickly. bold deposits sleep slyly. packages use slyly| +6|55624|F|58749.59|1992-02-21|4-NOT SPECIFIED|Clerk#000000058|0|ggle. special, final requests are against the furiously specia| +7|39136|O|252004.18|1996-01-10|2-HIGH|Clerk#000000470|0|ly special requests | +32|130057|O|208660.75|1995-07-16|2-HIGH|Clerk#000000616|0|ise blithely bold, regular requests. quickly unusual dep| +33|66958|F|163243.98|1993-10-27|3-MEDIUM|Clerk#000000409|0|uriously. furiously final request| +34|61001|O|58949.67|1998-07-21|3-MEDIUM|Clerk#000000223|0|ly final packages. fluffily final deposits wake blithely ideas. spe| +35|127588|O|253724.56|1995-10-23|4-NOT SPECIFIED|Clerk#000000259|0|zzle. carefully enticing deposits nag furio| +36|115252|O|68289.96|1995-11-03|1-URGENT|Clerk#000000358|0| quick packages are blithely. slyly silent accounts wake qu| +37|86116|F|206680.66|1992-06-03|3-MEDIUM|Clerk#000000456|0|kly regular pinto beans. carefully unusual waters cajole never| +38|124828|O|82500.05|1996-08-21|4-NOT SPECIFIED|Clerk#000000604|0|haggle blithely. furiously express ideas haggle blithely furiously regular re| +39|81763|O|341734.47|1996-09-20|3-MEDIUM|Clerk#000000659|0|ole express, ironic requests: ir| +64|32113|F|39414.99|1994-07-16|3-MEDIUM|Clerk#000000661|0|wake fluffily. sometimes ironic pinto beans about the dolphin| +65|16252|P|110643.60|1995-03-18|1-URGENT|Clerk#000000632|0|ular requests are blithely pending orbits-- even requests against the deposit| +66|129200|F|103740.67|1994-01-20|5-LOW|Clerk#000000743|0|y pending requests integrate| +67|56614|O|169405.01|1996-12-19|4-NOT SPECIFIED|Clerk#000000547|0|symptotes haggle slyly around the furiously iron| +68|28547|O|330793.52|1998-04-18|3-MEDIUM|Clerk#000000440|0| pinto beans sleep carefully. blithely ironic deposits haggle furiously acro| +69|84487|F|197689.49|1994-06-04|4-NOT SPECIFIED|Clerk#000000330|0| depths atop the slyly thin deposits detect among the furiously silent accou| +70|64340|F|113534.42|1993-12-18|5-LOW|Clerk#000000322|0| carefully ironic request| +71|3373|O|276992.74|1998-01-24|4-NOT SPECIFIED|Clerk#000000271|0| express deposits along the blithely regul| +96|107779|F|68989.90|1994-04-17|2-HIGH|Clerk#000000395|0|oost furiously. pinto| +97|21061|F|110512.84|1993-01-29|3-MEDIUM|Clerk#000000547|0|hang blithely along the regular accounts. furiously even ideas after the| +98|104480|F|69168.33|1994-09-25|1-URGENT|Clerk#000000448|0|c asymptotes. quickly regular packages should have to nag re| +99|88910|F|112126.95|1994-03-13|4-NOT SPECIFIED|Clerk#000000973|0|e carefully ironic packages. pending| +100|147004|O|187782.63|1998-02-28|4-NOT SPECIFIED|Clerk#000000577|0|heodolites detect slyly alongside of the ent| diff --git a/test/data/ExecPlatform/Database/data/orders_101.csv b/test/data/ExecPlatform/Database/data/orders_101.csv new file mode 100644 index 000000000..07c5e4fb1 --- /dev/null +++ b/test/data/ExecPlatform/Database/data/orders_101.csv @@ -0,0 +1,45 @@ +353|1777|F|249710.43|1993-12-31|5-LOW|Clerk#000000449|0| quiet ideas sleep. even instructions cajole slyly. silently spe| +354|138268|O|217160.72|1996-03-14|2-HIGH|Clerk#000000511|0|ly regular ideas wake across the slyly silent ideas. final deposits eat b| +355|70007|F|99516.75|1994-06-14|5-LOW|Clerk#000000532|0|s. sometimes regular requests cajole. regular, pending accounts a| +356|146809|F|209439.04|1994-06-30|4-NOT SPECIFIED|Clerk#000000944|0|as wake along the bold accounts. even, | +357|60395|O|157411.61|1996-10-09|2-HIGH|Clerk#000000301|0|e blithely about the express, final accounts. quickl| +358|2290|F|354132.39|1993-09-20|2-HIGH|Clerk#000000392|0|l, silent instructions are slyly. silently even de| +359|77600|F|239998.53|1994-12-19|3-MEDIUM|Clerk#000000934|0|n dolphins. special courts above the carefully ironic requests use| +384|113009|F|166753.71|1992-03-03|5-LOW|Clerk#000000206|0|, even accounts use furiously packages. slyly ironic pla| +385|32947|O|54948.26|1996-03-22|5-LOW|Clerk#000000600|0|hless accounts unwind bold pain| +386|60110|F|110216.57|1995-01-25|2-HIGH|Clerk#000000648|0| haggle quickly. stealthily bold asymptotes haggle among the furiously even re| +387|3296|O|204546.39|1997-01-26|4-NOT SPECIFIED|Clerk#000000768|0| are carefully among the quickly even deposits. furiously silent req| +388|44668|F|198800.71|1992-12-16|4-NOT SPECIFIED|Clerk#000000356|0|ar foxes above the furiously ironic deposits nag slyly final reque| +389|126973|F|2519.40|1994-02-17|2-HIGH|Clerk#000000062|0|ing to the regular asymptotes. final, pending foxes about the blithely sil| +390|102563|O|269761.09|1998-04-07|5-LOW|Clerk#000000404|0|xpress asymptotes use among the regular, final pinto b| +391|110278|F|20890.17|1994-11-17|2-HIGH|Clerk#000000256|0|orges thrash fluffil| +416|40130|F|105675.20|1993-09-27|5-LOW|Clerk#000000294|0| the accounts. fluffily bold depo| +417|54583|F|125155.22|1994-02-06|3-MEDIUM|Clerk#000000468|0|ironic, even packages. thinly unusual accounts sleep along the slyly unusual | +418|94834|P|53328.48|1995-04-13|4-NOT SPECIFIED|Clerk#000000643|0|. furiously ironic instruc| +419|116261|O|165454.42|1996-10-01|3-MEDIUM|Clerk#000000376|0|osits. blithely pending theodolites boost carefully| +420|90145|O|343254.06|1995-10-31|4-NOT SPECIFIED|Clerk#000000756|0|leep carefully final excuses. fluffily pending requests unwind carefully above| +421|39149|F|1156.67|1992-02-22|5-LOW|Clerk#000000405|0|egular, even packages according to the final, un| +422|73075|O|188124.81|1997-05-31|4-NOT SPECIFIED|Clerk#000000049|0|aggle carefully across the accounts. regular accounts eat fluffi| +423|103396|O|50240.88|1996-06-01|1-URGENT|Clerk#000000674|0|quests. deposits cajole quickly. furiously bold accounts haggle q| +448|149641|O|165954.35|1995-08-21|3-MEDIUM|Clerk#000000597|0| regular, express foxes use blithely. quic| +449|95767|O|71120.82|1995-07-20|2-HIGH|Clerk#000000841|0|. furiously regular theodolites affix blithely | +450|47380|P|228518.02|1995-03-05|4-NOT SPECIFIED|Clerk#000000293|0|d theodolites. boldly bold foxes since the pack| +451|98758|O|141490.92|1998-05-25|5-LOW|Clerk#000000048|0|nic pinto beans. theodolites poach carefully; | +452|59560|O|3270.20|1997-10-14|1-URGENT|Clerk#000000498|0|t, unusual instructions above the blithely bold pint| +453|44030|O|329149.33|1997-05-26|5-LOW|Clerk#000000504|0|ss foxes. furiously regular ideas sleep according to t| +454|48776|O|36743.83|1995-12-27|5-LOW|Clerk#000000890|0|dolites sleep carefully blithely regular deposits. quickly regul| +455|12098|O|183606.42|1996-12-04|1-URGENT|Clerk#000000796|0| about the final platelets. dependen| +480|71383|F|23699.64|1993-05-08|5-LOW|Clerk#000000004|0|ealthy pinto beans. fluffily regular requests along the special sheaves wake | +481|30352|F|201254.08|1992-10-08|2-HIGH|Clerk#000000230|0|ly final ideas. packages haggle fluffily| +482|125059|O|182312.78|1996-03-26|1-URGENT|Clerk#000000295|0|ts. deposits wake: final acco| +483|34820|O|70146.28|1995-07-11|2-HIGH|Clerk#000000025|0|cross the carefully final e| +484|54244|O|327889.57|1997-01-03|3-MEDIUM|Clerk#000000545|0|grouches use. furiously bold accounts maintain. bold, regular deposits| +485|100561|O|192867.30|1997-03-26|2-HIGH|Clerk#000000105|0| regular ideas nag thinly furiously s| +486|50861|O|284644.07|1996-03-11|4-NOT SPECIFIED|Clerk#000000803|0|riously dolphins. fluffily ironic requ| +487|107825|F|90657.45|1992-08-18|1-URGENT|Clerk#000000086|0|ithely unusual courts eat accordi| +512|63022|P|194834.40|1995-05-20|5-LOW|Clerk#000000814|0|ding requests. carefully express theodolites was quickly. furious| +513|60569|O|105559.70|1995-05-01|2-HIGH|Clerk#000000522|0|regular packages. pinto beans cajole carefully against the even| +514|74872|O|154735.68|1996-04-04|2-HIGH|Clerk#000000094|0| cajole furiously. slyly final excuses cajole. slyly special instructions | +515|141829|F|244660.33|1993-08-29|4-NOT SPECIFIED|Clerk#000000700|0|eposits are furiously furiously silent pinto beans. pending pack| +516|43903|O|21920.56|1998-04-21|2-HIGH|Clerk#000000305|0|lar, unusual platelets are carefully. even courts sleep bold, final pinto bea| +517|9220|O|121396.01|1997-04-07|5-LOW|Clerk#000000359|0|slyly pending deposits cajole quickly packages. furiou| diff --git a/test/data/cert_tests/incomplete-chain.pem b/test/data/cert_tests/incomplete-chain.pem index 40b675521..76388f845 100644 --- a/test/data/cert_tests/incomplete-chain.pem +++ b/test/data/cert_tests/incomplete-chain.pem @@ -38,4 +38,3 @@ T+qCyI9JgYCnWRbPRfhZrlKxqQpwoP++aFV0HOBR9nj/Rzisq8ZGn7f6HKVxlqHS lBdhbmcHA/nHgbpwU2bmonivndvnpQHI8Fxd4BzbcRYM+ZIkATWA5/aOvH/EEIb6 kwipaXsqHLfaJq1SY5G097HgWHWCkCUD/pxX6psTTavqftLenSd7piK3+fw= -----END CERTIFICATE----- - diff --git a/test/data/cert_tests/revoked_certs.pem b/test/data/cert_tests/revoked_certs.pem index ed52a726e..3f01fcfcd 100644 --- a/test/data/cert_tests/revoked_certs.pem +++ b/test/data/cert_tests/revoked_certs.pem @@ -64,4 +64,3 @@ CwUAA4IBAQAjPt9L0jFCpbZ+QlwaRMxp0Wi0XUvgBCFsS+JtzLHgl4+mUwnNqipl c+LJMto4JQtV05od8GiG7S5BNO98pVAdvzr508EIDObtHopYJeS4d60tbvVS3bR0 j6tJLp07kzQoH3jOlOrHvdPJbRzeXDLz -----END CERTIFICATE----- - diff --git a/test/data/example.json b/test/data/example.json new file mode 100644 index 000000000..fd552fddb --- /dev/null +++ b/test/data/example.json @@ -0,0 +1 @@ +{"time": "2020010301471578034058", "row-0": "head, 3839, -42936, 37777, 32995, -37475, -33518, -86452, -53916, 215, -67004, 4580, 91588, 31713, 74989, 88533, 48578, 35305, -52583, -7578, 13627, -84466, -47317, -28841, -35393, 47502, -93104, 82942, -35342, -56880, 37671, 51835, -41979, -50579, -30485, -61022, -29646, 467, 86118, -83002, -82796, 32911, -36228, -68217, -45488, 14684, 37440, -89480, 90191, 26916, 52647, -59221, -49072, 90853, -17177, 83698, 97582, -40843, 77459, -17662, -56142, 11206, -22637, 65277, -49272, tail", "row-1": "head, 1676, 86070, -86130, -13109, 61450, 47288, -11205, -17601, -22914, -81501, 25531, 37147, 1252, 12853, -95752, -3563, -69884, -21999, 56557, 75406, 60533, 49303, 13535, -94563, -52184, 3776, 49190, 49614, 55678, -94875, -74867, 27134, -19991, 86012, 42793, -55539, 80635, -35798, -3187, -23486, 95229, 93119, 88967, 64449, 72100, 10104, 36888, 32563, 63952, -53892, 40271, 84237, -60615, -48296, -61849, -78754, -19410, -38498, -82813, -61004, 57366, -7733, -52027, -92928, tail", "row-2": "head, 45578, -54229, -56110, 55526, -82551, 94237, -95498, 20307, 46342, -75650, 27632, -13156, -21367, 74043, -27099, 39769, 64330, 26535, 2954, 70255, 57119, -52578, 1445, -18397, -4390, -8992, -46086, 78460, 70508, -49488, -65076, 25329, -13585, 82093, -69367, -77458, -58462, 54485, 606, -32226, -83845, -30395, 45846, 46977, -2813, 66243, 45915, -61049, 48570, 33291, -29697, 18549, -88398, -25251, -77435, 18715, 35551, -4838, -12069, -10461, 85639, -18543, -25083, -7466, tail", "row-3": "head, 19012, 76020, 55064, 6233, -12719, 84658, -93107, 48763, -39013, 8274, -21284, 23432, 54581, -75560, -70614, -75303, -29955, -76187, 1191, -8169, -26629, -62574, 77849, 48903, -39902, 60306, -40784, -11983, -42935, -76587, -64346, 51517, -28524, 54736, -16745, -75743, -79411, 81233, 27355, 75500, 57914, 19616, 15778, 50656, -64764, -6430, 30063, -6166, 30834, -59929, -80702, 61574, 91130, -15693, -48993, 63818, -18734, -67312, -54556, 88308, 64285, 97467, 79068, 13226, tail", "row-4": "head, -60063, -28844, -46165, -14700, 29661, 86703, 36100, -15194, 96852, -52191, 54746, 25871, 99356, 68806, -68555, -68458, 79194, 29284, 5653, 80491, -45397, 88442, 61808, 2254, 68161, -36473, 90257, -23263, 76252, -8878, -9679, -6831, -48060, -70440, -64940, 39757, 17651, -62297, 92263, 82241, -51794, 25965, -89722, -8081, 92659, 17058, -49580, 62872, 87889, -83775, 53943, 16364, -51953, -24969, 84221, -31268, 72946, 76424, -31566, 20678, 86491, 97507, 86440, -33543, tail", "row-5": "head, -14702, -97200, 2370, -50789, 14638, -70164, -63040, 31136, 59063, -47134, 97510, 3346, -33820, -1259, -97336, -50473, 7403, -84719, 8014, 68707, -2770, -65472, 38848, 70000, 96194, 17979, -40089, 74948, 96241, 25611, -10822, -28657, 1633, 89341, -16474, 94374, -51730, 36590, 41965, 27556, -70649, 66235, -80883, 86343, -68412, 44228, -63991, 5316, -39264, -84631, 97455, 30417, 18734, 92007, -66260, -63356, -28052, -65311, 24593, -75231, -35664, -34844, 32146, -37577, tail", "row-6": "head, 55758, -51508, -38304, 87630, -87440, -14024, 83691, -27677, 39683, 12241, 93801, -99721, -15976, -47116, -13215, -85546, -32267, -26032, 88759, -53627, -33812, 13304, -34983, 31797, -10019, 38407, 17047, -32200, 15431, -45427, 60439, 4794, 405, -34171, 48211, -75685, 12876, -80375, -93791, 13004, 28462, 79527, -31851, 94113, -97588, -58029, 33417, -32309, -2549, -90508, 52462, 40619, 22021, -48748, -91041, 21480, -70928, 44474, -93399, -52161, -90134, 70967, -43414, 64844, tail", "row-7": "head, -50026, 98049, -25611, 60918, 32296, 89598, -1099, 98594, 66149, 75750, -41640, -56423, 70081, 53240, 63717, -24224, 85571, 35708, -35628, 69918, -49748, 71262, -58187, -21147, -72173, -60709, -5844, -45904, 4146, 16661, 63686, -77776, -14273, -33799, -76960, 22168, 15142, -75166, 28602, -59899, 22303, -98865, 95309, 44429, -36934, 6724, 93214, 49318, -18298, -7687, 57383, -60451, 69045, -86340, -53494, -79912, -70569, -81407, -54883, -18968, 79641, -89214, 33995, 80571, tail", "row-8": "head, -15081, -79226, 57667, 74471, -5759, 70650, -86814, 59211, -24165, 25736, 46244, -5242, -93249, 26135, 97955, -30360, -72202, -61728, 2664, 41524, -62021, 44515, 56305, 46924, 88683, -61851, 96214, 26074, -72608, -16890, 75240, -9231, -10768, -58039, -19252, 5440, -90112, -55807, 82281, -39554, -41473, -66268, 47956, 13797, 80478, -73772, -72108, -48324, -94382, -2347, 36258, -1592, -68090, 17475, 2289, -94571, -37289, 57611, -99394, 17180, -99256, 7361, -95095, 46726, tail", "row-9": "head, -53634, -30187, -42449, 15067, 86260, -34860, -98487, -88549, 92800, 7860, 80168, 98654, 90314, 26259, 84588, 45255, 76177, -12901, -21570, -59073, 89166, -66170, -10942, -63533, 47392, -82729, 70162, -34450, 60956, 37402, 12383, -78500, 41010, 54766, 17764, -21561, -71331, 22605, -57824, 10836, -26737, -11171, -5768, -16928, 92213, 29305, 57901, -25027, 46605, -65200, -91905, -69315, -65864, 23824, 3851, 29487, 85016, 42413, 48510, -40893, -97100, -35435, -55854, -23067, tail", "row-10": "head, 12781, -15445, -21353, -74408, -74597, 3417, 23512, -76738, -94915, -89108, -266, 18400, 38900, -12438, 5642, 95426, 83258, 53720, -49940, 36133, -6894, 51957, 56581, -69204, -65899, -76005, -60668, 27098, 15295, -69332, -20048, 12456, 72763, 16949, -91574, 69694, -99499, 43157, -584, -8415, -66014, -23442, -17099, 5831, 56979, -6072, 37626, -97073, -81887, 10602, -11043, 9208, 23877, 54333, -33991, -8183, -23473, -5897, 29744, 68585, -68980, -77337, 67259, -17615, tail", "row-11": "head, -90719, 62400, -13292, 81414, -20910, 29048, -69034, -84977, -15801, -34402, -53213, 67840, -49799, 63163, -88688, -82417, -55196, 30117, -58506, 73702, -40821, 18334, 44593, 54086, 75508, -42428, 58438, -98185, 83143, -64941, -65311, -64691, -8043, -243, 36215, 53917, -21262, 82545, 32593, -8540, -92185, 9233, -47845, 61353, 65478, 3037, 75151, 73742, 50969, -43984, -11821, 85619, -4530, 73066, -60701, 68967, -50966, 26565, 50416, -14484, -55943, 35225, -37592, -83536, tail", "row-12": "head, -82441, 95813, 11407, -44261, 48541, 81258, 36571, -84789, -44553, 83125, 70139, 76477, 79765, 91524, 19439, 37249, -56793, 43496, 61932, 77848, 38766, -28020, -10749, -98413, -19237, -51202, 46273, -59438, 13356, 81286, -72095, -26840, -92958, 13429, -23987, -41850, 25745, -84085, 53033, -47165, 581, 74820, 98020, 90235, -89542, 50469, 80917, 31641, 4177, -8389, 21856, -53199, 34051, -89287, 14238, 4541, -13585, 39573, -70751, 16768, 98801, -8347, -28674, -69058, tail", "row-13": "head, -81160, 90571, 13018, -99786, -93619, -43319, 59635, -56813, -52159, -44360, -5002, -97855, 63318, 37856, -85232, 23848, -50496, -68344, 26888, 32629, -53344, -77973, -91164, 50036, 64456, -54935, 65429, 31144, 36560, 6647, 94923, 74105, 12396, -78060, -55931, 5769, 35290, 78686, 59096, 47238, 45842, 82026, -77385, 89692, -90523, 38655, -16561, 26167, 7691, 13267, -72637, -92716, -92430, 88556, 16439, 58294, 85165, -148, 69807, -80392, -4436, 83608, -43026, 60867, tail", "row-14": "head, 34572, -44043, -45027, 42741, -88617, 39254, -68048, 39768, 87959, 49944, 15644, -19058, -42767, -25525, 53668, -32687, 6203, 58122, -59225, -92598, 17653, 89944, -33213, -2486, -97638, 96514, 65111, -16953, -52474, 29491, -43708, 52210, -31340, -9344, -224, 52443, 8888, 87029, 45668, 92145, -70092, 68210, -71131, -10749, 73842, -13551, -1321, -67322, 40735, 33482, 55058, 82326, -68181, -54715, 77631, 59802, -91249, 72958, 49599, -20666, -85148, 1655, -88506, -63894, tail", "row-15": "head, -26216, 68937, -7819, -81243, -5328, 14186, -13626, 2816, -79612, -4334, -89393, -51667, -18283, -89365, 50816, 71859, 36163, 16712, 88383, 82344, -70959, 26769, 32738, -25560, -64744, -61184, 42396, -88980, 77154, -40870, 4750, 62470, 27739, 27994, 31822, -53341, 30881, 87925, 84687, 36277, -8261, 24399, 98928, 98414, 79462, -27670, -16380, -62470, -35857, -38634, -20471, -85079, -44750, -77464, -80314, 55098, 6484, 96060, -9722, -97839, 81792, -7142, -93397, 19353, tail", "row-16": "head, -56121, 52181, -46402, -77434, 53698, -72616, -92048, 63645, -91574, 28673, 44683, -26182, -98421, 32087, -2300, -84744, 38963, -24271, 52951, 72063, -42000, -78073, -40217, 91406, -26607, -15525, 63041, -3996, -89412, -86181, -94173, -77959, -62417, -45209, -28562, -4483, -7950, -24678, 76268, 50823, -52967, -50445, -1613, 86195, -23516, 27494, 73321, 58480, 20318, -52045, -80810, 51092, 72038, 59700, -79049, -64831, 69444, -4023, 57022, 59586, -67511, -6978, 40333, -58874, tail", "row-17": "head, -42628, 7008, 82930, 12952, 48955, -27133, 52169, 85296, -3004, -58984, 14339, -94475, 1162, 72961, 89200, -55939, 96090, 38459, 74111, 19220, -23051, 38036, -61697, -91435, 37471, 94318, 6663, -58896, 25661, 6428, -32002, -99220, -99394, -67069, -85781, -43131, 95907, -9991, 53370, -50103, 41281, -41049, 72835, -27028, -8946, 1588, 51305, 3076, 14832, -61222, -4428, 43260, -55070, 43507, -29152, 55017, 45174, -52156, 44099, -97052, 39149, 31856, -19255, -49774, tail", "row-18": "head, -84397, 70782, 56195, 20088, -45638, -26602, -48798, 18966, -38966, -25194, 11968, 24589, 54420, -33227, 51390, 96626, -78527, 6162, 78054, 34434, -73023, -28873, -46441, -65936, -9731, -93827, 34438, -95852, -33479, 39838, 28289, 86463, 7740, -66117, 28912, -51440, 16309, 4415, 92494, 36398, 81931, 13191, -48869, -51716, 41133, -10846, -83140, -32260, -6124, 74577, -44114, 21067, 71514, 60264, -23862, 25621, 22785, 78904, -70801, -29009, 18222, -57895, -20654, -61339, tail", "row-19": "head, 45697, -45837, -86301, -54130, 41586, -85676, 60597, 28165, -73011, -21112, 19920, -15745, 1689, -71208, -49350, -318, -3342, 16832, 37000, -12622, -86686, 64929, 32526, 82989, -9948, -5768, 24597, -455, 87438, 49574, 38071, 67780, 21144, -23159, 64478, -49545, 23928, 95567, -69106, 28859, -95067, 95763, -89731, 70767, 55275, -19058, -69036, -88083, -45653, -13711, -25012, -37227, -33981, -25677, 67591, -99495, 89991, 3983, 51501, -76614, -70551, -51348, -93139, 63648, tail", "row-20": "head, 79611, 73862, -98429, -10061, -76843, -18709, -49154, -59174, 78196, -73863, 40099, 93087, 51373, 13284, 14373, -49592, 19547, -97743, 66814, -59132, 7971, 50427, 16334, 10500, -32585, -97402, -21931, 29943, 39872, -58552, -30918, 93947, 9212, 24231, -12410, -4016, 48142, -76026, -52155, 61130, -47371, -89883, 81310, -97122, 40088, 35011, -98699, 9727, 57011, 25900, -79537, 70350, -29494, -67296, -48024, -33257, -18118, -66856, -40645, -17372, -63425, -58643, 56371, -11879, tail", "row-21": "head, 66813, 81327, -81197, 18778, -35775, -6899, 19442, -88849, 51050, 27, 24233, 63163, 79218, -34153, -25911, -31825, 49152, -87980, 23968, -23348, -94072, 89158, 15595, 79419, -51068, -11358, 57730, -36003, -64159, 21938, 62901, 41920, 66321, -16960, -68535, 55887, -46786, 90611, -61027, 28762, 20965, -63115, 71498, 70657, 90592, -83688, -33694, 2062, 45237, 90366, 38774, -16160, -18357, 84164, 28765, -46295, -82287, -11812, -18675, -19797, -3941, -75557, -60270, -66703, tail", "row-22": "head, 46298, -9271, -26192, -39477, 58771, 75516, -31927, -2061, 63663, -27514, 14782, 58730, -98524, -55989, -7235, -35995, 12191, 79428, 40653, 3091, 24172, 11273, -43989, -57976, -72094, 96406, -81994, 97154, 88777, 47452, -48830, -68655, -92459, 37554, -19539, 45221, -73577, 68582, 93546, -9344, 25113, 20485, 68525, 99375, 13794, -67128, 95564, -33850, -99231, 93771, 44005, 22768, -13543, 83670, -18204, -99725, 21201, 24228, -57408, -8983, -32861, -78981, 95535, 85176, tail", "row-23": "head, 20417, -55492, -69106, -423, -32546, 88827, 66864, -94702, 92231, -56426, -14837, 47596, -82622, -87183, -38027, -4619, -88659, 17104, -43274, -19280, -14555, 9138, 32276, -21748, -45622, 84724, 98611, -43811, -50863, -71368, -41940, 67245, 87288, -73878, -89164, -32489, -42531, 81460, -56358, 15880, -50086, 56762, -3373, -16697, -99939, -4018, -53473, 76069, 94252, -4591, 96116, -16752, -81927, 85929, 39100, 13889, 24174, -43976, 18943, 68822, -70476, 20866, -76487, -64051, tail", "row-24": "head, -4973, 10337, -86569, -23561, -98516, -24213, 61938, -69136, -39383, 76495, 4061, 20887, 4739, -38089, -32969, -73189, 86884, 74843, 73984, 89657, 1153, -77887, -7072, -25794, -50244, 20966, -4517, 62372, -53231, 57537, 78938, -1087, -22953, -58747, 74888, -85726, -3034, -928, -32089, -44009, 11666, 1983, -12129, -24435, 5495, 51656, 81604, 25124, 83629, 29119, 23452, 33472, 93715, -20732, -87785, 15136, 45576, 17574, 23898, 89446, 55090, 74276, -3148, -56545, tail", "row-25": "head, 78622, 10156, 45085, 20000, 20772, -2597, -86717, 89782, -92267, 90464, -24695, 58824, 38981, -87122, 53902, -26404, -87712, 94694, -76358, -15294, 9924, 31681, -11010, 69238, -37113, 75951, -74951, 89726, 75033, -86414, -37647, 33645, 90497, 2766, 19872, 48474, 16534, -11790, -89782, 76274, -77606, -57435, 5952, 95535, 32678, 23543, 75815, -45714, 15899, -94900, 15637, 40932, 92656, -80248, 97834, -75039, 80476, 30126, 13277, -37459, 20182, -49823, 8308, 71308, tail", "row-26": "head, 16111, 19222, 90725, 74276, -31874, -56079, 66889, -60149, -91835, 90662, 95218, 75055, 81417, 36093, 39026, 93744, -44118, -92106, 83360, 77796, 7231, -96580, 32048, -55158, 21936, -76583, 27430, -3396, 90605, -59933, -25535, 14212, -58230, 38364, -3858, 42539, -76835, -28491, -37315, 13164, 36379, -23534, 169, -41031, 43698, -53907, 72774, -38241, 27358, 85935, -11926, -39171, -76416, 2128, -63552, -60835, -73302, 43971, 96449, -76883, -24971, -32195, 19290, -49589, tail", "row-27": "head, -82962, -8350, -9728, 1686, 96907, 98210, 50953, -47871, -77118, 10747, -56788, -24366, -8879, -35733, 49537, -44969, -84760, 39411, -82784, 48593, 70370, -58201, -20456, -53788, 38594, 89784, 6807, 6018, 66280, 48747, -2197, -20324, 14674, 50257, 65534, 68546, -32517, 21110, -82656, -21480, -92192, 79708, -945, -55131, -11908, -62263, -2951, 95837, -5007, -1298, -61870, 35497, -75997, 53937, -50038, -78409, 86375, 25386, -8309, 4091, -49321, -67051, -99878, 48844, tail", "row-28": "head, -77603, 81654, -94753, 34666, 35720, 86926, 31754, -92790, 17351, 40773, 36469, -52500, -12397, -9883, -64358, 55750, 57565, 36193, 37689, -56162, -7294, 68726, -76633, 17116, 51532, 68205, 93500, -34812, 79189, -90651, -67519, 29121, -20564, 26751, -64562, -57922, -83604, 99540, -49936, 67705, -33267, 34852, -44984, -79342, -54492, 86828, -2514, -15244, -78074, 19953, 57163, 15206, -68883, 72269, -78131, -22772, -95645, 97705, -42910, 49176, 65501, 83480, 3146, 10642, tail", "row-29": "head, -28817, -31988, -19499, 35257, 70322, 62464, -95595, -77385, -89813, 70128, -82970, 20328, 90184, 93899, 66739, -81515, -21572, 52414, 43208, -11358, 85615, -42483, -23581, 34505, 1723, 88754, -15979, 32628, -91070, -63979, -19689, 70559, 59988, -20167, -31054, -70507, -42222, -36369, 42142, 61329, -51220, -78429, -91403, 92778, -43, 72500, -78062, -50100, 15293, 17288, 67562, 78727, 86808, -79687, -56906, -85112, 18216, 77784, 32672, -31745, 20262, 43010, -16217, -67864, tail", "row-30": "head, 14147, -7528, -52985, -21383, -91493, 72963, -54795, -33810, 69991, -69249, 33862, 66658, 76423, 87610, 64113, 78553, -46854, 1111, -53895, 82187, -61074, -2712, 4197, 38023, -58011, 20460, -65233, -27141, -59388, -10234, -85436, 9906, 70866, -39920, 43064, -84533, -22651, -35545, 9727, -64750, -49499, -51341, -30637, -26880, -24352, 69175, 14353, 52531, -12431, -81492, 35132, -16112, 25186, 5714, 50411, 87003, -14111, 66324, -6941, 58825, -22032, -32077, 2389, -60224, tail", "row-31": "head, -39095, -55712, 23992, 47818, 99789, -87204, -26939, -10633, -58062, -15864, 13796, -18599, 56975, -71115, 46515, -39866, -10981, -70229, 28700, -1034, 91419, -9613, 64205, 65186, -92427, -62364, -45, -20975, 27406, -36240, -52709, -10961, 22613, -65096, 21360, -16863, -35688, 78315, 95857, -62467, -25196, 84939, 48678, 61918, -3386, 81806, -29659, -58431, 34565, -48534, 18297, 39914, -73153, -25433, 46673, -58689, -46310, 49111, 26546, 14996, -46952, -24240, 25440, -46140, tail", "row-32": "head, 87955, -69695, 79706, -27484, 38718, 20493, 17714, -67276, -12553, -92478, 13501, -9847, 2536, -77967, -46202, -4148, 10469, -73741, 79790, 4151, -7073, -22536, -85428, -39331, -25465, 44436, -37343, 25228, 79425, 38095, -25438, 60067, 14760, 57529, -3748, 82193, -86894, -58046, -593, -70045, -43670, 14967, 55078, -28104, 26712, -76482, -5034, 29411, -902, 1307, 94511, 91341, -22062, -63094, -31883, 60488, 60068, -55862, -67913, 30448, 87492, 47376, 81658, -79498, tail", "row-33": "head, 42041, 62084, -22498, -64291, 85198, 4392, 24794, 27181, 92197, 65008, -10225, 66078, -49860, 482, -50623, 4909, -80276, -58500, -82150, 43909, -64576, 12313, 45090, 89607, 9618, 21069, 67309, -37484, -58408, 34334, -62401, -37259, 12296, -79563, -63086, -83966, -90972, -1887, 31912, 91263, -24194, 22222, 72160, 98040, 30451, 56081, -15444, -44842, 75252, 43677, 51971, 73724, -98974, -48199, -31784, 5950, -31812, -76844, 18490, -25938, 23987, 46136, -64849, -26881, tail", "row-34": "head, 63399, 23663, -64324, 92937, -84124, -69769, -79817, 86689, -2197, 99372, -33706, 65711, 348, 43464, 90167, 58151, 30280, -53493, -67726, -60253, -52292, -90732, 37993, 1776, 74578, -75428, -18206, 47727, 51835, -90345, -16604, -74108, 70441, 68313, 70028, -77372, 95089, 68494, -45570, 75990, 17175, -46819, 90870, 11872, 34933, -82708, 91358, -82222, 23780, 11366, -80653, 6895, 15734, 68992, 55361, -81531, 91472, 48588, -91138, -4283, -6820, -51873, -38760, 4412, tail", "row-35": "head, -77632, -15114, -37263, 94329, 60789, -46905, -11276, 86900, 79291, 19856, 46500, 28549, 56701, -90731, 84180, -55849, 4126, -23520, -72584, 12453, 64804, -16220, 74763, -29825, -47917, -70493, 91376, 10887, 80143, -89075, 95130, -94451, -29479, 3006, 87627, -41941, 96160, 62123, 912, -38583, -51620, -37301, 21694, 19103, 69923, -28449, 30104, -8821, -45400, 67822, 80976, 66269, -84938, -96076, 23229, -88987, -16626, 59469, 24709, -93223, 44981, 3200, -34861, -75388, tail", "row-36": "head, -86518, 96564, -33230, 83096, 4708, 25339, -26325, 23811, -25317, 93956, -28400, 28504, 91033, 55281, -28061, 97253, 92827, -98779, 39460, 57126, -49180, 56699, -68375, 15587, 21407, 26459, -50771, 36648, 99205, -5287, 89639, 37068, 2091, -48262, -89113, 90531, -6874, 35783, -32474, 4122, 42819, -92539, -56994, -70694, 3440, 89427, -24078, 16250, -55833, 84692, 98738, -78384, 45554, -60178, -7156, 69472, -98107, 12051, -69109, 71604, -40342, -47272, -13974, 84230, tail", "row-37": "head, -62711, 19034, -10211, -63215, 31145, 65035, -88996, 22614, 25587, -82750, 91914, 94666, -80414, -39730, 24836, 81792, 35518, 65489, -36343, 49273, -71006, 17148, 98290, 74951, -2301, -92269, -61033, -29944, -94021, -97050, -56407, -33681, -31873, -42740, -78208, 76852, 7233, -4962, 66918, 33372, -6571, -80170, 37563, 11619, -92927, -33482, 44479, 9117, -77823, -88078, 24073, -31297, -80248, 12608, 58813, 53463, -69045, 10996, 32897, -47614, 55177, 87929, 45610, 84844, tail", "row-38": "head, -78623, 44309, 86918, -50802, 64796, -89475, -92787, -91331, 66928, 83473, 12605, 48039, 42958, -7148, 47755, -23207, -6479, -37835, 80992, 19526, -85259, -14638, 35161, 10107, -55424, -26320, -10423, 62688, 64796, -30741, -89719, 58785, 78897, 19797, -21554, 45746, 79277, -3963, 50113, -43943, 46930, 8574, -49715, 40536, 67383, 29214, -52230, -22339, -95069, 5880, -5679, -76725, -35668, 42197, 5210, 89028, 79587, 86575, -34396, -6206, -14201, -18860, -62536, 14851, tail", "row-39": "head, 94773, 41918, -76600, 73133, -31472, 54259, -65833, 124, -61199, 59455, -75342, 21697, 56514, -9642, 12407, -348, 10096, 53738, -64089, 72035, -87148, 29723, 17215, 68464, -25692, -55331, 74069, 31894, -73182, 81650, 57323, -25738, -38315, -54652, 12752, 82962, -67148, 68977, -56664, -34173, 22729, -82914, -20282, -92577, -54090, 32114, -18033, 57924, 67138, 94200, -85613, 81923, -91906, -54685, -71345, -66346, 41430, -63202, 94937, 299, 48027, 4380, 81787, -69018, tail", "row-40": "head, 82400, -21407, 258, 20939, -55371, -51317, -22884, 21505, 7634, -5292, -82317, 3840, 96082, -99794, -60579, -47620, -50989, -76839, -79548, -65764, 12847, -81126, -59911, 94440, 61255, -35294, -89389, -90067, 67795, -6197, -49230, -12666, 57597, 93258, 27370, 40642, -2765, 11860, -15207, -25943, -62052, -59890, 33871, -62236, 17996, -82967, -34425, 39514, 44281, 74067, 52121, -33872, -34531, -6903, 64480, -20168, -67674, 11994, 51771, 10463, 17447, -98488, 38302, 22571, tail", "row-41": "head, -9342, 29810, -74776, 96321, -31878, -7348, -25477, 37847, 77610, -11090, -89275, 70685, -81125, -97167, -68312, -33389, -64680, -44603, -13718, -89774, -16037, 87837, 66602, 66991, -20819, 45261, -14998, 99132, -20364, 71762, 71578, 47468, -35512, -79183, 70112, -85279, -90835, -7722, -38732, 91345, -74074, 32464, 75875, -66331, -91893, 40174, 3577, 30204, -96767, -64244, 16838, -59478, -87844, -70159, 89727, -30879, 6128, 93474, -14430, -72280, 69327, 59720, -41535, -2356, tail", "row-42": "head, -76461, 16019, 36482, -35305, 19998, 43669, -33461, -50477, 40941, -700, -33871, 69604, 23546, -14230, 63222, 28664, 87065, 43103, 99779, 13987, 38472, 34912, -96486, 60400, -41292, 27981, -14498, 93767, 59494, -22427, -95350, 65424, 86578, -15559, 10267, 83861, -16942, -51250, -19112, 81087, -69332, -69992, -8422, 76820, -34178, -28057, -7894, 80295, -78706, 40475, 32592, 5921, -33221, -12180, -25720, -12534, 21771, -58765, -64975, 39818, -17580, 46790, -88784, -45545, tail", "row-43": "head, 52391, -38595, -55736, -67496, -44703, -66057, -14576, -53519, 37405, -96257, 33145, -92383, -59806, -69446, 96324, -87218, -27246, 31757, -27455, -71385, -54828, 64128, 14473, 79151, 12696, 10451, -74529, -42688, -50478, -5476, 28082, -93357, -74956, 90497, 95293, 65225, 8844, -68518, -98613, -13138, 25450, -65451, 74861, -45445, -39325, 39941, 26565, -64472, -9648, 25426, -63340, 26518, 24168, 43303, -74678, -63773, 30352, 90851, -33155, -37818, -67473, 64714, 27175, 46894, tail", "row-44": "head, -98474, -23875, 84017, 84957, -12030, 55322, 23478, -39395, -8725, 99133, -61787, 4635, -39719, 61978, 83325, -380, -30704, -22833, -46897, 50032, 47627, 15467, 16255, 55316, -59812, -96722, -61032, -55439, -50767, 23603, -77198, 15910, -84008, 11912, 10186, -57414, 46940, 84546, -29099, 83987, 39318, 38810, -35861, -50566, -2065, 44363, 93005, 94516, 50312, -1396, 97022, -23892, -94709, -47283, -87962, -6881, 14611, 23282, 2897, 54339, -65177, -53825, 54793, 36102, tail", "row-45": "head, 40747, 45493, -1270, 96632, -27649, 40278, 31378, 47935, 74194, -14474, -23627, -97826, 65536, -84271, -6660, -29455, -34573, -69180, 45104, -70658, 11282, 32939, 33532, 23746, -71407, 33195, -89075, -35827, -25907, -3908, 40026, 7464, -97070, -94747, 7007, -99905, -54017, 18295, 43069, -20730, 27184, -97133, -41437, -14436, -45514, 32303, 31102, 82015, 16742, -56991, 94485, -53954, 91335, -5237, -79755, 60905, -59008, -22559, -1752, 39259, -69784, -73325, 39693, 57215, tail", "row-46": "head, 83260, 1738, -86437, -47957, -51841, -2346, 14914, -79153, 4460, 46146, 90425, -49565, 73276, 34650, 30192, -63245, 45885, -99566, 61225, -9046, -24191, 96528, 43372, -99927, -56803, 81966, 69411, -16834, 16287, 32386, 43453, -20393, -59020, -67943, -82255, -14076, 4051, 99051, 64086, 24973, 25390, -60503, -86464, 71814, -32458, -12246, -42549, -23725, 31770, 81415, 46135, -59836, -77633, -9592, -43007, 45750, 55201, -28629, 41323, 54664, -89224, 34967, 70910, 42674, tail", "row-47": "head, -24238, 74017, -81441, 30818, 13867, -47239, 25650, -8149, 71341, 39777, 76716, -99302, -81816, -56531, 68716, 27234, 23956, 3768, 26103, -82855, 50484, 25604, 67962, -74017, -58801, 77167, -80578, -60149, 94923, 91605, 11988, -65623, -6356, 27585, -34075, -44606, 10305, 35865, -40288, 3476, 23621, -34519, -23561, -17907, -42963, -88017, -22029, -60723, -2882, -56642, -932, 42292, 38266, -95136, 37929, -86617, 65835, -89963, -44174, 73405, 52799, -84145, -3700, -34929, tail", "row-48": "head, -62889, 15073, 69491, -28583, -41791, 82869, 20572, -78639, -9059, -63477, -51637, 41689, 46593, -90657, -81379, 11328, 53421, 48112, 30576, -58197, 69323, 20500, -88853, 152, -9484, 23854, -65248, 1539, 17909, 7560, -93914, -21499, 72342, 84616, -97113, -55267, 88018, -77012, 7057, -43376, 53865, 71633, 17420, 36532, -8718, 7072, 33211, 83753, -9305, 35010, -17358, -25539, 12538, -90523, 622, 99862, 33042, -22614, -77092, 63050, -23581, 66580, -38815, -72088, tail", "row-49": "head, 69420, 68069, -48831, 49093, 62541, -64415, 20360, 5235, 48153, -40123, 4693, -24594, -38702, 25537, 25443, 13855, 65059, 32000, 69217, -97259, -64698, 13125, -31712, 9119, -35922, -77508, 52652, -54785, -9629, -25111, -12045, 44267, -93706, 77359, 94004, -85191, 34240, -53002, -56336, -36743, 83652, -74297, 26519, 30553, 49250, -42250, 646, 76534, 77776, -85092, 81037, 95756, -83097, -85812, -74052, 7392, -50678, 48963, 26180, 8948, 68205, -25845, -13478, 58978, tail", "row-50": "head, -26206, 84506, -9929, 96347, 831, -93433, -94737, -61935, -27522, 41128, 65401, -15961, 79829, -39585, -86266, 21318, 28601, -797, 92337, 17209, -98550, -67027, -70272, 79707, 75505, 22823, 15728, -98209, 79523, -53020, -47873, -28118, 10923, -50567, 34815, 49232, -19094, 41505, -62732, 83449, 50089, 74921, -80811, -43471, 29058, 39993, -99639, -57811, -84482, -48603, 92871, -97334, -97494, 74435, -43228, -71369, -63379, 31395, -22779, 79294, 13272, 32890, 9294, 14863, tail", "row-51": "head, 9361, -87868, 21066, 35848, 13380, -21716, 9309, -81517, -81490, -86953, 96417, 19410, 42967, 63537, -308, -42253, 33354, 91247, -71665, 168, -72133, -29387, -23966, 97819, 92876, -73703, -91680, -37625, 56855, -27254, 51793, 18532, 8367, -71888, -1164, -30556, -27895, -2471, 8600, -8410, 85131, -78182, 538, 66975, 14315, 17389, 19056, 67443, -56261, -73446, 14290, -77502, -63537, 76913, 38208, 49049, -8337, 47899, 40740, 42735, -58400, -3102, 90814, 27364, tail", "row-52": "head, -67125, -21315, -74764, -42792, 10544, 63226, 93709, 59150, -65393, 49205, -65893, -49099, 24210, 71465, -6147, -86994, -15301, 9580, 16147, -15846, 74148, 97249, -29244, -91382, 1628, 42506, -32356, -55308, -45405, -35422, -49275, -41164, 48155, 96480, -73969, 37868, 55006, -29346, -27943, 10678, -90684, -36649, -42615, -61224, 17447, 10187, -6329, -95072, -96818, 74475, 76519, -69374, -40693, -16647, 74210, -10218, 43723, 91385, 85417, 42996, 57381, 83186, 69380, -26984, tail", "row-53": "head, 38686, 74806, 49116, -20798, -44564, -29746, 1962, 87334, 77397, 68537, 14670, 92474, 94182, -92823, -24882, 80082, 2164, 74250, -25996, -31513, -97947, 76097, 180, 73669, 51133, -77206, 40536, -2413, 13948, 6854, 1246, 8784, -748, 50502, -51761, 77066, 75691, -34989, 61643, -18976, 87616, -18699, -7057, 40472, 4981, -41562, 35643, 44720, -94712, -91553, -98847, -42618, -62451, -18875, -65336, -33087, -34955, -61815, -90837, -61331, -29531, 36880, -46443, 76372, tail", "row-54": "head, 28473, 24902, -38020, -46661, 65802, -7754, -94922, 14966, 27149, -53660, -19579, -83823, -57642, -96188, 18982, -24078, -18022, 2443, -23741, 23881, -83092, 93143, 6143, 43810, -36013, -8846, -39348, 82491, 91880, 75858, 54031, 58200, -63734, 85224, 99219, 55734, 86370, -99557, 85393, 40241, 16096, 93370, 35695, -86208, 54362, 83889, 18858, 39293, -28092, -11025, -51757, 62958, -59404, -69434, -47264, 36069, 43687, 31721, 84614, -77815, 52795, -2097, 34101, -1195, tail", "row-55": "head, -9706, 69349, -6434, -27880, 29389, 58456, 43658, 34910, -23543, -55433, 69143, 36070, 17842, 63873, 22201, -29878, -82436, -23106, -7612, 83904, 55320, -13581, -50200, -62944, 6825, -1711, -33613, 16302, -75538, 39568, -31411, -9650, 7328, -86520, -10549, -25759, -89408, -46539, 46998, -37007, 4982, 61104, -53207, -27151, 78481, 40004, 20729, -75488, -43640, -64830, 35865, 68587, -26709, -12747, 31743, 31733, 7343, 84424, 57995, 77690, -13616, -37833, -9631, 57420, tail", "row-56": "head, -48855, -66966, 77223, 51543, -39041, -62079, -76376, 83616, -241, 52970, -68606, -88947, 10596, -18160, 79974, 74550, -73754, 50290, 2884, -47339, 22480, -29725, 8394, 17628, -78065, -50527, 95750, 48621, -84358, 96172, -15577, 11482, 74184, -94578, -66463, 54908, -36009, -87081, -17502, -52450, -95734, -98653, 16978, -24119, 88931, -81754, 32439, 73514, -38656, -60945, -7532, 53131, 53955, 64900, 50616, -33213, 98411, 18370, 88435, 43863, 32676, -89553, -19677, 62723, tail", "row-57": "head, 26440, -36381, 25578, 43720, 88539, 58655, 96993, -13757, 7372, -29923, 8470, 44811, -92635, -63707, -5656, 72932, -70370, -17828, -11442, -9508, -37364, 6924, 79701, -81413, -22208, 4508, -81301, 83663, -92535, 98297, -85237, -54487, 5605, -15048, -33752, -88118, -11362, 33965, -70435, -13214, -42863, 92824, -94102, 5970, -34791, -80928, 16803, -67955, 60734, 48975, -1054, 22185, 34230, 68368, 57026, 889, -24184, 6220, -47152, 88114, 69275, 52965, 36951, 68145, tail", "row-58": "head, -53688, -54068, -47002, 49655, -54435, 26386, -50450, 72396, 89065, -31182, 3616, -9205, -40651, 34397, -59748, -11023, 54916, -53388, 78666, -55344, 39088, 6206, -92847, 79710, -34176, 1382, 99887, 49154, -25262, -58340, 52236, -66632, 54162, -59310, 45434, 11880, -40828, -41879, -50397, -5200, -15858, -95566, 86144, 5805, -3862, -18979, -72929, 29871, -28442, -19182, 89381, -5939, -8298, 6480, 62083, -47532, -31069, -38156, 66615, -36252, -95738, -9083, 38558, -13075, tail", "row-59": "head, 43161, -54643, -53325, -6870, 47746, 2167, -4850, 3447, 67321, -17154, -52872, -35129, 4416, -51346, 66512, -93604, 24982, 15205, 92415, -17417, 1578, -87468, -48891, 27937, 66576, 85977, -46823, -86174, -13328, 54523, -11774, 68191, -66289, 36212, 53369, -4515, 31946, -45770, -48827, -73681, 21863, -15938, -30088, -73832, 81911, -76064, -10095, 78597, -86203, -17798, 46583, 94314, -46962, -63228, 77819, -7444, -57328, -9349, 26799, 81398, 62274, 46162, -49549, -66177, tail", "row-60": "head, -50481, -49366, 80500, 7292, -79807, 25715, -71093, 50680, -74628, -69168, -44588, 97356, 5052, 98186, -21321, -70845, 49536, 50174, 82315, -31402, 59980, -44816, 6611, -89725, 5602, 91459, 19943, 94769, 87931, -34564, 52779, -54764, 77497, 70195, -62955, 68315, 50861, -26326, -56605, 69731, -6865, 61844, 49516, -83192, 58523, 37846, 32524, -49242, -3115, -59197, 19434, -10266, 32824, 29771, -12263, 42486, -91908, 85005, 88810, -85595, 97534, -50369, -51819, 78654, tail", "row-61": "head, 98729, 83629, 16383, 75826, -33702, 54016, 53813, -20500, -37169, 22483, 14666, -76518, -13383, 37452, -49916, -32288, -51849, -59109, -61880, 85688, -51504, 43873, -31507, 47526, 80098, -67835, -43817, -79475, -15150, 85231, 24586, -2066, -34907, -73090, 77674, 44770, 45857, 3948, -23295, -74994, -98753, 63896, 4202, 6658, -33034, 75636, 83089, 30292, -82804, -28134, 43947, -53485, 15680, 93497, -23051, -84610, -19390, 51393, 33228, 15286, 86494, 75721, -71836, -66877, tail", "row-62": "head, -30444, -89226, -64834, -22548, 68657, 30957, -84958, -87312, -35432, -65502, -63071, 45363, -97174, 37135, 72669, 489, -62493, -793, -26172, 36820, 41527, 42499, 18777, -51090, 29446, -7009, -54491, 71212, 53839, -73366, 14033, -32437, 67598, 70362, -39414, -46380, 78464, -22039, -59284, -33640, 18241, 88972, -76320, 38137, 57989, -5006, 76122, 63076, -85209, 35554, -34310, -34636, 74830, 40671, 99478, -39776, -90415, -7162, 20585, -93525, -64434, -48217, 80615, -32697, tail", "row-63": "head, 39861, -60631, -29485, 92568, 82669, 75724, -7191, 35439, 52803, 17568, -33102, -77276, 66193, 87748, 41322, 83562, -19770, 48952, 76092, 49858, -37774, -89128, 4849, -28498, -80087, -40587, 75453, -50952, 53926, -75769, -93407, -10256, -27934, 54600, -72097, -22993, -65734, 17022, 696, 44089, -55830, 35533, -60081, -552, 2179, -69563, -21549, -57182, -5687, -51028, 25433, -30007, -50119, -81095, 68562, -80926, 7021, -82385, -4007, 18956, -69559, 41671, -59805, 54483, tail", "row-64": "head, 25144, 30504, -18282, 71031, -27322, 76794, 98037, 91280, -51210, -70975, -32655, 67369, 79391, -84829, -59578, 39054, 31870, 33702, 71315, -14996, -60873, 52358, 80638, 78225, -28272, 43744, -5176, -31702, -17026, -58590, -15348, -7019, 55102, -73760, -36129, 6228, -38578, 39045, -95563, 1889, 35231, 55236, 87300, -44607, 78247, -3982, -23616, -4639, 80671, -36170, 38249, -45765, -62043, 70562, -16201, 83633, 64539, -27096, 79744, 16688, 92172, 20229, -27670, 48820, tail", "row-65": "head, 72673, -93182, 16413, 93812, -2789, -40030, 22461, -87606, -85158, 3938, 21650, 42393, 1332, 65150, -72930, -43718, 47999, 48917, 10865, -29316, 62397, 81811, 96588, 93321, 92784, -96455, -26888, 18356, 59404, 92182, -37718, -5042, -80715, -48619, -75814, 55578, 5661, -81473, -1097, -81159, 55591, 22712, 57699, -41811, -62585, -24616, 12189, 25613, 29426, 90868, -26676, 81332, 85645, 68959, -18256, 24197, -98908, -84591, -69987, 51381, -47000, 66535, 69254, 92618, tail", "row-66": "head, 64718, 16589, -84065, -24146, -81355, 80961, -24888, -36107, 41065, -23790, -45250, 69107, -48654, 57900, -80938, -30294, 50753, -1532, 52205, 34943, 93698, 6503, 43830, -89885, 30487, -24190, -64062, 70406, -32062, 86537, 23247, 60580, -18264, -46487, -55670, -99320, -83100, -41562, -10568, 61183, 48839, 44402, -84094, 16282, 90820, -71657, 35881, 86324, -42515, -91062, -91425, -49822, -12645, 26130, -37589, -38617, 50253, 53128, -42476, -64184, 15133, 61912, -5443, 76965, tail", "row-67": "head, -42053, -76911, 7478, 46227, 37978, -23522, 44110, 86933, 40434, 491, -31502, 33178, 7196, 7335, -97516, 64758, 11600, 91932, 49295, 73311, -86403, -1414, 95893, -62473, -63472, -90040, -6128, -26907, 38269, -55019, -51678, -32975, -71527, 23821, -60923, -92968, -34034, -40549, 99581, 64284, 2820, 8145, 31182, -16965, 29874, 34041, -19498, -24181, -90241, 33429, -86768, -41909, 12194, -77778, 16338, 19695, 37033, 79754, -69204, -286, 56523, -64846, 64057, 43909, tail", "row-68": "head, 25867, 7520, 54269, 26280, -21149, 6829, 17199, 12924, 70112, 12494, 13674, 65646, -3568, 66034, 68940, -28939, -40784, -43976, 25975, -86600, -9423, -17704, 5614, 87328, -1910, -66414, -78007, 97693, -20590, 97371, 21931, -3301, -99844, 52219, 26980, -57881, 74188, -41223, -15277, 35592, -14021, 4980, -58058, 1416, -90487, -74310, 9418, -359, -30220, -82177, 65914, -31460, 55554, 99166, 32010, -71207, 57346, 63948, 69940, 51864, -9967, -95014, -32958, -50053, tail", "row-69": "head, -32081, -53587, 52264, -66017, 47751, 78970, -33730, 40440, -95842, -78988, 51724, 90510, -72009, 57555, -15348, 59075, 28903, 87359, 94532, -85959, -62764, -4399, 97237, 73258, 99399, 36894, 4940, -84167, -96876, -34800, -3527, -65603, -38569, -79349, 85621, -65889, 11373, -52135, -71036, 62667, 89945, 53429, 58799, -1923, 27704, -95494, -27392, -37956, -3764, -95738, -3023, -50498, -93685, 16269, -14572, -20295, -12757, 66115, -61750, 4467, 55249, 15683, 30763, -77782, tail", "row-70": "head, -33341, 27758, -40224, 67364, 42982, 35186, -55013, -95663, -77124, -63347, 80700, 50608, -56560, -21521, -17997, 88657, 3854, -7868, 73179, -7874, -44345, 36035, -13961, 29034, 67188, -45319, 55734, -68353, 85788, -96941, 5738, 62387, 91017, 51421, -49519, 34511, 97369, -18747, -62535, -32328, 21563, 63201, 23595, -69414, 22551, 45558, -40348, 38157, 70874, 28881, -3577, 34889, -13152, 14942, 41866, -85502, 95389, -333, 69434, -30212, 89255, 76668, -83306, -45776, tail", "row-71": "head, -61799, 90314, -40435, -24886, 24984, -43624, -6527, 24482, -51713, -78940, 66684, 64452, -46345, 89478, -42008, -91452, -29642, -59004, 5144, 12571, -71463, -45711, 37597, 38913, 43422, -28569, -54994, 67034, 42686, 3468, 77838, -94825, 90904, 41253, 34340, -84545, 33259, 52782, -9652, 96975, 15424, -10460, -63146, -44191, 4643, 58470, -93791, -40759, -3347, -80671, 96836, 81237, -92889, -54600, 26046, 52902, -22014, 16700, 54598, 50657, 48691, 10437, 62879, -88237, tail", "row-72": "head, -22925, -88970, 3172, -38126, 31284, 96799, 45531, 45039, 91593, -88190, -92732, 95774, 97559, -34376, 44116, -22439, -7176, -10987, 91806, 64636, 12228, 2368, -42116, -25246, -38991, -64853, -94665, -87542, 4336, -34815, -8373, -5381, 77725, 3503, -46029, 34558, 26654, 1003, 82906, 23640, -61199, -30007, -51586, -85428, -35723, 71985, -61695, 61967, -88322, 21847, -99486, 13918, 88942, 50872, 73471, 76747, -61278, -27120, -12410, 45050, -76103, 94857, 39333, 13923, tail", "row-73": "head, -67755, 43022, 3192, 97992, 2124, -36884, -49748, 32779, 77841, 85101, 91076, -50858, 89348, -939, -52668, 69605, 15955, -67434, -13431, -20950, -10187, -99859, 41479, 12898, 99990, -67715, -70882, -47459, 45096, 44823, -91437, 3014, -60821, -41919, 76032, -88156, -58094, 7053, -91423, -79828, -55481, 31715, 53757, -29334, 96593, -34328, 49910, -59993, 914, -19387, 62118, 70205, 75801, -16436, 30131, -1244, -56000, 3987, -3117, -91173, 98462, -76440, -9711, -23082, tail", "row-74": "head, 16669, -98837, 49667, -347, -33271, 31372, -61529, -27959, -90815, 30751, -78312, 96603, 47061, 44242, -20709, -4544, -12808, 74110, -98444, -44736, -34175, -34392, 91757, -94779, 3780, 43992, -75727, 50453, 92470, 65679, -93199, -54364, -41374, -32585, -99440, 82362, 11915, 4500, 40692, 7436, -16404, 11902, -76648, -18320, 42213, 77081, 10351, 35844, -5726, -65325, -40286, 67725, 96190, 66223, 36508, -26317, 3187, 11554, -99282, 51650, -13526, -86762, -47141, 89373, tail", "row-75": "head, -68334, -78026, -95022, -58490, -80269, -77785, 34165, -49040, -10927, 20129, 87921, 28367, 71113, -97119, -76989, -12309, 61912, 4172, 85924, 9360, 43766, -16287, -15641, -48516, -71304, -99533, -81791, -12046, -48850, -35860, -80701, 72489, -38318, 56706, 27195, 33954, -27874, 14346, -31230, -7837, -91552, -11814, -11924, 8359, 77721, 1862, 87110, 1308, 88578, -24584, -99967, 48643, -65847, 87318, 37325, 16413, -49151, -3035, -27083, 64798, 61645, -62570, 28598, -50958, tail", "row-76": "head, 44223, -74038, -62737, -74508, -70289, -56377, -88157, -71504, 81023, 16768, 28669, 80154, 23973, -71387, 32981, 51129, 92338, 13291, -23119, 6240, -15405, 51421, 66951, 90379, 50324, -37718, 18142, -62361, -94331, 52266, 66098, 70323, 65799, 96341, 44944, 13067, -56046, -44492, -76551, 52661, -19593, 79783, -59256, -54532, 5514, -4364, 46871, 78607, -53304, 75568, 48400, -62167, -22783, -96730, -77850, 38241, 76544, -24707, 59503, -43493, 26752, -97296, 68971, -61710, tail", "row-77": "head, -41271, -56747, 8232, -45928, 83478, 59724, 94154, 87855, 58282, -94893, -72848, -63987, -27837, -91338, 6213, -42249, 7533, 94364, -43829, 85579, -45156, 72518, -41161, -98512, 80979, -84656, 53640, -26374, 36582, 74208, 52835, 65971, 89804, -64376, -4977, -56906, -96959, -89141, -3232, -93793, 7559, -70450, -77025, 44276, -4451, 17161, 98784, 73293, 74247, 40119, -7843, -45660, 6959, -9467, -73311, 8814, -62836, -19034, 69518, 47803, 41122, -89186, -70614, -54946, tail", "row-78": "head, 68817, -46793, -46582, 67611, -32961, -51932, -51041, 38394, 45592, -727, 26494, -73794, -2147, -5094, 82424, 73068, -76236, 12544, -64003, 3560, 33670, -44919, -79116, -68699, -19072, -76780, -83357, 7704, -29556, 85329, 37727, -15738, 18324, -94572, -56807, 34294, -48028, -86376, 28270, 33833, 13976, 25254, 51051, -60538, -54148, -84785, -97930, -67300, 8706, 70899, 4163, 75532, -11874, -90193, -23689, 73969, -56732, -29557, -37690, 29785, -17372, 97392, -53879, 94160, tail", "row-79": "head, 74570, -97744, 68784, -92313, -16102, 81242, -16020, 10987, -17433, 43701, -3589, -6216, -20434, -16416, 3179, 20989, -1636, -67178, -12407, -59965, -59099, 25484, -29538, -32584, 77270, -97886, -99, 23026, -69755, 23037, -48771, 20173, -52347, -93996, -96247, -71140, -11162, 81589, -77640, -72749, -71797, -65332, 64723, -30050, -66563, 40815, -9072, -47786, 12407, 43372, 75648, -69024, -47776, -36999, -33084, 78450, 865, 23748, -2754, 31167, -28403, 35091, -18225, 77001, tail", "row-80": "head, 68874, 39091, 64658, 39784, 20900, -34762, -44211, 52397, 32021, 78237, 18398, -66388, -27685, -16589, 62126, 35052, 11094, 3162, -58855, 87449, 11110, 64341, 46429, 96240, 63378, 90473, 56507, -88674, 95710, 35304, 50765, 50970, -70744, 17564, 59772, -71305, -67923, -45655, 94417, 9763, 57100, 21485, -20140, 43393, -6481, 66327, 87647, -98212, -49171, 20044, 26047, 9956, 82260, 22933, 44951, 6555, -76934, -35384, -53880, 25110, 12183, 33954, 68140, -55262, tail", "row-81": "head, -53337, -36292, 33044, 34306, 73789, -18039, 29945, -54085, 60291, -64141, 21731, 89411, -2761, 45368, 69648, 73235, -49453, 81846, -53223, -83617, -96860, -16033, 62701, 582, 90084, 67142, 54700, -21849, 77714, 27286, -55784, -53301, -425, 93316, 3169, 38931, 64005, 43774, 86695, -86733, -79287, -86532, 17009, -70004, -66690, -19344, -57328, -42849, 61187, 8555, -77200, -90100, -54652, 60900, -26231, 51958, -62784, -43355, -51450, -87480, 4739, -98685, -23287, 11141, tail", "row-82": "head, -99838, -5468, -43002, -90851, -85877, 67631, 83597, 36855, 22905, -65100, 9133, -97948, 10299, 50907, -86577, -78811, -20851, -37498, 60373, -80540, -55537, 10155, -91232, -28519, -62762, 86082, 73077, 63726, -51130, -96209, 64703, -33557, 52749, -38551, -15210, -76019, 71535, 88994, 91085, 33026, -83351, 802, 39631, -95350, -68155, -91406, -28060, 90770, 23605, 49254, 99963, 10849, 86438, 57151, -71260, -57227, -68663, 90720, 39416, -33388, -88520, 29927, 85169, 51154, tail", "row-83": "head, 61283, 75115, 90850, -92339, -26923, -79884, -16281, -45781, 21895, 17454, 34826, -33823, 23417, -46299, -46731, 53137, -621, -840, -22688, -39893, 88496, -52910, 27258, -21666, -67582, -49496, 25933, 81328, 55016, -45766, -71090, 77448, 12576, 19922, 50083, -14524, -7545, 88528, -90984, 10277, 98713, -34783, 58310, -13459, -93856, 56662, -9283, -12298, 63502, -57737, 13266, 45696, -72287, 49823, 63376, 26311, -23093, -37847, 25319, 65908, 98952, -19942, -81403, -76468, tail", "row-84": "head, -64694, 87672, -73188, -2595, 63670, -97553, -58137, 91170, -29568, -3432, -84335, 33688, 31325, 69987, -65828, 57967, -73393, 11197, 14824, -37585, 26531, 88771, 84950, -72026, 12295, -85706, -12298, 31628, -24383, -13617, 9827, -6727, -54550, 9951, 29374, -72509, 60092, 66671, 72966, 97824, 57280, -73390, -73501, -76691, -51604, -3947, -90867, -17285, -45750, 1374, -45968, -43502, -79452, -79523, 4947, 25945, 90346, -58072, 58746, 84523, -49454, 34172, 50261, 96766, tail", "row-85": "head, 7059, -11366, 59842, -60999, -86205, 76112, -83895, -91147, 5513, -96139, -23933, -37718, 16302, -5182, 74775, 93867, 23333, -63273, -29888, -22479, 11104, -37103, 11550, -41560, -17057, -92524, 65125, -58573, -4215, 74173, 46802, 14740, 71991, -66191, 85199, -37639, -52423, -61016, 29542, 77448, 43021, 88025, 84061, 28758, 86208, 47258, -71462, -77876, -44217, -33219, -45548, 7796, -95733, -63617, 52431, -50202, -79751, 81410, 167, -68960, -14148, -24098, 86676, 61284, tail", "row-86": "head, 31712, -83298, -51138, 94460, 13305, 88578, -72898, 47870, 52456, -8146, 14076, -12907, -40941, 73505, -64561, 36270, -38731, 63100, 79962, -16375, -71818, -33950, 4878, 38012, -2030, -40692, -22597, 24182, 8596, 15623, 20589, -99436, 73523, -21898, -7435, -86483, 29168, -84616, 30754, 59542, -67794, 47656, -82062, -30993, -53228, 66157, -88878, -48552, -99844, -39281, 99798, 72332, -64478, -14486, -92861, 24022, -59804, 84287, 45916, 58189, 40685, 2902, 99663, 76885, tail", "row-87": "head, 67347, -58892, 49880, 21177, -44574, 86957, -21373, -67348, -855, -86099, -33050, 13471, 74871, 1798, -51280, -79327, -68025, -27367, -67139, -56253, -58630, 70265, 53001, -67298, -18320, -33481, 79252, 88650, 23028, -1830, -86408, -3358, 11041, -25758, 94278, -10764, 3192, 54431, 67390, 54673, -68414, -38330, 98795, 72694, -98794, -87039, -17595, 53331, -99547, 15593, -4293, -15771, -33191, -72748, 98636, -79855, -12792, 55957, 96955, 97615, -49371, 233, -7446, -66343, tail", "row-88": "head, 25537, -22074, -44309, -80164, -48877, -45721, -60831, -19399, 82164, 12032, 96591, -25541, 23344, -55871, 57493, -31572, 40272, -94557, -25366, 79897, -9845, -44803, 42187, 6675, 8150, -29429, 89641, -60732, 39523, -80300, 89998, -10427, 47788, 29701, 70265, -333, -68929, -10211, 83483, -12651, 53551, -34722, -86084, -67132, -14015, -24121, -38217, -39242, -39298, -26191, 504, -71591, -93045, -23591, -47379, -10947, 81095, 58092, 37428, 64568, -39371, 41969, 91360, -1167, tail", "row-89": "head, -76059, -74997, -10934, 27365, 14604, -61571, 39735, -84586, -64075, -99402, -62759, 72031, 58658, -66986, -2979, 94218, -77322, -93588, 984, 71312, 24077, 41827, 18213, 65400, 31286, 445, 70269, 59466, 74707, -42958, 69564, 43748, -78401, 78130, -69220, 24044, 38248, -73952, 58875, -12297, -76997, 96670, 95421, 63655, 31070, -14362, -93028, -45841, -95313, 22831, 95484, 55152, 65660, -89791, 27775, 73443, -66975, 51720, -38854, 93646, -36793, 42483, 97547, -60332, tail", "row-90": "head, -29094, -94668, -91139, 26239, -75109, -74266, -89974, -31702, -22789, 85363, -49351, 74284, 64863, -42462, -36059, 90530, 16046, -81156, 58726, -78198, -66942, 91625, -79435, 94535, -93299, 10566, 48623, 70029, -26038, -56822, 37181, -88976, 35321, 8931, 6940, 99066, 50250, 22356, 77020, -96806, -19093, -85286, -88526, -42856, 53863, 76738, 67831, -71360, -68997, 86206, 16198, -39552, 94966, 10289, 49404, 5460, -68838, -14988, -80808, -76454, -67736, -94274, 9906, -53166, tail", "row-91": "head, 89290, -71314, -3070, -66223, -53883, -46115, 32400, 65117, 10570, -11893, 24937, -22535, 281, 17725, 46062, -52181, -21708, 67647, -81200, 88685, -56386, -72848, 69966, -51641, -38507, -81539, 81484, 11377, -93824, -57890, -28638, 65514, 24744, -99636, 43287, -31735, 72829, 29278, 70932, -57263, -16260, -79729, 28429, -63867, -39990, -26805, 94341, 59352, 82403, 52544, 94541, -43799, -68208, -42816, 51352, 36779, 57849, 22646, -28699, 92975, 43908, 59661, 88189, 91342, tail", "row-92": "head, 67918, -93666, -52361, 40851, 75390, 74775, 74953, -60334, -85079, -77101, -4353, -24349, 25266, -64747, -30129, -64135, -49023, -62912, 77719, -95624, 41306, 48026, 12823, 51485, 84410, 43872, -36162, 41865, -85041, 36896, -65252, 52575, 47017, 87870, 75820, 37449, 55259, 30693, -25987, 61046, -97620, -35604, 44921, 23001, -85832, 58722, -99839, 69767, -96889, -85123, -87076, -64497, -47477, -58244, 89508, -16855, -95894, 52147, -4164, -20669, -86528, -17475, 94233, 84881, tail", "row-93": "head, -36168, -27686, -21465, 60379, -18285, -81238, 22564, 66042, 5760, 57913, -52746, -66897, 50953, 64213, 65176, -11868, 46690, 69189, 29764, -41102, 2414, -33479, 70633, 53600, 77699, 63302, -55597, 54010, 639, -54130, -83788, 22189, -9210, 35709, 32970, -53202, -71788, 97779, -27396, 18110, 65873, -15994, 11378, -2026, 70583, -79525, 26026, 26086, 59094, 80538, 92374, 80461, 31664, 67221, 36233, 18515, -35916, -96256, 26336, -85848, 69968, 31890, -54568, -35221, tail", "row-94": "head, 26757, 46613, -45741, -37556, -77332, 2917, -4811, 72964, -74306, 88298, 94152, 12561, -72353, -3704, 86602, 57101, 62303, 53269, 78010, -43465, -65726, 70889, 29706, 74743, 12576, -64050, 76103, 37883, 75561, -75602, 65838, -56494, 61775, -54766, -70536, -74523, -69837, 55380, -59835, -11336, -38067, -61698, 68857, -63499, -61590, -19225, -26715, 94806, 70242, 70257, -73771, -73170, 59806, 4870, 36303, -47105, -47066, -31643, -30867, 76852, -2958, 30699, 13790, 8270, tail", "row-95": "head, -48730, 13154, 85136, -61577, 79954, 46827, -37595, -23760, 94016, 92962, -8568, -78353, 33517, 28890, 92627, 80484, 42661, 65301, -72232, 68275, 55308, -96612, 4394, -17595, 41545, -49413, -35619, 83181, 48195, 61389, -83448, 34865, 925, 48452, 92537, -6850, -90118, -86744, 77708, -82037, -974, 51362, -80117, 73810, -33693, -48119, 23432, 45446, -85326, 70254, -81008, -20950, -64687, 573, 95227, 46865, -21993, 59795, -61143, 58755, -43492, 88140, -41478, -70198, tail", "row-96": "head, -91942, 38805, 67905, -79840, -91072, 87056, -27288, -68248, 42492, 17952, 7224, 53316, 75833, -44659, 23123, 39973, -8500, -73765, 15847, -47910, 31594, 14392, 63283, -94072, 74250, 46524, -75266, 95180, -741, 64625, -73142, 35932, 86172, 46203, -28299, 68156, -36800, 18150, 8006, 87182, 71563, -65460, 88183, -16365, -27805, -75502, -89205, -54745, 42227, 82626, -91549, 13244, 41927, 55574, -24154, -76497, 11621, 82750, 61745, -64947, -95974, -96618, -87123, -97562, tail", "row-97": "head, 28435, 43929, -12435, -1860, -76576, 2541, -19984, 47489, 17738, 33781, -88751, -83072, 65569, 92794, 73978, -92714, 67242, -9660, -38512, -22085, 60252, 29621, -65458, -18473, 50622, -82679, -93988, -56135, 13835, -81985, -72232, 37999, 45732, 34976, -92824, -24750, -98964, -99159, -85940, 40094, 22582, -2523, -42630, 21598, 230, -92639, -57068, -47218, 5434, -15202, 3439, 93460, -88693, -62419, 37605, 66673, 31499, -77634, -34287, -73578, 94360, 43375, -41158, -69270, tail", "row-98": "head, -23514, -59801, -72379, -43346, -21763, 47873, 24498, -44957, 34111, -86674, 63135, 88837, 23505, -3179, -9469, -64693, -13871, 82821, -70404, 32035, -21399, -79687, 31742, -9008, -1721, -24228, -12258, 14990, 62047, -36903, -90078, -52357, -6820, 25027, 23940, -94863, 3533, -17139, 69517, 14378, 8483, 17404, 4112, -94365, -234, 24959, -73528, 48693, -59062, 63779, -8537, 65136, -30510, 48422, -25734, -93260, 58635, 85916, 29362, 96604, -33148, -2795, -42668, -79010, tail", "row-99": "head, -97084, 30061, -34182, -18553, -65816, 56839, 58369, -12732, 70097, -93332, 52853, 65560, -93853, -19497, 20924, 77550, -51243, 68772, 78603, 43058, -54701, -23513, 54563, 43062, 31247, -62948, -73150, -44539, -86236, 52638, 25766, -93401, -26534, -83803, -58587, -24807, 13890, 65192, -73402, 45702, -74508, -79871, 5544, -48610, -29441, -82581, -52922, -32810, -13668, -92987, 90671, -48925, -39807, -50651, 73108, -5270, 46167, 81422, -33252, 64640, -36661, -44390, -94359, 32750, tail", "row-100": "head, -63999, -33712, 2656, -44362, -39015, 41478, -88012, -85014, 10612, -22990, 38397, -80585, -59479, 95574, -27718, -76411, 76437, 5374, 96403, 48295, 78519, -29125, 60383, 19636, 87188, 167, 31300, -82237, 5259, 59495, 615, 34296, 3627, -36487, -26205, 71169, -4588, -67860, 83244, 60671, 62217, -1422, 34286, 84917, -10776, -42038, -66427, 46146, -8630, 59912, -69318, 37638, -71004, 69451, 33629, 60601, -81975, 12811, -31013, 59558, -2487, -22921, 93908, -3618, tail", "row-101": "head, 93904, -21003, -65929, -45100, 84202, 34174, 22647, -89495, -51770, 22436, 50734, -4973, 99389, -95110, 19685, -68111, 1757, 85632, -35896, -56731, 85240, -77474, -20648, 11349, 56279, -15302, 56190, -55078, -5498, 3109, -26716, -75885, -635, 92134, -66709, -44807, 37955, -13991, 54363, -4422, 74602, -20958, 77443, 8282, 64134, -89688, 60999, -58591, 77188, -56448, -27472, 16069, 64981, -46139, -58774, -66873, 64281, 39234, 70779, -21402, 39122, -21509, -77206, -17929, tail", "row-102": "head, -50569, 81918, 82495, 94980, 4636, -97808, 97138, 95785, -5825, 19465, 55753, -46672, 31442, -6312, -27887, -83607, -71574, -43767, -65613, 32957, -40756, 63559, 1324, 93717, -76330, 76556, -41198, 75970, 95352, -73744, 79490, -88033, -19273, 11361, 82073, 57417, -69997, 92280, -46396, -22375, 37196, -55119, 89440, 85307, -2608, -96014, -16088, -16942, 38898, 54647, 75958, 49679, -38110, -92106, -58108, 18305, -4857, -89525, -29162, -85368, -58116, -93796, 86079, 56452, tail", "row-103": "head, 35945, 9050, 58549, 59510, 28616, -51655, -99539, -48378, 61220, -11907, 22617, 51439, 55020, 2447, -97927, -7005, -72958, 43742, -93750, 10939, -42803, 257, 44814, 61152, -53090, 22300, -33276, 63865, 26220, 20855, -23313, -73101, -5419, -35820, 87601, 37372, -47707, -83149, -78810, 81007, 98555, 44997, -10040, 4430, -58744, 13304, -52735, 30730, 1977, 33282, 12409, -33068, 87970, -18427, 1041, -41726, 98724, -98520, -13303, -87473, -24977, -3178, -82298, -7019, tail", "row-104": "head, -38400, 9951, 36607, 86964, 55484, 17739, -42125, 75815, 81281, 50819, 85739, -69833, -64481, -99700, -36820, -36066, 50022, -8560, -32267, -9059, 80790, 72015, 34009, -42054, -87680, 83945, -88902, 42428, 61316, 7258, -82487, -17357, 60011, -78008, -49377, 30871, -4936, -50007, -70495, 53121, 15692, 19337, 79352, -99541, -83678, -97787, 53840, -51763, 71375, -82687, 54744, 26233, 35180, -80821, -86307, -91886, 20457, 18339, -35527, 67512, -74034, 18919, -44595, -63646, tail", "row-105": "head, -99591, 85701, 39064, 56235, -82699, -7665, -79092, 22010, 33379, 58610, -30564, 25970, -86038, 32463, -57747, 30547, 51473, -39607, -6474, -80017, 4202, 89380, -26544, -53526, -87798, 46496, -90543, -73003, 56427, 97485, 73922, -14486, 30965, 2924, -31153, 73146, -91308, -14466, 69149, 59472, 24261, 78516, 70416, 23350, -8410, 33768, -88644, -98453, -1080, -96897, -1706, 92643, 87284, 48294, -80981, 8635, -41955, 78778, 37572, -55153, -23563, -25962, -80615, -44005, tail", "row-106": "head, -14646, -60653, 57449, -4298, 46361, -59810, -47664, 66599, 58850, 52298, -21887, -18418, -29332, 94342, 98343, 24784, -47308, 83732, -29045, 76839, 5066, -38443, 11481, 21161, -53069, 66021, -94291, 99949, 553, 70513, -30430, -11656, 77771, 81637, 2595, -51633, -32426, -612, 80232, -50859, -77926, -57197, 96903, 30209, -80716, 55188, 82676, 66870, -55352, -42062, -29294, 9590, 15194, 93189, -62342, 63282, -32818, 95687, -97078, 48398, -70262, -65893, -95164, -35977, tail", "row-107": "head, 81759, 17995, -90638, -17380, -27466, 25362, 37738, -63484, 7310, 80635, 36814, 60512, 98916, -18009, -85920, 37528, -99563, -55904, -78867, 42633, 1363, -55818, -71028, -7877, -81296, 22010, 12407, 25701, -21158, -47998, -26477, -65341, 8332, -64350, 64255, -10656, -78852, -6013, -8262, 46208, 52200, -45621, 89712, 69300, 7389, 40322, 43265, 27896, -27198, -71210, 27928, -49561, -80856, 33730, -68451, 72889, -22990, -55741, 20142, 98119, 81752, 61347, 32588, -72948, tail", "row-108": "head, -91987, 37475, 90823, -18370, -28184, 81584, -61391, 1078, -50967, 69213, -52240, -71325, -23526, 28304, 86920, -59280, 17445, 51863, 33561, 85544, 69418, 74080, -82328, -75706, 9525, -59047, -17358, 78649, 99482, 27978, -22600, -9668, 45065, 53092, -60105, 87152, -55832, 23254, -41902, 62663, 78206, 74120, -73269, -52796, 35297, -88315, -48222, -98618, -40044, -34998, 2402, -78910, 2065, 81825, -88440, 43912, 49898, 47672, 59582, -74273, 97597, -90670, -30949, 6590, tail", "row-109": "head, -95614, 48437, -12006, -67448, -58328, 43457, 39640, 35360, -43373, -16275, -1370, -9413, -76101, 78191, -50150, -20175, -97959, 68136, -23340, 37123, -41020, -65295, 11034, 90111, 68299, 97856, 39942, 65005, 91841, 62568, -10882, -2003, -81002, -46423, 96799, 51115, 13159, 84879, -48164, -99595, 38961, -49631, 61355, -54140, 35343, -31110, -91011, 62469, -6376, 88076, 20687, -94181, -97765, -50227, -10102, -85084, -99742, -66985, -50692, -16314, 76621, -86526, 57805, -36632, tail", "row-110": "head, -90135, 39327, -66074, 49525, 86781, 64730, 19845, 73985, 22745, -87490, 28143, -1837, 34667, -47483, -41725, -5598, 46258, 44610, -95050, -3332, -43186, 89481, -44398, 52517, -78800, -97272, -21101, -14106, 26785, 45410, -25556, 61033, 70584, 64071, 65381, -10079, 61839, 10258, 43986, 87751, -73851, -79786, -59730, -34620, 62225, -14848, 85206, 89438, 39164, 69132, 67860, 39180, -55398, -19753, -52050, 58662, 56157, -56927, -94235, -21621, -55860, -34350, 68504, -1749, tail", "row-111": "head, -14501, 83104, 87244, -57811, 81861, 26068, -96470, -7807, -49868, -81153, 62916, -14427, 30306, 23810, -84527, -1884, -47369, -65043, 85816, -67723, -54204, -25627, 165, 67600, 83883, 80136, 6302, 44704, -47311, -19878, -6513, 74381, 8812, -74245, -20691, -99335, -36355, -94218, -15833, 22910, 12125, -19982, 51315, 57228, -79978, -76822, 98280, 32444, 24533, -90877, 55819, 36134, -64955, -36243, -19760, -43187, -40275, 58554, 20053, -34159, -85013, -69972, 88830, 22941, tail", "row-112": "head, -35003, -16195, -74045, 73414, -86252, -47660, -94591, -50729, -12700, -77115, 64554, -61452, -22372, -40335, -5081, -62938, -77606, -59762, 7092, -76728, 31036, 60089, 23326, -18714, -6139, -53557, 50086, 2463, 64922, -98873, -1761, -51583, 32902, 6734, 56734, -2576, 18634, 55640, -25176, -27276, -46310, -29363, -71308, -37522, 90030, -23743, -59868, 32630, 24131, 23088, 19351, -34455, -70349, 61575, 75612, -97610, 80313, 33836, 74291, 9388, 97572, 85267, -95938, 94142, tail", "row-113": "head, -42773, -11441, 10484, -2879, 34893, -52569, -14552, 15496, -6791, -68424, 52200, 84810, 25068, -25564, 44350, 63656, 20744, -27709, 47094, -84816, -86268, -86320, -48006, 8258, 58640, 14265, -54761, 35927, -11823, 34386, -13309, -38888, 73100, -1918, -33680, -63363, 1372, 69463, -59405, 96496, 68299, -70232, 63075, 26038, -71177, -87372, -57028, 71418, -84098, -4118, -31831, -19699, -30219, -2140, -22247, -12578, 91773, 36348, -64941, 22090, -57585, -43195, -85999, -34052, tail", "row-114": "head, -72463, 34220, 3775, -18417, -23586, -78188, -63008, -90593, -53418, -82513, -88453, -99960, 96723, 55515, 87788, 43606, 90360, 10891, -96262, 66989, 81981, 64673, 18911, 57056, 12712, -65053, 6436, -94578, -30537, 30328, -43572, -22019, -44724, 25237, -96253, 85719, 31591, 74180, -69387, -16661, -47013, 530, 60681, -61172, 67158, -61511, -60210, -20113, -38353, 16940, 16247, -49407, 24637, 68753, -85492, -24825, -9578, 7498, -28910, 10844, -99299, 85494, 23575, -40724, tail", "row-115": "head, -42470, 36482, -45206, 21365, -81114, -31924, 30242, 68433, -33542, 49338, -17083, -96036, -72132, -35365, -99856, -14666, -63212, -88807, -94351, -96703, -76609, -19081, -84699, 91847, 80005, -55476, -22014, 57516, 41340, -81918, -76524, 33811, 78450, -14779, -51693, -8289, 16204, 50174, -96907, 40458, 92464, 8947, -17850, -84308, -19021, 96723, -18106, -16575, 30022, -42021, -73317, 74849, 16104, -5575, -74876, 11455, 26141, 68452, -81837, 62754, -69081, -15519, -17461, -66288, tail", "row-116": "head, -38652, -39823, -35238, 43124, -23291, -35152, 5460, 53521, -77152, -16088, -20729, -84748, -47989, 984, 36170, -53637, 95498, 4723, 44686, 50390, -33757, -96630, -38810, -87757, 37939, -54395, -16091, -25808, 85397, 59467, 34728, 86818, -54575, 88787, -62552, -85762, 37098, 13978, 67786, 26829, -40900, 63428, -15668, -4764, 18682, -61341, 3942, 59039, -75113, 58651, 23765, 66389, -35977, 98281, 92692, 49019, 45596, 92223, 48743, 96921, 93767, -59751, 76871, 82757, tail", "row-117": "head, -53022, 43794, 9515, 50859, -51005, 47498, 26488, 73810, -52356, -6243, 22663, -74585, -19065, -33062, -36953, -19189, 49517, -55443, 46774, -64835, -14217, 19623, 14104, 35830, 86502, -59046, 87110, -67956, 42946, 26652, -75902, -31610, 42799, 90726, 73253, -19219, 3259, -39036, -67350, 78019, -34752, 91510, -21754, 35537, -38722, -80760, -87437, 7617, -9140, -14598, 28356, 12788, -62094, 90351, -55785, -17319, 66780, 64117, -54679, -39575, -74972, 8737, 89534, -85760, tail", "row-118": "head, 42387, -53279, -26466, 9348, -28753, -21275, -87775, -13794, -66289, -87218, -52493, -87233, 23947, -9818, 70152, 24210, -32110, 94588, 5594, -87070, -10, -80062, -79226, 9120, 97651, -84647, 30274, 24475, -11264, -68504, 8245, 14663, -11589, -7651, -70185, -94978, -20521, -79255, 90983, -4789, -91573, -24390, 62959, 35571, -22749, -71324, -90640, 90087, -51930, -84135, -24522, -65070, -74273, -23703, -43976, 94557, -15500, -38320, 60143, 55973, -47252, 66092, 17839, -8687, tail", "row-119": "head, 2981, 6792, -67113, 24296, 17328, -28228, -3479, 99304, -82123, -52298, -82157, 99608, -55460, -9810, 71879, -15827, -73060, 90473, 47185, -62142, -3964, 33787, -28043, 56062, -71635, 4392, -1423, -3339, -66454, -11774, 29257, 17573, -56070, -999, -2856, 21987, 45428, -57324, 90576, 55109, -12829, 78419, 17487, -60935, -74356, 99809, -12064, 65468, 99982, 17443, -21555, 92013, -75808, -16957, -36401, 87359, -67915, -31668, -87259, 92725, 47151, -5436, -82244, 20722, tail", "row-120": "head, -51595, 93727, 89189, 87148, -83180, 40030, 54830, -80397, -8310, 89185, 92244, 63824, -80613, -85780, 94801, 6399, 16047, 55424, 12601, 58586, 64166, 40661, -29950, -41507, 41335, -27882, -28118, -56045, 30376, -70684, -23009, 69070, -1004, -17898, -84203, -88845, -28229, 73784, -20302, -80125, 54890, 39655, -53093, -86955, -27303, 56367, 29393, -83355, 35968, -59464, -78569, -92741, -15708, -71244, 17751, -75824, 1788, 5963, -48893, -64226, 63403, -67214, 53125, 81140, tail", "row-121": "head, 39752, -37519, 88553, 37306, 95835, 61192, 38667, -4110, 25962, 6864, -27407, -58629, -68799, -6692, -26808, -45513, -18100, 67470, 6519, -4022, -99736, -53367, 42908, 44496, 52129, 26567, -87480, -64105, -35832, -72804, 76276, -12835, 27891, 81715, -74657, -69356, -11626, -25461, 51000, -78512, 61277, 81364, -72892, -17588, 90648, -7686, -81782, 21133, 67980, -74199, -41201, 41553, -45179, -60284, 18432, -47590, 72665, 48458, 50590, 90068, 96782, 5741, 47928, 15510, tail", "row-122": "head, 13012, 13498, 40795, -6277, 31912, -13870, 52106, -84874, -18004, -53314, 94233, -88436, -88047, -47132, -19375, 63122, 48608, -20725, 96743, 54909, -74105, 8532, 44547, -30877, 51217, -19363, -13314, -45111, 63353, 89152, -10717, 31772, 38298, -19623, 75535, -44662, -71189, -7260, -7437, -7889, -23062, -3513, -15207, 74023, 72141, -24376, -60635, -35549, -93542, -86778, -11554, -15532, -85449, 92653, 21607, 33168, 77157, -16582, 998, -27592, -92708, -70476, 79555, -43266, tail", "row-123": "head, -52735, -77224, -78818, -81710, 19907, -10023, 89876, 84757, -70331, 50847, 70651, 10451, 83000, -90708, 38612, 44572, 84922, 32176, -10930, -33436, -92525, -3522, 10658, 71347, -1084, 42615, -4792, -84085, -97544, -24815, 20694, 64227, -19336, -51472, 14193, 44668, 97261, 41151, 15086, 55413, -78896, 46136, -21055, -68723, 6410, 53636, -39271, 46457, 65477, 6540, -87432, 86584, 82345, -14835, -85115, -12357, 84670, -50086, -13083, -93135, -62142, -37396, 98170, -70592, tail", "row-124": "head, -82090, -37766, 88738, -52151, -22486, 14211, 15354, 86850, 34821, -41308, -79122, -67873, -21614, 52971, -11354, 88193, -18859, 30776, -7410, -64542, 37038, 72348, 27119, 73539, 32864, 1194, -81899, -53216, -99644, 62512, -13764, -47143, 19530, 67853, 84309, -61680, -54961, -39022, -43569, 72629, 91230, 68544, 47324, -14700, -78017, 69030, 48212, 44690, 26639, -70246, -33005, 83704, 2502, -82266, 7319, -6210, -25565, 15094, -26608, -88280, 70251, -28279, 47650, -23345, tail", "row-125": "head, 84752, 92689, 51270, -88127, -11512, 56194, 63677, 73502, -84096, 97456, 42976, -78459, -30085, -29085, 23779, -23997, 21693, -81689, 40934, 55089, 10661, 77179, 49350, -47917, 65556, -41517, 25257, 88642, 28618, -10400, 15428, -3351, -81421, -41643, -92953, 63174, 86230, -92904, 81727, -21175, -79846, 4843, 3302, 85507, 9064, 10244, -67723, 43666, 11511, 16218, -67257, 48425, -32238, -12088, -66598, 88437, -29626, -18308, -97124, 32731, -99661, -79080, 55883, 24423, tail", "row-126": "head, 71541, -74942, -70676, -84913, -50605, -29866, -94176, -62308, -33597, -78103, 97053, -51805, 10195, 35791, 70439, 16294, -31240, 70927, 67777, -65812, -51058, -4730, 3664, 62915, 15907, -32359, -75610, 48375, 62435, 16921, 41656, 7162, -63043, -65558, 61284, -50355, 13928, 20812, -66221, 22859, 87946, 75562, 33426, -83348, 71629, 70492, -92599, -90482, -66953, 10183, 24496, -29618, 38287, 6588, 68147, 67197, 43612, 6873, 22275, -35184, -71964, 76628, -10001, -58997, tail", "row-127": "head, -11617, 87140, -17898, 84273, 31878, 83700, 64109, 70717, 95732, 71870, 42797, 19364, -66965, 41737, 88608, -396, -8963, -31583, 9422, 90239, -76377, 43791, -89310, -73141, -49959, 99352, -35829, -17299, 93523, -49244, 61855, 19789, -82424, 37803, 75860, -17234, 10914, -81939, -20706, 24674, 90819, -81837, 10533, 65733, 8579, -4786, 18957, -24762, -34680, -33239, -81186, -1911, 46710, 80275, 32496, -98920, -1267, -59008, 71446, -66555, 38961, 35111, 70674, 18167, tail", "row-128": "head, 16267, -88322, 41344, 17938, 35097, 64873, -30804, 10831, 88129, 86969, -91325, 71882, 50788, -62436, -37195, 96781, -36036, 70659, 86722, 59656, 71322, 86386, -14951, -3032, 71804, 23469, -55220, 59807, 44362, 40465, 41790, -97387, -43442, -96331, -12741, 54770, 25355, 97868, 74716, 24138, 53613, -73252, -75364, 82597, 60191, 39263, -83208, 39826, 61279, -86681, 10372, 24899, 24468, 7667, 61014, -47789, 73274, -84322, -38050, -47395, 8600, 65106, -78181, 5396, tail", "row-129": "head, -4691, -63992, -78901, -70710, -87438, 35906, -13068, -92545, 96346, 32001, -20456, -26879, -24589, -71921, -86686, 8019, -42562, -2749, 31768, -14915, -59533, 83001, 69974, -46547, -28280, 83129, -33075, 13972, -66346, 43755, -92833, -37853, -31783, 22201, 81835, 19810, -88466, 90550, -60034, -85076, -71431, -5896, 56072, 30449, 71751, 79888, -72850, -98945, 18770, 32992, 96842, 54152, -47089, -12769, 49064, -45135, 8649, -81859, -27094, -73545, -40149, 71310, -38113, 64984, tail", "row-130": "head, -74708, 48412, -45824, -22595, 91467, 9788, 23040, 88155, 88976, -58294, -43684, 80132, -87438, 82547, -91261, 61574, -65784, 13066, -88040, -53066, -57522, 44541, -77474, -42209, 64265, 86931, -34041, 72504, -22732, -7093, -73496, -86762, -89877, -59008, -96457, -86167, 26694, 28153, -83633, 20379, 96820, 13010, -5510, -80894, -23921, 47554, -63625, 84452, 49723, -4786, -3787, 87212, 63924, -86731, 94387, 78556, -56735, -35224, -19980, 79278, 48608, -6411, -16916, 36595, tail", "row-131": "head, -97208, 94904, 97043, -25871, -41777, 69427, 35917, -32249, -97862, 66482, -15683, 50798, 38887, -45713, 7229, 21731, -52517, 74890, -75065, -8334, -48776, 9771, 79297, -51301, -73502, 14094, -12431, -31480, -3990, 35062, -54424, 86237, -87411, -3579, -32344, -77493, -32438, 47912, -8297, -61045, 16051, 61948, -42723, 40172, 83770, 1339, -86749, -75599, -69588, -55879, 26361, 83210, -31123, 52587, 47397, 78185, -45119, 52169, -9746, 16869, -38648, -9150, -93343, 76589, tail", "row-132": "head, -33650, 73105, 42013, -83029, -11898, 45978, -55793, 62480, 33746, -78095, 29686, 68145, -8865, -43948, 43884, 81559, -61037, 86304, -84432, -85821, 19503, -71422, 45951, -84743, -76752, -84037, -68231, 83551, -76479, 26919, -78990, -56056, -30471, -14664, -33327, 64912, -67840, -8943, -92657, -87606, -35751, 15601, -1530, -60179, 51288, 27683, -45703, 87577, -80699, 5476, 47701, 11443, -85692, -70642, 94522, 83747, -30267, 25888, 24384, -87752, 56902, 56912, -79239, 10231, tail", "row-133": "head, 21018, -65613, -71240, -77984, 47574, 94247, -6802, 15911, -10619, -93119, -67825, -97342, -96743, 54509, 20820, -61899, -46218, 54051, -79555, 38262, -61273, 60511, -40650, 19649, -54836, -5684, -2080, 57109, -65379, 83302, 52703, -25676, -74428, -792, 7760, 68938, -42368, 24942, -87619, -89689, 74310, -850, 38257, 12610, -89535, -81919, 69110, 39981, -70185, 12389, 67931, 6290, -95632, -58390, -43004, 13111, -14829, -63893, 37316, -53603, 96636, -23950, 42769, -82600, tail", "row-134": "head, -82124, -74730, -38215, -13093, -94824, -98208, 16253, -12698, 5535, -55889, 4245, -59604, -64921, 58069, 65708, -86788, -63993, -69466, -42789, -2422, -84360, 76915, 89687, -66230, -70359, 24783, -30696, 11129, -91843, 18630, 87983, -27771, 25012, -46256, 62531, -93512, 29333, 22981, 41589, -25903, 20823, -706, 65725, -42204, 59593, -84004, -46045, 79489, -90444, -60149, 44925, -29314, -12234, 26557, 91434, -23080, -2029, -83683, -35786, -17590, -40665, -88282, 66969, -95649, tail", "row-135": "head, -53352, -97622, -53049, 58928, 5117, 85721, 93122, -12218, -23994, 63962, -20167, -31156, -90882, 55403, 74573, -92361, 88902, -24780, 81836, 65164, -63269, -80094, -62991, 17688, 7916, -79465, -69696, 89445, -63603, -58676, 5379, 56973, 92374, -13761, 21928, 83881, 58176, 16233, 95149, -78277, 2736, -1391, 71320, -90664, 8201, 68665, -95041, 73511, -58554, 39523, -2234, -90786, 1757, -20583, 10421, 71765, 52933, 30486, -68842, -7165, 87033, -22786, 45751, 27761, tail", "row-136": "head, 27419, -26494, 29885, -53299, 48739, 43362, -88472, -1649, 94854, -93041, 3515, -63448, 96856, -7721, -83171, -12337, -78054, -80483, -83010, 97216, -26042, -42756, -65552, -17393, 98925, 60004, 30737, -73573, 45034, 19627, -72287, -92173, 79110, 58185, -71263, -14757, -97703, -4362, -96826, 94788, -1803, -14824, -47659, 97911, -32673, -51900, -12676, -59818, -51517, -95933, -72843, -68525, 76867, 37584, -86157, 47933, -77676, -61865, 86360, -82069, -36792, 65247, 53906, 24107, tail", "row-137": "head, 94264, 22045, 79578, -40859, 52800, 72913, -95178, 63638, 43317, 14102, 58293, -51355, 92771, -56100, 48621, -50595, -67857, 60730, -42853, 65179, -16593, -47503, -21542, 16754, 87113, 59256, 22415, -13644, -11501, -75089, -62153, -37396, 46748, 5493, 58286, 9093, 70726, -89662, 79375, -19578, 58791, 75148, 82671, 88195, -15713, -14343, -71045, -88979, -32785, 73338, 91296, -57636, -67668, 30674, -81420, 56172, 39542, 37369, 30336, -86579, -75002, 180, -21969, 56818, tail", "row-138": "head, -23377, -91094, -51609, 71740, 94817, 3260, 34758, 13897, 23172, 6590, -94720, -24529, -66822, -98673, -90117, 68184, 90270, 34923, 77950, 61032, -33145, -3345, -22795, 72595, -55831, 43203, -13398, -28119, -25758, 59142, -701, -63567, -42822, -67036, -27394, 62531, 84816, 86219, 32269, -37797, 15156, 83469, 40378, 26571, -85105, -85481, 76096, 31721, -288, 21663, -78408, -28869, -11993, -95480, -27827, -54801, -14284, -1510, 61454, 48364, -86888, 73857, 95861, -89616, tail", "row-139": "head, -83178, 90703, -49617, 5959, 67801, -94912, -23599, -29482, -17079, 79992, 73513, 8357, -21387, 75534, -63452, 52838, -35257, 64326, -59673, 49506, 74512, -33735, -968, 89501, 35022, -44261, 32151, 18800, 57190, 75797, 581, -19199, -47865, -39591, 23830, 40950, -94638, 15362, 73575, -97781, 85864, 55287, -8940, 80064, -38547, 39098, 1862, 65508, -45919, 92013, -87175, -44335, -71570, 34305, -89576, 76495, 24495, 96001, -22580, -18427, 17991, 23508, -31298, 68730, tail", "row-140": "head, 91300, 46536, -68429, -10435, 61170, -14113, -83902, -92863, 12253, -75282, -80418, 5030, 85617, 91360, 49906, -37855, 86342, 41877, -15041, -37887, -21399, -4162, -31202, -85288, -83719, -3458, -55058, 69983, -26106, 13609, -21316, 30678, -90496, -31874, -44052, -72101, -56651, 60766, -68053, 10284, -71068, -75640, -333, -50516, 99014, -56023, -98720, -48015, 61968, 1760, -39938, -62108, 85139, -91484, -83115, -32602, 14711, 38033, -51396, 32361, -55513, -41403, -23655, -14158, tail", "row-141": "head, 43493, -9159, 49903, 80384, 85912, -55862, -86426, 31974, -60992, 37615, 41344, 3333, -17135, -35605, -72274, -27147, -65157, -67302, -87323, 97995, -50555, 94843, 99277, -97569, -59649, -65671, 14666, -56300, -93235, 67127, -69249, 84213, -91833, -84786, -11981, -61906, -67534, 61407, -4265, -72072, -88294, -94084, 73049, -67937, -60720, 2862, -15835, -62313, 72301, 96159, 35581, 84139, 38626, 28193, 89438, 1317, -96517, 58405, 35893, -5419, 87255, 7009, -15970, 69432, tail", "row-142": "head, 28883, -65077, -90235, -24356, -94005, 89527, -29686, -12739, -43223, -49264, 85751, 85706, 87075, -80683, -8676, -28969, 51067, 90072, -53844, -93387, -96315, 61853, -24895, 25817, -60507, 45363, -4844, -41709, 97469, -7791, -89156, 70051, -46952, -19167, 97945, -6983, -77085, 14367, 20265, 92269, 43435, -52271, 2571, -68169, 91464, 11716, -48465, 23921, -32172, 53784, 85869, 31140, -12256, 30125, 52248, -75567, 91658, 12688, -43233, 22239, 41625, -89195, 48693, -23374, tail", "row-143": "head, 37821, -69293, 10936, -8293, -91027, -55834, 1000, 23092, 79631, 66883, -28078, 24763, -40042, 5411, -3910, 2230, 21225, -55736, -72478, 14088, 33404, 89437, 76684, -52475, -56697, 2068, -56253, -64065, -91019, 1117, -92031, -20168, -42347, -30080, -49611, -35915, -53602, -302, 47165, 88915, 66897, 77325, -82778, -44622, -43469, -88410, -95252, -18982, 95975, 16404, -76072, -50552, 87453, -93567, 40997, 11553, 49236, 12671, 21850, 55587, -62227, -75536, -95669, 40083, tail", "row-144": "head, 11456, -62678, -41837, 5821, 74653, -2984, 45545, 61027, 90999, -49206, 72296, 19971, 62365, 58167, -56533, -27806, -75509, 90445, -7216, 55097, 74026, 70552, -9181, 35271, -89908, -55374, -21065, -73819, 54194, 63906, 49578, -72111, 52498, 40488, -73692, 63093, -31741, 91275, -93548, 96100, 45535, 50982, 65340, 81542, -46906, 57353, 23044, -8193, 99913, 655, -80741, 55825, -54637, -70910, -736, 59737, -13968, 32097, -5501, 36649, -92431, 41719, 74188, 13165, tail", "row-145": "head, -15328, 37345, 62200, 45902, 35018, -96771, 40391, 10854, 105, -46283, 19264, 44311, 71397, -69377, -1183, 839, 47522, -14328, -84742, -57290, 42293, 68075, -61203, 38241, -12602, 4076, 40288, -20928, 52876, -18117, 94218, -21699, -93773, 94297, 68648, -87484, -71687, 86154, 79745, 28268, -43588, -58192, -60923, 16335, 34817, -32553, 96709, 67398, 89871, -61653, -33406, -51495, 68735, -88265, -34193, 53845, 24812, -12883, 48815, -80218, 51325, -84568, 6767, -67463, tail", "row-146": "head, 522, -30829, -39271, -24342, 1691, 157, 99233, 51709, 6352, 18540, 48765, -35741, -59839, -84405, -49393, -10543, -25797, 95986, -94035, -17315, -87728, 51117, -1907, 70399, 9063, 98865, -21494, 8285, -59845, 83236, -71370, 36437, -79900, 44150, 73062, 80349, 73916, 8882, 14925, 84329, -22285, -72851, 69472, 73903, -83423, -80494, -62985, 14957, -94586, -22910, -68168, -46044, 63278, -44307, -52312, 16564, 76830, -95278, 87103, -44096, 90795, 53095, -63567, -24359, tail", "row-147": "head, -80844, -46760, -18560, -44741, 92033, -87346, -18072, -12171, -25675, 50894, 98273, 69475, 67169, -4658, -49791, -18479, -44600, -88750, 73561, -33585, -52234, -76969, -41083, -99353, 76283, -58653, -83422, -36930, 67809, 47081, -75965, 87596, 96996, -12174, 13064, 2707, -67946, 18401, 50460, 46159, -38805, 85087, -27194, 98924, -66396, -25221, -12928, 59916, 49011, -29540, -57052, 11650, -97089, 86895, 3400, -28154, -79266, 62705, -98306, 66487, -71611, 25903, -1068, -19134, tail", "row-148": "head, -42372, -96104, 55647, -97415, -13647, -93795, 75023, -14836, 40294, 52295, 93670, -14730, 97250, 36938, -5082, -46443, 72603, 59587, 62106, 74006, -74089, 79915, -78379, 15289, 51815, -57399, 97694, -33712, 35382, -39104, -15353, -73088, -42454, -72673, -96791, -29960, -75276, -54321, -12859, -70589, 44853, -4677, -9308, 90827, -27490, 20331, -50310, -39845, -64246, 86192, -58726, -85964, 28374, 55254, 41006, -22700, 27238, 51103, -79234, -46248, -99844, -49005, -51371, 57320, tail", "row-149": "head, -25486, -46515, -12147, -62908, -79208, 62473, -37404, -92782, -88374, 83732, -51197, 45616, -58736, 20444, -31670, 72353, 45048, 79510, -72039, -28111, 67993, -16357, -96209, -17179, -46304, -55854, 75317, 53796, 2039, 68944, -89777, -16479, 41580, -32332, 22881, 33695, -91151, 22007, 51400, -53117, -74169, -77276, -7949, -93953, 87170, 9636, -51082, -53904, -76803, -764, 15651, 68443, 87679, 83967, -21898, 22966, -53425, -33583, -50436, 29979, -55885, 29740, 73620, -32194, tail", "row-150": "head, 45112, -80752, -82344, 55640, 87340, -10509, -10279, 10972, 74667, -82850, -56372, -60204, -74537, 64093, 68896, 22222, -71941, -66820, -39856, 82852, -93014, 238, -69897, -33119, 16147, -22680, -55646, 41726, -14973, 56451, -41887, -67161, -65833, -39493, 19957, 16149, 2854, -46972, 8669, -46466, -83286, 61833, 36836, 13513, -60119, -86962, -1134, -49846, -54911, -75940, 8412, -28988, -73858, -89267, 54563, -51006, 68898, -99446, -87434, 3529, -53728, -70413, 1188, 89682, tail", "row-151": "head, 5897, 72581, -3883, -2516, -56873, 19274, -73730, -8572, 43550, 56167, 57612, 87483, -90325, -28305, -60894, -20435, 37480, 34528, 26216, -60991, -92089, 19625, 22130, -46452, -29341, -5064, 80248, 89489, 26113, 54888, 58821, -13231, -2834, -63737, -39613, 18562, 84306, 12238, -39000, -1078, -11847, 57648, 84302, 86924, 56632, -62727, -15417, 93227, -30149, 11993, 99316, 32315, -30531, -33794, 43889, 85425, 22126, 14638, -76949, 24110, 30195, 17435, 55099, 44238, tail", "row-152": "head, 69911, -22556, -74800, 45275, -77011, 34767, 40674, 31094, -18810, -97531, 84418, -64066, -10524, -3872, -10875, -36757, -47682, -2898, 20442, 87328, 64848, -61439, 10215, -56831, -19809, -86797, 93532, 9865, 46006, -98256, -89291, 890, 18891, 34704, -50298, 69765, 16081, -90592, 86197, -60407, 96276, 17477, -33872, 88821, 52240, 24304, -1546, -30910, 55639, 57490, -74829, 6846, 55123, 68222, -28721, 86906, 99225, 34443, -89182, 99851, -89762, 51580, -33864, -79611, tail", "row-153": "head, -80155, 76801, -7052, 39887, -30840, -78569, -47648, 89012, 65290, -50197, 25756, 84575, -63268, -61171, -40644, -68133, 395, -26566, 83891, -31274, 99898, 40671, 95515, 70912, 11091, -37769, -62747, 63506, 5561, 53386, 3839, -90467, -60460, -66018, -33522, 49978, 77408, 18468, -28638, 4984, 68970, 92741, 52250, 4805, 80420, -21615, 94258, -50164, 36814, -71125, -44838, 95239, 18031, -1438, -75846, 6196, -3215, -39253, 44950, 63756, -34705, 42961, 7630, 6920, tail", "row-154": "head, -45133, -8626, 71168, 36455, -48957, -14190, -75104, 68063, 92611, -60520, 53015, 11413, -47972, -44258, -63667, 2334, 61246, 46033, -91373, -50946, 12545, -19409, 50517, 85343, 76432, 82095, 4114, 29927, 30278, -27639, 43032, -1926, -75110, 27206, -37369, 59263, -40649, -86727, -98453, -41114, -3957, -99612, -95199, 29499, -19831, 1473, -51237, -75183, 86245, 5176, -57792, -41292, -90286, 4137, -5262, 19933, -15327, 49880, -83290, -62807, 40578, -25118, 474, 10705, tail", "row-155": "head, -12944, -55397, -98116, 87910, -16806, -63300, -45645, -46077, 78090, 11540, -85551, -6989, -14996, 55054, 75285, 94540, 40333, 83564, -69153, 36116, -38042, 98478, 24954, 9010, 80851, -15393, 56579, -89868, -3231, 51735, 84283, 22758, -51215, -32684, -91100, 32641, 83945, 53045, -73045, -84439, 1614, 22691, -8397, -6815, 66042, -50181, 30537, 29147, -19354, 77424, 6947, 87193, -74316, 37617, -65007, 95665, -44868, -49900, 93443, 31464, 27265, 40855, -5372, -42831, tail", "row-156": "head, 77590, 85707, -39475, 83026, -3653, -89903, -46797, 1312, 73478, -53536, 43084, 37129, 60311, -69664, 64643, -62724, -34239, 36448, 27664, 54366, 15962, 84831, -25017, -78925, -50758, -77076, -74211, -57663, 4198, 55724, 38600, -86109, -8130, -59950, 16734, 94272, 19664, 38885, -24352, -70778, 74674, -75050, -31845, 52723, 17250, 89706, -41153, 40372, -82598, 98407, 2996, -39967, 82045, -41304, -68693, 4168, -78057, -27366, -88529, -49177, 18883, 28512, -75604, -62276, tail", "row-157": "head, 41951, 23981, -24732, 48287, -50658, -73533, 33861, -62079, -59967, -65278, 60299, -34759, 60429, -48842, 96195, -34000, -47042, -30050, -13109, -36121, 82269, -53212, -34981, -65451, -21388, -40282, 97040, 17138, 26982, 31743, -61394, -13600, -86496, 75638, 24287, -14550, -19876, -47189, -51375, 93188, 11394, 61086, 67644, 56195, -55298, -7753, -65666, 92283, -36625, -81144, -37414, -76432, 68884, -43035, 19289, -49731, -60819, -75740, 60403, -1184, -89087, 42840, -63895, 41054, tail", "row-158": "head, 79101, 17432, 41919, 80992, 70149, -37529, 72225, -14280, -59742, -8505, -29946, 5547, 46824, -54117, 89894, -6876, 81654, 36568, 87376, 45837, -96741, 30009, 76049, -87594, -52696, 36537, 21961, -43567, 56430, 95315, 33366, -14497, -17450, 72166, -28567, -15938, 96390, -25663, 56581, 90907, 82752, -89824, 99511, 69279, 90553, 15205, -64646, 54860, 90358, -32412, -26381, 56866, 22954, 14242, 20638, 40222, -92746, -34485, 30802, 88918, 31286, -25760, -5456, 24775, tail", "row-159": "head, 70691, 4897, -93796, 18449, -89947, -79075, 76184, 53501, 83585, -65742, -45811, -65974, 44783, 96858, 1652, 906, 84845, 14699, -18014, 51855, -53112, -53735, -74738, 85919, 30905, 63951, -7512, 96025, 28674, 31274, -14645, 6467, -29060, -35725, 28271, 29957, -55908, 12088, 77507, 45239, 84827, 34628, -80067, -25333, -36675, 45899, -11394, 97741, 92009, 58053, -19217, 33162, 18657, 30476, -4147, -40588, 56240, -87548, -94906, 33827, 70058, -86748, 67052, 97007, tail", "row-160": "head, 59295, 77576, -20623, -68254, 66929, 85909, 13706, -68216, -97801, -84925, -19285, -48479, 30439, 89370, 74493, 81376, -79370, -10669, -46300, 66909, -13778, 96151, -73845, 60983, 51221, 93251, -88146, -18165, -78722, 39695, -59018, -1387, 13804, -17415, 35991, 89141, -73100, -51559, -11247, -60614, 73990, -38826, 28027, -20514, -59167, 58692, -95947, -91957, 33817, -26355, 80315, 92348, -8408, -89638, -1121, -93316, -52042, -90565, 89405, 69908, -1976, -23012, -37357, -8860, tail", "row-161": "head, -85280, 24844, 92811, -31422, -42494, 83670, -65373, -90560, -39435, -98681, 19531, -66097, 58870, -23826, 65784, 10947, -80316, 4624, -29371, -69969, -5225, 49088, 6412, -21137, 94075, 5504, -15173, -54631, -99639, -73684, 10160, 5294, -1157, 55631, 17061, -29107, -44611, 27538, -27919, -80388, -36643, 73035, 99461, -36146, -88056, 50343, -70969, -68272, -2753, 49166, -44282, -78706, 54439, 79542, -24043, -35556, -39353, 64468, -5296, -93718, -57762, -5511, -78844, -23083, tail", "row-162": "head, 16633, -95726, 81402, 82194, 92266, 53908, -78787, 226, -235, 76976, 39629, 66300, 70200, -4543, 67839, -14512, -52303, 76987, -20344, -37538, 99363, 77909, -85554, 3694, -61269, -36498, -14421, 13012, 14394, 64443, 97548, -66681, -99649, -86004, -53599, 40013, -72188, -88605, -38327, -67191, -77081, 14516, -53674, 24972, -79523, 8654, 95362, -80067, -70808, -33470, 79459, -40121, -67825, 95618, -24953, -40404, 76104, -67688, 77240, -87188, 16061, -62421, -42877, 68218, tail", "row-163": "head, 74551, -28821, -71389, -18661, -50173, 41600, 70317, -80090, -73359, -29623, -76597, -18617, 50868, 66234, 14238, -1055, 26380, 3211, 13372, 99003, -30762, 85201, 16899, -65636, -98454, 37236, 54761, -68706, 98993, 86797, 60617, 71982, 68556, -31364, -83791, 17812, -58211, 55096, 21305, 67369, 51789, -81245, 86008, 98977, 76390, -9907, -13845, -34509, 76078, -46144, 33206, -39513, 65368, 30523, 97544, -89453, 2020, 16703, -89092, -93985, 60248, -76102, 12354, 80561, tail", "row-164": "head, -27825, 32247, 3522, 50182, 32479, -66795, 63160, 78959, -72181, -31213, 27948, -54731, 18143, 65399, -57266, -93364, -60792, 16343, 64365, -78985, 33564, 59062, -62406, -1509, 6611, 73752, -42286, 97390, -29649, -76477, -82313, 63735, -91477, -33778, 73029, -19049, 47274, -65352, -15505, 26434, 58821, 25286, 9428, 59434, 47533, -15637, 23501, -50267, -71104, -62059, -93949, -85246, 29385, -97722, 20135, 22189, -81500, -34459, 75145, 36752, -35256, 13627, -33647, 51171, tail", "row-165": "head, -98267, -29683, 65894, 49433, 45552, -82638, -52839, -66315, -25532, 82934, -99612, 57733, -77906, 40638, 22239, 73530, 28885, -82848, 83989, 8115, -5775, -82427, -50527, -27354, 25733, -47464, 97457, -45023, -77857, -19392, -58422, -257, 38560, 13876, 53041, -53344, 99472, -23523, 39503, 36105, -78584, -57089, 75162, -11750, -62907, -10166, -72615, -93462, 57358, -67853, 3097, 79026, -14224, 21563, 24812, 43951, 94464, -58024, -15520, 34907, -16518, -99475, 73750, -92732, tail", "row-166": "head, -24668, -35182, -22349, -51774, 54868, -55712, 48618, -78704, 87755, -83450, 40524, 61487, 55871, -75806, -32848, 98189, 39416, -69579, -60762, -27551, -99072, 54589, 12375, 22362, 73028, -54612, -36125, -48890, -82630, 90235, -60854, -52016, 72666, 74141, 92626, 58980, -53622, 82321, -75351, 99234, 41001, -75474, -20706, -96754, -15321, 65540, 6477, 3725, -66584, -769, 62486, -78964, 58211, -54671, -69620, 62084, 21875, -34007, 86899, -38188, -27696, 62451, -95718, -9961, tail", "row-167": "head, -44264, -94563, 22297, 27321, -41744, 2547, -15846, -4381, -66633, -39969, -94362, 35447, 99188, 15519, -86565, -15741, -23539, 14925, -85595, -48881, -74209, 45666, 75835, 33799, 66408, -16594, 24803, -84101, -14262, -54599, -61486, 16581, -18928, -63950, -97913, -39571, 81999, -16570, 39321, 31857, 61727, 80404, 324, -49404, -92374, 99735, -92429, 97517, -10632, -32042, 49254, -870, 79508, -68628, 35262, -45944, 6967, -48661, -62834, 37905, -768, -65609, 17228, -78963, tail", "row-168": "head, 24422, -15686, -49132, -55873, 91215, 56605, -26541, 95495, 95454, 55418, 83918, -60920, -26750, -71827, -27450, 58827, 62971, -80767, 98927, -14974, -77651, -9805, -75394, 73016, 27990, -55173, -94596, 87271, 85967, 83356, 42208, 18386, 52272, -26683, 75977, -23458, 33186, 44831, 40119, -30486, 24676, 34509, -75767, 85453, 42052, 1442, 25511, 22222, -44433, 15277, -5843, -11138, 76145, 48176, -40069, 38434, -36158, 6690, 25223, 96247, -50586, 86764, -29868, 81130, tail", "row-169": "head, -39127, -40861, -81931, -33504, 55854, -26828, 74446, -60247, 48957, 15590, -37608, 16791, 95962, 28081, -61308, -37610, 13933, -26896, 11290, 87102, 76212, -77686, 58011, 24507, 14483, -15171, -55137, 42431, 66260, -10535, 10668, 86815, 51987, -39342, -98911, 50463, -94211, 56742, 17472, -38206, -6780, 49356, -70497, -50376, -49758, -95646, -83208, -96882, -51078, 97978, -38046, 75997, 15441, -93633, -29280, 62414, -11404, -47326, 10059, 69755, -90080, -65035, 8228, -61909, tail", "row-170": "head, -97120, 66367, 18134, -26160, 46976, -41756, 35401, -88763, 15578, -50952, -23709, -6522, 61255, 23141, 42143, -66846, -88587, 52328, -16924, 69000, -45654, 11310, -34334, -53805, -77690, 29441, 35596, 61273, 88044, -39027, 56227, -32864, 59283, 64653, -8212, 90786, 43716, 48532, -701, 59768, -21061, 18171, -15863, 69188, -99444, -15398, -40960, -49469, -39839, -24128, -40042, 85019, 74879, 82449, 33798, 97474, -17670, -53998, 73083, -8943, 52902, -52374, 60054, 33451, tail", "row-171": "head, -84726, -99358, -68540, -34804, 89672, -19176, 96085, 50910, -97279, 87426, -48224, 62140, -52215, 52583, 63062, -42842, 87204, 71504, 37500, 84415, -58896, -29319, 44095, -70283, -23685, -6810, 19527, -43181, 27241, -75879, -93936, 56239, -91960, -10402, -4117, 17572, -95317, -36031, -78460, -76017, 17237, -84200, -90969, -48999, -8524, -9293, 20574, -88620, 81735, 80499, -68864, 72300, -66216, 53329, -53585, 8668, -8492, 27263, -748, 30664, 78920, -51909, -7717, 47991, tail", "row-172": "head, 74456, 25240, 20391, -10172, -37923, 80907, 71343, -39924, -68777, 72048, -20740, -57790, -52935, -87239, 56378, -85761, 44456, -45111, 57723, 31262, 16976, -82664, 36636, -36747, 18731, -75325, 84636, -71911, 57880, -51108, 22902, -49761, 65794, 7198, -70776, 19878, 45560, -90679, 2214, -87008, -20573, -23987, 24111, 13830, 28581, -80028, 14256, 80608, 93783, 6773, 5277, -46169, 53031, 38029, -79468, -69015, -34, 1224, 71164, 23692, 41059, -89799, -67747, 99311, tail", "row-173": "head, -34337, 98096, -36133, 65844, 9412, 12602, -23304, -17269, 23660, -2859, 24933, 57730, -37861, 70631, 19173, 63337, 67303, 90332, 93780, 92427, -37898, 94778, -81827, 57545, 23299, -79451, -9109, 86591, 68342, -83826, 2062, 27970, 59048, -23285, -52132, -71557, -11608, -14831, -98638, -94078, -64298, -62933, -27366, 33077, 20872, 26297, -47496, -47019, 62800, 58711, 56943, 8774, 25708, 74758, 36394, -8814, -35497, -99161, -22803, 34929, -79868, -52858, -16837, -84919, tail", "row-174": "head, 45962, -86263, 81291, -21672, -46625, -93860, 16630, 73179, -12585, -97344, 3386, 80107, -21160, -34616, 10474, 73792, 56846, -38484, -83541, 47086, 33131, 22994, -19625, 68411, 53842, 25533, 35136, 72447, 92634, 33036, 91195, -31397, 16912, -41322, 21544, -68511, 47812, 25485, 50562, 60668, -28905, 97968, 81668, -71952, -37214, 87882, 34250, 3762, 75889, -37747, -24860, -84149, -961, -1979, 65377, -85143, 71931, -73723, 35061, 6553, 58088, 16135, 34868, 18463, tail", "row-175": "head, -54855, 91278, -76781, -32685, -33817, 84442, -91545, -55193, -60944, 36144, 54007, 72436, 94577, 19032, -72632, -34719, -86876, 51175, -46533, 38556, 10560, 47155, 28487, 45649, 89376, 90693, 63384, 43762, 12602, -71381, -19863, -13434, 72357, -28954, 31669, -69751, 16250, -69020, 76637, 30775, 20094, -70121, 33443, -38324, -17242, 10991, 81664, 38495, -61592, -76, -94878, 19652, 8553, -9427, 56726, 48627, 67908, 82650, 4505, 43666, 5463, 6508, 36430, 92613, tail", "row-176": "head, 87174, 6086, 21311, -76220, 79016, 19841, -88076, -51348, -34724, -12988, 71714, 88434, -45438, 45289, 70512, -78361, -86715, 17737, 90671, -14795, -8318, 74135, 29100, -89612, 27091, -86947, 22975, -50547, -27912, 89653, 45365, 95987, 39667, -32980, 61829, 31082, 2033, 54742, 65884, -25084, 55198, 8063, -32752, -96846, 27888, 57671, -15628, 45584, 40906, 8578, -17294, 59271, -35685, -93662, 80418, 78595, 51849, 31039, 20666, 83536, 30211, 83919, -12436, 38385, tail", "row-177": "head, 60847, 54359, -25055, 62483, 18002, 851, 17010, 81559, 75641, -5125, 42381, 40855, 92940, -29137, 9025, -56604, 25134, 55015, 10831, 65791, 80979, -55655, -67552, 20640, 15999, 21026, -12504, -4540, -15510, -84101, -9219, 63962, 63119, 76214, 82549, 49083, 86427, -27793, 20029, -71316, -36651, 72312, 83849, 260, -25033, 27103, 46655, -80765, -22716, -78470, -2317, 47904, -80828, -85838, 75965, -87926, 51645, 19589, -64618, 24670, -9573, 93575, -53361, -31540, tail", "row-178": "head, 66419, 7102, 11425, -86263, 2351, -18905, 55555, -72956, -86045, -93535, -19632, 11517, 43525, 34023, -75760, -83448, -84762, 68581, -38679, -87181, -19074, -78130, -64267, 30741, 30295, -46988, -7407, 75783, -63031, 92280, -45183, -92871, -32407, -9508, 60481, -69352, 76381, 84048, -17338, -60855, 85067, 82679, 44579, -78488, 57866, -93761, 77790, -19344, 42300, -1824, -5777, -11344, -38999, 40604, 65284, -14454, 46891, -83510, 57714, 97322, -32760, 46902, 84355, 8612, tail", "row-179": "head, -76939, 66413, -26094, 94491, -49424, 12279, 59308, 98334, 36747, 11205, -39165, -66500, -87575, 33169, 64581, 27360, -34114, -20311, -9384, 3504, -86267, 31620, 52827, 2512, -87837, -84835, 60001, -22661, -29872, 35841, 65254, 97180, -44246, -55871, 25211, 96866, -16444, 24116, 54172, 9071, 23343, 62600, -75863, 22706, 52894, 58784, -4979, 51681, -7881, 93756, -38498, 15320, -73865, 26951, 42433, -17918, -58033, 2789, 75972, 91965, -84063, -19281, -93544, 93237, tail", "row-180": "head, 78389, 66866, -32847, -17610, 42877, -57564, 567, -86653, 78020, -83220, 72998, 40979, 29581, 19464, 64748, -50116, 74957, 80076, 28633, 24295, 54275, -22096, -30475, 41275, 95502, 43391, -60220, -37681, -98434, -77442, 65492, -99609, -5532, 45738, 862, 12088, -61776, 79644, -12849, -36360, 29535, 86476, 79489, -76317, -88110, -28941, 5534, 55996, 70986, -89787, 71799, 6505, -54904, -78289, -10479, 64871, -99659, -82853, 90984, 33311, -53358, 18928, 82822, 39300, tail", "row-181": "head, 39937, -75681, 67327, -5644, -93559, 10859, 83223, 96992, 51507, 53198, 43130, -84210, -81593, -26659, 53068, 21998, -8288, 3050, 96390, 66757, -89004, 88107, -93059, -96339, -75269, 52960, -83196, -18345, 84567, -21043, -96028, 89991, -60361, -71760, -87104, 41769, -30162, -7655, 83842, 90219, -84604, -94382, 44970, -75577, 49494, -7853, -39983, -89481, -16151, 29318, -26422, 82805, 74921, 50873, -12158, -46371, -950, 98999, -27514, 3388, 87499, 66678, -35633, -30571, tail", "row-182": "head, 4901, -37992, 68648, -84095, -64120, 7051, -23160, -63647, -26998, 3855, -52093, -80815, 80444, 95170, 50140, -30281, -63874, -34825, 13067, -40536, -63730, 67588, 13178, -68378, -31967, -50771, 51759, -87082, 91773, -34354, -69594, -7543, -86423, 59275, 30733, 18691, -20780, 65904, -65078, 85110, -8484, -87738, -85377, 78210, 14784, 97613, -29021, -43228, 5815, -62352, 57681, 25162, 31022, 15083, -70785, -59798, -97133, 43071, -75655, 63587, -42792, -81707, 28324, 12754, tail", "row-183": "head, 14247, 83849, 24154, 25856, 17002, 58923, -19492, 48693, 25534, -3352, 31234, -68321, -26241, 19085, 30350, -86626, -33600, -51177, 33791, 18949, 51188, 36489, -965, 81233, -97327, 93085, -13530, 31851, -90512, 14697, -46786, -61197, 68243, 88372, -22164, -28664, 59759, 66376, -54685, 34156, 67172, -33875, 2908, -23545, 6765, 33209, 97929, -34667, -30032, 67029, -85761, 19724, 56373, -49879, -73277, -52469, 72398, -29578, 32766, -46165, -52234, -83416, 51097, 35976, tail", "row-184": "head, -61180, -2750, 13451, -66832, 96000, 26805, 64755, -29342, 44450, 92751, 82540, 86219, 36339, 86028, 81322, 90701, 44092, 21934, 87223, 87432, 68108, -80774, 98675, 35409, 97664, -25979, -2067, -5550, 52896, 8343, 35652, 39339, 53838, 97199, 92040, 3065, -34364, 93735, 27608, -41006, -77422, 65882, -398, 44891, -55688, -67288, 92531, 86120, 84593, 93610, -70741, 28597, 59440, 48599, 44, 49990, 18480, -92645, 1077, -60339, -64267, -1941, -43853, -14167, tail", "row-185": "head, -92250, 56987, 1148, 22986, -45799, -41103, -19705, 72604, 80719, 2454, -88333, -88228, -64330, -65741, 40707, -19278, -85814, -60506, -83596, -62572, -85126, -95741, -86273, 16396, -87092, 24439, 82405, 33379, 60104, 97841, -13873, -68553, -5194, 88853, 96856, -58400, -66475, 13491, 87689, -28519, 5456, 19146, -759, 67820, -8533, -99249, -51988, 2345, 98786, 39491, -83346, -49500, 22589, 77043, -8791, 3917, 69841, -27726, 6623, 31146, 59093, -76276, -29436, -84945, tail", "row-186": "head, -23294, -79837, -27531, 5862, -61318, -52739, 38562, 80455, -25309, 55762, 45109, 94138, -10064, 91139, -56990, 88720, -34370, 13923, -95117, 14017, -35225, -22184, 52952, -91794, 85638, -99150, -78846, -37461, -54125, 19973, -75813, -48559, 19879, 44676, 79981, -24075, 52420, 20810, -85311, 60090, -2554, 10806, 48630, -68825, -89443, 59711, 31401, 44364, -72605, -28806, -36530, 45918, 95149, 86231, 60687, 85942, -25858, -43947, 73492, 97094, 22227, -97992, -81022, 96400, tail", "row-187": "head, 42303, 92122, 65719, 85917, -87205, -15567, 25429, 23809, 90230, 79485, -64894, -1781, -60201, 60719, -63476, -96465, -63430, 54642, 24760, 30908, -87602, 61379, 46903, -56307, 83109, 10232, -76090, 43994, -69752, 99499, 41244, -66461, -33114, 53917, 80333, 8044, -7612, -17466, -69116, -77978, -21256, -11935, -14496, 52785, 44881, 89230, 68394, 87001, 54346, 27080, 69269, -3265, 63868, -84859, -11382, -35188, 41793, 41488, 23610, -28940, -99938, -25002, 60164, -49349, tail", "row-188": "head, -17351, 45243, 49296, 22461, 39056, 56311, 87363, 33367, 46534, -30769, 13689, 20286, 37434, -60727, -35607, 34125, -59700, 69252, -9575, -48141, 98256, 14849, -90420, 5486, 70742, -76394, -99739, -64917, 33179, -32271, -51621, 51277, 52684, -34232, 67034, 94945, 27205, -28270, -71690, -62144, 71752, -5566, -58931, -4213, -36452, -18201, -23560, -53862, 18767, 33228, -52576, -27355, -45569, 97816, 98050, 5663, -22455, 64748, 43045, 44101, -64243, 49153, 92907, 21731, tail", "row-189": "head, -53139, -78154, -81334, -59641, -46708, 86542, -2478, 90465, -44545, -44811, -40699, -64999, 88193, -39098, 40650, -97739, -71021, 69537, -26405, -52326, -99860, 32068, 46625, -23615, 74560, 42283, -6832, -19334, 62707, -44710, 53687, -98259, 24842, -34088, 98423, 56159, 76163, -93238, -84330, 63677, 86999, 12144, -99156, 21736, 69860, 4282, 82385, 69634, -71203, 32966, 74716, -31137, -51625, 15892, -15606, 81004, 44529, -35103, 96644, 20559, 86178, -42042, 28397, 7555, tail", "row-190": "head, 69837, -63380, -52374, 60886, -38912, -68490, 17732, 53973, -77052, -93957, -80653, 10108, -43028, 36503, -69259, 71910, 38589, -68561, 79161, -23243, -54422, 25258, -19295, -13150, 45641, 20113, 94582, -48462, 24300, -5363, -40715, 31806, 85588, -38628, 29200, -7451, 38771, 87528, 84399, 43263, -51326, 82984, -40224, 15844, -81896, -75348, -98412, 53940, -29612, -559, -60622, 54534, -21580, 44387, 38531, 5606, -82294, 17454, -26176, 7358, 32450, 13413, -39594, -27942, tail", "row-191": "head, 92552, 57193, -81737, -34219, 470, 74561, 50027, -21563, -3525, -53113, -42003, -38776, 2703, 9435, 6147, -62715, 28118, 499, 55577, 44735, 43408, 76430, -47542, 4906, -28304, -62542, 40790, -36769, -52479, -26043, 78260, 75110, -5843, 62463, 19328, 81327, -52515, -6425, -50634, -98931, -53790, -68161, -66707, -49977, -23426, -98436, -22481, 69475, -57188, 16452, -1333, -91994, 35138, -52150, 89047, 84082, -11128, 47109, -64544, -76370, -52913, 49630, 73830, 39000, tail", "row-192": "head, -19462, -12189, -68059, -21200, 20642, 50055, 81491, -24179, 7559, -60978, -33208, -78918, -39422, 76863, -67079, -63429, -71295, -52897, -48347, 1238, -54191, 84029, 52206, 80643, -66068, -58359, 61458, -17699, 47001, 82460, -6899, -18124, -27879, 6959, 94134, 19373, -74413, -30583, 18502, -86411, -21152, 12914, 13751, 3592, 384, -47525, -2349, -47792, 98101, 64116, 21733, 48913, -46837, -1348, 89025, -64666, 21127, -4259, 11252, 15858, -32316, -31134, 28974, -59408, tail", "row-193": "head, -55586, -76182, 86202, 68401, 64980, -50875, 59119, -84073, 39540, -38336, -6843, -90056, 65364, -62330, 49674, 33397, 49104, -53266, 41432, -94888, 14577, 64095, 3051, -73817, 35918, -2826, 12249, 85398, 35116, -63064, -67836, -99438, -2497, 4892, 65992, -31002, 27716, -92756, -42234, -42481, 21028, -89976, -69678, 2043, -91602, 7569, 31942, 18742, 82432, 45784, -36357, -41721, -16186, -60094, 73700, 10588, 13927, 52944, -87777, 70226, -68139, -57008, -92891, -69047, tail", "row-194": "head, 21514, 93908, -46292, -38787, 41466, -94366, -99341, 22931, -70454, -61092, -58869, 52808, -36112, 8909, -34312, -45322, -4080, 79340, 15634, 32212, -80529, 86025, 94993, 69831, -52888, 26377, 82908, -33645, -63152, 10508, 18397, 36771, 48748, 16722, 59602, -5532, 18681, 59444, -63524, -57556, 55427, 95132, 6435, 20066, -53192, -41020, 73724, -49086, -11715, 84518, -68295, 42158, -56392, -89, -79476, 13014, -74116, -72488, 49361, -83268, 37505, 13433, -24199, 57301, tail", "row-195": "head, -72643, 75312, 84133, -30485, -55758, 5139, 86077, -86142, 93787, -90923, 16463, 84339, 49109, 14523, -51200, 70806, -34485, 46395, -4198, 60320, 60919, 49549, 5837, 72907, 81622, 32634, 78851, -68267, 92456, -22077, 26848, 25984, -31436, 694, 26615, -52254, -10913, 29470, -99358, -18415, -81138, -72739, 42152, 23902, -16123, 53346, 37872, 57280, -10133, -90748, 7019, -65050, 84189, -40577, 4917, 33858, 62944, 19649, 14789, -58209, 42446, 94243, 33376, -32292, tail", "row-196": "head, -30302, 80234, 33456, -14656, -54731, -27017, -92907, -85751, -18306, -43292, -21413, 41120, 66058, 74763, -35979, 22748, 93754, -17582, -47668, -55632, -15211, -70177, -41334, 56683, 44753, 85990, -84323, 6308, -94772, 4157, 84872, 27543, 48971, -30977, 8584, -63931, 83653, -26097, -79039, -6068, 45317, 67451, 10899, -2960, -30560, -46778, -75609, -22049, 4588, -46123, 85897, -46583, 82812, -20558, -42974, 75853, 21157, -96177, 14682, -15470, -6866, -79407, -63363, -79508, tail", "row-197": "head, 44944, -293, 30305, 72944, -60957, -63882, 3024, 5604, -45434, 72676, 17472, 17468, -97423, -83755, -47461, -74407, -17224, -47672, -18138, 63043, -39894, -31278, 40810, 23632, 13277, -60532, 76627, 25294, 57323, 91048, 50461, 58743, -13692, -62061, 65141, -65609, 88864, 28337, 115, -53420, 39591, -5966, -12808, 4632, 54212, -63161, 18890, -51697, 36296, 20497, -4732, -1225, -63063, -8199, -37137, 56629, -84936, 75489, 68007, -23647, 77397, -35477, -81148, -26776, tail", "row-198": "head, 14411, -84402, -15314, -5974, 99002, -73937, 18386, -49854, 82599, -20271, 8963, 76772, 16533, 84139, 780, 25214, -80912, -76118, -6228, 1155, 7029, -45204, 62349, -59511, -37152, 691, -13068, -16764, -11876, -19229, 77289, -60655, 3209, -18440, 92432, -15389, -55981, 49246, 71630, 15288, 91028, 21668, -27486, 62857, 9771, -49826, 24518, 80989, 35538, -74422, -73333, 45712, -56601, 32726, 28939, 60398, 15892, 68889, -87412, -41317, 18502, -91710, -20657, 61806, tail", "row-199": "head, 12653, 41192, 11482, -75547, -15677, -79676, -50894, -80531, 76643, 74060, 70425, -72688, 67606, 17237, 72415, 69094, 42247, 33562, -32605, -51737, 36043, -52759, 75853, -28436, 61502, 59835, 13156, 83454, 50590, -74382, -84701, -33202, -20598, -5949, 45022, 28346, -94890, 74231, 38423, 56929, 19142, -72965, 1100, -8714, 915, 96826, -43914, 9117, -52567, -50437, 68148, -85435, 74007, 63881, -55507, 17838, 23819, 38356, -87522, -54325, -64532, 5388, 76565, 19229, tail", "row-200": "head, -13299, -92792, -32367, 60674, -31835, -23402, 44568, -61330, 39108, 41670, -62946, -59869, 21247, 6623, -43753, 84561, 46144, 69842, 21236, 40784, -58599, 32294, 9131, -90131, 19494, -13935, -91340, 66635, 96328, 61862, -87340, -66335, -80238, -54778, 14539, -66818, 85954, -878, 10019, 26753, 49424, -53049, -13304, -79678, -6582, 69244, 73365, -1198, -98924, -27252, 52042, -78373, 56082, 74086, -38752, -50829, -94791, 7435, -67316, 96166, 35292, -50277, -91119, -67109, tail", "row-201": "head, -23842, 12008, 95597, -36554, -15053, -11102, 92447, -8518, 32510, 25274, 76292, 85534, 89555, 18429, -35805, -59016, -46385, -31227, 5051, -30177, -82150, 73399, 19033, 93500, 54775, 25032, -28030, 34359, 92689, 62650, -34569, -46134, 5835, -60568, 2599, -51010, 77070, -81475, -47391, 2278, 25864, 90814, -61406, -5504, -48334, 70970, -31655, -12317, 84785, -48451, -25328, -44762, 48136, -30468, 63851, 785, 59952, 22207, 95471, 17114, -92665, 37682, 49355, 3496, tail", "row-202": "head, 72674, -73296, -24042, -98803, -56488, -44419, -14790, 98070, 65309, -64788, -89431, 70952, 46568, -27274, 47650, 32279, -19982, -21061, -61442, -15502, -51823, -44507, -42644, -92297, 67410, 17643, -8331, -76786, -23644, -89490, 20062, -74858, 15460, -75482, 10045, 11352, 82928, -39214, -25736, 28737, -69188, 98234, -2185, 71227, 3714, 26121, 76650, 29281, -58959, 69631, -75427, -48558, -33760, 4771, -84897, -18444, 76302, 69089, -84152, -83940, -30377, 79291, 87665, -21551, tail", "row-203": "head, -18151, 56125, -85350, 40527, -58075, 57688, 92429, -19559, 72037, 3872, -31416, -14973, -89222, 78492, 21744, -86643, -72431, -80819, 86567, 52053, -66965, 99896, -92552, 61506, -49172, 12632, -70233, -56408, -7618, 65339, 26410, 94148, 12982, 39074, -20874, -31892, -67477, 26108, 63195, 32923, 25653, -39622, 12395, -69156, 83283, -29921, -43684, -24862, -41429, -7157, 95260, -69209, 49186, -74548, 34632, 84660, -15556, -35244, -86393, 46686, -68407, 87764, 90542, -11699, tail", "row-204": "head, 78247, 1676, 31804, 46092, 64896, -54333, -21578, -8696, 61435, -49741, -59583, -35760, -70923, -86512, -52734, 7073, 85079, 65027, 53357, 36026, -35726, -86912, 74509, 94485, 16224, -17108, 63412, 65801, 60149, 14565, 65291, 92292, -36534, -54750, 10196, 66118, -68059, 83439, -57188, 63141, 73105, 23268, -91247, 83807, 42645, -6648, 15647, 59971, 76702, -94975, 89806, -33885, -56469, -45390, -12070, 9607, -8048, 37747, -793, -10603, -58217, -59714, -91209, -71170, tail", "row-205": "head, 25248, 83723, -15849, 86467, -71065, 63687, -83402, -6947, -86002, -83671, -94231, 24744, -48456, 53804, -59334, -62468, 86686, 67029, -93329, -35023, 90157, 48666, 18192, 73533, 13113, 53550, 71184, -163, 86694, -18009, -35846, -32536, 53571, 33158, -14355, -64807, 37437, 81058, -68854, -86193, -73592, -99338, -89996, 18802, -68233, 6429, -4164, 98264, -2826, -92791, -77563, 64339, 68352, -74514, -22858, -78787, -1921, 6213, 95789, -78449, 13512, 69331, 69555, -61960, tail", "row-206": "head, 11485, -12402, -75804, 15477, -71667, -98810, -12049, 8744, -47218, -77094, 79821, -38348, 69440, -31384, 3174, 15510, 9730, -82689, -41210, -65059, 98574, 26100, 85046, -11649, -34904, -34164, 13743, 69922, -40524, 60732, 6946, 96193, -9280, 19255, 83032, -79976, -98306, 87443, 22347, -90536, -62352, 87866, 46952, -18716, -80686, 54531, 78080, -40151, -10729, -36812, 22557, -47238, -71383, -19993, -13761, -52080, -36856, 73290, -16402, 34680, -11646, -98833, -25420, -87541, tail", "row-207": "head, 89697, -71764, 84970, 5524, -6613, -64049, -22874, -96073, -73298, 64008, 64797, -36439, -76849, -80124, 10893, -32372, -75907, -9800, -45221, 20351, 63000, -40938, -85591, -35303, 29417, 2314, -71336, 47968, 43231, -72525, 84948, 29645, 92354, 32415, -62657, -26354, 57391, -28729, -40469, -473, -31424, -9410, 51194, 32994, -43198, -24432, -54170, 79675, 40314, -36890, -95357, -28580, -33316, -55146, -44118, 97912, -77683, -46507, 10424, 58893, 10382, -47071, -59222, -3888, tail", "row-208": "head, -63443, 28232, 86734, -70058, 80487, 84033, -80193, -8175, 39982, -83462, -44841, 82241, -36648, -49541, -41916, 36308, 12916, -19041, -87602, -77482, 62175, 52015, -39273, 23597, -41200, -75481, -74724, -9882, 92852, -53893, 91565, 97906, 8641, 12370, -81652, 60193, -61787, -61072, -6694, -31217, 86858, -34072, -84337, 21420, -77280, -59283, 99217, 22873, -55528, 16363, 846, -83610, 70567, 65025, -18390, -5861, -36346, -5258, -55348, -89587, 29037, -20819, -57597, 91074, tail", "row-209": "head, -37603, 48439, -70144, 40491, -94545, -89093, 64401, 88648, 4381, -6227, 27879, 75717, 30240, 50855, 70494, 47192, -28488, -3390, -93534, 78717, 37089, 84163, -49929, -51256, 97415, 3x9101, -40370, 79482, 9753, 17342, 93812, -49245, -57505, -68688, -75210, -11868, -28865, 13902, 88745, 88162, -41584, -47460, -97243, 94061, 50252, 56795, 3725, 64121, 59932, -27794, -45028, 95029, 24357, -68769, 30527, 84973, -63002, -43409, -82377, 38494, 71371, 42187, -648, 82624, tail", "row-210": "head, 85337, 16128, -50970, 85629, 39153, -50788, 54962, -757, -13799, -42152, -34782, -56480, -88732, -31928, 24732, -91101, 9839, -91768, -68335, -70333, 13351, 765, 41107, -18845, 50051, 90749, -90729, 86561, 64345, 9770, 97062, -36653, 80701, -33234, 14980, -58976, 73426, 84475, -51620, 32867, 80265, -23475, 26149, 87312, 51841, -60432, 55340, 19568, 34449, -39511, 8869, -87638, 30479, 57078, -14216, 57176, -54250, -41936, -69731, -99202, -33152, -60395, -61624, -76502, tail", "row-211": "head, -4844, -79783, -42364, 99577, -86956, -6007, -14125, -37564, 5839, -45207, -71374, -53512, -48904, -19678, -12345, -1670, 71988, 49410, 26726, 10633, -31297, -97140, 59095, 18511, 81188, 69230, -36261, 25170, -24247, 77231, 12107, -79335, -9749, -54898, 50036, 22450, 5142, 27594, 98720, 3641, -56329, -41270, 78545, 94395, 1297, -57247, -41981, 71699, 52015, 37817, -79469, 46270, -29218, -24320, -34760, -98696, -90630, -8525, -93960, 15015, -34141, -91123, 46281, 14296, tail", "row-212": "head, 27862, -6698, 21249, 97076, 88557, -73973, 22162, 49645, 35369, 76520, 9342, 33928, 1208, -68062, -30807, -95732, 84952, 73223, 22316, -26075, 76960, -65510, 36932, -95819, 33115, 36653, -57084, 64290, -85232, 54846, 53531, 61551, -30031, -144, 65678, -68748, 4713, 45620, -97199, -21067, -52032, -46688, -24576, -92402, 10455, 42557, 95005, 27782, 71284, -35304, 70744, -66511, 98971, 69936, 94729, -95864, 24468, -97458, -82434, -22316, -70585, -89428, 1510, 9959, tail", "row-213": "head, 60157, 67789, -71487, -65986, -73757, 89344, -38722, 28260, -97431, -92056, 95631, 4149, 1219, 18508, 61968, -23148, 91244, -90705, 92215, -8649, 84294, -16661, -33045, 26069, -27969, 36082, -11209, -69686, -51542, -21178, 43171, 90593, -65508, -94184, -66567, -69371, 61410, -20290, 71154, -96436, 75311, -79201, 38153, -43202, 83529, -20631, 85088, 28555, -29499, 82113, -89633, 43490, 40683, -95565, -9961, 46093, 91644, 63076, 25494, 53886, -94647, -33109, -75073, -81189, tail", "row-214": "head, -1023, -45195, -774, -31645, -88946, 84141, 64678, -16265, -64432, 17145, -37559, -77332, -5862, 18025, -63331, -64268, -72595, 59577, -82145, -33839, -13620, -46467, 64520, -69385, -31647, 93129, 33392, 63795, -81549, 80955, -96995, -31788, -40281, 58169, 82623, 20991, 62729, 25711, -3202, -31035, 75629, 5203, -86132, 9223, -99335, 25931, -14633, -78435, -94790, 4451, -75892, 88159, 3626, 34273, -2250, -73052, -85077, 68266, 53562, -83487, -95687, 66406, 72687, 70453, tail", "row-215": "head, -77073, 78788, -79070, -81904, 36084, -27209, -242, -19231, -40556, -44774, -77221, 32246, 65726, 86615, 47214, -84697, 33020, 73270, 41606, 93158, 24426, 90863, 37663, 54227, 1102, -66559, -42225, -22842, -52703, 22019, 85599, 51278, 68642, -18486, 9869, -90581, 15330, 69944, -83976, -36588, 55738, -62161, -73026, -79773, -91637, -39711, 60826, -49023, 33944, 53213, -17448, 4948, -22061, 76927, -76511, -11059, 59560, 52940, 60815, -69186, -42419, 66184, 53203, -45928, tail", "row-216": "head, -58843, 31684, -66337, 58031, 25157, -84615, -22798, 20100, 34613, 10590, -68696, -57745, 86832, -2213, -98435, 54498, 38486, -90733, -39663, 88194, -52485, -22642, 40674, 32055, -7685, 401, -7398, -43275, 47679, -2418, 19379, -50228, 83543, -32235, 26197, 48601, -73657, -14423, 6874, -84660, -88614, 76857, -92851, -45289, 96031, -71882, 20865, -55855, 76160, 73504, -59647, -60496, 91246, -84934, -39223, 57765, -7246, -47978, -99176, 76700, -5406, 50408, 72622, -661, tail", "row-217": "head, 88193, 94440, -82206, 83422, 63615, 15811, 83337, 98754, -1219, -4442, 39693, -8122, 58775, -98841, 89002, 9903, -3536, 10085, 42706, 90667, -86745, -9245, 181, 32264, -7676, -62652, -39825, -16752, 27026, 2123, -20126, -28911, -29535, -38721, 59225, -70483, 21831, -23666, -84260, -14604, -56737, 53732, -6345, 39041, 56309, -88793, -42199, 76173, -98527, 46734, 64366, 85574, 89116, 59236, 11702, 41528, -55444, 20435, -85581, 68231, -80173, -30985, 4012, 42823, tail", "row-218": "head, 29646, 94902, -44864, -57360, 17462, -39681, -15520, -72694, -62483, 91249, 83301, 31233, -74944, -43621, -65588, 79466, -12712, 13985, -79015, 16825, -10806, -55910, 94956, 74643, 44930, -67935, -85035, 91425, -90580, -44668, -29691, -43709, 35005, 68492, -58376, 30766, -7148, 45171, -18300, 98728, 79594, 85117, -59942, 75731, 27092, -97089, 13527, -87558, 52589, -91262, 40212, 3219, 50374, 88630, 69102, 84040, -87171, 18169, 9830, 22285, -55775, 34342, 69609, 77881, tail", "row-219": "head, 14404, 52864, -74055, -34050, -29538, -11916, -66319, 19904, -39300, 26963, -74770, 88250, -89326, -69296, 4012, 83026, -24158, -11607, 39473, -92309, -70539, -27617, 72925, -60758, 89780, -77704, -70869, 82826, 66074, -89317, -24598, 60154, -73695, -79538, -14033, 49870, -76604, -55036, 69466, 62240, -54241, -39002, -10218, -38057, 39254, 55904, -89692, -23400, -45474, 13029, -26199, 65108, 26593, 78316, 45655, 91507, -87205, 53555, 23848, -16607, -59822, 31832, 74169, 77778, tail", "row-220": "head, -61185, 17886, 47695, -56751, -38301, 73502, -70847, 22477, -96799, 43727, -67627, -38040, 51004, 57553, 56683, -57548, -74191, 15871, 43515, 31858, 76000, -25525, 2242, -13086, 59497, -47017, -50319, -45445, -59453, 73998, 61827, -32514, 3303, -7547, 80539, 71457, 66139, 9741, -54899, 97911, -98861, -52512, -12768, -1062, 44089, 83369, -29840, -10875, -98973, 11768, 30873, 72579, -1525, -95160, -38714, -8112, 83723, 14903, -23070, -13731, -93552, -49066, 71857, -5720, tail", "row-221": "head, -8221, -2216, -41072, 15399, -31916, -46729, -84456, 45312, -74657, -27752, 24809, -32254, -57737, -41224, 31151, 48186, 75820, -53999, -46404, -13, 67426, 37343, 48881, -39727, 77708, -17497, 51927, 58834, 11739, 36015, -17663, -68235, 31756, 74574, -46794, -762, -51000, -16619, 17130, 17553, -85954, 64470, 11269, -66677, -43376, -78265, 54686, 84352, -11968, 39852, 90874, 26383, -3741, -97424, 13868, -69075, -43576, -7029, 81316, 3825, -44250, 4087, 84258, -88570, tail", "row-222": "head, -91320, 83127, -41991, -17656, 85489, 64806, -79076, 24844, 94556, -7422, 60174, -89506, -42150, 8141, 62809, -71094, 61018, 50557, -75423, -94470, 31654, -22510, -71819, -32103, 52570, -38569, -84682, -25107, 19179, 13746, -10652, -38070, 23486, 59577, -59434, -93091, -27640, 38703, -32033, 26814, 19255, -65392, -93403, 67971, 22268, -91598, 81039, -84960, -8649, -82567, -42640, -31057, -49421, -90079, 82831, 89970, -88052, -56443, 98095, 49907, 13894, -63140, 40676, -47431, tail", "row-223": "head, -87859, 11731, -84875, -52274, -89732, 85557, -58564, -3991, 65368, 47842, 83932, -15014, -81020, -64455, 37144, 63034, -18152, -98389, -53374, 81007, 71849, -76080, 77614, 85971, -95909, 3954, -33073, -47690, -39395, 4757, 30335, -72224, 68267, 34405, 85471, 26807, 99160, 56320, 76394, 31405, -9459, -13557, -88287, 5626, -53452, -38149, 2199, -88983, -96400, -48977, -58870, 39415, -95146, -19496, -75083, 78727, -27498, 28421, -16900, -4202, -54528, 28458, -52043, -37739, tail", "row-224": "head, 60880, 8376, -84382, 53772, 64867, 13996, 45800, -84536, -20746, 21002, -84715, -52234, -97752, -61178, -201, -4486, 45552, 90980, 10709, 53446, -74123, -12348, 62178, 85747, -67518, 86989, 79427, -14527, -47209, 38834, 88069, 38619, -41188, -83889, -27227, 94006, 77651, 90424, -49957, 34156, -53926, 18990, -33974, -80627, 71023, -79129, -43149, -54918, 89969, 437, 23677, -67623, -56047, -33316, 53978, 94016, -88908, -52389, -2881, 95238, 74071, -22477, 42664, -47448, tail", "row-225": "head, -73356, 71858, 92466, -75742, -44909, -64119, 80467, 10259, -10831, 19635, 2407, -87205, 93527, 48350, 52996, 85627, 3145, -7685, -22709, -70012, 33202, 47989, 72303, 61772, -99083, 68358, -95980, 4980, 61613, 23308, -90549, 96509, -51427, -61099, -36128, -62332, 74999, 11769, -90208, 29579, 4065, -22524, -78493, -53485, -59262, -53367, 60352, 95923, 21616, 99941, -20287, 71375, 53866, 69578, 96035, 62079, 84042, -22368, -77372, -19069, -16560, -35871, 68051, -59282, tail", "row-226": "head, -74931, -8895, -14020, -89588, -11502, 72231, 76081, -53953, -99259, 33323, -936, 9236, 64287, 30508, -16396, 8202, -59321, -97819, 29679, -93943, -24542, -31111, -31297, 35425, -51569, -12912, 65354, -90275, -53804, 55076, -54763, -12729, -55765, -32610, -60291, 4178, 40766, -28487, -33447, 2295, -82336, -57928, -58019, -77254, -4777, 24773, 76953, 49639, -22874, 74451, -43156, -77475, 71030, 8773, -84034, -88368, -84453, -84784, -38843, -29093, -8488, -54728, 84022, -89482, tail", "row-227": "head, -5398, -40219, -44299, -61762, -21899, -43446, -10885, 80732, 82865, -34329, -33312, 5488, -26235, -58885, 43229, 85283, 15507, -77963, 74884, -63376, -1733, 14118, 38136, 1878, -27496, -346, -93196, -58869, 82583, 69636, -28336, -73073, -35979, -68965, -41915, 58378, -34962, -45352, 40673, 49797, 19266, 11795, -35187, -50470, 31421, 61238, -38829, 1489, 27304, 62520, 92127, -18486, -19662, -98660, -27094, 56951, 3988, 83331, 47246, 8376, -89053, -99206, -72335, -40473, tail", "row-228": "head, 59048, 7622, 36188, -57677, -72746, 54898, 75353, -39710, 32036, -31079, 71636, 52710, 3869, 21917, 65603, -48571, -24905, 1630, -68748, -58473, -10779, 75109, -15332, -34585, 74459, -11044, 39743, 78438, 82417, -63680, 19898, -35204, -68251, -30857, 61847, 44087, 72821, -60817, 1477, 61850, 13718, -52569, -63957, -96625, 36133, -82784, 38264, 15373, 82531, 64673, -20349, -84606, 87535, -55321, -99281, -43639, 74247, -21521, -28090, -41713, 35847, -64738, 22351, -3534, tail", "row-229": "head, -15318, -78762, -76434, 23376, 5802, 16240, -10130, -79624, -71667, 18722, -95389, 80862, -77754, -59845, -83557, -51057, 45011, 56573, 76746, 77711, 81896, -34030, -53601, 39781, 34706, 88030, -8301, 99193, -78519, -24779, 52428, 81887, -14298, -20083, 86530, 21710, 32176, 94857, -51145, 58815, -69286, 36039, 38230, -1655, 71055, 21283, 59025, -68596, -61931, 47839, -2912, -56480, 26573, -85275, -32842, -98212, 34353, 43741, 52688, 14211, 31535, -66938, -41973, 58917, tail", "row-230": "head, 36865, 54151, -71878, 59993, -8687, 73625, 12648, -4266, 99104, 17644, -16314, -662, 47048, -5605, 36190, 83959, 97115, -49661, -74595, -86497, -29953, 20855, -47501, -74393, 85718, 67905, -69194, 49388, -81809, -77559, -40649, -88291, 99791, 86959, 39548, -47401, -13900, -89603, 29091, -40644, 34632, 41758, -25037, 31860, 28, 30074, -85297, 7360, -9770, -15060, 21435, 50245, -7162, -84838, -25299, -81390, -9131, 59236, 2013, -27429, -6321, -68123, -77459, 98778, tail", "row-231": "head, 89503, 61472, 49273, -83329, -7758, -75115, 4181, 46830, -7785, -55218, 93011, 722, 9223, -75897, -12216, 77459, 44339, -61241, 57841, 85641, 92214, -48585, -39233, 62213, -32062, -19084, -84991, -77170, -42867, 82609, 47510, 70031, -22305, -77144, 78908, -7820, -4767, -41250, -54825, 61177, -66406, 76584, -91276, -93714, 50747, 23226, -42286, -38669, 37220, -1280, 3657, -31315, 76725, 38807, 10160, 91106, -36053, 93935, -15399, 68495, -75495, -57594, -94141, -9903, tail", "row-232": "head, -93000, 60044, 60377, 97763, -27111, -45637, 69295, -57488, 94619, 74318, -84653, 84875, -77959, -82697, 24387, -3834, 61261, -63704, -51183, -8794, 81064, -8272, 52201, 66250, -6802, -9234, -11589, 97174, -69199, -85016, 7949, 52022, 26229, -70044, -56898, 97565, 85711, -20724, 63712, -98177, -90108, 28801, 61759, 17754, -79581, -35023, -55346, -52702, 15825, 62386, 82846, -19478, 50404, -24020, 40290, 52677, 19538, -55374, -75645, -52812, 51127, 64446, -79756, -88035, tail", "row-233": "head, 7593, 33598, -24294, -69132, -72577, -68770, 36474, 14220, -75046, -59020, 16825, 54005, -35881, 37481, 35407, -77492, 26262, 7162, -84166, -51009, 61069, -29441, 88281, -96012, -72220, 58156, 56184, -40966, -36963, -82662, 48516, -53456, 55092, -23723, 64696, 8817, 24597, 53470, -14088, -57871, 91114, -50038, -28227, -31146, -91184, -44944, 27540, 10503, 18852, 88600, 91812, 43064, -32375, 46431, -65265, -14246, 93175, 48557, 56701, 84353, 89875, 36343, -38732, -70892, tail", "row-234": "head, -1761, -33784, 59048, 36640, 93948, -83241, 74636, 4146, 83089, -19929, -74839, 858, 50627, 26150, 46411, -94903, -63117, -14485, 42542, -69984, 21752, 38625, 78198, 60, 20641, -96051, -23435, 32879, -99429, -79968, -65349, -78876, 13051, 72272, 27467, -49142, -84357, 44113, 22483, -51546, 5928, 6487, 40694, 72364, -76571, 30756, 78150, -50912, 78917, -31982, 55571, -21398, 97847, -57316, -77730, 66899, -68801, 55051, 85945, -7763, 43958, -91298, 37498, -31196, tail", "row-235": "head, 26879, -49015, -24756, -69972, 14316, -82496, -4189, -8834, 85167, -79094, 53648, 55472, -54703, 219, 78759, 91340, 51806, 20983, -42206, -19868, -2955, 25207, -86718, 14021, 20378, 25932, -75867, -49229, 80420, -98350, -33335, -44127, 18844, -35103, -7910, 56982, 38754, -5786, 18362, -90464, 31831, 74155, -43296, 15812, 88570, 65848, 87042, 42455, -14592, 37495, -51108, 55350, 87653, -13506, -48647, -90680, -23000, 79476, -78530, 71986, 38758, -52305, 8375, 35540, tail", "row-236": "head, -24697, 27092, -47230, 1752, -1381, -70124, 13148, 5046, -32903, 29893, 92932, -2404, 45267, -5531, 95646, -59653, -10687, 58259, 82502, -92982, 52095, 86460, -95357, 69325, -10455, -90500, -72558, -89918, -90520, -13462, 40746, 99890, -67655, 3855, -47758, -40448, -57860, 81479, -71102, -22502, 78960, -60184, -98511, 61611, -29272, 79188, 22197, -17530, 31527, -60921, 59079, 96159, -82983, 27135, 20004, -78711, 25352, 22337, 57240, -47046, 93465, 97689, -36432, 6940, tail", "row-237": "head, 78643, -67196, -24832, 91340, -44534, 61697, 1839, 33143, -4281, 82064, -62138, -1738, -41310, 8023, 7421, 77117, 67581, -51038, 33475, 41779, 44662, -62910, -59834, -24136, -18076, 7118, 25856, -64340, -24465, -7621, 98560, 62618, 89747, -64392, 44876, -30537, -41727, 48877, 36816, -20516, -27297, 91210, 26442, -11935, 51897, 77453, 39754, 49687, -76096, -57300, -86134, -99584, -62770, 26387, 16629, -84643, 57060, -44932, -44221, -82029, 79596, 72781, 64331, -93420, tail", "row-238": "head, -51205, -56160, -45940, 67402, 92614, 82414, 77784, -50613, 62271, 20950, -52842, 27184, -25725, -81576, -58104, 83598, -77610, 8334, -98900, 24223, 40486, -99261, -58794, -84780, 41073, 29857, 54249, 90202, -89558, -50893, 94588, -77425, 4201, -14701, 69584, 52739, 58974, 89900, 25343, -72106, 88996, -77142, -94922, 50942, 79755, 456, 70373, 70914, -3381, 10821, -76814, -66431, -81916, 86135, -50316, -23887, 12118, -47218, -26782, 34462, 26467, -50075, 40994, -39323, tail", "row-239": "head, -19866, -57298, -91294, 93517, -22935, 81880, 13606, -46669, 55516, -44597, 64314, 24669, 3310, -57121, 96417, 4858, 61377, -92530, -90808, -14196, 77542, 75598, 23280, 39032, 70287, 69386, 80940, 88647, -3067, -37105, 72638, -94210, 74766, 82705, -31412, -14624, -85722, 75040, 83824, -40134, -63985, -14415, -74628, -1944, -1152, 6605, -44821, 62897, -74529, -78299, -5140, -61926, 67181, 20910, -98778, 52525, -23718, -43704, 81415, -18647, 22703, 23594, -22999, 14054, tail", "row-240": "head, -35159, -49406, 46219, 89382, 77905, -40174, 95711, -12527, -86440, -93241, -35717, -99684, 37772, -68560, -81211, 25137, -37627, 65488, -9330, -84308, 70525, 85838, -69402, 53047, -17190, 60306, 71208, 32160, 19968, -9926, 3438, 81883, -90391, -39192, 15734, 39330, 48646, 78323, -85601, -15696, 98355, 62193, -91320, -95966, 72686, -22802, 15334, 51829, 44303, -54826, -26151, -667, 34802, -6766, -18259, -97529, 47545, 20138, -55791, -94735, 53115, 16949, 80153, -15578, tail", "row-241": "head, 69039, 53367, 18808, 4580, 99117, 55181, -84582, 25968, -73963, -78932, 4031, 22811, -16014, 20643, -7858, 59015, 80568, -95783, -75952, -28737, -61073, 17626, -97588, 85304, 30589, 22957, 71027, -63158, -4741, -14908, 35128, -63725, 44974, 41219, -65224, 87415, 27653, 25786, -86685, 10396, 25229, 57717, -91488, 41568, -73520, -33390, 21582, 65700, 6132, 55484, -17408, 67831, -41392, 56558, -65417, 88552, -6089, 83474, 17318, -13455, -86818, -2766, -74416, 26262, tail", "row-242": "head, 94210, 7472, 8566, 90912, -35704, 34097, 7610, -42922, 75956, 18159, -5700, 86581, 86533, -61695, 25161, -60359, 45360, -73411, 19917, 56873, 52664, 44789, 66866, 28317, 334, 91002, 40408, -66115, -56982, -16060, -76740, 11444, 24661, -13017, 53407, -22053, 68754, -29731, -42496, -73098, 63767, 11643, -38138, -45244, -84483, 78003, -25705, 12458, 43837, -32853, -41298, 88618, -51423, -4935, 85845, -4962, 59753, 11051, -27632, 53436, -78320, 60833, 8714, 17323, tail", "row-243": "head, 86424, -38410, 79465, 30195, 18764, -74573, -84857, 82935, -63607, -36194, 89553, 87179, 68147, 32163, 76399, 7868, -70941, -88262, -6150, 15153, -93959, -62918, -4647, -8032, 63631, 75365, -8768, 2354, 61417, -38798, 52151, -94730, 36147, 13337, 88864, -23366, 52994, -46263, -7837, 56898, -43451, 31376, 73512, 71365, 21983, -42147, 91958, -4348, -86209, -74615, 38723, 99240, 68206, 62404, -32678, -64323, -74781, 64759, 95452, 66319, -20878, -66364, 40343, 77345, tail", "row-244": "head, -78529, 17900, 52626, 64446, 11831, -39805, 26459, -98152, 1030, 71749, 10141, 56069, -76284, -99228, -53912, 6542, -65217, 89132, -43786, -46185, -95006, -7588, 39812, 99449, -47495, 25390, -69801, 66420, 88342, 19447, 72830, -91646, 87129, -9696, 33705, 85609, -1855, 1389, -92196, -20030, 56739, -99801, 63723, 94563, -65436, 14194, 66763, 77049, -35644, 8864, -7655, 21440, 82908, -34086, -63047, -95034, 82490, 71891, -47537, 66235, 28045, -92678, -19271, -36038, tail", "row-245": "head, -81357, 25915, -86061, -95688, -14501, 74633, -99226, 23192, -44376, -71258, 56673, -62298, 8677, -94071, -83321, -70208, 78307, -8929, 3605, -28106, 4099, 50059, -77420, -56532, -86564, -8406, -94793, 92126, -78318, 14038, -18169, 20533, -33209, 37561, -96727, 62925, 58012, 99263, -79787, -60070, -77507, 18143, -87808, -83044, 59373, -53261, 8538, -66354, 18003, -60, -36952, 85691, 9322, 77419, 83666, -32752, 34746, -69189, 69519, -35180, 45670, 78483, 59486, -13635, tail", "row-246": "head, -69053, -22386, -79441, -73803, -71647, -81997, -75098, -29509, -26687, -1612, 46396, -35076, 87085, -62411, 34189, 20265, 54651, -33385, 71599, 77836, -2592, -92248, -35210, 39626, 11236, 9788, 9830, 68616, 40633, -29933, 62157, -6683, -70460, 87521, 37914, -14148, -15896, -97460, -31517, 90417, 49560, 9226, 16221, 99764, 67985, 58189, -63587, 17288, 79863, -12490, 48275, 4507, -93444, -17498, 51481, 60709, 8585, -55642, -25158, 12848, -25105, -3021, 69203, -91844, tail", "row-247": "head, 50714, -19680, 16782, 14684, 47443, 44581, -87346, 73766, -52915, 77550, 26834, -81684, 62433, -37196, 2126, 93792, -81500, -87888, -2887, -77768, 71378, -91593, -2784, 64363, -71352, -6008, 35236, -93052, -66372, -74887, 19988, 2389, 35695, 83262, -62458, -37684, 12404, -43093, 94697, -59206, 62299, 8637, 4956, -99171, -64185, -20355, 42618, 13195, 54981, 48985, 67861, 94906, -53172, 93491, -71760, 92014, -36867, -76887, 60645, 63160, -13341, 72573, 23469, -52849, tail", "row-248": "head, -69493, 52484, 15128, 12737, 42962, -95445, 17097, -5672, 26744, -28972, 41469, -99433, 32961, -45975, -58156, 64440, 9881, -67432, -61398, 64446, -13879, 20582, 9146, -77882, 52169, 79341, -39410, 92375, -91877, -24693, 62261, -50235, 40109, 41341, -42528, 63667, -98080, -11815, 89929, -62542, 67936, -92058, -24845, 84510, 60193, 58230, -72972, 82783, 49187, -43860, 39527, -35330, -63277, 47931, 66218, 82043, -17772, 29881, -28251, 25884, 23348, 8176, 78155, 48868, tail", "row-249": "head, 34778, -68668, -24794, -51287, 17303, 65078, 78978, -91396, -73455, -1101, 78111, -57101, 58376, -25908, -20347, -41625, -83116, 48645, -64507, -8308, 63754, -81780, -65785, -57608, 66592, 2389, 31509, -49247, 2498, -82059, 42924, 69463, -4647, -52241, -41648, 67403, -87822, -48062, 3180, 11075, 7745, 66124, -11706, -72471, -77812, -69930, 1266, -58906, -52952, 29423, -51775, -12111, 3919, -11535, -258, 48958, 50934, -31704, 27924, 81910, -33287, 5248, -75439, 98318, tail", "row-250": "head, 46598, 6797, -43933, 69071, 88710, 92875, 78421, -93845, 78807, 3213, -64794, 75080, 35405, -68628, -86531, 9290, 23031, 35831, 52614, -18734, -86907, 87556, 61345, 61999, 25135, 3055, 52381, 70764, -91876, 78138, 56423, 67294, -70105, -71955, 27127, -12826, 27197, -83857, -93448, -29388, -59261, -58470, 90490, 24023, 75370, 78594, 17516, 13494, 56436, -45967, -34954, -22471, 53065, 50979, -86867, -23742, 79291, 52700, 95493, 16091, -54566, 11594, 2200, 26964, tail", "row-251": "head, 11060, -26565, -42105, 86215, 48196, -36880, 10526, -64868, 35488, 45737, -64425, -46603, -64425, 5805, -88826, 89697, -15964, 75294, 16965, -29569, 52202, 71381, 16698, -61660, 80135, 82968, -45131, 54642, -89794, 42803, -45764, -96395, -92966, -32858, 15760, 62261, 89881, -16875, -16749, -25614, 20558, -64282, 60848, 89061, 73539, 10748, -82054, 36103, -60512, -75737, 78843, 94122, -59734, 10659, 16902, 45475, -67730, 31807, 28662, -47847, 89450, 37789, 45308, 21099, tail", "row-252": "head, 25898, 9734, 99760, 42539, -24039, -56740, -17410, 57493, -31904, 34521, 71637, -44096, -31134, -55480, -44031, 28301, 3246, -23891, 98918, 24669, -73775, -75485, -56201, -9765, -75420, 2714, 37146, 32165, 42800, 85133, -18393, -76552, 1644, 96374, -12541, 602, -11046, 73898, 38305, 2201, -50895, 31093, 78264, -17924, -80396, 38184, 97616, 6721, 78277, 8266, 43960, -19213, -3498, -5236, -80610, -69069, 25888, 49996, -14537, -30640, -75992, 16964, 10436, 31082, tail", "row-253": "head, -3484, -66577, -62756, -73103, 50035, -79256, 63658, -7114, -28556, -14017, 25717, -4141, 5105, -25434, 64867, 45023, 32807, 60523, 10674, -73961, 1045, 24189, -50348, 31857, -90676, 18514, 83492, 42061, 80844, 72870, 37824, -28791, 28660, -78970, 26179, -75284, 57276, 7422, -31458, 67753, 68368, -96489, 87179, -53477, 1658, -2834, 12671, 53101, -75335, 62332, -87591, 34864, -73198, 87358, -68026, -95797, 99132, 72591, -48014, 41128, -51164, -65735, 15347, -4455, tail", "row-254": "head, -46130, 58903, -51671, -20099, 27726, -70587, -46202, -19251, 84918, 67735, 55546, -98580, -92795, -93554, 62624, 19650, 78649, 19342, 7376, 57887, -80068, -40671, 11747, -23734, 15148, -56070, 36463, 80108, 14455, -43168, -6345, -66408, 73494, 15200, 1988, -38933, -77277, -94126, 46709, 17104, 32397, 64110, 23508, 11864, -86457, -29676, -71105, -21537, -3777, -78345, 47155, -18063, 91176, 5680, 49470, -52862, 66234, -89574, 28531, 17891, 40930, 34112, 42036, 63433, tail", "row-255": "head, 17194, 78977, 83999, 31590, -38331, 60, 87194, 49444, -43530, 639, 34284, 32518, -76547, -96672, -50445, 13696, 18770, -99167, -77169, -15324, -65884, -11212, -2516, 99001, -36966, 55887, -91213, -38349, 1899, 24799, 1089, 50820, -3224, -44049, 46817, 55996, 60007, -99922, -65836, -35325, 6138, 13807, -75145, 27862, -47158, 49730, 40906, -44490, 50510, -54858, -77047, -91166, 38680, 28841, -23731, -9842, -61553, 88236, -56110, -99571, 4618, 28326, 12121, -6162, tail", "row-256": "head, -62528, 58249, 37641, -87402, 94021, 60726, 44579, 8275, 35579, -5772, -60688, 85688, -89954, -49711, 98895, -30143, -82694, -79178, -67461, -86757, 44870, -60537, 93924, 31648, 7201, 43423, 71302, -67482, 93091, -51636, 58616, 67943, 17905, 87258, -72557, 69414, 38602, -96410, 75716, 61084, -42458, -4413, 15834, 27404, -70375, 42620, -66286, 68969, -77661, -8581, -11089, 58838, 34254, -94523, -21932, -60812, 25569, -3678, 96899, -13766, -855, -44964, -90188, -7672, tail", "row-257": "head, 77947, -13988, 72, -26952, 77148, -10717, -90691, 48660, 13720, 64937, -44918, 55030, -5243, 50568, 62162, 47876, -9589, -14714, -19986, 41243, 23922, -60941, 6993, -13276, 17441, 65081, -68271, -29682, 51968, 59750, 27251, -65637, -45881, -13118, 2485, -66910, 85662, 68376, 10918, -86089, 51159, -45886, 80019, 90672, -69044, -16568, -81206, 34469, 28540, -99152, 71152, -70164, 54941, 23826, -6086, 68116, -40776, -95645, 82007, 31515, 98836, 84936, -51882, 13139, tail", "row-258": "head, -28385, 34269, 26436, 87865, 78432, -63715, 452, -33435, -51931, 50817, 90128, -20454, 37501, 48378, 51451, 59093, 17335, -22846, -95578, -59954, -12262, 84536, -48284, 23158, -9027, -94606, -36153, 31788, -38115, 47712, 65631, -67244, 98355, 36650, -56609, -57156, -85562, 35339, 66242, -64127, -72924, 8197, 93264, -91096, -25884, 17229, 11449, -2535, 56380, -10752, -19427, 54528, -52318, -73125, -28619, 96612, -19057, -46632, 47501, -55814, -72528, -55800, -84827, -476, tail", "row-259": "head, 57337, 59925, -6584, 75315, 64397, 48258, 13044, 12733, -59630, -97771, -98249, 82722, 30894, -10531, 5249, 87989, 38907, -33663, -78639, -59564, -96543, -69719, -24161, -79948, 96983, 64298, -57851, -28354, -29294, -90289, -59832, 92918, 33717, 39301, -12244, -24827, -25461, -52651, -77654, 94690, -89624, 45999, -8740, -34266, 93054, -40902, -59203, -18689, 53811, 50656, 37879, -62395, -19078, 10199, -24929, -53296, 64910, -75646, 96574, -77853, -42034, -22995, 83220, -27957, tail", "row-260": "head, 72670, -95154, -50843, -78868, 29490, 22764, 62759, 94367, -16014, 23311, -50157, 56376, -48680, 45651, 12138, -75560, 32569, 23394, 8691, 13705, -83431, 53131, 90212, 95756, -57845, 34070, -69752, -93776, -80528, -2216, 52780, 62164, 95135, 23047, -5546, 53273, 3278, -41949, -44809, 33398, 75191, -89752, -70086, -78475, 71576, -94275, -94499, -97475, 71626, 64731, 66355, 89752, 55756, -1950, -44191, 47317, 46615, 51821, 4390, 91335, 90986, 52298, -18946, 56598, tail", "row-261": "head, -12800, -50330, -75598, -37660, -35763, -12425, -635, -65122, -12565, -40619, -61040, 11863, -90876, 88266, -81168, -58217, 26931, 61324, 42415, -91372, 45493, 37953, -44353, 71406, -99926, -3796, 15480, -52569, 2436, 25812, 7425, -32249, -52780, -9149, 50432, 39354, 97668, 15500, 86917, -99429, -3006, 96601, 31007, -3116, -72591, 74737, 35304, 79406, 86867, 57946, 61360, 26264, 53497, 10533, -73658, 83414, 95252, -60564, 43157, 19713, -70787, 66659, -28989, 36909, tail", "row-262": "head, -32830, -15080, 84954, -85770, -61594, 25050, -56571, 22540, -31677, 63107, -33913, -89760, -68313, 21871, 99694, 3423, 96206, 9230, 84683, 85078, 75097, -46946, -60856, 89362, -10104, 24944, -82896, -79366, 18675, 11062, 78851, 29862, 78386, -3902, 51822, -88372, 11739, 74251, 91129, 95861, 45804, -7622, 13793, -22311, 54681, -457, -48347, -18709, -24147, 85606, -92445, -54256, 46514, 36396, 25862, 33354, -55133, -69318, -27449, 86041, 60151, 66915, 560, -10396, tail", "row-263": "head, -60472, 24686, -25688, 49177, -27840, -64902, -65909, -95812, 87788, -83561, 69311, 129, -43775, 83351, 6434, -16454, 72043, -42499, 71182, -66140, -34938, 64296, -47807, -93604, -27415, 45773, -6004, 84394, -89970, -98493, -10263, -38034, 95818, -95151, -24176, 7863, -25635, -66908, -75252, -74207, 60988, -38992, -5401, 4359, -34075, -1481, 14119, 91619, 57393, 48520, -46415, 23831, 68492, 11937, 1593, -8358, 76264, 78725, -37648, -7723, 17837, -96024, 91263, 71834, tail", "row-264": "head, -14969, -50769, 203, -32987, -14176, -58538, -888, -12412, 48312, -56552, -41051, -32999, -86983, 14671, 87163, 32128, 68912, -22374, -24914, 99693, 7469, 64358, 56225, 90320, 10854, 48042, 64768, 83124, 23307, -37654, -7611, -90396, -71310, -70608, -29094, 75279, -93068, -73461, 60002, -22905, -79521, -43810, 13989, 34095, -89576, -78365, 93814, 46920, 98700, 25794, 13351, 80440, -29139, -97326, -53517, 15453, -43200, -79969, -25633, 71411, -163, 24769, -76657, 2860, tail", "row-265": "head, -22205, -90398, -21810, 45605, -74893, -30334, 48164, -60817, 11096, -87196, -17221, -59852, -58517, 15081, 61577, -35075, -49216, -3383, -94935, 26273, 21029, 13143, 48239, 93001, -27124, -66191, -73839, -43006, -79456, 42192, 81887, 12697, -5548, 40262, -59910, -71951, 80554, -27707, -17756, 27460, -87926, -4886, 6802, 75468, -92065, 48028, 43793, 90235, -99371, 82125, 51689, -70832, 46034, 92778, -81142, 70991, 58176, -57377, -765, 14233, -74079, 54746, -62494, -76691, tail", "row-266": "head, -67033, 1839, -97706, -4363, 83319, 80380, -29235, -89004, 72860, 21629, 54815, 81127, 58500, -81707, 59692, 15680, 52838, 9852, -66587, 52858, 25664, 82954, -96305, -62975, -65489, -82439, 35053, -68231, 90342, -15900, -10050, 1783, -75329, 52708, -9144, 17557, 39899, -82103, 18580, -33896, 13627, -68834, 20637, 34810, 33276, 76033, 3773, -99247, -51082, -23167, 38016, 75860, -32243, -80670, -59392, -71379, -15045, 48916, -18966, 99608, 65393, 72595, -38360, 50895, tail", "row-267": "head, -39703, 83702, -41194, -63160, 86032, 63774, -11283, 41037, 36287, -60734, -16654, -71309, -29693, 45121, -17264, -74383, 29543, 47611, -13494, 25499, -18219, -86684, -16582, -91763, -90636, -17634, 26128, -6706, -33142, 36487, -15323, 13544, -99308, 81969, 37280, -5878, 85458, 71989, 98891, 81575, -41609, 76949, -48298, 29934, -6705, 49690, 82599, -86601, -87052, -61652, -68575, -82289, -98207, -38673, 30539, 74116, 18012, -90876, -63528, 96107, 23011, -99483, -67117, -49813, tail", "row-268": "head, 48740, -85203, 78338, 45477, 22323, 72582, 76022, -7857, 15, -8548, -88416, -14339, 52951, 50231, -95422, -11276, 58711, -50861, 18643, -28648, -17262, -92966, 26862, 71480, 51691, 39661, 35422, 29371, 45918, -14229, -77037, -26045, 86549, -7010, 99356, 96715, 27055, 20187, -23400, -82865, -8816, 49624, 96690, 18237, 61914, -67956, -28385, -29356, -70821, 2471, -2720, -87564, -22910, -92840, 12029, 62469, 76423, -26890, 90444, 69977, 3136, -49958, 2039, -77357, tail", "row-269": "head, -94115, -30543, 30341, -84493, -36180, 37416, 27932, -36899, -50159, -951, 99223, -14045, -61105, -88329, -47173, 22308, -18309, 25051, 52111, 14059, -98300, 78264, -22412, 82546, 84619, 47336, -15663, 68406, 26038, 18459, -35245, -569, -53137, -27420, -38615, -42442, -16560, 60883, 17636, -5792, -21704, -58139, -34852, 17585, -68453, -7746, -83069, 99831, 416, 86194, 45348, 31177, 54057, 81546, -32550, -65309, 90792, -54057, 12933, 24544, 70233, 24214, 16298, 8106, tail", "row-270": "head, 19574, 61934, -58347, -2213, -71534, -70110, -93743, -11468, 27563, -99107, -84346, 15607, -40546, 27575, 41505, 28497, -8908, -43519, -54085, -71512, -45214, -62839, 86857, 49328, 79724, 63368, 45287, -15839, 71444, -79818, 60404, -96895, -19270, 8128, 92167, -59442, -90476, 39330, -67673, 57890, -15269, 20822, -47086, -59784, 13607, -86442, -19339, 25021, 43715, -17002, -14381, 63813, -66119, -19834, 8154, -65291, -56728, -51367, -80621, -34967, -58721, -34284, -84696, -63408, tail", "row-271": "head, 48154, 825, 72522, 44988, 92353, -16488, -65006, -48394, -2163, 79420, -38866, 9956, -36834, 70818, -51772, 61036, -78041, 66071, 64331, -25643, 92028, 50496, 33181, 12753, 75814, 31173, 55039, 13365, 7843, -78098, 80034, -48608, 3347, 91871, 91905, 60623, 4753, -72264, 60092, -62404, 33086, 15453, -62399, -58620, 20081, -78110, -57914, 50311, 46383, 98304, -49284, -98498, -73561, 4310, 33083, -98084, -34664, -45242, 40187, 45208, -88420, 65225, 59519, -11409, tail", "row-272": "head, 45715, 7806, -99418, -98803, -15855, -89901, -58490, -84697, 59454, 74091, 23587, 45123, -94886, -84979, 44906, 50698, 94981, -67609, 14826, 96586, 73079, -54654, 281, 75506, 96060, 76820, -49140, -49159, -24060, 39451, -58563, -80453, -58425, -55958, -1581, 44033, 45676, -68697, 67868, 42322, -98232, 49421, -77891, -86558, 11026, 52047, 68320, 3518, -67790, 52932, -43884, -57565, -61920, -47497, -84287, 57364, -74113, 22111, -59337, 7348, 69058, -79966, -45214, -47309, tail", "row-273": "head, 45924, 30595, 22222, 64409, -76235, -16725, -16292, 13465, -75496, -97383, -18467, -81351, -80695, 66848, -30172, -54952, -15641, -96739, -97673, 80518, -94628, 76619, -48002, -41119, -99078, -88269, -67908, 4702, -73955, 73843, 28064, -7925, -66725, 51768, 63149, -86086, -99394, 55928, -37693, 7371, 44662, -33784, 73172, -57608, 46124, 74190, -51192, 44711, -92805, 54958, 54245, -47940, -18105, 96720, -8076, 91836, 97613, -30462, -36865, -37129, 30407, 76433, -91794, 47290, tail", "row-274": "head, 1410, 7381, -46158, -18406, 74076, 68454, 55148, 76601, 88936, 76334, -72549, 97042, -11196, 1230, 59970, -74759, 12443, 42032, 75045, 49579, -7722, -72328, -13537, -53585, -29216, -21438, 10804, 43490, 18235, 28022, 12661, 19535, -48204, -28097, -66352, 91225, 74566, -69050, -19668, -17601, 75360, -47547, 20839, 44707, 40742, 60211, -56058, 19654, 2751, -73206, 60868, -90523, -70484, 9296, 29589, -37543, 48077, 42269, 88391, -18021, -56381, 83746, 19011, 40009, tail", "row-275": "head, 43029, 31885, -14592, -52358, -40159, -32498, -33163, -57274, 19968, -89927, 78059, 10881, 60045, 53558, 54056, 57437, -4893, -85782, 84057, -51484, -44893, 24564, -76152, 2074, -48326, 7819, -88944, -6936, -98834, -67441, 62022, 76445, -74852, 6170, 52441, 90902, 84769, -97771, 6853, 34045, -4745, -68533, 18843, -49549, 20121, 90754, -44381, -63227, -25801, 77210, -96095, 32891, 67519, 97579, -8925, -78102, -95098, 89722, -53742, 55210, 13278, -64015, -93377, 16759, tail", "row-276": "head, -49346, -6214, 99827, 26970, -49619, -88756, -12219, 18113, -99538, 74708, -85659, -23124, 26040, 68392, 4843, 90200, 22233, -19212, 78766, -90049, -31127, 81158, 11879, -87374, 58896, 31271, 65101, 20757, 71826, 95039, 24767, -28571, 92856, 41348, 34385, 54996, 97476, 35465, 81371, 72645, 42448, 76152, -93606, 33136, 17989, 41764, -71136, -62431, 77274, 60406, 63638, -20060, -25712, 40557, -70205, -94387, -35761, -89233, -89523, 7431, 39373, 61678, 44755, -9281, tail", "row-277": "head, -49739, 71738, 28412, -21583, -64559, 77577, -19356, -73385, -99769, -6347, -94265, -84679, 79017, 14095, 3726, -81658, -44851, -57388, -73037, 4137, -13767, -81737, -80278, -90079, -72047, -22553, -61865, -23193, -6349, 31152, 76591, -7257, 51780, 78242, -19302, 7681, -97815, -42565, -7901, 95186, -97934, 27938, 55779, 57498, -22325, -31811, 71155, -41409, -85001, 98197, 18295, 10968, 97795, -96947, -56393, 61747, 10498, 61649, 82274, 21740, -20925, 63902, -91754, 18295, tail", "row-278": "head, 75538, -78698, 87714, -10714, -47754, -82524, 76059, 79926, 95821, -25490, -29276, -53866, 68816, 29595, -86815, 70495, 93820, 64927, -15434, 4687, 99011, -22551, 32081, -39711, 84361, -61742, -31558, 15528, 57030, 30021, 98778, 50800, 58978, 52570, 97085, -23070, -19885, -72575, -49620, 14231, 58046, 19100, -10212, 67949, -97137, 84242, 61967, -74644, -48258, 29957, -68888, 98532, 76263, 15170, 78940, 57848, 42919, -63872, 52274, 75133, 81929, -69893, -86911, -47306, tail", "row-279": "head, 9297, -35425, -22700, 37827, -44399, 68306, -37969, 33579, 69910, -92343, -63619, -83753, -34299, 6396, 13498, -39091, 70785, 84323, -8961, -83568, -59929, -51924, -72574, -85070, -38913, 17500, 58870, 7383, -39418, 57721, -60212, -77295, -84182, -3932, -73622, -43054, -10673, 3889, 86799, 69628, -56833, -39931, -26789, 79726, 23948, 13440, 11800, 30219, 57638, -14236, -14567, 72164, -46222, 49063, -20773, 12037, 78223, -13312, -75906, -90218, -61552, 57126, 38896, -54681, tail", "row-280": "head, 13783, -61688, -85194, -55155, 11962, -30377, 75997, -73917, -93068, 84032, -65647, -28394, 98890, 36192, 72081, -85605, 97579, -15304, -4231, 44084, -76640, 31047, 92952, 75308, -55206, -2427, 29954, 52381, -85767, -46467, -53111, -12217, -77146, -97359, -38905, 15879, 76599, -82858, 67215, 29974, -90837, -74819, 11730, 17889, -60805, 7302, 89425, 64713, 98884, -24314, -34561, -794, -72117, -58383, 57654, 26187, 2896, 88335, 50819, -63707, -89868, 68387, -53466, 71918, tail", "row-281": "head, 9975, 72888, -4909, -80624, 44931, -5592, -33013, -53868, 66024, 12434, -47835, -16488, -30040, 644, -43326, -55573, -29333, -21104, 20807, 68320, -28609, 87709, 80368, -76665, -73642, -56359, -70976, 78362, 79346, -78802, 20616, 10652, 42206, -83336, -3875, 54045, 88850, -19798, -37123, 96515, 96417, -27983, -4565, -884, 69613, -83811, 26450, -9899, -19684, -40447, 75268, 52807, -68539, -10338, -19130, -80980, -5064, 77126, -65266, -86317, 99620, -51883, 20417, -15128, tail", "row-282": "head, 54273, -74465, 65774, -82539, 99594, 46537, -46287, 88090, -19684, 27960, -35723, 90256, 14262, 38778, 16649, 53021, -97909, 4281, 69824, -27374, 95546, 858, -21145, 28535, 38891, 70156, 88747, 62998, -59481, -62618, -92773, -44139, 2529, 6766, -70738, -7486, 43022, -10676, -66972, 74180, -89064, 210, 92161, -38288, -65777, 41576, 36636, -5634, -42835, 28502, 27158, -24255, -44174, -21367, 73794, 23925, -18133, 12216, 62000, -19843, -77971, -10832, -26856, 16036, tail", "row-283": "head, -45043, -21780, -54825, -96590, -32646, -6599, -17006, -84446, 9768, 24984, 28376, 69821, 80268, 57817, 63951, 40542, 84231, -98966, 57819, -30926, -89729, 81108, -85788, 19594, -33398, -16739, 45533, 91219, -38104, 61231, -81532, 92178, 31637, 94991, 42720, 44748, 67786, 47287, 7802, 36436, 69271, -83903, 42319, -26377, 29740, 97596, -42838, 2466, 60291, -36636, 26643, 98291, 77580, -16473, 81432, 52060, 68253, -55962, -40632, 34007, 81383, 50325, -76804, 79471, tail", "row-284": "head, 6952, 45718, 63921, 49968, -54567, 52132, 33039, -16911, 92579, -59698, 10551, -79607, 836, -97340, -28936, -85848, -54818, -4074, -99510, -17937, 52973, 59669, -5587, 36330, -100000, 64717, -25237, 59063, 57070, 73147, 79253, -48747, -49781, -62451, -27007, -50113, -3561, -29143, 50546, -6305, 35705, -79859, 4075, 34736, 30074, -8904, 9444, 91265, -69697, 25026, 15593, -60105, 54261, -83027, 77147, 70368, 51990, -89338, 87215, 7163, -1541, 15550, 74249, 95058, tail", "row-285": "head, 73191, -59586, 42819, 54169, -3036, -9199, -4991, -95639, -43064, -3775, -35325, 48330, -59778, -14283, -91645, 19640, -2327, -73145, 41963, 2513, 54746, -68376, 68249, -27828, 33467, 49613, 84417, -38380, -26714, -22968, -88379, -15336, -44256, 29574, -87317, -48215, -89210, 81397, -41589, 83983, -89732, 4096, -45648, -88203, 16840, -12843, 96978, -18219, -94209, -85514, -34530, 81231, 79207, -34721, -79519, -25473, -59842, 57306, -16479, -7708, 47523, 2039, 72192, -96501, tail", "row-286": "head, 60034, -18715, -90789, 47251, -21851, -56283, 58191, 29094, -12803, -43240, 10329, -73439, -89715, 64525, 63723, 89743, -63165, 4306, 38395, 75791, 12581, 15958, -88332, 34280, 76295, -37668, 73944, 13688, 45368, -81702, -26333, 84404, 59324, -38878, -75286, -55050, -19126, -43818, -55592, -34988, 4853, 43028, -44276, -79079, 33594, -62469, 10425, -56992, 10351, 82617, 87406, -29415, -10142, 18016, -42300, -15844, 96512, 9658, -22946, 78365, -89503, -5204, -80709, 97267, tail", "row-287": "head, 67209, 2829, 18900, 49365, 13899, 7931, -21613, 89057, -3024, -79887, 40613, -67913, -18275, 1934, 12358, 36068, -5107, 82429, 25885, -1365, 90316, 97613, -40871, -63399, -36186, 58335, 33699, 29704, -76734, -8400, -81092, 83528, -51115, -85438, -25698, 31854, -76813, -662, -32982, 90947, 23205, -926, 59629, -62031, 62023, 58280, 5676, -31718, -22297, -8829, 78649, 68230, 8937, 49369, -31820, 89744, -55468, 39902, 82979, -60746, -24935, -44939, -98786, 18594, tail", "row-288": "head, -81635, 31388, -1612, -39422, -25870, -92669, -61727, 1027, -87302, 76630, 51536, 66492, 82894, 45064, 35331, -23933, -88908, 27972, 69255, 51465, -38595, 51805, -7672, -52197, 75000, 54991, -40944, -60764, -4198, 27940, -45943, -46397, -71679, -5069, 50868, -99893, -96235, 82757, -1389, 6193, 47499, 98936, 48051, 15696, 49307, 86177, 83466, -61928, 12404, -53987, 17799, 27561, 24933, 78057, 3710, -26634, -50680, 45499, 65540, 62154, 25318, 98354, -29549, -96610, tail", "row-289": "head, -44366, 97486, 23829, -73996, -76621, 15764, -6486, -57540, 31046, 24795, 10451, 89045, -58872, -3585, -92085, -27462, 32394, -64791, -5022, -27278, 47671, -76302, -75598, -48890, 90940, 77573, 54336, 47254, 48483, 11862, 15869, 5661, -21465, 31686, 84574, -45805, -85985, 38005, -17578, -68457, -4396, 5634, 51548, -22982, -66972, 14168, -99086, -98215, -88230, -27042, 34457, -16317, -78614, 36506, -47010, 85569, 86928, -62084, 22809, 21795, -69300, 66566, 49607, -16130, tail", "row-290": "head, -43636, 4179, -92873, 56810, 55185, 43236, 20113, 96659, -62199, 91052, 39232, -42276, 31954, 23410, -23202, -10374, 91902, 80360, -22289, 88162, -82208, -79290, 14811, 19814, 53282, 99684, -82445, -88251, -40071, -76658, -38498, -12752, 95795, 65679, 63696, 38509, 30151, 27665, 56654, 3258, 97259, 99742, -87306, -94874, -28204, -51353, -86977, 50986, -28458, -12878, -78532, -51362, 60811, 33672, -85759, 62325, -50710, 35432, 79547, -10755, -18731, -91618, -92656, 33576, tail", "row-291": "head, 46174, -66071, -62927, 5442, 1098, -40444, 41553, 73969, 46204, -36393, 82944, 9213, 53726, -60478, 483, -97746, 18064, -14924, 11957, -46559, -74911, -45155, -88405, 40032, 25275, 87410, -60523, -19683, -49565, 62838, -73767, -57787, 72310, 35437, 39764, 75020, 22290, -67421, 19100, -73257, -58824, 41579, 20663, 35349, 55725, 74745, -78818, 91520, -52558, -62535, 5472, -93089, -14845, -2529, 66498, -69441, -83724, -39745, 39827, 47540, 35297, -94070, 99317, -39117, tail", "row-292": "head, -14146, -69252, 63295, -40765, -38689, -35579, -99946, 15253, 62956, 60384, 17569, -16798, -26970, 54954, 99358, -59823, 97734, 84986, 56038, 64681, 23994, -33212, 85767, 31633, -57253, -41923, -32138, 75794, 21853, 32876, -63219, 21467, 31260, 75329, 93024, 96053, 69161, -2685, -23915, -17504, -72218, -61736, -35149, -1683, -82729, -23972, -74001, 61736, 75418, 85808, -90973, -70696, -67174, 23442, -63752, 81633, 96613, -16862, 19960, 38674, 95309, 38174, 53706, -64878, tail", "row-293": "head, 43683, -93264, -71380, 64518, 65002, 73039, -45308, -68169, -43250, 29802, 18986, -58975, 5768, 62155, 72600, 51719, -37322, 49008, -68534, -25469, -98425, -66111, -32818, -74509, 10863, -14697, 71960, 93280, -98453, 71264, 28657, -17302, 99831, 6042, -87896, -32191, -28581, -61758, 14945, 92794, 25626, -67218, 35352, 53827, -46926, 66214, 15734, 1661, 27069, 95670, 70964, -30251, -5419, -47015, 45483, -35882, 12112, 99769, -33782, 34068, 23382, -74748, 64939, 8247, tail", "row-294": "head, -92484, 14753, -28736, -61757, 10007, 19840, 67130, -44062, 79552, 59392, -9122, -29917, 26233, -14921, -87847, 42611, 99103, 68029, 77239, -67603, 79955, -81493, 8212, 30767, -1449, -93515, -40020, 78143, -53635, 81601, 96139, -60033, 32182, 40054, -31556, -11505, 80168, 41630, 8672, -97906, 15171, -55350, -71361, -39691, -82503, 49829, -69340, -16896, 17483, -38170, 9772, 4627, -80484, 72556, -79032, -10431, -95769, 29701, 69243, -72661, -76115, 5182, 2664, 50237, tail", "row-295": "head, -47593, 14364, -50779, 48711, -41373, 15894, -75534, 3084, 24399, -57281, -75260, -41655, -69078, 33956, -548, -74936, -49267, -58678, -74675, 66882, -96406, 29292, -75661, 71480, -34476, -1908, 46349, -94865, 60186, -98322, -70554, -30402, -83797, 9643, 64052, 4592, 73997, 14656, 17641, -84334, 9950, -30135, 27421, -81004, 75155, 38568, 72478, 62687, 5408, -46617, -53277, 36218, -97867, -98294, 48988, -34701, 31211, -94155, 86072, -17466, 26087, 95964, 59759, -75392, tail", "row-296": "head, 5163, 54860, -63376, -17127, -27593, -57765, 88567, -48694, 99896, -71750, -48743, -35649, 52347, 77779, 73540, -78791, -30507, 74875, -90094, -75736, -82530, -6150, 71498, -83655, 88751, 35799, -7845, 85535, 81925, 3531, 79342, -64188, -84145, -76488, 46316, 1654, -46267, 16542, -63567, -38775, -40489, 66162, -62773, 68521, -18978, -60116, -45167, 26940, 17987, -3052, 42341, -49677, -48856, 80724, -47644, -4570, 41487, -37665, 97461, 17999, -86440, 37977, -60393, -50750, tail", "row-297": "head, 71101, -88908, 85784, -49886, 91838, -26119, -98946, -74295, -35911, -99025, 17809, -41408, 32516, -19855, 74426, -31637, 91598, 62739, -26040, 50033, -57503, -90809, 71852, -35962, -93835, -87440, -51216, 8061, -76166, 16911, 90483, -45869, -41429, 51480, 43102, 20181, -86055, 1185, -2853, 47135, 1692, -50593, 59464, -52523, -32876, -53305, -90730, 549, 41974, 35291, 37063, 4205, -60438, 74586, -45542, 60849, -74896, -58954, -68606, 21181, 93099, -42504, -12199, -60340, tail", "row-298": "head, -16329, -78753, -74329, 49649, 55517, 91316, -15608, -20267, 66471, -14208, 4799, -63770, 82237, -16394, 70441, 16514, 2785, -75948, 89324, -43136, -63311, -53444, -47646, -14293, -21736, 57398, 53228, 1874, -98475, 87327, 12407, -8597, -61286, -70362, 32015, -20985, 73309, 86723, -19960, -24441, 67653, -63426, 48223, 11568, 50463, 52992, -51577, 26125, 90930, 16398, -3235, -8712, 34439, 4768, 75507, 48680, 23787, -57927, 99421, 86910, -61323, -99328, 7397, -23899, tail", "row-299": "head, -3995, 66960, -53712, -78307, 3752, -57174, -32866, -43054, -90710, -83556, -52083, -12908, -92261, -37994, -85836, 6380, 29810, 82741, 31962, -43245, -4783, 68017, -49341, -31855, 59113, -9952, -53746, -23883, 69913, 21530, 21890, -81754, 75075, 69876, -65283, 46858, 94991, 46037, 54897, -72313, -37117, 69341, 1981, 92423, 76923, -20411, 681, 8673, 31540, 1573, -78167, 30341, -63216, -97893, -43164, -95873, -40808, 77831, 67853, -7100, 85664, -90963, 16487, -77853, tail", "row-300": "head, -83481, -16809, 44016, 7244, -36098, -98018, -85037, -15645, 83184, 35318, -39385, -19769, 10613, 84742, -50229, -22990, -30217, 85511, 98987, 34636, 47380, 24944, -29107, 43058, 35816, -39004, 16599, 68385, -76842, -63770, 93548, -11497, 53126, 94363, -50763, -45522, 6829, 43886, 59012, 40418, 94329, 44778, -55116, 69734, -81827, 75400, 96885, 50059, 89510, -82401, -31349, 86448, 69672, -58808, -952, -52284, 36250, -82919, -48846, -9870, -50035, -80426, -72384, -10139, tail", "row-301": "head, 85950, -71779, 71799, 95340, -31186, -62793, -95592, -81399, 49555, -7229, 58993, 55463, -93628, -58068, 87045, -15972, 57880, 7827, -87546, -9329, 31737, -12675, -14244, 74541, 40665, 60016, -89401, -50107, 37470, 17166, -53473, 48942, 81578, -19983, 51411, -85511, -73435, -30205, 30135, -73772, -98180, -6649, 148, -88807, 27727, 52716, 29334, -10901, -95794, 20550, 38331, -55642, 71016, -91491, 91278, 78022, -78110, 77829, -96455, -50536, 57412, -39435, -86594, -87404, tail", "row-302": "head, -12753, 98401, -99203, -777, 7168, -89415, 27793, -70416, -56091, -63188, -78941, 10557, 82457, 98035, 81891, 77917, -35659, 22360, 54937, 17916, 1661, -2870, -29968, 30118, -13459, 42070, 46591, -99025, 96194, -89231, 48233, -87463, -60057, 22857, 84824, 11314, 14330, 74400, 78020, 92093, 19766, 53325, -8605, -78469, 67266, 10640, 98692, -14550, -44533, 6496, -89902, -72973, 60145, -52702, 22742, 1876, -41749, -26211, 17828, -79075, -60204, 74464, -95591, -54067, tail", "row-303": "head, 74141, -91883, -81598, -20603, 7578, 52938, -24442, -48263, 77574, -96405, -22327, 5595, -54730, 12582, 29050, -54134, -54431, 15083, -50416, -99217, 87588, -16479, -86889, -41297, 29498, -7639, -64144, 56379, -39610, -62843, -61653, -17881, -86245, 25963, 22213, -10076, 75943, -76873, -84606, 52928, 17051, 18570, -56961, -93859, -41494, 83609, 10598, -96791, 51553, -41683, 15428, 64474, -37683, 71904, 2811, -1685, 47595, 83514, 54828, 52200, -48198, 39461, 5033, 90740, tail", "row-304": "head, -73810, -32055, 12503, 27097, -43690, 77266, -25549, -3523, 75967, -14156, -29790, -24520, 67123, 57184, -71065, 15389, -62655, 65874, -62847, -8524, 6626, 38196, 44592, 20380, 69174, 70456, -95605, 53498, 95036, -98754, -58484, 50379, -89583, 98492, 26406, 25337, 37212, 1458, -29040, -13364, 90016, 55299, -57687, -12689, -96871, -88874, 99526, -12151, -24161, -42212, 25059, -19330, -39046, 9734, 22346, 35549, -10822, 4297, 41602, 97220, -70313, 68202, -37013, 44732, tail", "row-305": "head, 59604, -56622, -72629, -36175, -62272, -7301, 21053, -55405, 18868, 32289, 79435, 7741, 5413, 28649, 51217, -32128, -80222, 82582, 17324, 69353, -9927, 80413, 81408, 90609, -85965, -71507, 62501, 86946, 54184, 79754, -71676, 57625, -56790, 86142, 85082, 16019, 26848, -37361, 19493, -44829, 64997, -59746, 22376, 38842, 10738, 37897, -66924, -19845, 96248, -8493, 21994, 70282, -84792, -93996, -3708, -4101, 4604, -26644, 18060, 4802, -87466, 44575, -56841, 33768, tail", "row-306": "head, 81923, -36682, -97191, 28504, -87381, 4043, -9013, -16720, 56790, -67467, -56573, -39115, 82268, -3788, 27886, 29990, -84967, 32296, -87525, -11913, -26118, 82523, -60391, -52, 87395, 54065, 99641, -35650, 35435, -95277, -6309, 91256, -33258, -48906, -91883, 12076, -41099, 38702, 71819, -61538, 21747, 79249, -73175, 68820, -68341, -54983, -56267, 37111, 23792, -85700, 76373, 97989, 85123, -18097, -61406, 58177, -94837, -82767, 56917, -8439, -59348, -24754, 79830, -5418, tail", "row-307": "head, -38084, -24304, 63975, -98748, -80854, -22536, 85700, -7670, -56626, -36977, 56438, 51436, -14009, -68051, 49925, -81183, -78986, -41107, -17550, -94010, 31801, 15020, -34899, -34885, 9532, 46850, -6768, 20775, 1017, -66204, -82296, 70875, -83606, 82018, 34143, 53651, -71729, -87802, 60840, 82002, -57007, -64438, -16565, 5850, 36175, 57717, -81169, -27630, 11873, 7940, -94432, 33585, 24954, 84936, 7734, 79707, 66043, -37016, -32490, -99820, -79623, -9695, -38060, 2796, tail", "row-308": "head, -50706, -88480, -49911, -41723, 14748, -70760, 45562, 10151, -88866, -98908, 61290, -37497, 11032, -39864, -88339, -85560, -75847, -69990, 43304, -7150, 86005, 40071, 45566, -26009, 52770, 47881, 84102, 74052, 59203, 6967, 70629, -43866, -21276, 61283, 34178, -31213, -49624, -93016, -39990, 5414, 8143, 38444, 52959, -77858, 68958, -87962, 66868, -14231, 67425, -34527, 70243, 44229, 37105, -77939, -59475, -17837, -64038, 15147, -50621, -24173, -66734, -92331, -26533, 56683, tail", "row-309": "head, 67319, 66254, -58005, -45632, -9083, -1806, -68604, -61224, 27282, 46063, -86575, -19491, -29226, -53613, -13214, 25960, -69550, -32463, -43172, -51781, -90805, -57270, -54158, 73406, -62105, 8449, 96357, -52053, -11034, -39177, -43497, -58749, -26299, 57964, 39460, 15103, 13422, -6190, 78835, 56915, -39053, 7503, -92009, -72942, -34304, 48229, -93873, -36398, -45028, -82390, -33446, -89917, 26302, -22474, 20026, -10348, -21889, -28647, 21761, 56048, -42556, 72886, -28731, 98497, tail", "row-310": "head, -48852, -89114, -92555, -96969, -39862, -63164, 29982, -89653, -3221, -46412, -94938, 78209, 91754, 75590, 64278, -8941, -28232, 93066, -48348, -65241, 32667, -91009, 29564, -42380, -55146, 43290, 96458, -72736, -37533, -54800, -4037, 99665, 30868, -93954, 2927, -24188, 69570, -14788, -2985, -11811, -93988, -2075, -73209, 24196, -24291, -77128, 76143, -16284, 42239, -30691, 34410, 7984, -63701, -17342, -37154, 25056, 78579, -70903, 19323, 49012, -20834, -55673, -72376, 54457, tail", "row-311": "head, 50487, 34708, -85698, -34666, 65641, -10897, 88200, 86842, 7748, -88029, 71836, -21657, -67663, 41865, 47613, -20514, 1159, 73127, 10190, 75788, 2078, -39788, 93401, 75633, 21034, 1007, 61576, -12779, -5589, -2662, -77119, -74086, 18549, 15721, -46004, 33668, -71209, -56581, 58014, 66362, -48281, -33599, -47267, 39466, 43394, 63482, 79458, -46459, -25553, -58734, -12118, -8332, -6331, -16775, 66879, 43195, -11266, 36118, 96964, 77167, -82337, -83616, 79030, -13631, tail", "row-312": "head, -8927, 19938, -51077, 54430, 71070, 22862, 76460, -18586, 31793, 8846, -67972, -3386, -69337, -23204, -40931, 77470, -69188, 60993, -50105, 66233, -90114, -65730, 79401, 23990, 54073, 71059, 94418, 22947, 21385, 67312, -5227, -7150, -27220, -80991, -73622, 23818, -26335, -66215, -19620, -5583, -86958, 94087, 13726, -55588, -82803, -74445, -80984, -62539, -61539, -37465, -77609, 78745, 50067, 76309, -96092, -25037, -59678, -90186, 9141, -31646, 20877, -61883, -95028, -99812, tail", "row-313": "head, 23288, 66028, 94742, 15877, 13249, 14017, -94856, 66418, 52814, -73501, 62394, 79305, -91408, -24256, -34536, -83715, 3639, 45206, 43250, 83392, 93384, -24932, -76682, -91980, 49537, 69614, 11259, -32534, -92409, 82463, 74375, 20907, 46746, 31510, 82456, 66364, -65934, -12379, 23933, 62829, 16503, -69435, 28030, -63136, -57559, 99712, -60746, -79445, 64439, 84238, 7098, -78575, -68182, -6499, 48610, -64350, 78229, 32861, -43066, -66494, 64281, 97267, -80386, 63705, tail", "row-314": "head, 80111, 52039, -8297, 23150, -68917, -97456, -50675, -56180, -82920, 84502, -51535, 86980, -27255, -23100, -3614, 76685, -33856, -33740, 50459, -76013, -57882, 61421, 19101, -12768, -6955, 67382, -37342, 49519, 7919, -55441, -72328, -23189, 33106, -13171, 39483, 65504, -62008, -18549, -62688, 16656, -18000, -26463, -20147, -93416, -69518, 28594, -95950, -38670, 87505, -7706, 15155, -87421, -95986, -63110, 32696, 57956, 1209, -18842, -16648, 54440, 9097, 5209, 28718, 64788, tail", "row-315": "head, -80282, -72055, -54770, 55430, -83390, -71528, 11590, 81305, 39721, 46348, -63195, 69706, 35389, 70697, -12956, -81853, -3941, 78264, 76073, -7543, -94, 17712, 27305, -13221, 52739, -74539, 51873, 18930, -31546, 72716, -17572, 66658, 67452, 35831, -77144, -47273, -3813, -55300, 57733, -55974, 82084, 73334, 53364, -89624, -97621, 82255, -48387, 32819, -56813, -86158, -73643, 79156, 78031, 19916, -43299, -56977, 49376, -59504, -42813, -73178, 36478, 7802, 14523, -15709, tail", "row-316": "head, -11317, 4948, 1136, 8694, 83159, -4488, 19332, -41621, -54714, -28241, -76934, -25893, 45771, -99297, 91663, -32259, 85127, 21906, -81199, -81855, -71170, 49737, -81785, -74194, 93678, 10849, -56920, -22366, 69841, 11610, 63764, -10896, -70449, -61676, -29368, 63189, 11476, 37185, 60863, 31081, 25317, -90130, 47689, 15480, -8977, 21047, -12899, 11110, -12672, -97362, 29132, -53582, 57607, -43598, 91855, 56071, 74613, -93650, 91152, -17574, 6957, 27576, 41971, -57881, tail", "row-317": "head, 32745, 82839, 1551, -86786, 53405, -44261, -78225, 50690, 41626, -30794, 77930, -2267, 70747, -56198, -88743, 82994, 84895, 97392, -84210, 95944, 18731, 12562, -4889, -43993, 17089, -16222, -52195, -99809, -49530, -38061, 3279, 87506, 2935, 68833, 99458, 14951, -67988, -48626, 89174, -38709, -69511, -83681, -8961, -38735, -93999, -39639, -25805, 55668, -36754, 42649, -11563, -93132, 82089, -88519, 30720, -66240, 20176, 10939, 37304, 65857, 16422, -25718, 64853, 59687, tail", "row-318": "head, -30185, -63969, -22013, 13688, -97393, 55015, -61082, 55446, -22677, -31710, 15177, -12974, -4677, 14079, 82256, 95372, 34989, 95313, 51506, -10657, -60334, -50113, -68480, -19742, 29214, 63022, 56839, 19927, 24923, -68694, -51477, 38497, 81339, 98310, 70578, -30962, -41810, -24439, -59334, 9696, 53240, -63291, 93568, 17055, -94687, -54726, -63560, 51026, 81724, 95761, -60352, 45555, -81792, 53129, -90834, -94816, 36397, -81139, 93162, 47748, 65843, -59275, -93995, 38621, tail", "row-319": "head, 95435, -71178, 54096, -83108, 3130, 21325, 76610, -39704, -60099, -39059, -81590, -73127, 63639, 64498, 5626, 85828, -78705, -18301, 67131, 96229, -2072, 14578, -75484, 68145, -1058, 54715, 92594, -49648, 52954, 16367, 7948, -56357, -55082, -36274, -59324, -311, 77156, 20514, 27416, 42989, 87841, -51217, -10649, 78332, 53709, -83993, -60530, 39994, 28074, -75311, -28799, -75648, -29146, -87664, 8110, -70467, 18416, 13121, 7435, 77444, 79010, -70334, 76638, 36654, tail", "row-320": "head, -80716, 76169, 22725, 58875, 98138, 33778, -78923, 54944, 48285, -49152, -27762, 90287, -7305, 47786, -44386, 93190, 37581, 71141, -86180, -17726, -88340, -42650, -89174, -22144, -62737, 57789, -67936, 40039, -2639, 38504, -53762, 66216, -7560, 64085, -19344, 76420, -87184, 43200, 72994, -84309, -6972, -30749, -11526, 56960, -3691, 78620, -68896, 72216, -11560, -63143, 58964, -53376, 85165, 61213, -19062, -1778, -57247, 52004, 1505, -99555, -62515, -29376, -33083, 14233, tail", "row-321": "head, 77543, 86099, 20056, 31872, -46233, -98667, 96841, 94236, -46435, 56081, 78362, -87973, -21489, 97313, -24062, -65807, 90390, -18286, -23561, 12550, -91969, 96036, 47120, -7984, -82302, 76314, 38071, -44831, -26108, -23555, 97493, 16497, 88390, 35690, 13326, 71033, 74315, -92176, -55992, -61353, 72909, 32128, 53337, 49778, 26990, -81806, -43658, -23843, -79264, 7554, 70522, -29778, 17103, -33382, 32731, -20075, 70054, -59079, -20310, -40012, -16483, -37234, -68766, -18988, tail", "row-322": "head, 82925, -13853, -93386, 6625, -20028, -70480, -12180, 43212, 81177, 37046, -86154, 15158, 40926, -7866, -68204, 3647, -42318, -28519, 8854, 4304, 70969, -6700, -41825, 58649, -8724, -62272, -74775, 51620, -58382, 36686, 64341, 41744, -97347, 39883, -75596, -21179, -29747, 54859, 58645, 65522, 6757, -14589, 38884, 17260, 24583, 17859, -17317, -949, 63286, -87531, -18642, 10147, -6442, -43016, 82078, -39597, 87883, -15221, 67042, -27559, 19350, -54937, 86904, -83270, tail", "row-323": "head, -60651, -37479, 3513, 11010, 5209, 57849, -74885, 11316, -10801, 63434, -37744, -77849, -5090, -66251, 82966, -60077, -31438, 52041, 20931, -64981, -85572, -53864, 47271, -50023, 37162, 17691, -53838, 10375, -79497, -23033, 48094, -21370, -44869, 78950, -27190, -89096, -91200, -11901, 53494, -2837, 94946, -99105, 99223, 74632, -89558, -43096, 81362, -53907, -82999, 75956, 58435, -14865, 63631, -76703, 96524, -16479, -47423, -47326, 53921, 34562, -84807, -71217, 45526, -15915, tail", "row-324": "head, -83035, -8597, 91629, 47487, 78957, -92888, -56981, 13990, -21215, -56571, 9748, -13755, 96203, -9151, 34038, -803, 45240, -64926, -48344, -19418, -41218, 40484, -67015, 46055, -94014, -38237, -4096, 69239, 70173, -97040, -76250, -22735, 22439, -72472, 9464, -63721, 16769, -73416, 21837, 11719, -46005, -44735, -32990, 19266, -86142, -24926, 89142, 33566, -90737, 78528, -80340, -31431, -63801, 93406, 14169, -2048, 74043, 52353, -320, 89828, 8094, -25603, -20143, -59482, tail", "row-325": "head, 22777, -89963, 20190, 59596, 18039, 40774, 99498, -67993, 85708, -55672, -67392, 98904, 20565, 52110, 46531, 66648, 57136, -43166, -86275, 25760, -31516, -55679, 59752, -97356, -14807, 98450, -1633, 78643, 35829, -92063, 95058, -52671, -50921, -37183, 64346, -20274, 32015, 34443, 70452, -45507, -33050, -10870, -9165, -58751, -50028, 22890, 42185, 71462, -49537, -7232, 61462, 42304, -17431, -28021, 36733, 29912, 9282, -25414, 66, 50176, 32883, 79623, -43623, -6665, tail", "row-326": "head, -52312, -96735, -6082, 84549, -95635, -78908, -73143, 52169, 46132, 51645, 27737, -72955, 30349, 19793, -96892, -74581, 14452, -91875, 40542, -73319, -77961, 6476, -83295, -95030, 59371, -18398, -67729, -3845, 98831, -37492, -98101, -13461, 56133, -33969, -93810, -97517, -37684, 9616, 82031, 66848, -59845, -61556, -65280, 43109, 68298, -57983, 91070, 83665, 32673, -93474, 83840, -61838, -54313, -64434, 93851, 77530, 35858, -42442, -39934, 25721, -12358, -94677, 22447, -95029, tail", "row-327": "head, 35479, 31547, -43812, -26678, 6534, -9230, 35887, -63279, -24083, 39736, 6976, 72126, 49801, 95109, 32908, 80565, 55954, 70090, 15153, -7499, -47606, 52140, 99230, 35999, -54643, -43046, -92312, 68329, -23759, 55734, -12433, 31199, -96890, -99783, 35741, 78655, -78757, -37109, -80149, -105, -12791, 22044, 5453, 59792, -11305, 15601, -40756, -36120, -64913, -49492, -23905, 67961, -95467, 91824, 59469, -74491, -74028, 75929, -55156, 47650, 70923, 61247, 81324, -29348, tail", "row-328": "head, -43300, 98720, 51789, -50497, -69450, -28795, -37657, -66853, 47230, -82065, 40828, 14592, -61683, -83468, 50329, -32746, -514, 52941, -27527, 17197, 48948, -76149, -98154, -27696, -80049, -75728, 91709, 26989, -53260, -48224, 2449, -70751, 3177, -54444, -99849, 99226, -76133, -38726, 14468, 17100, 87499, -24111, 5624, -92330, -52248, -53188, -86550, -29081, 94709, 2902, 82610, -574, -74397, 84853, 26074, -66472, -67707, 8495, -5165, -35969, 2765, -46601, -4024, 42223, tail", "row-329": "head, 42758, 72214, -35901, 99078, 14830, 46261, -2897, 9187, 53233, 1612, -91531, 63513, -58596, -39592, -2221, -56567, 92620, 89802, 27168, -93435, -73646, -10363, -42763, -20334, -41402, -1133, -78592, 90025, 73214, -78234, 22228, -52441, 43338, -69950, 46683, 25904, 49830, 68917, -15799, -66495, 8201, -69506, -75179, 38198, 43481, -37, -97290, 27813, -18006, -19240, 27875, 18891, -11829, -36710, -99271, -64129, 84081, -41657, 51912, -80908, -70044, -73446, 34937, -45717, tail", "row-330": "head, 417, -6929, -54247, -11251, -73448, -8542, -28843, -9379, -53531, -62180, -95732, -96269, 74856, 42320, -53782, 59584, 76330, -90982, 69806, -61170, -49326, -17027, 48995, 10530, -27790, -69045, 21542, -28475, 13339, 15279, 32275, 54661, 62953, -46481, -10618, -13329, 24208, 13078, -27011, -34018, 62500, 53993, 53526, -18378, -2620, -90595, 30525, 3422, -84617, -18386, -70664, 38233, -77810, -15159, 15131, 68091, 38339, 43427, 32946, 97701, 47769, -49964, 49458, -99342, tail", "row-331": "head, 93056, 70533, 98672, -37981, 98074, -39798, 86146, -63826, 758, -86883, -89713, 91916, 53660, 5748, 31154, -22867, 1057, -87414, 68655, 43883, 31970, 66101, -6488, 86629, -55555, 17522, -48391, -57041, 66388, 53540, -24259, -78787, -95000, -18131, -26097, -80326, -24740, -26726, -61862, 67166, -6882, 85176, 29905, 9420, 8875, -33754, 54837, 53580, -73945, 90551, -70369, 22239, -41832, -82063, -66296, -86095, 7171, 76038, -99703, 34132, -55143, 19969, 93567, 41019, tail", "row-332": "head, 37475, 24178, 33700, 61674, -77757, 84982, 45015, 52218, 79310, -67821, -81343, 98416, 7254, 98209, -90924, 28960, 63724, -14702, 53263, 69109, 54261, -36521, 58793, -85797, -23658, 60031, -12074, -95683, -68768, 73275, -40070, -57092, 92259, 44759, 61462, -5006, -36576, 23942, -21087, 81240, 77469, -21399, 70225, 3874, 55114, 22648, -24191, 9438, -27566, 85449, -75306, 81407, 69165, -3655, -95915, -44110, -6617, 43275, -81894, -5311, 69590, 80056, 21740, -16545, tail", "row-333": "head, 87919, 52414, 52943, -69949, -59012, 25164, 48646, 31265, 36545, 73273, -63704, 33563, -63899, -14219, -50121, 53270, -46438, -18415, 23013, -6958, 23557, 42701, 66445, -71472, 50207, 35998, 60153, 61489, -46520, -8501, -43895, -60562, -93865, -15296, 54431, -25063, 62029, 74582, 29130, -84295, 10486, -11865, 27014, -44178, -79416, 34728, -71255, -50652, 56973, 43982, -34871, 36818, 84239, 6965, 44463, 91175, -8712, 78872, -49670, -11082, 9663, -66566, -18571, 79279, tail", "row-334": "head, -47549, 55675, 81332, -32237, -91949, -42474, -37726, 61375, 75661, -97502, -83183, 69041, 61406, -28213, 11877, -69520, 33722, 10953, 92962, 47260, 17878, -6757, 10207, -55282, 14304, 76632, 66793, 33731, -96192, 90299, 35480, 19873, 66565, -26633, 22618, 98405, 1106, -91335, 26625, 38106, 92825, -90816, -97310, -70197, -4831, -26223, 94962, -82367, -61918, 21549, 43382, -6571, -23759, -57385, 50687, 3543, -6860, -76594, 55924, 80680, 76349, 7565, -94758, -42999, tail", "row-335": "head, 25527, 46650, -41957, 90421, -39221, -47093, 12578, 64113, -1667, 37453, 37331, -53612, -13389, 22821, -83620, 30692, 98076, -53339, 89806, -3146, -96705, 53129, 18736, -90803, 62710, -65424, -25588, -46898, -93092, -64235, -17511, -70250, -7862, 28731, 55177, -42231, 75029, -26647, -57118, -54182, 76526, -18602, 84984, 43103, 94060, 36149, -84047, -62884, 60041, 83311, -89791, -58922, 98520, -94039, -5066, 74970, 47991, 86524, 8126, 44446, -90374, -91133, -97020, 2901, tail", "row-336": "head, 45698, 98838, -83817, -36657, 20881, -33480, -35629, -79268, -77022, -57302, -60295, -2718, -2340, 96131, 73868, 21145, 58368, 99429, -58404, -40144, -25501, -28659, -11473, -53268, 4710, -8704, -60794, 78080, 56550, -20255, 35850, -49049, 71331, -46567, -16487, -10308, 35388, 73172, 81475, 86032, -22557, 92388, 95118, -30507, 13051, -61124, -52943, -46989, -96237, -46281, -7993, 44887, 96880, -33852, 8020, -99433, -45540, -49327, 38500, -18804, 86460, -15641, -55005, 51434, tail", "row-337": "head, 48724, 87778, -89860, 838, -80143, -44951, -50704, 96341, -6786, 45136, -77019, 15861, 35280, 52912, -90341, -59403, 14617, 18661, -77962, -36326, 19922, -65115, 68228, 44231, 5193, -3502, -19672, 89287, 45200, -79187, -89393, 28674, 79023, -99646, -93991, 46535, 37502, -42961, -23537, 73468, -30099, 18372, 11726, -37334, 34699, 97113, 56004, 15405, -83333, -76962, 36000, -38215, -64058, 91278, 75355, -90140, -44874, 8200, -86905, 67371, -59052, 84068, -37716, -60049, tail", "row-338": "head, -7113, 33128, -34368, 44296, -63297, -54004, -86055, 13241, 79095, 92521, 35705, 9039, -48933, -81702, -8591, 31398, -45778, 67065, 64065, -27087, 17837, -39573, -30869, -65308, -41807, 37282, 52685, 41665, -21060, 10011, 71307, -71934, -90089, 17893, -96696, -19846, -45797, 72812, -59441, 25100, 59359, -69259, 3182, 74064, -48400, 52273, 89382, -38999, -96092, -46305, -75547, 19795, 88404, -75946, -94657, -55067, -77016, 26575, 94082, -53911, 37336, -15463, 81884, -83072, tail", "row-339": "head, 45619, 74175, 27042, -99826, -18361, 74903, -32676, -30454, 49084, 41947, 66540, 3700, -39717, -6778, -31998, -19298, 99149, 78745, -83901, 45404, 30830, -72714, 95713, -16553, 14222, 52024, -92461, 97651, -68661, -25954, 73853, -35765, -19610, -24792, -7363, 86430, 64829, 32380, -49191, -3278, 46247, 56113, -28407, -2688, -82159, -58477, -97269, -45773, 54237, -47596, -85797, -79713, -28013, 93267, 60639, 43594, -93715, 18618, 45474, 78071, -68409, -16843, -51835, -98488, tail", "row-340": "head, -17586, 7005, 8372, -61205, -47848, -39579, -61634, 75938, 47259, 44925, -75481, -56075, -41598, 37446, -7955, -24514, -83328, 41755, -51458, -80047, -48865, 81639, -13916, 26498, -63729, 23692, 10818, 34022, -55901, -37241, 76267, 53587, 17774, 95229, 47176, -535, -78962, 14218, -74380, 46817, 96371, -43872, -44690, 35724, 98136, 58554, 89838, -69279, 89204, 39090, -21174, -72152, 46905, 34424, -86089, -58589, 30906, 27497, -62610, -7964, 73362, 91396, 9529, 93278, tail", "row-341": "head, 92536, 87004, -98699, 99020, -46280, -24491, -10059, -51800, 94861, -59139, -21202, 26187, 78318, 79775, 22118, 50929, 6661, 99142, 31650, -54829, -57286, -71976, -76399, 45158, 26739, -42781, -57241, -58785, -86141, 24769, -62635, -21567, 79455, -1189, 81023, -18436, -94124, -16794, 18628, 66337, 42393, 83096, -59547, 31336, -88380, -36971, 35737, 6120, 15094, -23965, 15889, 80177, 4671, 14185, -82469, -79093, 48574, -2616, 71997, 81326, -49541, -62641, -99221, 42598, tail", "row-342": "head, -38808, 9407, -71986, 55250, -16697, -45003, -51277, -38270, 54153, 98337, 67337, -58472, 17429, -53928, 81769, -53723, 80058, 49340, -79643, 27635, -71428, -2739, 18882, 29814, 36053, 21554, -53478, 50174, 33158, -49819, -90576, -94447, 60162, 48275, -95848, -18559, 12368, 97592, -74422, 64776, 7884, -16144, -92709, -8652, 27666, 57226, 70674, 12398, -85997, 25044, -42837, -64886, -74389, -5076, 42273, -56709, -21600, -45040, -29154, -76863, -33962, -31499, -43576, 62526, tail", "row-343": "head, -98229, 4711, 13898, -32265, -5062, -88800, -57009, -911, -92848, 83435, 95930, -21113, -48453, -67630, 45974, -78292, 44534, -6177, -2067, 48502, 5742, -31029, 87669, 59807, 56052, -75825, 71675, -75300, -89647, -79237, 40748, -54745, -96086, 96763, 1493, -83612, -90017, -33274, 53325, 47592, 18660, -76206, -11026, -42000, 21575, -52386, 68989, 82847, -10927, 47764, 26633, 11682, 46102, -17595, -54586, -7938, 18143, -78526, -56311, 29593, 18705, -30538, 48565, -46156, tail", "row-344": "head, -7308, -55149, -38991, -81182, 35816, -79647, -10188, -67256, -61817, -65042, 85798, 14683, -27492, -52679, 8588, 85425, 50807, 78173, 16658, 55543, -62981, 56132, 27045, -62128, -80297, -2692, -7575, 85204, 8310, 67033, -96234, 83338, -75456, -37041, 79466, 45048, -1419, 17968, -42639, 18004, 12576, 80941, 97437, -89106, -88555, 79438, 89502, 73088, 87355, -84406, 90899, 26692, -24221, -40247, -63182, 69655, 70901, -97574, -56264, -20022, 91275, -28627, -53314, -66935, tail", "row-345": "head, 95012, -26663, -36744, 82060, 24334, 16932, -15084, 12425, -75590, -37179, 57252, -31640, 39550, 17467, 94412, -81155, 45882, 16286, -69606, 98508, 2645, -43532, -19772, 29086, -19318, 90468, -8418, -56340, 38001, -55957, -50212, -21928, 70131, -24065, 33335, -32732, 96964, 88062, 19135, 64682, 40444, -78157, -30547, -30490, -83068, -97673, 81626, 56345, 4177, 84042, 74267, -25461, 32091, 92951, -50244, -23827, 44383, 55906, -9262, -85093, 90213, -50981, 2856, 40816, tail", "row-346": "head, -94516, -65736, -5170, -91251, -98623, 78112, -53636, 51120, 93646, 80150, 1986, 89783, -53385, -11995, 46122, -9158, -13614, 43376, 67149, 41813, -77022, -36287, 80834, 10741, 39636, -9457, -41097, -35346, -42267, -18258, -78150, 77344, -47394, -70394, 1035, -52527, -42471, -471, -86316, -81558, 31808, 22933, 47976, 71467, 76576, 46061, 74544, 5881, 51301, -54729, -94251, 16433, 90631, 15170, 40023, -42350, 13050, 15365, -24801, 37038, -57279, -58393, -56967, 57033, tail", "row-347": "head, -65331, -69880, -3688, -49268, 39617, 49720, -14564, 48764, -49597, 51999, 39573, 25851, 56623, 92197, 5233, 24695, -73460, -58506, 7105, 83937, 19310, -71974, 97204, 85443, 70347, 30549, -64724, -19628, 84386, 90278, -70950, 25647, -49197, 16144, -92918, 52727, 36218, -14304, -33658, 15072, 16739, 50899, -53408, -29112, -19003, 7497, -56592, -49825, 71086, -30882, -23580, -46304, 98319, -461, 91236, 73083, 22897, 20024, -33421, -66625, 27440, 68844, 48493, 62352, tail", "row-348": "head, -80269, 12055, -24187, 71015, 92109, -67057, 16336, 66437, -58791, 72924, -5666, 92394, 22741, 10791, 81786, -95694, 78069, -97704, -23162, -68249, 76550, 41854, -45169, -77658, 2904, 70075, 3255, 28972, -19692, -86585, 94185, 5530, 94496, -68521, 38578, 43282, 55785, 85188, -55157, -86310, 50735, 15010, -31361, 56161, 42367, -65744, 70294, 68925, 51835, -47011, 24417, -17122, 67143, 12956, 98437, -2930, 83171, -84675, -53941, 38571, -11013, 11665, 9973, 14622, tail", "row-349": "head, 44929, -6311, 56762, -10209, 93874, -10801, 74396, 79833, -3447, -84162, 90532, -67870, -10564, -90997, -38630, 42829, -40603, 78561, -96533, 54697, -84662, 81288, -97957, -26867, 91237, 49657, -32999, 22318, -26156, 65313, -61191, -75271, 40291, -46364, -94973, -11901, 62302, -75920, -89942, 27588, -31680, -90947, 82665, 37000, 89411, -52762, 57652, -97094, 77690, 34446, -21522, -74591, 81142, 11364, 44840, -68280, 59843, 69897, 19206, 46840, -43251, 26445, 25469, 51378, tail", "row-350": "head, -94941, 50785, 92812, 65396, 19795, 12110, 79731, 71689, 31016, -66389, 24162, 18266, -38151, 17259, 44924, 86628, -92668, 30319, -77621, 60960, -20641, -37886, 77200, 27000, 92723, 15898, 7372, -70702, -61615, 16725, 75829, -75977, -42301, -25348, 34565, -47420, -19251, -8765, -40589, -27429, 87118, 62595, 58030, 59009, -83317, -19584, 17276, 68510, -63581, 39809, -70919, -1682, 8954, -15190, 65043, 19211, 31454, -89697, 52716, -57606, -2235, -44801, -747, -73093, tail", "row-351": "head, 59404, -91597, -4490, -26365, -71029, 63043, -18814, -29601, 45906, -35733, -70558, 79877, -50225, -55524, 42175, -50621, -88025, -20525, 15485, 29812, 56324, 95211, -53037, 16951, -70746, -34669, 7028, -70122, 91977, -57458, 6360, 76686, 6389, 8982, 82869, -11189, 36080, -51753, 19143, 72694, 52817, -36114, -86473, 77056, 76475, 10929, 63408, 93540, -28705, -24102, -39152, 58987, 49061, -63542, -53667, -29241, -52565, -77516, 75320, -71470, 18394, -9801, -24220, -67049, tail", "row-352": "head, -60682, -64141, -63586, 78324, -1618, -79981, -65411, 61246, -76138, 9600, 28766, -17715, -88816, 66094, 93960, -57410, -83403, 95057, -64820, -77294, -70322, 64734, 60740, 65854, -96908, 41439, -50301, -8158, 6412, -81057, -31811, -52262, 27610, -1182, -56690, -92608, -33178, -8749, -97615, -27482, -25297, 54724, 79273, -3158, 73462, 51678, 27806, 21231, -57648, -59436, -87167, 7906, -76521, 58865, 95432, -23933, 90766, -14586, 41765, 22656, -58266, 38358, -91352, -94476, tail", "row-353": "head, -86338, -7672, 80377, -55902, -38539, -53552, -9532, -97903, -92309, -30373, 36854, -48281, 64091, 99785, 92750, -24344, 8651, 40602, -87126, -39799, 28548, -81682, 58788, 57337, -74787, 62579, -90093, -91754, 93647, -97897, -44528, -39972, -55130, -88377, -18347, -94955, 34221, -81137, -82800, 25539, -54478, -84758, -26540, 42633, -11489, -36438, 82587, -82970, -41789, 89379, -76338, 58634, -46608, 91769, -81282, -98514, -86094, -41587, -24348, 43642, 26610, -36481, -82785, 65999, tail", "row-354": "head, 14144, 83611, -67359, 38170, 73732, -320, -3001, -68565, -71041, 25470, -56601, -59714, -69651, 13808, -29388, -56881, -40883, 8317, 31452, -13842, -12693, -62105, 62243, -47535, -36367, -57441, 54839, -98615, -39656, 2425, 17875, -92081, -91070, 11376, -93252, -3432, -91255, 8667, 80822, -75257, 79070, 97728, 78240, 48973, 11503, -85831, -8827, -33737, 52155, 88885, 11038, 36521, -38240, 91672, 75493, -88673, -49310, 14912, -99101, 93388, -46414, -28476, 89826, 68050, tail", "row-355": "head, -90386, 20162, -93230, 46586, -39679, -61881, -72252, 82841, 39704, 98252, 95109, 7650, 71031, 95855, -95449, 59009, -73896, -28403, -23196, -14660, 57170, 12411, 67447, -80529, -35672, 3534, -55859, 32480, 86263, 13991, 68301, 3876, 95454, 98538, -2350, 18534, 38498, -47776, -76287, -65511, 94032, 19656, 24107, -28612, 21404, 29133, -80093, 91198, -81336, -42616, 94457, -71855, 77930, 48919, 42029, 61954, -65825, 58552, -45970, 96223, 95168, -92935, 54588, 72983, tail", "row-356": "head, 56026, 58780, 45232, 48380, 68945, -52447, -24830, 85364, 60516, -8687, 27581, 73356, 48960, 28377, 9704, 9772, -72392, 71020, -95050, -61943, -68987, -50992, 44542, 80892, 95911, 17853, -92850, -31145, -60228, -76836, 4381, -65630, 76476, -70491, -30775, 15294, -6847, -83275, 35100, -24747, 1553, 47332, 5077, -31761, -77666, 46864, 55975, 22875, 48144, -9920, 81003, -39386, 90700, 93795, 1843, -39030, -83078, -97228, 53621, 45513, 11673, 45948, -86891, -60971, tail", "row-357": "head, 19030, -73414, -8115, -34131, 84441, -7019, -22084, 64825, 47914, -75084, -98674, -35746, -13799, -417, -25886, 13087, 2638, 71604, -825, -92416, -55613, 17601, -55682, -68636, -14584, -46602, -83111, -24257, -50874, 39138, 50847, -84270, -41224, -60328, 42305, -79153, -77731, -79593, 37434, -59280, -83056, 46870, 14620, 83699, -23097, -3965, -94925, -30686, -34305, -77968, 52727, 21070, -1456, 64824, 50820, -233, -97730, -51046, 57659, -29564, -13812, -69346, -31353, 3076, tail", "row-358": "head, 68447, 36371, -17152, -88444, 48259, -41284, 97559, 46004, 4830, -49750, -89005, 61797, 86235, -57028, 79457, 41493, 66629, -33161, 99007, -46699, 61750, 95212, 34235, 45213, -81001, 8544, -47015, 11224, 92589, -89137, 82086, 35594, 1297, -37157, 74088, -51020, -26981, -3922, -61158, 35161, -45313, 82525, 49955, 77428, 84913, 4793, -61878, -74479, 61084, -4575, 57875, -6947, -29704, 13423, 19291, -91622, 51277, 77507, -40264, 72422, 79517, -84839, -83436, 50418, tail", "row-359": "head, 42118, 48831, -26892, -31751, 38348, -55061, 81858, -45228, -85498, 68449, -78451, 87135, -89915, -79029, -98067, 27995, 60851, 46640, 90057, 93801, -80380, 18448, -24720, -49429, 42260, -81512, 13828, 23458, -56304, -75287, -60169, -35268, -3800, 9720, 8543, -29941, -59395, 27717, 20163, -67257, -87400, 88743, -54696, 399, -58125, 1618, -59593, 33013, 67813, 26860, 76313, 39770, -94603, -53574, 41781, -24436, 75054, 17569, -57614, 13164, 70807, 69355, 61871, 42056, tail", "row-360": "head, 63660, -15435, 32058, 58357, -67024, -3957, 82360, 40808, -84278, 61285, 62599, 38404, 77516, -8199, 61609, -40424, 7637, 42817, 93955, 21496, -2060, 96160, 66845, -6572, -45428, -19768, 14117, 29651, -83252, -701, 52601, 27531, 82347, -36476, -69462, 33753, -3660, 36911, 2967, 19914, 67859, -38234, -51774, 86180, -20866, 55090, -23422, -22819, -90891, -3017, 57612, 38309, 38860, 79559, 13982, -78877, 68533, -67292, -53101, 18370, -14259, -77930, -6062, -63315, tail", "row-361": "head, -39293, -35250, -21995, -90969, 13749, 27607, -65275, -12817, -99996, -47145, -74996, 98439, -45822, -71228, 63825, 46239, -83248, -78363, 79484, 87832, -8937, 78902, 5087, -67980, -85459, 69184, -44283, 22029, 26067, 50393, -63887, 87121, -13505, 25730, -76478, -84883, -47669, 28932, -84500, 88535, 18461, -7423, -74212, 53328, 10907, -28848, 96236, 76990, 38725, -59565, -88402, 28116, 81959, 10727, 14181, 91587, -94857, 6702, 99225, -30784, 32505, -45726, -31819, 35738, tail", "row-362": "head, -62909, 24117, -53641, -99835, 62483, 51708, -85988, 55968, 80943, -99746, 70346, 85140, -91658, 62083, -5574, 64825, 66379, 71710, -18748, 32778, -54654, -73834, 23644, -87357, -10490, 27871, -98684, -35271, 2611, -78183, 39290, 28128, -47209, -14633, 35871, 13574, -28469, -69315, -41837, 83185, -13103, 27450, -53743, -69952, 78126, -78750, 73193, 12174, -30532, -1353, -88381, 56069, -7326, 94971, 56889, -93760, 49260, -73583, -41535, 26416, -9673, -79703, -60934, 41696, tail", "row-363": "head, 63847, 79423, -83931, -62797, -55555, 44596, -90564, 31407, -22726, 50982, -96158, 84471, 51640, 89898, 13761, -60700, 3951, 70806, 85979, 14590, 97388, -10223, -60557, -22323, 14845, -61116, 96165, -50826, -14279, -2080, 28629, -35817, -62863, -8919, -39994, -5172, -47699, -35341, -45061, -90108, -1796, 93272, -17051, 19122, -77842, 86069, 50094, -34570, 30061, -62120, -94120, 12480, 55725, 3514, 77860, 67876, 27145, 94995, -30869, 67798, 91616, 27445, -27473, 7242, tail", "row-364": "head, -73213, 75342, -22290, 15277, 69503, 91695, 51216, 17474, 64458, -11849, 5211, -77273, -23258, -86187, -36899, -279, 75624, 96931, 6986, 77017, -77937, 94164, 23071, -28101, -58873, 598, 21709, -63496, 63122, -39611, 31519, -69194, -67630, -75028, 5436, -64332, 81823, 78076, 76931, 10142, 68276, 84971, -2403, -91188, -69184, -30170, 80043, 44813, 94802, 63043, -54739, 60123, 23135, 28640, -75530, 62606, 27776, -39448, 58846, 69351, -13863, 79611, 6870, -51813, tail", "row-365": "head, -74107, 44705, 42560, 61201, -44075, -49578, -92175, -91647, -85192, -71304, 77057, -2469, 63169, -52020, 31093, 10018, 3699, -26151, 33610, -62421, 75729, -50066, 48623, -43322, 50253, 97192, -51805, 23642, -82177, 57501, -13850, -27697, 24347, 1684, 52460, 2875, -56027, -22152, 39586, 89796, -90571, 89197, 58887, -56850, 45938, -99655, 56233, 75832, -4603, 67981, 74816, -71916, 38615, 79524, 77982, -74936, 85178, -38443, -90874, 36832, 44967, 86960, 14263, -13731, tail", "row-366": "head, 46052, -35674, 34905, -20811, 33870, -70507, 82826, -47978, 26133, -57418, -90534, 78446, -11973, 50359, 51420, 9946, 4817, 44946, 47052, 9791, 43525, -51622, 39512, -9474, 54103, -46939, 18974, 14763, -65079, 63175, 37691, 86000, -21442, -25581, 48018, -53265, 25012, -3720, 24923, 78954, 57603, 44168, -31858, -37931, 48666, 55498, 80726, 33655, 40695, 5486, 44356, 59508, -21087, 41427, -93591, 50352, 60229, 5900, -46863, -50716, -37116, 12338, -23282, 3175, tail", "row-367": "head, 43729, -74621, 39528, 10930, -95065, -41275, -41318, 94469, 34485, -25121, -82963, -87468, -41177, 30463, -12200, 76267, -2372, -346, 58968, 19531, 47592, -55014, -53796, 23701, -76400, -42544, -33193, -91470, -17755, 7874, 40738, -24447, 84280, -18773, 30747, -11837, 71598, -29489, 29658, 85525, -73424, -57876, 66438, -20532, 45587, -27984, 46992, -22259, 28796, 55133, -28275, 70690, 29454, 5815, -1553, -43596, 22838, -93265, -55525, -31556, 83638, 8250, 63920, 26401, tail", "row-368": "head, -72208, 10135, 50048, -94997, 26050, -16037, 75180, -27481, 35343, -71246, -37015, 95727, -88984, -35290, 35604, -99451, 50871, -88973, -62878, 17054, -39875, -65444, 70658, 73206, -82244, 97703, 51774, 7214, 10589, -53087, -78636, 96345, -90451, -3506, -75917, -90988, 86495, 32659, -36052, -53636, 23217, -86561, -61657, 19618, -27326, -63828, 7647, -40069, -65342, -95179, 55258, -10249, -69542, 7525, 47792, 41222, -10215, -54349, 46149, -3116, 16058, 27055, -9823, 16913, tail", "row-369": "head, -62801, 66529, 40744, -42480, -6091, -60453, -53751, -83717, -80397, 26842, 37327, -99390, 36854, -54241, 10614, 58288, -67174, -38005, -16328, 95212, -66069, 28716, 12498, -2534, -41345, 57052, -78667, -17255, 87886, 83329, 20126, 2177, 51553, 17322, 64135, 10523, -18003, -81020, 62425, 1467, 46674, -26058, -43842, -44136, 36883, -25433, 77480, -41191, 92874, 94302, -86668, -9451, 4208, -85404, -33562, 63425, 48520, 56582, 24327, 89658, 98839, 56199, -41549, 37083, tail", "row-370": "head, 25110, 23239, -89454, 42679, -30381, -63059, -94441, 50629, 27167, -97864, 81127, 81131, -59943, -95359, 97727, 96472, 2552, 63600, -85687, -23200, 65401, -87086, 37169, -67993, -96831, 4825, -60336, 86502, 4397, -78360, 28286, 84968, 42982, 87267, -12924, -42430, -26328, 81476, 77028, -58957, 80411, -69773, -74877, -76014, -25749, 73915, 16372, 87986, -15057, 7448, -33539, -61842, 7340, -60592, -68519, -69986, 4269, 62403, 69626, -36893, -25418, 89614, -58128, 38548, tail", "row-371": "head, 18476, 20454, 34935, -13438, 3308, 55476, 8199, 93364, -34378, -14477, -14742, 85845, -54684, -68054, -60193, -72767, 75628, 77240, 2332, 12413, -96184, 91161, -83994, -14, 2713, -80906, -69651, -88830, -7338, -47312, 98701, -20555, 1436, 15996, -62042, -26050, 6106, 19658, -59129, -89162, 75085, -75249, -69294, -68514, -58893, -49550, -1133, 23098, -7466, 41466, 31101, 74662, -97133, 30882, -95807, 21202, -93333, 80529, -30552, -75973, 72266, -18065, 14559, -91160, tail", "row-372": "head, 84085, 33126, 91977, -59809, 8595, -256, 75810, 57733, -53888, -30335, 49117, 17716, 84206, 86743, 306, -78989, -35426, -12785, -8493, -54720, -71006, 78694, 63986, -73000, 10303, -14461, 88130, -46104, -65173, 26845, -80023, 5398, 24220, 22610, 17094, -78294, 1972, -28772, -70249, 74534, 83728, 53294, -56426, 91638, -89679, -32695, 45450, 76221, 27154, 68330, 46068, -61230, -30928, 12085, -97189, 9249, 84299, 97723, 66059, 50424, 89092, -77469, 9312, -49326, tail", "row-373": "head, -4163, 6986, -84298, -2620, -57815, 55146, 10806, 27376, 24504, -34659, 39365, -32787, -61368, -34061, -76595, 34971, -61760, -77455, -47439, 21982, -68981, 76601, 33634, 83600, -59185, -70901, -51409, -87939, 39489, -54088, 38562, 58330, 33293, 49739, 10574, 44611, 3897, -14707, 42085, 65364, 6444, -14378, 56934, -67373, -80582, -24228, -39295, 74127, 14136, 4008, -17060, -94085, 9786, 98944, 9352, 22873, -67171, 84127, -46996, 22726, 55759, 1031, -96902, 43102, tail", "row-374": "head, -41062, 48872, -29045, -57815, -55787, 87008, -44508, -73479, 51006, 28548, 99368, 50878, -64810, 54893, 6252, -26718, -13985, 50587, -54116, 59687, -41922, -35298, -45320, 98605, -88676, 91595, -82003, -67497, 73097, -47369, 38838, -21427, -97440, -39991, -64516, 39523, -64389, 38031, -92082, 80029, -88491, 67244, -22767, -53709, -33372, -99571, -36519, -66396, -17932, 93406, -47210, -60044, -72766, 48833, -83387, -73619, -50974, 21623, -27449, 12504, 45830, -22439, -67823, -16614, tail", "row-375": "head, 30013, 84400, -62897, -20327, 34570, 11516, 29754, 19902, 10598, 78000, -37664, -46167, 3920, 83934, 5407, -31147, -91542, 30923, -56096, 22552, 77064, -69018, 77499, -91981, 98373, 11582, -88702, 31746, 5878, 35315, -5229, 34583, 37071, 21722, -5675, -41864, -86351, -60669, -78473, 4507, -37924, -26225, 24868, 36504, 47033, 91064, -93647, 1734, 16193, -81275, 50511, -25998, -54893, 89913, -32124, 95116, -73685, -75093, -55351, 53167, 83917, -41588, -79507, -70302, tail", "row-376": "head, 23753, -40255, 77073, -88978, -7174, 66742, -38808, 95996, 47525, -63889, -44392, 38339, -54827, 81066, 76972, 10016, -99137, 49206, 44989, -58739, 78001, 9694, -39286, -59244, -15535, 53181, -48102, -52841, 21354, 25421, 2367, 36560, -78341, 55515, -87710, 44940, -59636, 55681, -78929, -63883, -99049, 80170, -45599, -55201, -85381, -25324, 51613, 81170, 68049, -59991, 49837, -9297, 91813, 27132, -67210, 39642, 63281, 92802, -32889, -53575, 50523, -49910, -93476, -86375, tail", "row-377": "head, -2057, -11104, 55359, -5558, -20393, 85223, 58225, 72752, 92736, -63169, 1149, -44702, -166, -17513, 93261, 52498, -94172, 9358, -18849, -47077, -40247, 72053, -17433, 88824, -30457, 57962, -46757, 96083, 95969, 15115, 53471, 33364, 15019, 20769, -68840, 16520, -73918, 25898, 82201, -90334, -58389, 54400, -29401, 51671, 52004, -67903, -83490, 85150, -17474, 89132, -98680, -47348, 26653, 85165, -56806, 85989, 30755, -25320, -23077, -54364, -91636, -76319, 87398, 82070, tail", "row-378": "head, 19525, -26659, -45301, -98756, 64436, 46419, -1776, 32382, 60372, 49286, 43544, -87993, -19592, 8514, -67312, -60851, 69501, 90142, 52441, -93729, 24710, -63579, -12271, -97072, 38830, 49248, 48281, -79023, -6369, -86867, 31428, 70212, 26340, 37322, 78400, -33577, 26185, 81931, 4868, 70434, 15912, 68067, -64795, -34282, 27077, 61110, 46246, -98748, -70590, -46939, -42350, -48071, 35186, 35208, 92235, 64586, 55374, 6873, 56190, 31595, -16089, -85294, -48549, 31768, tail", "row-379": "head, 60589, -85840, 27996, -44562, -86778, 96987, 90001, -90437, 98659, 13969, 33257, 67960, -3743, 38365, -247, 43902, -59122, 95322, 14292, -85174, 99140, -43232, -30365, -37546, 38903, 70845, -74109, -50822, -58029, 67409, -65609, 17609, 41050, 3044, 40676, 87652, -51118, 10049, 26534, -24990, 99943, 47675, -39950, -64504, -17794, 27870, -72785, -24057, 51938, -54106, 80168, -66685, 10703, -81302, 82257, -96641, 38863, -83202, 88446, -99906, 41553, -72410, -88225, -52200, tail", "row-380": "head, 32641, 60366, -26740, -41364, 8635, 97629, 57802, -23851, -5575, -81845, -58650, -70767, 41418, 84728, -7493, -30463, -28752, 59880, 85159, 53626, -60692, 9378, 81713, -59165, 41588, 51879, -27368, 12467, -22287, -20895, 67090, 15524, 12669, 30216, 66371, 2280, 67029, -16553, -71842, -59727, -5030, -3315, 68644, -20472, -94912, 69342, 51091, -21036, 47299, 41219, 49858, 75870, -21815, 61907, 15651, 53364, -39060, 70374, 61275, 94906, -55439, 86992, 97776, -20702, tail", "row-381": "head, 6505, -21274, -7493, 46118, 99546, -21902, -12793, -247, -61274, 90328, 45093, 42917, 8932, -34849, -66782, 46026, 2210, 1186, -72458, -12154, 40097, 81653, -62877, 68342, 29241, -74635, 47107, -42318, 77065, -14354, 91604, 14621, -46105, 90835, 17501, 32665, 62404, -24543, 77920, -97577, 85889, 92436, 32589, 26354, -82641, -2950, -91160, 84830, 69235, 14338, 64460, -60957, -56853, 85481, 12615, 77293, 61778, 27557, 36527, 38031, -12302, 53432, 38746, 93338, tail", "row-382": "head, -17571, 82155, 96759, 25085, -9795, -9179, 598, 67091, 72647, -57142, -69596, 40611, -27891, -48388, -61496, 46881, -27502, 55253, -56933, 25751, 43757, 66708, -51741, 18530, 42596, -19491, -58963, -41800, -62675, -3868, -94797, -24852, -11560, -63794, 23858, 69008, 91791, -79745, 27338, -98671, -72217, -31728, 46978, 4791, -75383, 17928, 46827, 3123, 31825, -60873, 98856, 70980, -68427, 50568, 65635, 11155, -58297, 76946, 87956, 24979, -57048, -19370, -95127, -22957, tail", "row-383": "head, 2361, -96119, -42181, -83968, -66137, -31810, 84793, 1906, 88292, 57832, -76305, -57208, -31986, 27020, -50872, 24392, -72578, -23504, -15078, 54283, -76909, -13154, -74114, 80687, 46298, -76940, -40105, -58313, -18591, -86409, -96155, -1024, 17624, 93282, 50020, 66998, -86408, -21922, -18663, -76789, -88354, -70740, -18949, -90621, -22660, -49201, -91930, 17526, 69367, -84199, -69474, 48714, 65458, 72455, 91720, -1394, 58941, 29762, 53317, -73955, 43692, 8972, 17568, -27277, tail", "row-384": "head, 29816, -65216, -95257, -51072, -28636, -26184, -62555, -3310, -744, -55660, 38242, 73909, 81313, -97955, 23053, -46182, 95427, 34349, -81150, 15118, 58921, -93967, -63510, -73461, -64584, 18449, 23148, -16300, 71928, 31232, -57358, -92134, 2242, 89572, 72854, 85015, 19292, -32554, 52670, -92129, 65937, 23399, -25178, -53896, -98667, -15374, -50529, 21053, 91657, -72891, 10990, 57099, 69805, 16554, 35954, -87296, 98166, 31367, 5256, -44287, -10046, 47164, 80781, 55185, tail", "row-385": "head, -53530, 13855, 71075, 16884, -22998, -36242, -55883, 55100, 68976, 11158, 98583, -23839, 19168, 52855, -16861, 62926, 22333, 32231, 95285, 41427, -94277, 18722, 50167, 43108, 26162, 9060, -14543, 32744, -53172, 47391, 66880, 27057, -79798, 13198, 67588, 80003, -33881, 97949, -56936, -95300, -99976, -86540, 43747, 9865, -45810, -69563, -37618, 71900, -31067, 35019, -60101, 27505, -85763, 61908, -97330, 93745, -89734, 29349, 27210, -95131, 31763, -42262, -21320, -20411, tail", "row-386": "head, 48484, -46122, 53374, -34757, 82519, 23013, 97906, -90625, 34380, 66260, 51335, 30153, 90420, -83011, -4698, -53112, -39889, -79165, -49985, -44279, 29463, 22234, -23773, 21779, -68359, 34574, -65245, 88877, 35721, 75306, -41407, -2042, 49784, 52946, 86388, -57497, -55654, -85011, 1437, 59475, -13495, -38487, -20103, 19427, -55954, 40922, 45340, -24442, 77047, 63006, 91096, 68495, 3106, -71721, 67482, -62119, 60022, -75899, 29845, -82368, 79814, 40833, 27295, 99156, tail", "row-387": "head, -61570, -19337, -3988, -79390, 76362, -44132, 99423, 47445, 22022, -1686, 84689, -43030, -48014, -85693, -82551, -73060, 61991, 69454, -74665, 4242, -26847, -1337, 11596, 1652, 35141, 66007, 76086, -92085, -41703, -69360, 48979, -62133, -34064, 90955, -66218, 95014, -55730, 74622, -29306, -55241, -28584, 44409, -90569, 72986, 24560, 71905, -33392, 81988, 35709, -79120, 89249, 14641, 43234, 67542, 79503, 74300, 41979, 26870, -23254, -67326, -86257, 55023, -36085, -98530, tail", "row-388": "head, 82273, 25121, -51403, -31479, -81205, -72371, 47238, 77881, 26643, -68333, 38782, 34222, 5780, 46432, -32189, -46903, -24022, 99334, -34643, -11474, 78131, -91082, 55113, -81591, 26045, 47284, -87289, 88017, -81067, 18548, -7128, -19973, 45323, 33290, 61372, 82842, -59, -58111, 76002, 13216, 2393, -8117, 29368, 91984, 59815, -44716, 72842, -81910, 61156, -93995, -89784, -65237, -12425, -37572, -99196, 93534, -11460, -42281, -58115, -44448, -50367, -5499, -21233, 27312, tail", "row-389": "head, -22857, 54569, 18498, -71554, -44980, -35819, 23009, 38223, 6975, -5659, -66708, -13727, 74760, -87663, 55987, 61905, 46046, -38754, 23638, 79129, -56749, -64616, -95031, -69474, -82986, 41487, -87691, -14493, -90749, -64603, 5016, -57990, -44187, 36993, 20395, -60819, -48570, 66619, -44412, -16594, 13898, -53940, -62881, -47084, -50908, 91433, -66575, -25770, 31425, 48517, 53765, -78907, -67787, 49543, 6798, -60111, 17007, -9006, 38920, -38959, -26118, -39897, 55455, 52074, tail", "row-390": "head, -5411, 35673, 52468, 78569, 22668, -26113, -29001, -43542, 71601, -21811, -70470, -21983, 67704, -26955, -51518, -25185, -81445, 45192, 33255, -27111, 97360, -81468, -14093, -42822, -14686, -7295, -63832, 44850, 4229, -31166, 20725, 18908, -56643, 62512, 12501, 14961, 96893, -15326, 57909, 92486, -77472, 655, 29443, 44759, -43131, 54460, -10991, -94165, 88459, 48884, 28861, -51061, 25494, -52351, 82261, -63392, 19087, -58220, 73187, 81873, -46524, -13024, -52277, 94433, tail", "row-391": "head, 97943, 88919, -85664, -7963, 70885, 86571, -12082, -31217, 71853, 72963, 99760, 12181, 2067, 22911, -59411, 59718, 51903, 51969, -57789, 50092, 4618, -54501, 46948, 6008, 64789, -88213, 23070, 10594, 82358, -44359, -46109, -63773, 34797, -96175, -26011, 42458, 24826, 93431, -57915, 45730, 61147, 64769, -61009, -28268, 49034, -70902, -65808, -74350, -51398, -6092, 63113, -44891, -23524, 56607, -28138, -85054, 70101, -12691, 83759, 69555, -76665, -37772, -18466, 96916, tail", "row-392": "head, -63461, 36210, -93800, 87876, -59079, 62414, -70663, -35825, 70532, -57070, 20412, 44127, -80674, -91841, 77799, 57617, 84394, 44686, -18729, -89467, 73233, 11975, 37731, -93383, -1595, 81488, -31937, -86461, -91547, -77470, -13501, 89227, 63675, -68625, 88836, 94335, -80441, 31534, 93954, 19178, 12232, -12643, -8218, -60724, -8471, -73184, 66836, -64800, 97929, 82335, 50636, -78895, -53421, -24499, 77982, 44960, -95376, 1466, -40317, 26589, -94460, -26880, -26205, -94220, tail", "row-393": "head, -5394, -59028, -68258, 67007, -52528, 95223, 63471, 89096, 41677, -78797, 13889, -75226, 99714, 91253, -17009, 44735, -70850, -94700, -3820, 19942, -59610, -12452, -13536, 94955, 7111, 2115, -78480, -60421, 73801, 42362, 46406, -41240, -88537, 72638, 70023, 60145, -29984, 56738, 57417, 28268, -53331, -61554, -82446, -73164, -78600, -10640, -39140, 55067, -62712, 29493, 87751, -56488, -71706, 44024, -69011, -22981, 14819, 72324, 14928, 49574, 98326, -8780, -14135, 56129, tail", "row-394": "head, -25656, -37950, -47859, 34062, -28521, 99469, 63180, -62060, -26201, 40208, -1346, 81349, 79868, -70571, -95870, 71765, 52659, 36679, -20924, 30681, 25966, 33492, -68596, -42059, 29504, 16747, 68744, -86796, 94726, 645, 73962, -62460, 64685, -5073, 35956, -20821, 26425, 68669, -18123, -67741, 97478, 43629, 37728, -93006, 50067, 95649, -64832, -57664, -63006, 52515, -68558, 7550, -66341, -58973, 33150, 33886, -76936, 51047, -18689, -48085, 84752, -20485, -35862, 15030, tail", "row-395": "head, 20819, -99043, 37852, 91571, 57445, -45249, -81728, -46616, -94359, -90392, 74446, 26428, 16477, -3807, -61021, 56357, 18871, -72136, -59043, 23196, 67888, 37556, -45164, -2738, 32396, 3053, 88534, 17169, 33079, 40984, 14740, 30920, 30170, 66984, -98963, 78885, 26045, -89390, -74203, -94541, -71983, -63460, 11855, -58036, -71141, 29150, -2512, 616, 98504, 86677, -52635, 85913, 91797, -40961, 74349, -64137, 26760, -25093, -66529, -45417, -22055, 94538, -32142, 36237, tail", "row-396": "head, 53485, 99843, -46828, 36216, -36810, 50976, -99964, -60583, -40900, -96549, -69726, 59927, -24133, -70411, 49467, 88117, -60917, 42002, -85311, 78927, -58901, -27598, -80782, 17098, -52285, 22597, 13098, 93980, -33294, 27826, 77488, -4674, 31393, -34487, -38073, -74139, 60417, -34333, -95441, 41866, -43456, 66914, -79669, -62042, 56740, -29097, -41567, -90005, -29542, 29782, -19616, -87020, 87131, 9258, 74779, 31960, -83081, 15952, -73248, -51829, -9660, -76136, -39483, 65496, tail", "row-397": "head, 73547, 21431, 62052, -92861, 46377, 55885, -85808, 19915, 65517, -33137, 25603, 64960, -33959, 70799, -74544, -11118, -34062, 5395, -90186, -51127, -89669, 52100, 60490, -6536, 76956, -28102, 13445, 65103, 35615, -92696, -78892, 19217, 69608, 60968, -40237, -29025, 92241, -41185, 42728, -25059, 90570, 49590, 38121, -53151, 32521, -92035, -69673, 52674, -75760, 1602, 32517, -75910, 83806, 2616, -69232, 49059, 40894, -99802, -9404, -61614, -22669, -67158, -58858, 66733, tail", "row-398": "head, 22564, -99313, 46078, 2269, -99817, 87342, 76363, -35164, 67611, 82022, -34739, -22810, -71931, -61123, 37768, -4598, 14467, -46351, -26247, -894, 25773, 38108, 93151, 62821, -68036, 11107, 79051, -39392, -90459, 32921, 24273, -713, -71096, -72223, 16949, 63908, 78142, -93011, 16203, 55089, -66319, 80171, -78054, -89589, 64632, -80836, 53523, 53438, 98904, -85552, -3300, 71593, 2523, -44646, -14250, -74621, -73048, -95938, -46026, -73066, -43727, -16049, 78868, 9154, tail", "row-399": "head, 14123, -6031, -98931, 34917, -12084, 63856, 80682, 59196, 8504, -4625, 72744, 81529, -34179, -10806, -7738, -37272, 38743, 7437, -78593, -10841, -53378, -51575, -57489, -66491, 15698, 68982, 21772, 61393, 10148, -23361, -40813, -6123, -80318, -21395, 75217, -36291, 18468, 97730, -79328, 48492, -62110, -59267, 69519, 54239, -92928, 16159, 96293, -71518, 41916, 46427, 52486, -84423, 78754, -78386, -83124, -64234, -6342, -76395, -3588, 81843, 91340, 74757, 1650, -14711, tail", "row-400": "head, 33906, 34576, 23683, 48135, 87690, -2451, -74657, 66357, -35386, -80022, -24576, 9696, -52005, -69241, -22113, 34235, 32888, -80357, 56948, 33723, -18126, 12336, -98136, -47502, 7523, -96733, -83684, 72114, 74705, 50852, -48780, 92374, 22434, 71449, -35389, -57856, -33459, 23304, 17095, -29419, 87092, -31527, 24059, -46886, 40158, -3075, -3141, 64725, -65165, -60215, -77558, 71003, 14201, 3635, 65466, -49694, -21511, -732, 39173, 29332, -63408, -90647, 26602, 51660, tail", "row-401": "head, 78936, -23408, 26803, -20926, 9802, -84166, -32127, 62498, 56180, 47590, 17307, -59553, 47665, 73755, -98673, -4487, -2846, -36476, -85856, -50091, 21777, 75376, 64867, -75617, 70255, -30276, 13667, 40513, -13089, 89906, -90948, 55295, 12056, 51032, 78977, -61561, -31952, -82326, 86248, 18586, 47253, -64070, 68744, 8287, 51931, 13362, 4690, 48764, -59470, 40550, 4859, -60355, -50565, 58551, 14148, -11945, 75874, 29579, -93605, 8771, -89933, -30856, 18377, 70870, tail", "row-402": "head, 88071, -68350, 75668, -57130, -66944, -28080, -71377, 28078, 82862, 26807, 39556, 63538, 14435, 42552, 79650, -48973, -641, 46571, 51159, 98059, -67670, -70570, -91222, 8898, -3524, 44111, -77783, -71907, -16507, -57604, -67220, -20114, -13142, -83350, -15758, 62153, 49270, 12422, -68956, -3882, -28342, -31782, -17078, -51187, -8203, 47174, -33591, 86126, -15922, -31160, 26248, -76757, -14159, 96038, 52751, -49332, -66609, -94794, 84416, 17216, -11724, 84268, 80881, -98139, tail", "row-403": "head, 25987, 55524, -85945, 59278, 8996, 75592, 62497, 25305, -33500, -10022, 24973, -15800, 86875, -89157, 2259, 14951, 78249, 55131, -83657, 48498, -8251, -17583, 59141, -12806, 33446, 39296, 72761, -6855, -3844, 16039, -51016, 12347, 94692, 57274, -64070, -27406, 72890, 30837, -78575, -65098, 55344, 99284, 14324, -63773, 75163, 97365, 18268, 7932, -68573, 17585, -78001, 95455, -69564, 83923, 28358, 1147, 28749, 73882, -67349, -89482, 14823, -61183, 28490, 83176, tail", "row-404": "head, -737, -70888, 86260, -34482, 72314, -85989, -35598, -75131, -35097, 75136, 10101, 53379, 99825, -49617, 58812, -65482, 91057, -32028, 33531, -99491, 91541, -22401, 13675, 41034, 12183, 44983, 87567, -20644, -52370, 32200, -68509, -8186, -40738, -19192, -33246, 92573, -12125, 86381, 49246, -2690, -95993, -35040, -38200, 4011, -86954, 51444, -71037, 80950, 46297, 67066, -48611, 35437, -15450, -69570, -20015, -78572, -12049, -5557, -50861, -25071, -78832, 11149, 68322, -5300, tail", "row-405": "head, -16791, -95882, -90768, 87260, 88413, 97584, -58173, -19280, 73343, -852, -90546, -27246, 31940, 43282, -38285, 45080, 15235, 11271, -22295, -10777, 25745, -99598, -55687, 44573, 96649, 99234, 96664, -79654, 99298, 18329, -65214, 72075, 89009, -85453, -73536, 87993, -72012, 80584, -7407, 19745, -17036, -10769, 60089, 39732, -79390, -53127, 97159, -22809, -15839, 61582, 31453, -77148, -91774, 80016, -97616, 91136, 95469, -46252, 94403, 44911, -25370, 11592, -20857, -11081, tail", "row-406": "head, 98429, 679, -54892, -25383, -23396, 99534, -63731, -54885, 27632, 58275, 30712, 1970, -54059, -95968, 32472, 14719, -68912, -86514, -88666, -63161, 59479, -74164, 47057, -47003, -56210, -1955, 21749, -2386, 65616, -16976, -73693, -79940, 73856, -13311, 52666, -91815, -658, 42643, -10834, 33642, 44074, -4235, 76143, -38077, 38787, -92868, -88720, -92941, -10721, 98354, 57212, 28862, 72774, 75810, 7272, 56068, -42480, -72140, 58845, -36306, 31361, -18210, 43117, -56000, tail", "row-407": "head, 45961, 94137, -67226, -62627, -2998, -90102, -52509, 46005, -54407, -16546, -31183, -79138, 38920, -10122, -49962, -46218, 49867, 36016, -25490, 83493, 83109, 38178, 66642, -46125, 44707, 17558, -11679, -15410, 78064, 48203, -18954, -77772, -80719, -9244, -93455, -88011, -76627, -31049, 45268, -42270, -98538, 46959, 33112, -2229, 91621, 89525, -63950, 44661, -49702, 66925, 75428, 71551, 94444, -65148, -57265, -50185, -37175, -24823, 61120, -77572, 35688, 39650, -20998, -73377, tail", "row-408": "head, 8647, -43604, 43264, 12151, 39367, 19285, -23423, -95406, 65992, -41534, -23779, 50325, -33005, -29902, -7724, 97636, -2117, -4899, 9821, -63054, -93492, -74794, -84030, -52338, 60150, 83798, -9329, -69721, -35798, -63779, -75785, 3383, -33593, 89261, -64132, 25437, 10476, 71798, -44887, -20435, -86223, 71113, 50746, -91499, -54669, 30424, 3423, -21928, 35965, -86150, 25570, -43542, -23930, 16931, 50015, -52266, -2334, -5295, 9147, -45103, 10843, 55211, -82950, 90792, tail", "row-409": "head, -22611, -19967, -9824, -57386, -22516, -36685, -84850, 314, -52597, 22402, -52678, -67833, 49295, -61021, 33045, 93498, 86265, -84412, -15743, -84654, 98622, 36362, -16540, 8929, -64302, -61392, -39755, 92159, 6966, 45560, -88671, -93635, 32214, 9335, 60728, -23893, 14981, 71954, -81304, 10296, -38160, -13178, 23819, -70648, 32987, 81423, 43252, -94866, -97579, -22232, -61220, 48540, -23377, 78592, -73676, -77015, -2539, 42762, -55797, 37072, 38628, 94951, 48447, -19412, tail", "row-410": "head, 89485, -42958, -8506, -36363, 3254, 42647, -97388, -6650, -10992, 16169, 52665, 13002, 42240, 97912, 78791, -13066, -87315, 74220, 22147, 46268, -43562, 34447, -60587, 53987, -89184, 89, 25103, 79645, 49105, -47650, -92985, 59786, -51977, 5916, 1474, 4234, 25288, -48701, -37844, -67093, -77220, 96139, 89204, 65572, -71990, 636, 88165, -17618, 71800, -85223, 38621, 50254, -43801, 58290, -12497, 19604, -73230, 77594, 74729, -24116, -51725, 91531, 45372, 48716, tail", "row-411": "head, -96441, -21271, 26907, -95917, 74254, 87208, 48388, 39422, 57761, -38487, -32774, 34011, 20881, 51910, 96093, -60907, 65596, -29844, 78862, 45204, -67071, -53198, -48564, -92119, 17154, -3067, 1017, -8556, 8968, 64080, -19699, 60817, -62256, 10647, -11593, -58389, -8110, 7957, -77364, -37837, -96136, 22460, -34283, 79377, 26197, 34163, 12302, 76697, -9989, -60617, 1993, -77747, -39909, -54294, -1949, 20308, 66348, -78844, 2817, -29514, 88375, -92812, 32140, 30401, tail", "row-412": "head, -14224, -82294, 92159, -72053, -89494, -82154, 32648, 72701, 82433, -60363, -83452, -65767, -85749, 13194, 51520, 50412, -70356, -62403, 31931, 93973, -98948, -49985, 93566, -15623, 31894, 28510, 14918, 7638, 64798, 40946, -60063, 28583, -97730, -78411, -74576, -44520, 95908, 19710, 2757, -78027, 89534, -22482, 62027, 35467, 17517, -36648, 17220, 41767, -39807, 86379, 61441, -14001, 61929, -99835, -51582, -13673, 55657, 80291, 31131, 66504, 21709, 2645, -47224, -57086, tail", "row-413": "head, -50966, 19803, -41502, 36225, 25398, 71048, -95795, -53290, 29929, -37021, -23757, -36220, -4068, 96825, -13073, -58168, 28741, -12589, 69484, 3659, -82834, 87434, -21543, -26330, 60911, -86860, 62698, 54854, -98897, 90417, 63281, 65100, -9987, 916, 94704, -64735, -16791, 14517, -77891, -36618, -45840, -49034, -92063, 22778, 14822, -46383, 83582, -16785, -40891, 48678, -14241, 4356, -37117, 9856, 6046, -6181, -13433, -52978, -91063, 84026, -20554, 17730, -48078, -36361, tail", "row-414": "head, -33609, -5488, 24197, -10262, 77125, -85372, 3224, -76955, 44640, 99080, -69013, -31332, -5003, -79348, -18513, 12520, -68096, 39332, 13437, 70572, 4227, 25990, -8462, 58231, -5413, -26034, -65397, -57278, -16237, -77768, -60766, 71579, -45182, -27099, -17707, -8360, -35036, 2066, -36399, 65282, 74856, 27807, -7598, 82406, 6380, 67537, -81928, 65285, 59347, 29853, 54720, 7375, 20693, 70902, -86697, 60737, -88451, -50847, -28173, 76900, 45132, 93440, 97048, 47419, tail", "row-415": "head, 18990, 63181, -45725, 25506, 56530, -3697, 61183, -87600, -74137, 59729, -57836, -23689, -13748, -82113, 21391, -13681, 92205, 54803, 59250, 18276, 63900, 32809, -2456, 66405, -23372, 75790, -89751, 75154, 2742, -85160, -15101, -41519, -84800, -33607, -77121, -99637, 7528, -81667, 58930, 38244, -42394, 34589, 54417, -58117, 95112, -12617, 9660, -67884, -64739, 27677, 28807, -77956, -67300, -37492, -52000, 46099, -83223, -9218, -21646, -85314, -27680, 89183, 70561, -47977, tail", "row-416": "head, 6116, -10316, -9064, 47505, -50090, 84238, 74560, -48261, -47092, -30680, -65359, -54255, -59157, 13897, 79363, -60638, 96390, -568, 81939, -73096, -10640, 8418, 48261, 62264, -73747, 99453, -13455, 23109, 12623, 55548, -10253, 16016, -94197, -55955, -46463, 14500, -3838, 46223, -56825, -23630, 20339, 69760, 37676, -65592, 20718, -56319, -86687, -60512, -46918, 68831, -54137, 10575, 74765, -19280, 84103, -83539, -94360, 83853, 6162, 8762, -25743, -78652, -85235, 56398, tail", "row-417": "head, 2169, -18214, 86312, -86879, 65855, 87650, -77403, -24413, 11180, 79192, 33663, -11961, -56379, 45495, -42077, 94084, 87737, 73190, -98369, 56271, -6004, -6880, -82103, 99091, 89706, -58035, 86069, -86329, -2197, -7483, 5607, 74670, -71487, -15344, 54778, -90346, -39131, 15770, -43821, -3801, -46230, -77388, 51904, 96131, -48547, 99536, 67090, 78526, 33972, 11500, -85331, 33228, -1053, -12192, 90158, -96226, -75896, -21670, 59359, 83172, -16760, 22925, -57671, -97984, tail", "row-418": "head, -56085, -63778, 10959, 44536, 116, 22509, 61029, 36578, -43421, -70502, -41642, -91368, -71789, -45463, 14559, 39808, -80407, -2175, 86833, 81631, -47167, 50554, -8761, -7511, 7287, 41363, 80805, -52366, 20544, 50928, -23393, 28000, 64489, -33762, -24951, -17487, 4356, -89013, -15749, 76686, 99595, -86188, -20538, -31644, 44442, 81158, -74247, -87865, -30322, 97078, -35860, 37295, 31038, 79008, 56440, 14901, 76847, -96326, -82619, -10957, 51571, -78401, -19361, -70274, tail", "row-419": "head, 50678, 33764, -12973, -8841, 52100, 11176, -84320, -91424, -900, 83861, -44139, 95199, 47763, -66276, -81984, -19006, 8602, -41796, 37214, -15901, -91224, 71292, -82808, -53976, 54159, -26460, -2262, -25954, 85708, -83974, 69510, 65766, 7357, 19360, 18973, 3663, -7029, 36192, 64786, -55702, 79126, 3313, -62156, -79998, -41830, 42446, 31525, -17631, -43557, 92635, 93736, 39049, -4938, -7606, -3525, 82816, -1258, 68189, -93636, 36076, -33586, 79827, -39521, 33623, tail", "row-420": "head, -61462, -84411, -84810, 25063, 83943, 56345, -30260, 29888, -70020, -76365, 93093, 46271, -156, -70752, 5180, 36143, -4170, 53956, 47288, 7462, 96298, -18417, 99483, 25541, -98623, 92053, -70115, -65170, -2274, 13943, 85016, 61262, 65836, -91492, 93720, 34332, -61583, -91812, -36064, 29879, -93216, -36259, -22622, -36399, -53859, -207, 8528, -8663, -97343, -27397, 26928, 3558, 64402, 98815, -80721, -45329, 73287, 19, 11633, 79675, -6552, 97824, -19230, 16525, tail", "row-421": "head, 23811, 98942, -12660, 61292, -50671, -67469, 12084, 93036, -89811, -185, 2819, 96605, -96236, -66784, -2728, 4625, -61836, 68386, -21360, -3240, -78886, 69365, -81174, -48377, 88063, 62986, -37525, 13513, -66118, -38640, 77873, 29486, 93807, -38251, -68946, 33650, -18339, -82126, 85608, -89623, 63434, -43355, 6082, -83123, -72960, 22941, 13183, -34599, 61629, -14584, 40015, 85848, -39856, 68241, 10187, -80913, -93598, -44671, 14417, -13941, 34612, -84401, -4548, -43023, tail", "row-422": "head, -90344, -68958, -82359, -8419, 75177, 34075, 90296, 44356, -188, -18225, -25596, -51169, 69258, -60942, 40192, 66618, -35383, 90650, 5747, -15882, -47741, 75370, 48237, 75913, -33123, 23280, -24657, -94403, -74369, -72884, 65046, 25278, -24686, 45499, -76034, 22678, -36234, 1892, 31755, -54531, 96407, -3615, -51386, -26159, 32154, -78203, 42880, 66190, 17586, 58217, 24385, -1411, 4466, -35119, -5328, -19492, -47327, -34439, 32979, 59374, 80097, -9860, -65692, 57718, tail", "row-423": "head, -98903, 89020, -54067, 88597, 97769, 27021, -41653, -91643, -26956, 68411, 14136, 80332, -22642, -61029, -20936, -67960, 11845, 56398, 17085, -82700, 89943, 93965, -46628, 32271, -39383, 14465, -76041, -98733, 49282, 48494, 78716, -74970, -49524, -86779, 96765, 89552, 29601, -65308, -44153, -83703, -62161, 88808, -66330, -15467, -83496, 7303, -89786, 63090, 13090, 15439, 39512, -6880, -13484, -3512, 56254, -72991, -29888, 5301, 84347, 58163, 1434, -68172, 51639, -20385, tail", "row-424": "head, 93963, 96667, 65642, -32134, 4863, 72617, 70009, 16246, -55699, -87523, -90117, -20168, -40504, 97529, -2928, 57731, -46857, 38700, -76786, -22406, -32988, -13948, 59132, 10834, -94857, -53668, -18791, -58407, -92176, 57361, -7330, 40460, 14984, -5703, 21870, 17905, -53115, 70909, 58820, 24409, -86419, 74905, -52353, 68345, 66570, 55735, -97286, 61057, 27962, 28566, 14668, -19802, 12263, -48758, 52429, 68054, 87244, 78890, -37068, 54297, 83765, 55458, 65009, 84403, tail", "row-425": "head, -25189, -7032, -93634, 15739, -39371, 32482, 47478, -80672, -50106, -15296, 46513, -44816, 86741, 26870, 31747, 97353, -66180, 95592, -21200, 84196, 43428, -15909, -43621, -89507, -48949, 90225, -53934, 32678, 6275, -31321, 21471, -88700, 70923, 17344, 29066, -64889, 9532, 11121, -48049, 36140, 67417, -24214, 50969, -54789, -28877, -18754, -32909, 21130, 98560, -88355, -52803, 28865, -66698, -14594, 55413, -61221, -30187, -48010, -6212, 82874, 82613, 52314, 77067, -19808, tail", "row-426": "head, 3776, 81776, -12741, 76239, -89209, 1717, 37675, 51345, 70648, -34340, 43776, -58866, 76010, 84761, -89559, -3322, 31271, 83117, 2208, 56517, -27451, -6268, -15710, 55105, 20104, -75242, 16971, 1036, 80529, -86097, 4699, -64815, 62604, -27928, -79753, 17373, 91439, 95624, -98027, 53103, -63255, -43599, -20628, -2883, 85503, -93551, -38110, 20417, -95107, -60622, -37119, 15543, -41266, -78110, -53615, -49594, 73740, -76859, 22014, 21467, -70327, -28965, -90093, 60466, tail", "row-427": "head, -13141, 32297, -49728, -44980, 27936, -52490, -66472, -79858, 65682, -77320, 76915, -69553, -56664, 75084, -30169, 71912, -86497, 51900, -41511, 40186, 54260, -31740, -99442, 93779, 74253, 58977, 81336, -13306, 73225, 46557, -94430, -37737, 37056, 51025, -68948, 25448, -97293, -24561, -79654, -20026, 18287, 30167, -18261, -40270, 71274, 89917, -95010, -58198, -77790, -99032, -90719, 73061, -98345, 61790, 53721, 67086, -57676, -56455, 71003, 52225, -89487, 73178, -26299, -20109, tail", "row-428": "head, -2, -13351, 18537, 52148, 35396, 35812, 20334, 97835, -57193, 99564, -16921, -44455, -35862, -51723, 51007, -96364, 1188, 62169, 14422, 67737, 72562, -84161, -21798, 32891, -45344, 69968, 36387, 49714, -81045, -57870, -55001, 34458, -57935, -85588, 55134, 41597, 16045, -87112, -17283, -95020, -44732, 78781, 19116, 23085, 75133, -44292, 39839, -48749, 29541, -97089, -37201, 41807, 52887, -39283, -87552, 22043, -93928, 70681, 85235, 46253, 94586, 25929, -53458, -67434, tail", "row-429": "head, 18483, 56544, -88398, 53777, -50268, 71495, 67886, -50552, 77749, 60882, 51338, -87335, 94610, -37792, -96672, -13358, -39485, -55250, 69525, -42512, 32993, 16626, -62324, 83346, 85670, 38113, 19975, 66204, 47175, 98095, 15549, -27925, 17376, -36502, 50005, -47576, 97188, -44837, 9913, -16759, 48814, 89799, -31117, -95698, 51580, 33266, -94206, 9091, -70895, 56187, -91833, -55831, -62941, 88802, -13648, -81334, 54389, -35730, -66456, 33434, 68138, 86865, -89558, 54136, tail", "row-430": "head, -56542, -6047, 15249, -82217, -55034, 71608, 73125, -59935, -56912, -82387, -54266, -60606, -35653, -74024, 42900, -15406, -92630, 35790, 69268, 95521, 54832, 92635, 44334, 8291, 91247, 35821, 7637, -60726, 68366, 31921, 44465, 62212, -89201, 95140, 17572, 39433, -30676, -45635, -44224, -88114, -55903, -82852, 94758, 15725, -95491, 47856, 92068, -51825, 40108, 74546, -68199, 94986, 33934, -96192, 18753, -29367, -40000, -72722, 18508, -16799, 55100, 54352, 71417, 4148, tail", "row-431": "head, -86580, -52389, -8570, -98421, -71483, 48069, 30255, -47177, 75317, -36569, -47252, 29736, -61601, -44714, 85672, -17956, -20827, -27158, -2853, -7317, 61162, -28646, -55206, -13617, -14964, 77829, 27657, -18903, -60134, -77233, 13598, 63724, -25060, -11897, -7305, 16595, 42850, 40775, -67401, 73309, -6282, -25666, -61007, 45094, 13563, -89922, 34978, -70628, 17864, 35348, 21958, -7810, 80182, -76565, -9996, 54606, -50427, 33402, -41064, -99374, -27013, -84321, -50343, 39096, tail", "row-432": "head, -46002, -40354, 21978, -3205, -23929, 41309, 17639, -26895, 4620, -87459, 57529, 16661, 33421, -63425, -19409, 55779, -19503, 22448, 3751, 75662, 94461, -30901, 26207, -45564, 73128, -32441, -24715, -32407, 53443, -90780, 60607, 83222, -97600, 14340, -49756, -72995, 23586, 97932, -59398, -48987, -87060, -10856, -41114, 79153, 2544, -76095, -8262, 95852, 12202, 9419, 61510, 93002, 30256, -38824, 82666, 73813, -91957, -9147, -76690, -11871, 83791, -45332, -70710, 80629, tail", "row-433": "head, 62565, -17664, -90802, 18852, -83179, -20001, 46710, -205, 45607, -4323, -27985, 93389, -85738, -6164, -35668, -1100, 87785, -8731, 44601, -60231, -68728, 64190, 32612, 69701, -15277, -51083, 87890, 78338, -39446, 85525, 77851, -73614, 62760, 49488, 1013, 23548, -72152, -73387, 97585, 21765, -50518, -71476, 69010, 95307, -82502, -28995, -5018, -15572, -11853, -37356, 22440, 84030, 48889, -13123, -13923, 13622, -70975, 51366, -1564, -80697, 3346, -73028, -67856, -4824, tail", "row-434": "head, -32418, -45744, 53459, 6808, 39813, 6146, 50391, 9229, 8034, 75060, -60307, 78034, -53162, 20082, 16496, 43625, -74458, 87121, 75579, -12464, 11241, -84315, 8727, 30393, 7180, -80098, 49680, 32344, -71731, 84609, 72474, -20505, -95158, -11543, 10235, -36810, 98393, -193, 19006, -35561, 83410, -52954, -9569, -91683, 80322, 53591, -831, -15678, 56088, -36999, -25156, -35190, 83174, 5970, -84152, -39865, -68799, 73172, -54460, -32159, 8451, -71051, -55901, -49326, tail", "row-435": "head, -60962, -99649, -97134, 41173, -84804, 78473, 18019, -62944, 34111, 9919, -70021, 54622, -38263, -47854, -42263, 11431, 63479, 59696, 96940, 66891, -90476, -76256, 47321, -62229, -75267, 21182, -46047, 37991, -15026, -39423, -64321, 23990, -54451, 76791, -27446, 44918, -5461, 85643, -23389, 60506, 93021, 62356, -28174, 18659, 75811, -99712, 37924, -42655, 74736, -46664, 73562, 7584, -84010, 22010, -86425, -80737, -4782, 66475, 16704, 65713, 48791, 12409, 25295, 96058, tail", "row-436": "head, -81179, 60767, -4065, 2119, -89429, 24612, 63843, -78005, 78086, -13684, -73089, 81901, 30684, -12714, -92453, 56313, -98528, -55735, -33938, 53619, 25607, -3552, -89321, 65944, 41951, -28635, -54816, -68419, -85357, 4877, -51028, 20582, -28950, -31266, 22250, -62283, 84156, -36686, 84631, -14314, 96230, 50225, 67475, -40340, 1122, -77948, 82958, 92876, -20449, 64369, 47109, 66580, 65484, 5511, 476, -8624, -10656, 53047, -14242, 39244, 44513, -43916, -28778, -6949, tail", "row-437": "head, 41049, -61310, 29198, 61560, -8750, 12425, 94773, 99993, 26495, 3019, 35268, 55370, -94670, 63774, 9532, -68847, -54064, -98882, 66363, -60894, 58234, 1875, 92193, -70426, 3567, -45008, 61562, 76956, 97015, 50473, -96052, -10702, 76434, -34243, 1072, -87658, 93769, 47080, 95811, -56688, 57708, 45136, -26945, 67467, 276, 74909, 83051, 2295, 17327, -3772, 37580, 62594, 65477, -70232, -25395, -50735, -71120, -54311, 34319, -41635, 90152, 39244, -7998, 45415, tail", "row-438": "head, 11742, 30812, -34000, -3810, -23117, -40460, 81480, 9278, -30073, 66266, -36740, -14413, 74073, -68434, 50388, -71188, -91479, -1530, 73949, 17814, -71590, -1716, 85165, -30863, 51932, 50363, 18093, 77691, -54483, 56963, 67065, -69109, -81164, -65012, 37629, -98336, -80873, 60875, 40891, 19394, -56879, -64949, 40688, -33558, -76379, 40728, -86637, -75859, -76219, 99302, 70816, -83413, 72855, -91259, 1961, 64597, -74853, 18679, -27841, 36211, 34356, 39104, -947, -2416, tail", "row-439": "head, -84604, -17557, -60340, 48202, -16009, 31029, 6965, 75108, 60133, 57437, -39291, 32585, -15235, 8611, 52130, -82348, -34881, -28942, 11173, 55724, 89273, 80152, 86814, 95914, 36590, 11435, -31609, 37292, -35915, -38739, -33880, -77136, -55941, -65128, 95120, -76446, 56529, -9722, -40480, 49841, 78067, -48403, -90438, -14901, 26330, -33572, -60673, -28027, -21238, 77867, -87007, 18100, -7594, -53298, 46259, 19095, 26752, 24508, -28932, -47078, -18975, -42676, -96886, 8022, tail", "row-440": "head, -40614, 18769, 86913, 11764, 52164, -29865, -71982, 78142, 77170, 78499, -92067, -91291, -8095, 53968, 25901, -88620, -40727, -60352, -22815, 16896, 94116, 52177, 41661, -65561, 81456, -49033, 33283, -65745, 81953, 2385, 81040, -44793, -40487, -13383, -14698, 64737, 40876, 65780, 29410, -59441, -94608, 4063, 67843, -71746, 6368, 21034, -22531, -69250, 84046, 90554, 43633, 88804, -68857, 58312, 72320, -15228, 66269, -75060, 61240, 96110, 92286, 12142, 18230, 95034, tail", "row-441": "head, -49038, 4619, -68960, -9514, 81428, -59934, -2923, -80345, -45802, -10336, 85711, -54913, -56208, 805, 64249, -97069, 9109, 50839, -11229, 52477, 86813, -3016, -15411, 67355, 59157, -62390, 55953, 15108, 38984, -82865, 58184, -52533, 30309, 98044, -25721, -2093, 43464, 8165, 85938, 31305, 35060, -35767, 25294, -97398, 44808, 70112, 52166, -71237, 58307, 60915, 22891, 88026, -2605, 1746, -84219, 84633, -63448, -18570, 31186, -67323, 23451, -17820, -75994, -10385, tail", "row-442": "head, -91987, 61591, 38618, -73979, 82454, 21880, 40614, -56134, 84098, -4857, -83846, 75791, -84224, -47975, 38354, -69983, -88197, -47480, 4451, 81030, -3255, 58277, 72466, 31428, -49010, -65671, 59946, -56817, -67732, -75177, -58179, -68012, -54322, 65335, -33852, 49314, 47569, -65021, 52685, 82574, -70524, -86744, 90404, 45694, -81871, -26185, 98221, 24907, 56203, -23970, 23475, -33121, -72088, 43284, 58062, -72704, -91019, 59590, -39494, 52412, 94971, -8516, -78882, -81902, tail", "row-443": "head, -47084, 24144, 25548, -4353, 87596, -72984, -28084, -17680, 21786, -82753, 1782, 19361, 38422, 30601, 10294, -97684, 39125, 83415, 49802, -6613, -42759, -62114, 33807, 235, -60772, 67933, -29899, 26072, 94256, -62279, 71758, 24748, -96324, 64804, -98768, -22655, -2698, -75791, -7949, 89642, -5019, -78659, 56197, -89680, 87589, 37697, -46212, -16516, -74028, -96625, 58752, -48674, 3205, -4521, 42012, -14038, 13110, -17636, -81783, -11528, -95179, 71208, 99456, 64187, tail", "row-444": "head, 96477, -80764, -40225, 24939, 2343, -86198, 47181, 56536, -60928, -59439, -54120, 17943, -62354, -64231, -57866, -34571, 29511, -9532, 23637, -60535, 50639, 32131, -3556, -94786, 26941, 77221, -32604, 52957, 43461, 98059, 87082, -38827, 56747, -52533, 57270, 31036, -32424, 97589, 76518, -27111, -21158, -90373, 45034, -28140, -85414, -17015, -5985, -15266, -9138, 9972, -59057, 85953, -41299, 32868, -87193, 19945, 60143, -43851, 30073, -66464, -84447, -41164, 4470, -46776, tail", "row-445": "head, -58303, 24143, -91293, 92976, -19550, -11097, 25676, 70406, 9741, 84524, 97320, 91662, -20451, 96914, -48786, -29672, 34836, -78197, -34136, 68518, -50968, -32876, 68953, -79529, 60059, -3348, -49430, -11507, -32618, 65422, 96265, 4218, -63672, -66048, 45859, -30601, -66942, -32235, 23843, 48544, -28732, 7608, 70077, 43056, -23236, 27398, 21059, -88611, 33887, 7613, -80261, 546, -36991, -46378, 20376, -58933, 53691, 97452, 18845, 8024, 22016, 39275, -58647, 24074, tail", "row-446": "head, -58264, 62767, -2190, -85585, -57262, -75786, -79544, -25365, 66083, -46919, 79760, -90538, 67469, 5189, 67891, 90668, 74158, -56055, 97140, 50297, -98311, -1944, -22518, 58615, -37258, 26406, -72948, -58522, 81668, -56651, -19595, 83216, 32745, 99794, 31409, 12395, -62089, 59914, 53741, 91271, 59367, 96076, 72800, 20201, 18812, 12581, 53623, 45289, 93800, -99110, 23195, -24327, 41561, -12944, -44847, 90518, -34108, -90687, 55305, 82634, -89515, -18577, 55439, -27670, tail", "row-447": "head, 87259, 40318, -4931, -42689, 36451, 72960, -76398, 7889, -19752, -14404, -85420, -53641, -5915, 66687, -81834, -6325, -53415, -12440, -90087, -38071, 78656, -72621, 8745, 74448, 32141, -7214, 42307, 81604, 42560, -77263, -94582, -96614, -78911, -21654, 15057, 44176, 11964, 22565, 16306, -28889, -22686, 68156, -90739, -75469, 25467, -93170, -57062, -58229, 59890, 99300, -88824, -30122, -62936, -43453, 77431, 27249, -72938, 25146, 82509, -96352, 94698, -51508, 74402, 95206, tail", "row-448": "head, -28050, 87155, 81275, -62025, 31632, 84915, -55789, 40485, -55953, 51939, 11602, -69244, 3364, 61328, 4444, 15273, -59551, 36184, -99744, 40517, 34104, -92386, -18185, 69663, 29113, -84450, 39650, -26215, -21467, 90705, 93042, 39661, -27433, -98690, 48925, -24184, 50609, 61346, 32231, -37973, 75736, -77668, 46253, 52590, 18653, -55624, 61505, 87111, -94804, 22284, 71576, -43585, 44012, 25553, 92232, -59944, -18931, 26962, 95078, -24762, -21961, -33320, 73059, -45879, tail", "row-449": "head, -92910, 71196, -77220, 15740, -16953, 23186, 7655, -21451, -16223, 20344, -9713, -58044, 49841, 66318, -49534, 6157, 35077, 93719, -3646, -70977, 49046, -82513, 40272, -55020, 60658, -58060, 546, -72639, 66925, -59878, -70013, 8739, -48867, -16979, -68368, 79850, 33860, -371, 22867, 60498, -55473, 39916, 59350, 32370, -79336, -79176, -74459, 48807, -56118, 81049, 47183, -9729, -66211, -22603, -76398, -43415, 91186, -35882, -33282, 19038, 73963, 24250, 99530, 15366, tail", "row-450": "head, -21397, 11514, 18429, 26290, -81842, -40529, -52991, 22123, 9383, -79665, 83285, 33960, -47520, -73137, 42052, 52454, 64287, 14223, 57833, 53513, -2964, -85459, -67752, 23125, -22588, -82321, -10401, 28107, 45978, -5446, -63330, -86068, -86858, 15848, 27162, 36962, -10502, -6991, 17922, -75364, 31240, -32133, -31912, -16131, -81677, -5520, 15597, 7669, 17365, 15168, -96753, 94369, -56544, 58803, 77991, -20114, 37142, 99411, 48079, 24649, 89173, 12725, 11851, -61447, tail", "row-451": "head, 86675, 75295, -79104, 81541, -51945, 60077, -49460, 46580, -68264, 79985, -59875, -61329, -59743, -39385, -36974, 50830, -30104, -2349, 74202, 97813, 98754, 16015, 8161, 89377, 85841, 9269, 68117, 51048, -39099, -42153, -69661, 80580, 7312, 53015, 84887, -40424, 87905, -26782, 27706, -17909, -17168, -19590, -8505, -35881, -7895, -87042, -47345, -78314, 50509, -63579, -11322, 9797, 57923, 42196, -36298, -51077, -34798, -41968, 50105, 18246, 52293, 53748, -20376, -87189, tail", "row-452": "head, -87227, -25835, -86364, -43170, 56649, 72674, 26248, -22351, -14378, -17788, -12939, -51280, 2673, -32628, 34281, 81570, 15553, -95724, 39784, 77762, 31017, -86098, -86898, 75691, -87646, 87746, 94904, 52059, 13816, 59826, -65638, -86370, -49755, -55699, 80473, 51675, -63167, 91581, -48785, -5605, -43944, 93576, -68433, -9958, -98160, 20391, 41672, -4979, -37160, -46016, -38776, 75320, -8704, 45271, 28388, -16469, -35535, 1847, -30939, -96276, -99580, 28865, -59900, -38970, tail", "row-453": "head, -60584, 28216, 82381, 47084, -21675, 72565, 58178, 9982, -46286, -68086, 19779, -25512, -24634, 88646, 3298, 67867, 2631, 15648, 75345, -98541, -96745, -34397, -24172, 21066, 2452, -2496, -84971, 16175, -46291, 77504, -36378, 27353, 70602, -66275, 29465, 50431, 41617, -91427, -68257, -60720, -59037, -47916, 27531, 68802, -15472, -74385, -47957, -30613, -15478, 50581, 43025, -9889, 32682, 12394, 47424, 47050, -43907, 97374, -8350, 14859, 42007, 39038, -68699, 43062, tail", "row-454": "head, 82624, -43368, 21064, -12771, -6662, 87715, -54763, -20360, -14089, 90444, -25190, -79117, 56410, 71798, 40170, 75248, -20596, -28696, -70208, 67630, -83969, -46125, -18528, 64908, 47887, 58672, -89583, -97784, -3744, -61348, 86478, -98730, 16752, -25365, 43323, -53889, 19244, -25835, -1917, 70854, -11632, 76175, -7406, -83340, -86859, 81899, 6896, 95574, 43414, 65023, -54491, 81680, 36586, 8857, -31471, 26333, 36608, -76017, 8675, 23959, -95622, -38696, 63279, -67168, tail", "row-455": "head, -65964, -77856, 76324, -89363, 91696, -17402, 58741, -72479, -95531, 31676, 61646, -71817, -2034, -32328, -22676, -25137, 85710, 68237, 54264, 26993, 89358, -40941, 80303, 88448, 38625, -97892, 50240, 51553, -3682, 69497, -22512, -54314, 51401, -60586, 95744, -21711, -19965, 66160, -40464, -46418, -88098, 23630, -48882, -14779, -51089, 48268, -76507, -16832, -5623, 39733, -21561, -79648, 86996, -37001, -33772, -30554, -54808, 80522, 80765, -39007, -41649, 67907, -5710, -26624, tail", "row-456": "head, 73322, -4370, 47743, -5193, -41685, 97958, -29490, -34634, 4304, 23956, 4057, 9297, -6218, 78976, -67138, 56224, -22301, 55475, 95905, -34563, 42577, -71505, -61867, 87426, 94854, -13826, -77891, -49033, 82374, -78113, -50399, 29995, -57453, 26425, 83220, -47435, -21908, 55099, -13101, 52779, 4454, 57487, 96651, -38719, 34675, 3493, 57473, -69964, -77801, -32396, 91258, -79314, -17266, -35780, -8180, 49335, 3051, -63291, -35076, -2556, -24145, -40046, 2903, 2486, tail", "row-457": "head, -87894, -47249, -29866, -8217, -77553, 11005, 55415, 68730, 109, -95970, 76835, -87239, -14699, -82144, -71003, 60797, 96629, -47488, -2902, -58330, -53470, 44395, -73691, 17237, -86610, 19631, -61516, 69341, 9308, 85712, 14083, -7557, -22418, -43842, 84991, 22653, -62178, 52820, -80058, 3767, 28196, 42839, 63282, -40693, 25437, -93374, 25354, 53479, -3783, 66308, 76154, -55029, 90814, 43779, 66968, -77975, 63946, 52996, -96718, -67043, 52981, -53735, -55706, -72414, tail", "row-458": "head, 52168, -88114, -38004, 54481, -85889, -75693, 62961, 10869, 24070, 10278, -72498, 80887, 41021, -2497, 9158, -70469, 74890, -27192, -70466, 27768, 58903, -50238, -43352, -92328, -60441, 60389, -91607, 23325, 36002, 91652, -92315, 39868, -48559, -71650, -38893, 76612, -66816, -50085, 70887, -66788, -84054, 83816, 21054, -60786, -59631, -48573, -33218, -34603, 22196, -63918, -38877, 70212, -87432, 78686, -11293, 85722, 83256, 47670, 62470, 60755, -11194, 14864, -38472, 68864, tail", "row-459": "head, -16253, 14591, -33635, 28408, 46485, -7771, -89652, 12765, -99477, -88530, 88494, -37918, -91043, -33509, 4494, 70974, 99162, 83137, -25123, -67210, -97409, 38876, -73222, 83241, 17057, 76362, -64002, -25561, 37400, -54654, 30157, -73004, -28311, -18342, 34650, 21506, 63276, 64092, 37224, 21681, -90203, -59770, -70045, 53616, 33962, -28921, 27103, 11596, -35742, 33675, -75821, 40982, -11643, -60091, -5903, -9655, -91957, 98298, -11676, -28660, 60690, -16569, -51648, 91701, tail", "row-460": "head, 94368, 43350, -69767, -95887, -8650, -38159, -85543, 6355, 71198, -36049, -36968, 59494, 89069, -54321, 592, 39671, -74521, 55844, -32655, -79525, -52076, -8086, 24130, -8373, -80312, -93314, -7057, -7613, -64664, 95437, 23196, -30633, -42775, -59485, 8493, -73947, -38717, 34571, 60830, -77504, 72004, 96803, -34448, 62357, -90570, -18028, 1611, -50809, 43933, 50104, 34268, -93883, 22548, -93881, 7014, -95999, 88463, 53338, 98465, 87182, 86298, -77061, -48956, -28833, tail", "row-461": "head, -87135, 48334, 15085, -74881, -43356, -12339, -54312, 42096, 604, -58542, 33362, 71254, 30203, -74381, 58230, 65387, -45200, -19987, -37335, -11771, 77001, 70875, -56014, -19671, 73409, -23989, 38231, -79161, 53994, -60646, -25309, -70943, 64787, 14005, 60983, 19122, 29167, -51756, -58733, 67477, -20686, 25203, -35353, 36546, -8050, -92172, -1895, -17543, 32073, -65789, 41364, 2747, -78391, 84584, 83632, 5119, -90915, 22286, -87565, -79886, -25733, 98608, -87533, -49483, tail", "row-462": "head, -68180, -65648, -63794, 98188, -40402, 69274, -90677, 72681, 44890, -34594, 84796, 26388, -95307, -53864, -90889, -95279, 44692, 26334, -58923, -62605, 98494, -49120, -85326, 55558, -66136, -87247, 33305, -33101, -46400, 30326, 1505, 23585, 93438, -38039, -48383, -26029, 98830, -83567, 32458, 76762, -92969, 40129, 10643, 2666, -83504, 87061, 77889, -87400, -72326, -99439, 53607, -55727, -69187, 56915, -15727, -37879, 65780, 83829, 6981, -82962, -34012, 67459, 28079, -86264, tail", "row-463": "head, -13135, 81890, -43491, -30775, -34561, -97781, 71411, -11191, -38037, 13253, 24219, -17436, 69232, -34757, 57835, -65663, 29888, -72595, -15827, 62721, 92613, 90702, 55730, -56700, 16078, -9065, -63574, -12919, 20217, 58862, -34975, -95459, 22352, -7149, 93680, -41223, -92084, 30612, 80335, -99822, -86919, -701, 65736, -29055, 36284, -98900, -82703, 40071, 47013, 25090, -12038, 61544, 75355, -35375, 97271, 94311, -16561, -8257, 27069, 36035, 45995, -51055, -98254, 55543, tail", "row-464": "head, -51720, -84053, 44960, -5872, 93925, -92377, 20188, 9664, 78403, -75094, 69576, 59263, -90849, 46234, 19104, 61738, -80636, 73917, 3984, 66180, 83829, 10403, -72714, 77800, 74239, -1235, -66583, 28499, 52359, -31830, -28141, -38786, -41043, 51436, -68660, -99012, -54626, 28506, 27638, -15862, 20885, 87115, -19783, -52640, 13889, -24147, 65385, 73428, -86408, -41372, 33568, 46066, -63164, -63739, 779, -2149, 50039, -84581, 79337, 72390, 89442, 58728, -42657, -6466, tail", "row-465": "head, -8358, -83879, -63042, 34676, 38914, 75940, 12712, 20490, 40487, -93205, 4698, 4102, -30965, -5500, 83412, 26208, 66558, 42255, 27793, 59033, 95073, -92997, -14517, 56169, 8407, 46790, 36059, -47213, 77132, 19338, 92888, -19443, -96876, -8711, -71112, 86124, 41836, -21448, -35035, 57108, 50269, 51793, -43419, 88530, -51623, -59322, -48515, 61297, -99495, -49616, -53521, -40981, -70220, 1562, 44431, 67223, 57697, -93179, 81130, 80650, -52649, 45797, 77126, 87458, tail", "row-466": "head, -52717, 84581, -43243, -31849, -96919, -83283, -77365, 66219, 48058, 68497, 17768, -77398, -70135, 18105, -44837, 93465, -93618, 991, -48666, -25446, 87841, 88947, 64575, 75367, 61669, 44473, -74150, 13647, 39342, 38338, -77121, -27459, -26238, -52336, 30504, -73941, -84484, -88053, -67266, 50905, 23537, -11708, -38695, 82830, 31297, 2356, -19082, -14526, 5243, -12679, 89999, 23778, 2827, -44275, -43218, 80962, -20955, -29503, -18980, -89469, -32073, -17514, 46965, 86807, tail", "row-467": "head, 27728, -29907, 89167, -10866, -30625, -59527, -23719, 8678, 39199, -83076, 88342, -91006, -24048, 11014, -69803, 76751, 26340, -87464, -58414, -42708, 49115, 91009, 34933, 23472, 14478, 55656, -25158, 10232, -56340, -42658, -54352, -22817, 32850, 41019, 50320, 28682, 50376, -21303, 21026, -91217, 71157, -80631, 18973, 41184, -71427, 6744, -90840, 97106, 85370, 97709, 63426, -27880, 97927, 14198, -25877, -53384, 25211, 73985, -39298, 21857, 41557, -89202, -79350, 25948, tail", "row-468": "head, -82428, -10047, 62309, 20245, -92025, -95989, 12622, 7495, -72833, -16413, 33034, -67207, 88351, -94520, -86756, 18976, -90031, 71666, 73207, 8394, -57440, 36923, 51684, -45652, 62989, 25563, -36559, 83174, 35946, -32880, 43445, 20823, 63907, -55784, 80100, -73246, -30308, -86661, 87111, -50831, 78559, 54224, 82906, -37493, 85407, 85193, -22930, -44552, -28854, 30060, 74144, -91150, -30900, -55692, -51980, -7121, -8695, 37957, -45451, -62926, 41772, 44284, -5841, -42004, tail", "row-469": "head, 57907, 61139, -66760, 32041, -35648, -68224, -92961, 81580, 39843, -55771, -23544, 32981, -2063, -1048, -75659, 25912, 79204, 26142, -96299, 60837, 54252, -81652, -20331, -45331, 49139, 3512, 59565, -47163, -66383, -83616, 72675, 18300, 15106, -9510, 21689, 38139, -27158, -45556, 39410, -24370, -52163, 93535, 227, 56725, -71549, -24384, -42084, 63459, 98416, -95102, 84684, 85311, 48585, -49170, 59645, -82545, -68783, 806, 99765, -57489, 98289, 60143, 70826, -77411, tail", "row-470": "head, -31391, -54197, 66804, -73279, -87475, -59627, -62237, -23306, -96284, -78246, 43406, -55087, 87697, -91700, -59699, -84809, 23122, 69779, -58192, 91143, -19606, 68303, 12926, 39156, 89723, 35073, -5706, 71941, -67957, -31386, 82482, 30145, -87225, 57715, -27460, 55665, 24819, 57846, 41902, -83796, 37910, -89827, -78213, -2140, 66932, -58509, 49542, -6767, 59549, 75056, 38095, -48556, 78436, 46532, 70871, -24442, 29630, 26679, 98509, 27627, -11126, 54359, 43563, -51179, tail", "row-471": "head, -53175, 60979, -53259, -87168, -68946, -52209, 51560, -81952, -92254, 26448, -87084, 86128, 46004, 57347, -26949, -96670, 7328, 54515, -58118, 25413, -64549, -57755, -85368, -42724, 47212, -36240, -62280, 88661, 25661, -34127, 380, -72292, -51044, -84615, 38649, 33987, 60033, -49909, 91338, 80898, -48902, -64921, -60261, 25666, 97558, 24377, 32071, 5064, 25564, -48092, -43907, 61201, 75608, -6976, 92962, 17360, 90528, 72319, 57600, -42712, 5431, 76212, -22281, -83223, tail", "row-472": "head, -77962, -44197, -5057, -65663, 29223, -19205, -36200, -62694, 46554, -46755, 29443, -52680, 73371, -76419, 65158, 34293, 98072, 76645, -27389, 18255, -52404, -69644, -37749, 47786, 66367, 14864, -52620, -31717, 81741, 97977, 86634, -2872, -37290, 5607, 54596, -78450, 99626, -70854, -70342, 76158, 34410, 8995, -6691, 77524, 94914, 24290, -71725, -50575, 93372, 86757, -74582, 1387, 31953, 92853, 4591, 93081, -9866, 3401, -18225, -95296, -83890, 57733, 24780, -26117, tail", "row-473": "head, 26231, 23411, -47303, -30369, -48312, -26997, 58041, -51851, 9477, 64634, 75622, -57173, 26547, 23349, -23077, -52519, -22094, -64735, -87837, -16174, -56346, 87054, 39363, 86136, 98327, -99117, -93420, -23566, -40267, -21534, 1198, 5507, 75830, -57409, -41084, -27532, -88793, 40055, -44913, 69648, 50048, 86370, -94200, 77173, 49496, 35922, -60538, -2711, -59648, -55154, -6079, -9545, -90140, 4715, 75111, -99950, -20275, -70155, 17356, 25527, 4496, 78090, -88052, 60254, tail", "row-474": "head, 69679, 96571, 88228, 61662, -82340, 20332, -35487, -80132, -73666, 91897, 90864, -36253, -33740, -70411, -49281, 59417, -72775, 23277, -71778, -89517, 75200, -90751, -31463, -5711, 72527, 59019, -58243, 71421, -73822, 75065, 15178, -31792, 58615, 87906, -1180, -99542, -8129, -66399, -9085, -78238, -72144, -38150, -31247, -83017, -55352, 21493, 2175, 41898, 39319, 14857, -86484, 45914, 91073, 19425, 76438, -74656, -67951, -5377, 12479, -21512, 42403, 43248, 84432, -68039, tail", "row-475": "head, -80897, -29660, 3726, -16741, 4799, 70771, -33148, -68779, 30048, -31665, 19313, -89994, 24776, -10599, 50176, -95190, -46803, -94363, 33016, -26567, 7930, -9882, 13658, 37723, -69268, -51939, -82638, 30522, -26041, -11521, 32105, 67806, 37992, -83667, 88999, 5200, -1875, -59488, 48338, -5779, -52455, -71018, 30921, 36664, 75110, -57814, -25286, -82345, -193, 14745, -29543, -43602, -77723, -63738, -73619, 68565, 94375, 61623, -93254, 21949, -53764, -40156, -3432, -9843, tail", "row-476": "head, -34121, -93643, 68023, -76835, -13746, 1686, -49677, -90604, -37263, 8996, -43557, 82879, 87842, -67797, 72604, -93848, -83444, 33015, 45130, -83480, 6733, 82734, 78006, -915, -95335, 72042, 30243, 53096, -746, -21686, 37044, 85532, 63670, 35795, 83550, 34616, -71094, -49877, -11015, -23382, 61164, 7426, -2618, 94109, -64908, 63852, -69142, 40547, -73916, 67966, -46246, 45127, 86826, -94612, 25210, -92897, 17251, -51525, -11504, -54335, -95805, -55973, 27067, 9103, tail", "row-477": "head, -32924, -57870, -51523, 16369, -10745, -5988, -98233, -97761, 28721, 37967, 28876, 63214, 69694, -12085, 77562, 77008, -66208, -27687, -51742, 64063, -59521, 27356, 6181, -3185, 48984, -83244, 8054, 69167, 23169, 16553, 97573, 31076, 14525, 49410, -66260, -43474, -71174, 1014, 46499, -60895, 14754, -85758, 63344, 4742, 66994, 76440, -41939, -90232, 40415, -100000, -18243, -65894, 45543, 35924, -64881, -51867, -72438, 89179, -50757, -69415, 81272, 2782, 11035, 41365, tail", "row-478": "head, -26815, -51913, -78471, 32435, 5881, -79735, -60795, -24086, -60852, 37583, 82021, 71989, -59242, 98569, 52747, 76206, 37950, 63968, -85896, -75452, 55392, -11907, 85577, 43541, -58872, 19371, -45621, 84458, 28555, -9613, -36474, 80608, 46063, 6206, 26890, -26432, -47431, 2249, -67090, 47810, 26944, -89132, -89538, 45151, -43408, 77820, 85074, -6857, 4719, -29607, 86769, -47735, -57034, -39713, -65641, 22787, 3769, -15921, 18375, 33665, 36536, 94179, 98052, -24082, tail", "row-479": "head, 77462, 3062, 82323, 79465, 47872, -31933, 10385, -32708, -80382, 17110, 1181, 70549, -82823, 75109, -69801, -24690, -98692, -10547, 78800, -92048, 18732, 2686, -76630, -30034, -86193, 52280, -48238, 80972, 76082, -79662, -61923, -3344, 91639, 96660, -84888, -39196, -96531, 93220, -78651, 13023, 27324, -43055, 57180, -3372, -85135, 61283, 70258, -48207, -93897, -35326, 18671, 54278, -75616, -81872, 81399, 99370, -30686, 52192, -68793, 3259, 59170, 16529, -45271, 29111, tail", "row-480": "head, -73638, -67290, -66721, -83068, 62736, -68022, -80421, 24294, 28110, 44995, -98037, 51541, 84726, 95928, 36159, -84766, -42244, 64971, 32594, 29277, 76568, 54337, 31991, 56959, 22867, 74364, -52122, -55321, -89164, 62395, 65759, 82872, 43515, -50656, -11174, -7528, -47676, -70368, 36086, -63127, 64739, 27622, 55574, -63610, 91800, -85519, -81924, -40666, 83431, -87599, 40157, -83319, 55240, -35360, -56031, -82409, 30772, -93705, -55184, -76956, 6255, 78838, 93043, 32226, tail", "row-481": "head, -67092, 77413, -55426, 58168, 56237, -48363, -69163, 30303, -94126, -87598, -66156, -79392, -94013, -97289, 62156, -63585, -67948, 1923, -92994, 5773, 63810, 51168, 26869, -51773, 44475, 66365, -58531, 56092, 16868, -89543, -52837, 16283, -19615, 464, 1995, -73968, -48965, -64984, 53651, -3960, -54731, -90468, 41980, 95411, 84983, 15760, -42857, -33656, 53731, 1202, -59233, 8998, -64032, 39641, 57029, -71713, 78410, 15643, 12615, -45397, -15822, -56030, -67130, -50094, tail", "row-482": "head, 15040, 43205, -88497, 40996, 74175, -39445, 5622, -46446, -13105, -1061, 19237, 38138, -51052, 84162, -17737, 46195, 4069, -52976, 86696, 25155, 62159, 29025, -89514, 54444, 65982, 27133, -58662, -47522, 71100, 84305, -55516, -37591, -61640, 34370, -18945, 7101, -65015, -86963, -53090, -76062, -25191, -13183, 95820, -77824, 37115, 97248, -70079, 9532, -15815, -33687, -41943, -9188, -62431, -98066, 3139, 18733, 27625, 25505, -85014, -76955, 68822, 53128, -25456, 11678, tail", "row-483": "head, -27808, -35514, -11669, 49067, -19164, 15230, 80521, 91409, 64901, 61248, 80940, -51248, -25344, 89641, -77745, -17197, -50088, 9213, -37290, -40488, 82660, -83706, -60606, -57465, 84433, -95315, -9586, 71602, 85825, 43737, 44448, 36406, 54027, 86365, 75521, 60239, -15074, -14043, -93723, -39720, -89108, -58407, -29274, 44485, -74865, 57049, -48290, 88100, 54697, -40130, -26404, 62305, -50067, -83915, 54670, 63553, -54039, -52025, -27811, -93879, 70281, 12404, 52695, -93133, tail", "row-484": "head, 53764, -31909, 21285, -32976, 40085, -22076, 22316, 8751, 83698, -55116, 98241, -33973, 88515, -53805, -35817, 40259, 42518, -16250, -95017, -64665, 85907, -47917, 72276, -51873, -53700, -57973, 69870, 35798, -37079, 17977, -39674, 53968, -77599, 19655, -3244, -41853, -33981, 76678, -17066, -96586, -89539, -55200, -28310, 52255, -52193, 36866, 96625, -69498, -71132, 98922, -90912, 66800, 88405, 13547, 74230, 48732, -2348, -14838, -40663, 30523, -60232, 93649, -75224, -34892, tail", "row-485": "head, -53649, -74066, -61439, 56166, -81433, -72503, 96826, 31837, -92881, 45398, 33445, -64622, 2535, -65145, 14663, -68296, 83505, 19750, -31015, 92105, 47126, -86293, 32250, 13423, 57207, 13662, 73774, 84014, -38689, 46873, -28755, -51526, -57932, 77177, -78199, -29626, -82813, -8260, 43434, 29022, 15719, 29681, 76878, -88205, -27666, 18595, 2427, 70088, -40556, -57433, 36169, -96633, 17979, 9351, -7192, -11171, 90804, -83834, -64700, 62500, -44262, -28435, -66942, -86792, tail", "row-486": "head, -71159, -97934, -27599, -21375, -90934, 51804, -5077, 99560, 10315, 99669, -54098, 59240, -48180, -77066, -41864, -93223, -76415, 427, -91604, -35304, -38553, -42589, -64816, -11699, -9796, -51179, -39125, -16662, 84031, -77179, -93010, -40242, 65640, 90586, 45023, 41282, -35685, -29363, 20356, 17279, 65478, -48798, 3295, -6840, -52311, -1741, -46434, 4306, 38316, 45125, 67257, 72017, -80761, 79246, -14827, 35157, -8903, -49739, -21206, 34424, -9726, 55557, -73711, 41179, tail", "row-487": "head, -56867, -55551, 47526, -85598, 57306, -52806, -75122, 10109, 21697, 20816, -16621, 7184, 26261, 12469, -20742, -99168, -86559, 54512, 86749, -68799, 69722, -87180, -1915, -47618, -81013, 54423, -39941, -5243, 31548, 88570, 85360, -34786, 66386, -45293, -94151, -23332, 41740, -60280, -92964, -59898, -42644, 23440, -28510, -63206, -36140, 15349, 51551, -40505, 5485, -52136, -14381, 95845, 64854, -62301, 94167, -49176, -67352, 4219, 54865, 74505, 99017, -87886, 22339, -77837, tail", "row-488": "head, 66377, -93522, -95291, 76170, -32270, -93833, -66187, 79131, 7307, 20451, 76867, -71309, 64340, -67957, 71696, -30340, 92734, 39990, 17817, -9853, 14205, 30825, 86555, 49986, 96547, -93466, 86702, -25021, -89968, 7965, -26800, 99513, 95597, -74926, -90364, -44044, -23568, -15567, 23229, -23890, -22040, 22477, -50718, -62855, -67409, 26085, -54870, -4326, 70889, -2372, -50761, -21574, -34735, -16682, -43370, -4219, -36306, 73710, -83873, 80216, -91547, -7296, -83081, 72404, tail", "row-489": "head, -15535, 97609, 95433, 50826, 8301, -62290, 28180, -49279, 54946, 62930, 41215, 77945, 42577, 98723, 75840, 99006, 52425, 43863, 18413, 20614, 95104, -38795, -50614, -75698, 10527, 73726, -3480, 82132, 32458, -89621, 81350, -53828, -92875, 38236, 96452, 26414, -87678, -91951, 1035, 85373, -61885, 57324, 47906, 90859, -71674, 9475, 7592, -80686, 1021, -12956, -12247, -17901, 88934, 54142, 35686, -43871, -34895, -36659, -97586, -22500, 58005, -62287, 68244, -70731, tail", "row-490": "head, -66426, -96294, 27724, -51358, 43612, 15753, 90569, -56624, 80273, 19499, 45986, -38346, -26104, -40960, 47348, 37233, 56759, 74846, 30099, -9749, -19834, -75462, 4750, -51120, -48822, 8356, -53825, -86695, -37744, 22376, -99109, -41025, 40980, -30901, -88201, 84754, -95037, 33360, 47288, -14270, -89974, 49073, -29877, 38201, -83164, 52229, 15555, -26555, 56208, -82927, -2593, -97963, 75521, -46818, 67512, -60963, 87107, -81097, -5792, -5734, 82943, 20660, -34051, -3350, tail", "row-491": "head, 69248, 3564, -26380, 92136, 85108, -72483, -4184, -9029, 48799, 5330, -26259, -26451, 55201, 11514, -76262, -78322, -3754, -96595, 68026, 83402, -85775, 13774, -93498, -91959, 10073, -84108, 40080, -88034, -28954, 31771, 4074, -38356, -89671, -14710, -79944, -17729, -95106, -84039, 59729, -83992, 70737, -6869, -8378, 93642, 50232, -19709, -74933, -1791, 35, 6255, -40369, -88080, 96873, 71189, 8611, 57303, 89641, -88547, -84884, 75993, -87998, 5509, -42413, 24647, tail", "row-492": "head, 13579, 30331, -50651, 2994, -52346, -10512, -12839, 51413, 79526, 84897, 80631, -90041, 12951, 51977, -57429, -3486, 9019, 95220, 80048, 23006, 81424, -51535, -43118, 43610, 43273, 75679, 75324, 5912, 32588, -48917, 40794, 54860, 83113, 21864, -78028, 53796, -7991, -58208, -55527, 85905, 22622, -54166, 99992, -30693, -52521, -69363, -3689, -46815, -73698, -97593, -73925, -69802, -71567, 18228, -39594, 43772, 92306, -10480, 20120, 3736, -23388, -78360, 875, 44749, tail", "row-493": "head, -18057, 87658, 1572, -96546, -20549, 22400, -24327, -28662, -86947, 70191, 16979, 8251, 41750, -92401, -47157, -74283, -66748, -58350, -76539, -89626, -94588, 53693, -51629, -69791, 36365, -61898, -90729, 51974, 89131, 36791, -62229, 13812, 64613, 19893, -20888, 34452, -45555, -31877, -20068, 90323, -28989, 34800, 80540, 59912, 49793, -8953, -76967, 91624, 47612, 86274, -59336, 14819, -10980, 86165, 16779, -56626, -56794, 7352, -82038, 26908, 21340, -86926, -18247, -81757, tail", "row-494": "head, 28304, 40233, 81463, -87688, 87210, 82340, -32385, 65730, -53181, 24795, -68047, -75702, 9738, -5315, -43853, -94961, 70437, -3825, 42076, 27562, 88709, 37203, -83289, 28900, 34551, -55133, -21585, 45374, 66241, 88663, 50497, 42004, -59420, -67606, -5718, -59595, 32757, 69200, 36537, 97285, -79441, -56269, 10612, -4483, 62205, -49956, 83568, 62289, -37094, 34135, 99882, -11078, 39067, -98192, 18831, 60952, -34213, 18403, -29646, -90613, -14747, -50743, -98898, 99486, tail", "row-495": "head, -82536, -9337, 7669, -86134, 96399, 72871, -82646, -37461, 22202, -64537, -68866, -63060, 43018, -55522, 69320, -14984, -65556, -80411, -11132, 28535, 4198, 68155, 98186, 4057, 86891, -89140, 39613, -28974, 83672, 17607, -42451, -41200, 4225, -44436, -27980, -83667, 13104, -483, 64236, -75884, 9068, -15732, 99018, -21493, 4336, -6690, -99920, -79337, 2883, 72158, 5431, 6475, -67385, 25323, -18210, -88769, -87733, 86350, 938, -7704, 59327, -20079, -47964, -55396, tail", "row-496": "head, 85369, 32779, -22137, -87317, 35255, 8667, -98551, -10952, 48181, -84431, 89249, -20956, -6919, -74420, -63613, -6532, 24788, 72464, 64290, -63623, 99261, 76278, -18606, -4950, -27814, -28886, 7518, -97616, 87980, -83831, 60623, -21189, -37177, -62668, -12379, 36277, -57333, -9302, -80506, 175, -99068, 44725, -48291, -84233, -66146, -88679, -57377, 17231, 12796, 56467, 77275, -17722, 30238, -75259, -33791, 64188, 58261, 46794, 76343, 79565, 62127, 32805, -60649, -96302, tail", "row-497": "head, 43923, 74468, -69112, 8335, -62125, -38038, 47323, -85004, -25745, 14699, 43635, 45340, 78374, -70885, 46851, 13316, -90909, -39501, 95517, -753, 16415, 79294, 59828, -67295, -16552, 3478, -38964, 71323, -14887, 80510, 18094, -70003, 130, -56793, -29211, 23941, -86339, -74702, -8533, -52001, -79913, 36324, 35878, 39843, -24972, -66651, -10181, 58889, -38113, -33468, -39788, 30913, -6438, -11403, 4038, 14789, 43849, 1994, -14288, -31762, 92614, 51272, -88626, 50856, tail", "row-498": "head, 80923, 42353, 94303, 85990, -5510, 47923, -54172, 43906, 72648, -87797, 20824, 92961, -16849, 47976, 18756, -6297, 81317, 26353, -44608, -32304, -31283, 29919, -82997, -69895, -12587, -51117, -73244, 10336, -74648, -80260, 47699, 10387, 90457, 48108, -45708, -88557, 24442, -69214, 26032, 89739, 16312, -85370, 3908, -22746, 77174, 65485, 60684, 6392, -26971, 72400, -88959, 97552, 50454, -86357, -99903, 37707, -26516, -44251, -48447, -92176, -92207, -70307, -74286, -50309, tail", "row-499": "head, -4037, 13066, -14291, 14784, 95721, 18185, -63834, -18715, -93206, -18740, 61985, -78290, 79054, 87209, 11912, 15401, 23722, -60859, 43586, -89983, 10111, 48908, 3370, 53515, 7971, -11348, -11850, 11774, 21104, -90658, -94305, -53559, 90910, -68320, 56013, -19545, -99533, 7472, -28450, -52378, -46372, 77784, 2156, -87899, 15892, 62425, 15573, -2639, 81180, 1579, -82154, 94801, 35206, 64476, 80209, 65814, -23257, -75235, -69347, 66212, 34544, -39039, 63162, 47610, tail", "row-500": "head, -31733, -48297, -91712, 19635, -33086, -15583, -62049, 53559, 49985, 78852, -52569, 91490, 24357, 53561, 89039, 85349, -77442, 63826, -33028, -28335, -50990, 74887, 52004, -68312, 16030, -71425, 40186, 34640, -77805, 96818, -5792, -29622, -37851, -75473, 18045, -72575, 4339, 61047, -59778, 15335, 72896, -16815, 43629, 45080, -60570, -39720, 53936, -14398, -49785, -59462, -57994, -46990, 25712, -81568, -67745, -16731, -94936, -35822, -43067, -64174, -28046, 79663, -39191, -1422, tail", "row-501": "head, 8735, 79488, 5179, 3436, -32272, -63147, -18126, 22256, -76783, 10769, 80342, 59341, -64404, -2524, 96006, 30335, -54141, 66830, 65823, 96639, 53754, 83394, 70163, -42973, 32808, -70737, 87143, -9543, -67636, -61141, -78988, 69300, 32470, 1517, -84375, 11580, -42152, 98540, -61154, 65808, 36914, 7337, 80584, 42880, -56125, -80451, -19142, -42408, -25846, -30295, 41791, -71575, -79910, 5749, 55589, -21955, -46755, -58889, 18385, -86969, 44689, -4503, 14835, -56076, tail", "row-502": "head, 66752, 31046, -45065, -47872, -59377, -40940, -16183, 81657, -60938, -89512, 55008, -24261, -17598, -60139, 37800, 98263, 12849, -5534, 48670, 87021, 96361, -76800, -84928, 8954, -38749, -58741, 51564, 88726, -79800, 23872, -69453, -2687, -82545, 74714, 74426, 48354, -23623, -82417, -1217, 97240, 37460, 40354, 36421, 9574, 99235, -64188, 39691, 4366, -29996, 5529, 75049, 16464, 98165, 13139, 24178, 38836, 73672, 70441, -55346, -94272, -93088, -99849, 17, 37928, tail", "row-503": "head, -85406, -26618, 6870, -19925, -79598, -88840, 12003, -87404, 68998, -88530, 9348, -20499, 86174, 26029, -631, -56620, 37242, 56021, -15426, -42554, -99924, -43009, -99525, -4319, -30427, -13998, -90404, -54007, 82546, 38851, 80918, 66860, -93503, 6615, -57484, -67735, 72787, 99931, -71986, -90663, 62459, 2371, 98906, -78769, 2606, 6611, -99827, -63894, 94542, -98385, -75716, -51359, 76699, 85755, -84307, -85810, 1883, 31299, -62177, -37705, -37262, 16998, 59109, 56413, tail", "row-504": "head, 81384, -29192, -82806, -80199, 17424, 85365, 3388, -96381, 79048, 12768, 70137, -72901, 63063, -47940, 22775, 19970, -90762, -12699, 69696, 87487, -17628, 8352, -6494, -55650, 20827, 60560, -9841, 68805, -97802, 1577, 88368, 26110, 11491, 6916, 60850, -97425, -87755, 38286, -83308, 19611, 40147, 23262, 89261, -62583, -75485, -43994, -98776, 23931, 83121, 92035, -7939, 76616, 87101, 60314, 51529, 80516, -77422, 89226, -83390, 3051, 62626, -76238, 75325, 39563, tail", "row-505": "head, 91249, -62805, 76236, 83666, -21359, -8401, -40594, 12729, -93679, -65590, 85000, 30633, -99090, 28176, 35347, -75838, -49706, -57479, -79452, -13469, -56493, 17132, 45261, 75614, -37517, 18529, 91179, 25573, 72801, 38883, 72627, -25977, -552, -6621, 4834, -87744, -82082, 8433, 77663, -41941, -47739, 7076, 9550, 90437, -33006, -26796, -24971, 36939, -4806, 65221, 6737, 12989, 14747, 79653, -67305, 52084, 67873, -9521, -42459, 64423, 12523, 28216, 54746, 97276, tail", "row-506": "head, 43907, 15743, 80262, -44250, -18929, 52558, 31235, -24773, -98925, -81095, -69079, -44521, -67042, -95264, -51446, -85788, -92429, -76345, 55452, 50429, -62042, -26765, 99533, 79330, 92135, 80922, 78918, 86412, 6040, -46753, -89406, -89422, -99755, -68580, 117, 83608, 76444, 51601, -26994, 83645, -58003, -32257, 78655, 1577, -22168, 85205, 5971, -5103, 82391, -23601, -80227, -32299, 90173, -44761, -34916, 665, 87519, 46682, 41198, 77738, 81294, -43611, 24191, -98151, tail", "row-507": "head, 54475, -55574, -31713, 34299, -93857, -74148, 72691, 42593, -65501, -5642, 98105, 99556, -89631, -70396, 89070, -40142, 64407, -4619, -25821, 51358, -64712, -37337, 39172, -95994, -84428, -8431, -72426, 70683, 99713, -3950, -14320, 16439, -61801, -19805, 49510, 48507, -79581, 76037, 57890, 50912, 70820, -28826, 94198, -43888, 88319, -30945, -82633, -82791, 79958, 19589, -17833, 50986, 17007, 94267, 26779, 21447, -53733, 84736, -76560, -78827, -74657, -8223, -43545, 93730, tail", "row-508": "head, 9638, 51522, 323, -96081, -18017, -98044, 63670, -52107, 77631, 93636, 24497, -4686, -77468, -14142, 41718, -30919, -6596, -70880, 88345, 48402, 10458, -15799, -18260, 84738, -9771, 88275, -69312, 73055, -73183, -75683, 65313, -93173, -38397, 60840, -19307, 58589, -13634, 86242, -83043, -2850, 97124, -58554, -7707, 51870, -340, -32340, -10704, -78499, 1204, 97817, 71181, -89114, 89078, -57643, 15194, 24929, 88692, 65431, -31546, 63552, 76522, 69679, -16801, 9908, tail", "row-509": "head, -36315, 66005, 33772, 21693, -69420, 85441, 16136, -63683, -65161, -67210, 1687, -10117, 67445, 84711, -90835, 28837, -91589, -45131, 53015, -21219, -51174, -57753, 27166, -54504, 42589, 95390, -42732, 71772, 50715, -36913, -76637, 24953, 19290, -37521, -19246, -5707, -57358, -49347, -58847, 78053, -19474, -37692, -33725, -2462, -60077, -36728, -70708, 40535, 75389, -48415, 23158, 38959, 48895, -38407, -96474, 76019, -38926, 99964, 89758, 86630, -21839, 21966, -23347, -81128, tail", "row-510": "head, -64816, 30021, -72869, 15000, 43508, 7835, -25973, -74776, 70774, 8462, 71071, 63123, -63923, -81399, 35820, -85004, -49720, 33102, -51134, 96508, -35494, 85647, -5037, 22121, 94794, 18151, -72558, -98870, 99547, -68111, -52020, -16619, 80479, 11845, 66828, -15328, -57566, 2384, 49985, -37585, -97171, -9470, -49467, -54110, 86334, -25133, 11713, -80427, -93732, -52429, 19508, 77783, 33303, -75481, -1429, 63806, 78238, 94089, -6803, 59388, -29993, -30014, -32032, 95913, tail", "row-511": "head, 91527, 21874, 30902, 74280, -72305, 53312, 98728, 67177, 87277, 73644, -522, -98725, -17158, -86341, 36186, 87425, -39614, 26546, 30327, -19334, -29523, 1286, 10375, -22662, 250, 83567, 14888, -41901, 75308, 62888, -79287, 51552, 12059, 86562, -88458, 18222, 5602, 55590, 68406, -84697, -77510, 64019, -44251, 15503, -38173, 37571, 91679, -85460, 82947, -43323, 41773, -10407, 17993, 30700, -55497, 32124, 49133, -58236, 92620, 64663, -59761, 27813, -22753, 70687, tail", "row-512": "head, 16041, -92817, -16799, -54186, -98791, 99455, -74756, 86575, -14079, -29894, 63809, 12290, -62540, -44288, -9564, -27700, -95165, -5735, -59606, 39826, 41650, 23016, -80951, -3848, -83967, -60879, 41580, 71786, -99951, -86620, 24993, -98050, -81408, 32345, 47319, -8547, 39066, 25011, 99632, 81906, -75954, -51839, -62373, -80945, -16332, -59460, -45004, 30324, 74221, -922, 11360, -64844, 24809, -79205, 99105, 95646, -61692, -10212, -1010, 57781, -71283, -79296, -47687, 78699, tail", "row-513": "head, 68763, -19356, -96653, -87823, -12157, 669, 83194, 14575, 65663, -96958, 90053, 89150, -56725, 4063, -67933, -1881, -28356, -71017, 29902, -40131, -29608, -20568, 29969, -86144, -59530, -82848, -72828, -10821, -5911, -46177, -22597, -94229, -66853, -64551, -39289, -14491, -29070, 67462, 59845, -83774, 13151, 8007, -63578, -5491, -15280, 36007, -70732, 66862, -55012, -1594, -58103, 22135, -55730, -50263, 78618, -38785, 20124, 87545, 78123, -53049, -75626, -77841, 82907, -90489, tail", "row-514": "head, -59531, 86171, 89134, 96098, 10422, -36806, 83321, -77552, -29485, 68522, 6605, -24110, -63628, 25536, -26644, -12203, -70934, -13206, 57475, -41118, -49730, -10341, 7748, 85335, 19606, 54933, -34151, -1778, -98287, 75767, 96037, -20031, -75688, 91850, 78547, -9691, 59503, -50947, -45436, -79732, -73671, -32395, -8660, 44558, 44391, -7387, -59989, -1164, 36468, -82959, 10239, 98957, 86191, 50588, 62437, -37049, -89486, -89936, 71024, 19880, -71549, -19827, 54478, -69759, tail", "row-515": "head, -73013, 93049, 69940, 24863, -97469, 30134, 52292, 4168, 46090, 19238, -92445, 67573, 81368, 12446, 24423, 41862, -93314, -40133, -8330, 90644, 35320, 34091, 61686, 22520, -11228, 45471, -23324, 85461, 27942, -26525, -95129, -35818, 35863, 4650, -83998, 10370, 50656, -32409, -28599, -75143, -92393, 35191, 87781, -30068, 44341, 18805, 28663, 73210, -80282, -1501, -35867, 11679, -69152, -96534, -55799, -40974, -34484, -399, 14753, -78499, -93346, -6298, 48734, 87716, tail", "row-516": "head, 323, 34236, -37217, -6873, 87229, 335, -93671, 44300, -78644, -3347, 54032, -36121, 91181, 82694, -70866, -56504, 18492, 24688, 80436, 88807, -61385, -92202, -25830, -26456, 86563, -12978, 10442, -16888, 8469, 38773, 2794, 19594, 96478, -63336, -96094, -78378, -24948, -55847, -30055, 86118, 96009, 57025, 76750, 58162, 92259, -59036, 21477, -95765, -69004, -59845, 80493, 92252, -28089, -82092, -63772, 56971, 33721, -71795, -42809, 89215, 55977, 98219, 53364, -46686, tail", "row-517": "head, -67638, -9690, 33910, -32402, 55854, 86535, -86073, 80378, -12139, -97284, 96557, -86731, -27371, 21105, 97034, -39450, 43951, -11586, -49498, 97211, -44827, -29760, 88783, -5571, 24927, 90292, 79494, -74500, -84500, -17266, -42474, -93746, -13552, 87223, 23760, 73114, 65814, -67211, 3768, -42548, -13767, -13012, -91936, 38315, -86764, -22164, 59970, -26105, -79740, -14336, 17650, 8873, -67003, -71919, 88939, -8685, -37374, 67397, 77770, -34806, -29465, 24567, -49288, 63230, tail", "row-518": "head, 28595, 70151, -20163, -11770, -97529, 14613, -67995, 72918, 54031, -88828, -83789, -76930, -48426, 3889, -21120, -6595, -31840, -16786, 74481, 66885, -33493, 1064, 42878, 52348, -57493, -81227, -45982, -21658, 86209, 43982, 89158, 4266, -16257, 13015, -83511, -2270, 66619, 63706, -96490, 28027, -36730, -24596, -76206, 79325, -63807, -60055, 11619, 85517, -11615, -39098, -42746, 62609, 47789, 61198, -92746, -84517, 9616, -49549, 11495, 90838, -75860, -74835, -25065, -52782, tail", "row-519": "head, 7747, 18474, 58100, 22409, 73279, 63198, -13074, 8463, -46686, -85710, 67209, -98378, -53102, -74059, -29930, -52793, 8584, 66667, -25074, -77878, 80949, 22631, -55994, -31900, -77521, 28696, 78391, 7604, -83338, 14536, 35330, 77131, -37830, -81358, 83920, 14226, -72263, -74354, 8730, -15900, -65739, -14738, -7351, 98520, 36637, 26239, 33684, 6652, 25279, -57074, 28766, 10973, -99784, 61017, 34164, -81381, 16435, -36600, 99503, -17221, -75691, 56751, 74995, -10465, tail", "row-520": "head, 87135, -93874, -56117, 86397, -63199, -8419, -61011, 63873, 33108, 19350, -44216, 34614, -11421, 46176, 54957, 82967, 25920, 39978, -22931, -10187, 7715, -6034, -21584, -38128, 47525, -76621, -83335, 17200, -71551, 27239, 8037, -13302, -43266, 5903, 21820, -76070, 25632, 91220, -79863, 75096, 24623, 29688, -59511, 88517, 37152, 63988, 64029, 95842, 79817, -84761, -14174, 51514, 23637, 29647, -22792, 21122, -49299, 49836, 20673, -42280, -74645, -42963, -56208, 84905, tail", "row-521": "head, -73063, -79764, 3401, 3600, -55846, -92035, 28517, 84360, -89314, -66967, -95810, 71910, -12703, 62333, 76369, -85426, -66734, 89081, 65149, 117, -30892, 74097, 44060, -58560, 32624, -3221, 4, 16173, -91359, 8492, -78279, -79360, -75480, 76947, -84858, 78043, 7808, -69203, -67586, 40189, -68314, 37336, -80210, 59500, 3750, 7988, -2937, -95780, 83890, 86633, -46512, 17893, 54873, -65372, -29568, 89105, 11972, -63102, 82496, 84607, -71109, 2668, 43039, -96569, tail", "row-522": "head, 72019, -76320, -42912, -60798, 64340, -68039, -84580, -97086, -47547, 70482, 42628, 59811, 4585, 91148, 93925, 18129, -96480, -85385, 78918, -51611, -41454, -5820, 68767, -97745, 92166, 42917, 96208, 74498, 6556, -24020, -89130, -51685, 71936, -14624, 29398, 99423, 4305, 34010, -60767, 25228, -1226, -27736, -68729, -20995, 18270, 10603, -72185, -26286, 58198, -74233, -38273, -35028, -10272, -30444, 35172, -63594, 99767, -37767, -92783, 30682, 24203, 58528, 47947, -82329, tail", "row-523": "head, -20988, -60717, 680, 18440, -89558, 278, 25881, 73355, -42325, 72345, -27818, -47648, -56032, 70716, -48829, 86928, -24065, 22239, -49431, -26110, -12098, -66641, 21358, -42021, -52544, 75315, -48121, 16791, 23444, -68309, -59392, 3097, 70385, -62780, 39484, -22560, -29842, -32741, 13804, 72145, -50717, -69504, 50011, 7469, -89489, -42387, 95187, -71285, 89781, -91788, -93115, -16641, -22033, -77687, 61240, -2642, 90093, 37026, 19539, -56933, 21862, 38511, -39083, 44832, tail", "row-524": "head, 66458, -35879, 45224, -89163, -83109, -47244, 60546, -54661, 74236, -90143, 34961, 69096, -38025, 18123, -41858, 44845, 8355, -61406, 74739, -49867, 70006, -56504, 10375, 76288, -20103, -49527, -79184, -62524, 43819, 58691, 31114, 80644, 66538, -37579, -51640, -73426, 28066, -16584, 96948, 26192, 6586, 63736, -72028, -48433, -78494, 76998, -75231, 20024, -70243, -94521, 12615, 83409, 96902, -22402, -34502, 96796, -98891, -38533, -70098, -59136, -71974, 36201, -18409, 86233, tail", "row-525": "head, 74462, 95676, -33699, 28260, -82151, 54697, -77811, -93938, 48593, 95649, -89753, -47117, 67161, -40845, 33972, 61332, -717, -12165, -39294, 66338, -79980, 58028, 21187, 88944, -91034, 18781, -55509, 21694, -8351, 7167, 88652, -47023, 11075, 53026, 96172, -69194, 43117, -16151, 87800, 60138, 56416, 68904, 6758, 31161, 71704, 87722, -83725, -49528, -32496, -52506, -16853, -66803, -81890, -28343, -42824, -57667, -49183, 98799, 2661, -37716, -62409, -30742, 3403, 98575, tail", "row-526": "head, -91958, 62836, 76476, 99953, 10197, -6586, 82963, 47668, -24959, -55458, 76382, -72317, -19269, 3871, -73933, 14546, 46706, 29490, -84029, -67525, 14255, 98788, -23910, -77941, 3087, 23747, -18450, -78061, -56191, 87335, -42405, -57657, -42915, -77310, 75629, 76237, 21502, 641, -84642, -39882, -7208, 15157, -1732, 8612, -43947, 61977, 52821, -13428, -19488, -93090, 39267, -91750, 73042, 20116, 50308, -99421, -22010, -36817, 95177, -45742, 69832, -27723, -28742, 38546, tail", "row-527": "head, -51931, -49192, 19004, 73184, -69686, -13353, -84915, -90444, 63378, 35848, 93413, 41645, -6085, -18070, -93530, -51943, 49063, -12867, 10530, -12727, 84246, -95483, 70779, -358, 77871, -93352, 55325, 74870, 64262, 20386, -91714, 92055, 47545, 49402, 68281, 5830, 68324, 19179, -87789, 10122, 11789, 74736, -80778, 13448, 16167, -11808, 55482, -99038, -71265, 61358, -31967, -42536, 22525, 52053, 15500, -41596, -50166, -29760, 41064, -32068, 57696, 19333, -58337, 2248, tail", "row-528": "head, -21670, -82791, -61170, -79680, -70027, -13188, -16154, 26515, 85276, -4120, -62250, 45701, -43506, 23138, -10876, -63736, -118, -19520, -31483, 92560, -72454, 90791, -72985, 71320, 52078, 59289, -32106, 73179, -84834, -73301, -16635, 4721, -38017, 43249, 52541, 75563, -68009, -63450, -25312, -30070, -58293, 91935, -33124, -82548, 36537, -52422, -24910, -32227, 43406, -67982, 51672, 41408, -52060, -42474, -1850, 58535, -65173, -22287, 87224, 12585, 29419, 41194, -11533, 87886, tail", "row-529": "head, 43914, 59401, -12852, -33650, 851, 67609, 15591, -1179, 21780, 95798, 6792, 96462, 38415, 13533, 93643, 3410, 17334, -96969, -73668, 92658, 67571, 56853, 7651, -73784, 55438, 45369, 425, 40884, -83588, -32308, -8466, 53605, -64458, 6663, 76860, -68550, -39950, 23747, 87003, -75504, -96144, -79749, 35044, -95465, -38966, 64567, 86055, 46881, -71746, 19652, 72308, 52566, 74249, 83485, -94665, -83115, 83304, -80120, 87897, 77496, 49606, 81398, 70060, 67178, tail", "row-530": "head, 98916, -82709, -83576, 32009, -78015, 7437, 1267, -36542, -1654, 45998, -97048, 50344, -67699, -1474, 7091, 95172, -88376, 67995, 80848, -90073, -44481, -47401, 99801, 23145, 48503, 27298, 48608, 49583, -55304, 52665, 20899, -65144, 5307, 56440, 36049, -36330, -28299, -87727, -38481, 41983, 67742, 24895, -40259, 25730, 22265, 75868, -10345, 76399, 82347, 81039, 25796, 84544, -23561, 42645, 344, -88884, -53547, 95968, -20611, 82794, -73577, -94091, 31279, -82722, tail", "row-531": "head, -84495, 90533, -18927, -31556, 73517, 34357, -48361, -58306, -9471, -17343, 74467, 1304, 26149, 46413, -37818, -27188, 23620, -25233, -5654, 33528, -99121, -9, 52999, -42801, -44071, 89519, 99672, -25295, 52794, 37153, 37915, 68092, -58382, -25813, 14383, 79127, 86563, -52964, -59938, -3749, 64079, -29827, -15004, 55418, 21022, -64685, -96420, -60049, 15493, -82475, 31327, 39002, -97002, -44112, 71715, -59848, 20379, -74940, 11890, -35374, -86314, 50922, 62135, 91561, tail", "row-532": "head, -19741, 97180, -62503, 84369, 35660, -98180, -19305, 45202, 6976, -6052, -53096, -42088, -85237, -71243, -22396, 62287, -84379, -58655, 57250, 89729, -50974, 80809, 45296, -86464, 73643, -49220, 37160, -31784, -98916, 23660, 74994, -65564, -75318, 12567, -34092, 88157, -29845, 67058, -36415, 80076, -88161, 39875, 65861, 96263, 79833, -22230, -17995, -13381, -63901, -95536, -14103, 22036, -81088, -484, -29294, -76585, -94324, -55101, 53940, -42561, 11405, -91951, -62083, -85422, tail", "row-533": "head, 3182, 59825, 28075, -64256, -36443, 14887, -87345, 57292, -54857, 75412, 26173, -52263, -77201, -52637, 85490, 15788, 97788, -97436, -71632, -93700, 85173, 7645, -71565, -87696, -88558, 70133, -49406, 21158, -15175, 43212, 46107, -40015, 16877, -7475, -42095, -6276, 36092, 87357, 20124, -45707, -20776, -80984, -53409, 25848, -54233, -44705, -64037, -86691, -53182, -29230, 71781, 37623, -57442, 61277, 65435, -96675, -68849, 20380, -34921, 57635, 18238, 44028, -67466, -64215, tail", "row-534": "head, 28391, -99133, -7885, 16128, 34244, -84014, -34318, -41963, -53303, 2112, 98712, 90178, -1590, -36029, -16116, -29252, 77379, 18889, 66587, -45504, 58083, 68115, 67561, 80092, 12940, -29284, 48267, 51126, -782, 27075, 15876, 31244, -5484, -24760, -44092, -21212, -85814, 50715, 42910, -45878, -43768, -27719, -49050, 30326, -5266, 43940, 12344, -34198, -83411, 12571, 49260, 71748, -55120, 99311, -67795, 57969, 72562, -81413, -4778, -69690, 91605, 74751, -54789, -75421, tail", "row-535": "head, 57839, 1017, -29645, 54306, -49593, -97703, 40777, 28237, 83210, 62448, 2586, -85779, 43571, 56734, 33147, 7897, 95634, -66561, -83574, 31996, 73269, 49500, 64481, 96800, -59646, -68520, -81838, -5031, 91759, 3593, 52344, -22034, 20771, -47231, -11045, 26024, 7347, 92632, 2544, 34279, 82365, -3994, -70584, 12529, -3022, 7601, -68728, -71659, 1085, 20106, 11321, -80152, 79076, 16132, 68800, -5967, 73413, 93992, -17170, 76794, -15351, 68169, 52594, 4982, tail", "row-536": "head, 17860, 59437, 70777, 9883, 4062, -52499, 81216, -98198, -76864, -7607, -75120, 86264, -87841, -17776, 48764, 94830, 2888, 61601, -86555, 40844, 44452, -84201, 28780, 36634, -25373, 44716, 53566, -59623, 65470, -9140, -83498, -62466, 69240, 68172, -15813, -82017, 40769, 69814, 75517, -6588, -21109, 14635, -35262, -4768, -84136, 58119, -59169, -29946, -81968, 69601, -86281, -45616, 36045, -8517, 89815, 14108, -28940, -53167, 82553, -91147, 319, -97868, -35280, 7828, tail", "row-537": "head, -75363, 84763, -30689, -11545, 89643, 94377, -47715, -38042, -45262, -6354, 95853, 40026, 14255, -40573, -17112, -47541, -8149, 14458, -19215, -83945, -59586, 91849, -91612, 60909, 59178, 61657, 47463, 52600, -74267, -35484, 23189, -12494, -43202, 49894, -7722, -22278, -326, 97164, 47806, 62844, -51352, 36367, 96917, 54206, 36779, -19141, 61727, 6500, -32102, 65926, 37212, -41009, 53547, -53426, 66852, -92485, 26541, 72423, -83874, -43334, 25120, 67120, 77337, 30852, tail", "row-538": "head, 34479, -20576, 47987, 12612, -92357, -89903, -76303, 74708, -32367, 64209, 24047, -96078, -11902, -57928, 89177, 67761, 15592, 44991, 11972, -988, -13764, 83169, -68081, -87770, 39129, 36141, 92392, -5668, 71455, -17625, 17873, 31999, -62522, 39266, 40628, 65858, -38173, -84406, -32229, 15553, 45881, 99691, -61739, -2991, 71465, -41106, -28304, 451, 55666, -55357, -69587, -54998, 93999, -31552, -40981, -74738, 99065, -96394, 35127, 7504, -65610, -2924, -23452, -22239, tail", "row-539": "head, 86111, -22788, -83133, -8773, -57407, 41465, -64668, -34754, -52679, -86240, -84211, -7159, -37718, -66410, 53063, 81826, 25984, 79481, -61547, 22962, -38631, 53404, 42274, 52779, -64866, -63461, 44737, 22941, -87441, -40002, -57278, -63879, -16094, 36819, 12977, 32731, -94854, 61600, 61243, 94222, -46323, 45374, 58808, -32435, 90940, 39217, 50675, 71828, -2598, -81616, -73970, -89421, -32268, 6532, 9379, -13592, -52073, 62057, 78197, 67911, -41354, 73057, -24403, 84461, tail", "row-540": "head, -82687, 92679, 34230, -45384, 66356, -44012, -3055, -71361, -61962, 70292, 82178, -27646, -96908, -35260, -97796, 5550, -52910, 35056, 58245, -22621, -91672, -35121, 35400, -64333, -55194, 31480, -70675, -18617, 23692, -12399, 83684, -1764, -66233, -27437, 90403, 47986, 95777, -23433, -17220, 30337, 402, 72288, 13288, -57226, -35316, 6632, 61983, -35534, 24509, -95860, 74226, -72028, 41847, -79555, -79126, 82108, 16336, -80417, 17996, -68373, 14629, -89574, -61136, 75831, tail", "row-541": "head, -60035, 94460, -53369, 36031, -3458, -38736, 7327, -14786, 90056, -60157, 23388, -55833, -67772, 7137, -78676, 69365, 5703, 20613, -92618, 20804, 74858, -57196, -66064, 15759, 97092, 19803, 24563, -94838, 30319, 89259, 65027, -25797, 38203, -89746, 52332, 28372, -84986, -90876, 69148, 21215, -95719, 11772, 78707, -18467, -7607, 79849, -34000, -93640, 37055, -97454, -56079, 37052, 39770, 15398, 23825, 68890, -44499, -44700, 24260, -46000, -18729, -55632, 36348, 40034, tail", "row-542": "head, -25412, 26166, -33614, -7949, -91217, -41587, 93264, -61297, -66583, 35963, 51600, -57048, 46971, 17933, 95072, -84915, 8385, -21518, 90455, 50161, 16982, 20633, -59787, 77428, -21570, 94769, -43859, 11885, 36724, 86720, 76267, -47729, 50756, 34419, -56648, -43974, -1983, 81162, -93272, 87244, 70987, -87912, -70779, 89002, -85939, 18338, -30570, 17832, 74947, 86185, -36293, 21485, 82870, -77408, 69974, 22210, -42518, -96324, -34710, -72632, -98296, -30259, 23525, 58403, tail", "row-543": "head, 99342, 248, 38484, 39530, 76232, -81254, -57239, -32752, -41760, -70289, -72646, 32413, -19319, -41794, 59880, 31083, -65438, 36231, 63376, -49149, 15596, 40239, 14772, 32820, -35495, 66867, 26837, -8546, -83705, -84300, 20957, 11893, 5754, 28028, -21793, -34646, -90193, 91434, -98646, 49455, -5105, -71688, -59477, -96012, -21712, -75957, 60603, -34803, -43516, -2870, -10281, -17321, -86229, -68458, 59670, -23816, -56384, -90040, -88296, 35195, -21395, 53873, 74633, 38574, tail", "row-544": "head, 58992, -52598, 67899, -74577, -76392, 72573, 93062, -65661, -12249, -64541, -42069, 72320, 31416, -78479, 69603, 73761, 17472, 27560, -8729, 49200, 94789, -20143, -60783, -68751, 50208, -32005, -19088, -72744, -77547, -88401, -34872, -17484, 94717, 68164, -98866, 56791, -27316, 44653, -97658, 73272, -81219, 40519, -69288, -15134, 46206, -11486, 63100, -14670, 37196, 38929, 67818, 66662, -97072, 36513, 11251, -57771, 10318, -48844, 17188, 58173, -29671, 50255, -75641, 88557, tail", "row-545": "head, -13768, -27306, 65224, 71459, 80215, 53853, 26176, 73405, -42936, 85157, 3317, -76239, -54991, -91814, -63902, 35074, -82016, 31856, -12312, 94147, -414, -67060, 93264, 35227, -62759, -13292, 29123, 60810, 80417, -61380, -84503, -53319, 22724, -24643, 35777, -53056, -38600, -10735, 33206, -77586, 18521, -34367, 98471, 38282, -21286, -5688, -8206, -15625, 76932, 44234, -71510, 62715, -64176, 25808, 80995, 85817, -64442, -37199, 24474, -51826, -2249, -92153, 66888, -61197, tail", "row-546": "head, 56770, -66751, 55665, -54930, 66176, 16471, -48333, 69500, 463, 79670, 41898, 1234, 8362, 22361, 66332, 14022, -71451, -53838, -70545, -51462, -50050, 66978, 79192, 31151, -74079, 82135, 37345, -8637, -62395, -62372, 44511, -45064, 62446, -20110, -90464, -93826, -79957, 44051, -38183, -46800, -56819, 48415, -27068, 12951, -3785, -67591, -34072, -95010, -8790, 43651, -7894, -71079, 62707, 29616, -67901, -96356, -28757, -32919, -28099, -59107, -36351, -99195, 65384, -72044, tail", "row-547": "head, -54431, -32573, -18812, -5221, -14922, 16402, 77149, -84231, 65679, 35282, 25468, 87278, 26156, -68954, 42731, -51961, -16427, 2125, 95273, 83500, -10526, -74291, 98405, 277, -30597, 81333, 17476, 27373, 28728, -5179, -31042, 59496, 25478, 47193, 36806, -55687, -59243, 51493, 76486, 89179, 91138, -33367, 28132, 96845, -93067, -24964, -92728, -25402, -42732, -82828, 97992, 66054, 24118, -443, 10612, -24465, -41624, -74190, -49964, -35777, 58765, 22828, -48633, -95974, tail", "row-548": "head, 691, -97786, -48551, 4558, -19748, 60919, -70888, 51475, 594, 85933, 60654, 80967, -46902, -21277, -18280, 37821, 5200, 86518, -55838, -41902, -56856, 67757, 68430, 50156, 5650, 22277, 16455, 88355, 40884, 20077, -42496, 8457, 44566, -4368, -97477, 94855, 471, 99529, 14184, -16382, -52975, -4087, -80806, -5445, 65755, 57301, 64389, 41562, -73244, -75468, 28272, 79214, 65027, -11321, 9934, -74335, 46061, -16238, -68754, 94978, -62947, -24792, -16762, 90046, tail", "row-549": "head, -87025, -92172, -57470, 26931, -78380, -9654, 15569, -96855, -49164, -27863, -83293, 45983, -11888, 78719, 7793, -45824, -25702, -86813, 93998, 29266, 21758, 18334, -63787, -31287, -83837, -34770, 16633, 54005, -5691, 31775, -88829, -98162, 12974, 87216, -34925, 98530, 71973, 73506, -800, -97229, 65312, -60087, 21471, -80831, 34451, -51081, -89417, 773, -27573, -57822, -67042, 41670, 76030, 86713, -74291, 18017, -84500, -62322, 39069, 90622, -25365, -64731, -83892, 10170, tail", "row-550": "head, 88257, 68289, -37059, -52837, 38206, 32878, -69249, -30490, -55310, 96697, 32191, 23977, -90551, -26027, -83232, -70453, -25732, -20516, -89956, 31294, 10084, 69088, 29293, -39123, 66514, 87780, -65750, -75403, -42842, -15406, -24984, -88341, 39284, -26982, 38112, 84316, -39996, 52893, 62074, -86732, 5314, -36972, 12398, 58252, -90488, 93192, 53781, 38804, 15533, -34806, -52758, -48721, -59894, -31563, 32830, 19495, 5736, 88539, -48628, 739, -79127, 60442, 6022, 35957, tail", "row-551": "head, -27207, 11776, 2032, 43813, 94988, -1337, -18653, 3293, -6541, 53956, 39571, 52777, 13226, 7624, -55239, -52541, -31872, -14169, -50976, 83521, 64723, 70552, -37396, 41326, -85707, -45705, 25723, -93962, 6612, -32593, -40626, -26307, -18469, -52396, -9165, 51064, -97912, 1207, 70553, -36213, -95794, -61921, -19105, 72709, 96691, 20892, 91395, 39033, 43436, -39907, 6536, 71671, -26274, -64097, 88192, 72389, 76864, 9977, 23143, 85184, 87432, -33724, 14063, 34124, tail", "row-552": "head, -88954, -77547, 61126, -14054, -11110, 3823, -65831, -94427, -28076, -93366, 47836, -52742, 15678, 38237, 50429, -65447, -24980, -30300, -48978, -8426, -69328, 50861, 14545, -60903, 31821, 24163, 79490, -32733, 56342, -50429, -47853, -43484, 59337, -26534, 17004, 77954, 74241, 46631, -7163, 40029, 48227, -92029, -20548, 16471, 16737, 37009, 40463, 77896, -80613, 29935, -42937, -79510, -69352, 16998, 42428, 98786, 7805, 76043, 15644, -35908, 99526, 50921, 95931, 58441, tail", "row-553": "head, -78884, 78111, 1364, 62818, -31025, 69160, -66925, -11151, 97036, 23647, -70707, -255, -48544, -32921, -71196, 39640, 73803, -86005, 39830, -75935, 71812, 64983, -19039, 77241, 60771, 47061, 92824, 18754, 90140, 14910, 70580, -12871, -94762, -30636, 51080, 73939, -26474, 85912, -28859, 97235, 73886, -76863, 56646, -65171, 39664, 93869, 85844, -18287, -8409, -35992, 86780, 34190, 72568, -47357, -84874, -60151, 63064, 57391, -47482, 45201, 22688, 75205, -86956, 79632, tail", "row-554": "head, 62490, 60017, -71696, 46616, 88816, 67435, -74921, 35739, -32182, -95093, -76215, 87939, -87163, 288, 45133, -27077, 56155, 25853, 47379, 11680, -96134, -20913, -24917, -59416, 8042, 88129, -32798, 45844, -88655, -18708, -67654, -87520, -26700, -23144, -1697, -93954, 72370, 76630, 37085, 70395, -95373, 64552, 98796, 29935, 68763, 65828, -47918, 81249, 57504, 48409, 6107, -53531, 37486, 91932, -49039, 27808, 28443, 20685, -77710, -7906, -84619, -68074, -88469, 5141, tail", "row-555": "head, -86878, -12712, -85362, -75132, 22865, -51315, -43666, 25158, 53038, 90892, -25119, 99747, 22940, 7478, 75118, -85077, 10314, 26659, 56201, -90260, -40933, 91987, -2743, -7935, -46201, 36277, 35072, -82285, 87891, -72469, -36116, -3237, -13487, 21831, -10761, 12999, 81092, 91755, 79803, 61509, 62062, 32362, -57488, 15632, 7394, 10685, -66868, 25940, 6720, 86184, 99306, 4306, 83283, -67600, 36213, -5525, -25420, -55948, -81933, -9618, -20273, 77052, -68728, 48267, tail", "row-556": "head, -67171, -11167, 94535, 35826, 14179, -65060, 26765, -45966, 69967, 8038, 96289, -26009, 7735, 373, -2885, 30174, 84301, 6296, 18066, -61921, -51495, 6910, 59227, -54341, -89453, -61749, 69200, 80878, -74287, 97786, 36750, 72259, 40722, 3928, 90430, -75145, -18066, -74874, -1636, 10490, -27328, -70596, 89699, 44056, 78291, 44534, 19065, -58716, 95102, 73565, 82581, 49705, 27339, -40848, 75511, 62455, -66518, -27227, -4138, 56532, -61393, -43996, 82266, -33611, tail", "row-557": "head, 48146, 72274, -53402, 43927, -64582, 74495, -57727, 69764, 75194, -23133, 31641, 9128, 56227, 29486, 17317, 74917, 48168, 3590, 20184, -80627, -73412, 40956, -32693, -77632, 62681, 98510, 49445, 81343, -89291, -45576, 72536, 32404, 89255, -3627, -59952, -27482, 655, 38795, -64442, -3036, -1286, -46689, -3027, -78801, 56562, -54872, 19250, -1697, -42910, -47029, -7291, 69540, 18909, -24289, -67811, 42182, 73205, -36520, 96295, -2558, 50013, 86507, -45115, 91047, tail", "row-558": "head, -82407, 81846, 73092, 20532, -4017, -87466, 64536, -45234, -22854, 67564, -33149, 15954, -38831, -97351, -11419, -57879, 80739, 3300, 28116, 58121, -25265, 40600, 37047, -69411, 29573, -83260, 20802, -97941, -80135, 36571, 92192, 13956, 72962, -23201, -53517, 2268, -86259, 13378, -69270, -96300, -42630, 17464, -84313, -5808, -21873, 47953, 20305, -86270, -72350, 44477, -39939, 12226, -69132, -25156, -47876, 85989, -967, 5874, 89182, 38595, -20248, 86321, 16256, -24990, tail", "row-559": "head, 50921, 55849, 6279, -20936, 31280, -36608, -13725, 24387, -65490, 31987, 79638, 48075, -35260, -40851, 92850, -6429, -47039, 74947, 10101, -11231, -76148, 56127, -46670, -38044, -37518, 87805, -75333, 19909, 35765, 21528, 56291, -92621, -81753, 416, -63460, -63056, 52278, 37528, -98551, -1527, 68275, 55938, -87681, -70702, -10193, 20665, -24591, 65621, -50848, 87521, 66953, -36059, -66051, -79170, 34218, -44996, 73004, 69011, 98185, 13649, 6493, 79198, -52605, 17255, tail", "row-560": "head, -22178, 77777, -60361, 29267, 4981, -21362, 95785, -55109, -9952, -42412, 36338, 14986, 42304, 7960, 56448, 87099, 76753, -78048, -69804, -32240, -8106, -21262, 45369, -27980, -92477, -7332, -71970, 73248, 82981, 47771, 31778, 39605, 14805, -93655, 56366, 21334, -76802, 20772, 21693, 50884, 78734, 90721, 43790, -22250, 41629, -83733, 89995, -10268, -36405, 76412, -4483, 72711, -67283, -87215, -87289, -93631, 99735, 70147, -66244, 64479, 10585, -92582, -6394, -30878, tail", "row-561": "head, 86228, -48014, 38766, 80577, 43083, 95186, 19371, -59287, -6815, -72443, -1834, -23060, -55340, -4071, -26990, 25805, 9339, 27332, 24443, -82346, -13002, 77307, 45947, 80898, -56813, -20372, 88449, 91878, -30896, -5715, 32702, 27357, -96090, -81546, -50172, -51436, -38756, -70481, 13362, -99637, 4639, -16950, -24390, 69333, 62471, 44214, 85498, -39919, 57661, 44928, 3625, 97132, -68597, 91969, -26765, 62156, 29801, -52607, 72592, -5284, -42257, -85184, -9163, -27091, tail", "row-562": "head, -61617, -99610, 74628, -40857, 43171, 3849, -82444, 59565, 61033, -95128, -25610, 22428, -1780, 65616, -71851, -45665, 80082, -33220, 22771, 12939, -25139, 58751, -22668, -78543, -23317, -49779, -25463, 67433, 75547, -24738, -17655, -60967, -27247, 70241, -21161, -65861, 47060, 11858, -81584, -61881, 95490, 88518, -82164, 10799, -92428, 15594, 94523, 54807, 32185, 75281, -95441, 47113, 31523, -84878, -9256, -79565, -58460, -88471, 37239, -81783, -17838, 39081, -8059, -52397, tail", "row-563": "head, 36519, 33882, 90159, -45530, -42468, 78956, 12244, 28007, 50649, -65626, 46905, -36077, -15876, 69255, -408, 11559, 13071, -41229, 30087, 48536, -98202, -27830, -17887, -97296, -14276, 81420, -15856, -85185, 78937, 19334, -48260, -39708, -84881, 7538, 88904, -48493, -67109, 19908, -91511, -82700, -37057, 21609, 84222, -38108, -85760, 51764, 51308, 48420, 48517, 30929, 57760, -51606, 37698, 22425, -96204, 69051, -10839, 30829, -87485, -92746, 66425, 94451, 59420, -83304, tail", "row-564": "head, 9563, -60291, 92411, 95999, -12978, -91714, -85315, 74424, -24536, -24371, -65462, 39092, -95032, 80539, 43216, -31572, -33042, -33182, 99577, 22163, -50008, -75789, 3406, -4624, -61824, -80786, -54521, 4351, -45610, 52591, -43791, 77820, -55183, -57179, -81946, 60800, 49343, 87211, 59492, -91159, 50128, 90740, -46377, 22887, -59899, -53765, -55511, -81441, -62534, 40541, 98494, 94273, -16643, -29197, 1042, -74518, -52634, -86226, 41710, 16321, 54150, 81421, 98387, 67883, tail", "row-565": "head, 83480, -21997, 13288, -40884, 46209, 46213, -30203, -14929, 76781, -55514, -28950, 92612, -80031, 74453, 36529, 77266, -88532, 23579, -5080, -51194, 62501, -73076, -88553, 75203, 55422, 88485, -16544, -64873, 49026, -62512, -29393, -49056, 20133, 36711, 4896, -79522, -48753, -35646, -88078, 59573, 48728, -28148, 18310, -71486, -1655, -40864, 1018, -2813, 14597, -71610, -2688, -6866, 97082, 97152, -5410, -93229, -60510, 59302, 89750, 16570, 89974, -33466, 5824, -70204, tail", "row-566": "head, -13024, -42298, 14265, -61024, -84385, -58880, -63414, -54998, 29354, -91456, -53929, 59591, -16384, -88508, -52436, -7418, -10718, -67545, 25950, -35675, -66851, 74392, -25392, -72013, -78835, 28550, 57862, -52868, -27126, -14884, -86458, -39936, -88077, -77390, -91990, 85394, -75605, -78298, -9449, -79774, 73856, -20192, -48850, -78063, 23216, -92286, 96539, -23689, 76945, 27251, 45631, -94198, -89379, 73766, 36636, 24815, 56678, -14455, 27100, 64931, -95242, -14119, -202, 30504, tail", "row-567": "head, -16239, 87810, -5368, 74100, 62867, -89757, 78754, -19029, 44991, -96288, -50722, 1378, 28003, 85470, -23113, 93888, -66659, 54629, 41781, 7330, 22378, -84481, 80195, 53781, 56229, 23928, -64197, -27091, 29802, 10696, -7013, -27576, 98920, 45039, 97586, -46792, 83997, -24126, -76169, 88777, 59223, -89627, 73474, 9696, 23137, 97944, -64968, 44187, 68377, 42225, 53107, -6019, -58172, 18386, 63593, -84208, 1349, -90091, -93191, -96812, 62579, -74896, 2863, -12372, tail", "row-568": "head, -22945, 29465, -14555, 69846, -91414, -5213, -30664, -6179, 74105, 61921, 32164, 6345, -82654, -87335, 40120, 25014, -78605, -7609, -60356, 88783, -64095, -13691, 24284, 87436, -92574, 3497, -11192, -49945, 49099, 77579, -32867, -41921, -18599, 72868, 76795, 38778, -4977, -53311, -9022, -20195, -5167, 55213, 90646, 86675, 6314, 39672, 82115, 8978, -33928, 20409, -47824, 48688, -63030, 46670, -96017, -16432, -22201, -27190, 50144, 83118, -40826, 77499, -51544, 7844, tail", "row-569": "head, 3945, 74151, -47052, 20918, 69888, 59684, 32113, -24646, 54348, -72492, -92627, 12555, -83492, -65891, 17500, 31646, -1119, -28346, 29440, -29098, -97087, -50630, -86340, 77664, 75403, -19830, 56311, -13784, 11057, 66779, 98623, -10266, -34012, 25499, -36500, -45674, -18720, -19557, 89864, 88529, 89315, -25446, -92249, -84177, 85473, 21015, 62616, -914, 33993, 2403, 75235, 12209, 6236, 74395, -96620, 77047, -65383, 17641, 73269, 46409, -73199, 73020, 41094, 61748, tail", "row-570": "head, -78123, 6397, 42445, -10803, 89501, 71310, 75413, -67924, -1737, 81914, -58858, 86416, -32169, -46024, -878, 60301, 54413, -6649, 36134, -63385, 25691, 35622, -96380, -68006, -36023, 49176, -36498, 86544, -82072, -77613, 84725, -86072, -61726, 36552, 78805, 4319, 86939, -16106, 5954, -55988, 28713, -56260, -11935, -63930, 78274, 54121, -60357, 68140, 6783, -78063, -23692, 27092, -66554, 52651, 51568, 24139, 2097, 82449, -30346, -6050, 70112, 71641, 31267, -7545, tail", "row-571": "head, -7928, -65048, -37867, 22775, 73459, 52987, 1207, 94863, -49356, -25735, -43376, 86699, -36427, 31815, 40436, 48813, 43266, 543, 56633, -19508, 48204, 11586, -16160, 78020, -94796, 2186, -2592, -86186, -2314, -73655, -85928, 43369, 19262, -44694, 16054, -60941, -27993, 99706, -90539, 89857, 36538, -69654, -74219, 46318, 83275, -63641, 97619, -76989, 1435, -24539, 61821, 82666, -74332, 81313, -42997, 17241, 23858, 95049, 58571, -75909, -86610, 86646, -65154, 75263, tail", "row-572": "head, 99631, 85438, -23154, 92690, 53723, 985, -3534, 51859, -81338, 25922, 48839, -54313, -2254, -41845, 92988, 12328, -78524, -80082, -32318, 66103, -37939, -51241, 89149, 21717, -10107, 87724, 81876, 21685, 79736, -20762, -91088, -68268, -59895, 55522, -2912, 12602, -37120, -66669, 10755, -85108, 17481, -83392, 74247, -22467, 62098, -56170, -85244, -42952, -88015, 14084, -83302, 94680, 29796, 11683, 51341, -95051, 21181, -82074, -98936, -96568, 86541, -53687, -75150, 94872, tail", "row-573": "head, 91535, -76813, -52548, -58574, 15949, 32035, -37656, -72376, -37560, 35523, -79811, 41635, -84877, 30823, -18060, -5782, 44417, -13787, 55141, 901, 84930, -41834, -52022, 9810, 16868, -91062, -62583, -27168, -25699, 38254, 31366, -25509, 80402, -23716, -47074, 68677, 53554, -1537, 93402, -58775, 32166, -79974, 17567, 75053, -10287, 99333, 66337, 30170, 57472, 38964, 43434, -75507, -69788, -36499, -71483, -73996, -10994, -49794, -93070, -3131, -43318, 6851, 87208, 59899, tail", "row-574": "head, 29305, 14881, -50477, 16610, 51556, 40330, -79848, 78936, 45896, -93950, -19911, -35920, 1077, 79121, -13202, 38777, 64509, -72980, -71689, -42832, 19430, 11428, 21768, 9410, 18249, -25561, -30008, -14628, -52998, -50523, 4986, 52966, -92306, -90278, -56286, 84435, 16232, 58581, 47584, 32132, -38295, -55673, 84401, -13281, 71174, 22956, -67972, 63559, -8208, 91503, 94909, -81744, 34338, 72063, -41425, -6579, -50531, -61388, -93445, -17328, -19024, -95944, 22167, 32226, tail", "row-575": "head, -61603, 45934, -54857, 43832, -19246, 63110, -37073, -25921, -10265, -69454, 73065, -19435, -48782, 88732, -99195, -12542, -97564, 3202, -4202, -59777, -110, -49051, 81789, 33944, -20029, -18521, -41992, -97129, 55179, 35610, 15323, 67860, 3014, -84720, 61155, 15248, -47782, 62339, -31642, -87877, -2826, 29102, 17964, -54070, -62989, 13631, 5890, -24275, -95345, 42459, 75538, -39355, 46245, -99163, -58672, -90217, 16465, -77565, -86152, -33412, -56437, 85874, 89662, 52874, tail", "row-576": "head, -10048, 7710, -50361, -86542, 36457, -47269, 85897, 49192, 97057, 7301, 67205, 54157, -3489, 64801, -74497, 14006, -71418, 42055, -51748, -17548, -90815, -61193, 14046, -95589, -82460, 5754, 96741, -81692, -59722, -28780, 68046, -2344, -76146, 93582, -29906, 31832, -48908, -32774, -28584, 18186, 43053, -66746, 72546, -86175, -17707, 1364, 61313, 29291, -19888, 34646, -47992, -56920, -48938, 61002, -82306, 85154, 83817, 5659, 27060, 10862, -97565, -23035, -76047, -90393, tail", "row-577": "head, 1937, 50487, -64473, -40980, 52513, -92113, 90364, -22606, 63218, -6857, -72980, 18168, -15309, -76598, -29675, -29223, 89148, -41938, -6365, -63959, 15164, -89028, -30006, -99897, -16629, -92002, -66269, 47098, 42228, -18344, 43343, 70349, -88898, -18817, -85803, -81109, -12676, -54801, 94770, 13805, 79857, -85961, 99665, -24363, -9375, 27996, 97120, 97898, -76485, 30798, -60863, -4361, -33861, -95581, 86185, -7179, 504, 10376, 18648, 88357, 99924, -7570, 19715, 61916, tail", "row-578": "head, 80651, -66551, 7359, 59188, -57864, 33483, 66015, 58393, -17629, -16510, 25702, 17360, 84151, 98223, -47870, 72905, 95461, 40758, 16562, 37221, -52202, -94829, -2749, -62875, -28858, 73016, 9283, 70051, -20529, 56625, 17421, 44542, -86850, -28497, -44629, 71165, 25873, 34968, -79177, 75437, 4482, 95583, -20648, 88180, -9424, -44373, 3238, -63075, 80004, 56090, -68627, 14441, 48747, 98487, -56590, 42794, 9646, 8287, -89614, 95709, 84517, 87468, -78593, 25782, tail", "row-579": "head, -9855, 77307, 6943, -1979, -89040, 47144, -41897, -24413, -71955, 37807, 30274, 39329, 98993, 52951, 33701, -83460, -12962, -6903, 54946, -27303, -14555, -43214, -1494, 58512, -53203, 23710, 73432, -57271, -29050, 94678, -90346, -73027, -46141, -41743, 5284, -83651, 50070, -64401, 88573, -30176, 60925, 44152, 80181, -45690, -48132, 19548, -35117, -64948, -21877, -14796, -34194, -44774, 19188, 34719, 87448, -20498, 15292, 23659, -94189, 8427, -25069, -90096, 56330, -62308, tail", "row-580": "head, -78425, 99794, 90453, -33509, -36831, -40086, -23635, -73238, 40912, 96140, 7546, -19717, 80062, -25868, 57992, -21061, 42992, -84910, -93731, -84911, 75364, 49271, 14754, -20235, -28904, -2644, 51253, -19305, 90008, 35031, 25049, -27382, -13673, -98843, -74477, 30556, 80421, 98036, 95882, -9973, 64947, 17931, 52417, -28297, -73671, -71746, -61316, -87884, -61378, -61553, -62794, -49834, -31349, -89512, -13255, 68803, -48198, -50600, -36312, -45572, 89522, -22251, 23198, -36116, tail", "row-581": "head, 92005, -30806, 93987, -44778, -50584, 41710, -30643, -57518, -26962, 78083, 67125, 4214, 74447, -73021, 46829, 5339, -10098, -65722, 72244, 80174, 25419, 69588, -39228, 71912, -55178, -96171, -19570, -95596, -29036, 93361, 8370, -51733, -54297, -48051, 55505, 15346, 2809, -65197, -83046, 36945, 73108, 262, 4607, 40366, 90788, -34463, 94731, 87548, 29261, 79807, -83535, 77784, -90726, 49330, 84741, 9998, -39473, -18375, -28994, 39746, 15110, 88926, 11756, -24271, tail", "row-582": "head, -36746, 91633, -87252, 71608, 49406, -19392, 99699, 55935, -88701, -76057, -33116, 62135, -60435, 30819, -66033, 3922, 57240, -40753, -10099, 31486, 29123, -95165, -23448, -13082, -18534, 58168, 22931, -31099, 23343, -74776, -99881, 41138, 7190, -66060, 37731, 7635, -84369, -73791, 66150, -16260, -57510, 48172, -60493, 7908, -57925, 5734, -51658, 26294, 2142, -24048, -20653, -94104, 13881, 97312, -15554, 60171, 22398, -53328, 53807, -32134, 43906, 76048, -41130, -40480, tail", "row-583": "head, 81798, 49810, 11110, -63522, 44050, 393, 66214, 11556, -87302, -47752, 75584, -23108, 73985, -8856, -24099, -89238, -57981, -2444, 41422, -42769, -97499, -21569, 85254, -70838, 69279, 97589, 98210, -24556, 97571, 23619, -57001, 65575, -31395, 14574, 99966, -85851, 44708, 51095, -69251, 15272, -87699, -26058, 29164, -50107, -44915, -49159, 79533, -7568, 88913, 7113, -80479, 19144, 21596, 93541, 87523, 79756, 5726, -96379, -37191, 98141, -82402, 96919, 19264, 42223, tail", "row-584": "head, -12304, 70823, 33633, 55786, -43994, 30024, 54212, 42368, -80587, 30763, 20445, -13672, -14903, 80529, 77626, -57042, 74491, -24637, 20569, -96802, -63949, -33944, -22808, -78000, 14865, -14295, -78181, 55194, 40847, -84866, -81595, -12309, 56034, -28343, -38036, -12085, -46883, -45932, -1548, -57413, 39087, -85251, -84375, 81737, 30597, 31237, 37267, 616, -40230, -64446, 26631, 41650, 3506, -12059, -28619, -95094, 78346, -94084, 49607, 67360, -60036, -45088, -72421, -9879, tail", "row-585": "head, 67887, -40551, 83714, 11534, 96977, 25330, 41228, 58350, -28439, 44383, 57894, 38339, -51740, 30963, -12567, 5388, 90093, 99352, -21425, -90843, 26625, 47303, -88279, 76349, -98986, -7864, -77389, -84434, -26579, 11277, 34852, -79580, 14474, -11596, 64365, 30171, -3041, 41641, -37425, -97111, 26762, 86029, 60862, 48396, 22542, -45117, 42643, -4671, 48165, -26145, 49887, 65371, 53889, -88240, -1535, -89877, -21982, -95697, -75690, 70094, 13970, -99958, -10980, 19453, tail", "row-586": "head, -40624, -50912, 10334, 55745, 87254, 88480, 63678, -95902, -99858, -55046, 76015, 53984, 56823, 23323, 92250, -41651, -75288, 5875, -33412, -59830, -57030, -36363, -27712, -36597, 58586, -93634, -80273, 22161, -4211, 48489, -60449, 90562, 99397, 50747, 15836, -41284, 5460, -32448, 55923, 2578, -51184, -95375, 67487, 94226, 60478, 26054, -33041, 5951, -21419, 69336, -64928, 60424, -22727, 16643, 30208, 96380, 14749, -53524, -60880, 77560, 34250, 5523, 36200, 66552, tail", "row-587": "head, -26215, 74869, -17014, -91567, -60230, 53350, -77280, -47426, 35647, -95261, -63724, -49513, -4295, 839, -70091, -46805, -58813, -21790, 66005, -70641, 25575, 64672, -75126, -91455, 64950, -11810, 99696, 77801, 75450, -5309, -73719, 46996, 45652, -36885, -29535, 86769, 8472, 73352, 6182, 81810, 94453, -83242, 33727, 73602, -52592, -65879, 67315, -31705, 16880, -82201, 40673, -15730, 54499, 77791, 64240, -6184, -58464, 51095, -85210, 80545, 33774, -39507, -73893, -38161, tail", "row-588": "head, -4525, -99104, 189, -68689, -8674, -13056, 42267, -70269, -81849, 95773, -1559, 51143, -52923, 58884, 31905, 93615, 96572, 21656, -1137, -77608, 7427, -3036, 44158, 29574, 48622, -72279, 52947, 1771, 47720, -8142, 83268, -4693, -34735, -59469, -96625, 7838, 55352, -70404, -40185, 5576, 73210, 83054, -95022, -32447, 50541, -93292, 65144, -36073, -71975, -98478, -57253, 4793, 83997, 18476, 11889, -1183, -91715, -63663, -84307, 36316, -24044, 12177, 64963, 53945, tail", "row-589": "head, 44898, -65118, 87743, 56139, -36498, -99239, -37798, 40064, 70712, 59330, -14220, -13618, 26170, -65820, -89077, 66920, -18846, -24577, -35079, 18298, -69356, -29701, -69425, -23436, 60467, 7740, 72766, 23314, 74737, -1760, 44983, -99914, 91055, 77267, 59368, 62227, 20498, -95713, -3826, -35326, -73870, 37669, -19773, 33441, 25021, -560, 18655, -34123, -25593, 83994, -15230, -87070, 92873, -77335, 38002, 7096, -83653, 3444, -15019, -42552, 30763, -61043, -51390, 75286, tail", "row-590": "head, 83107, -62257, 35205, 66621, 51433, 18855, -93061, -59769, 99790, 89523, 46714, 26846, 85451, 51515, 78113, 32670, 9767, -37546, 78230, -90998, 95516, 34326, -96625, -36491, -47194, -97562, 4771, 3575, 62396, -82268, 45234, -17439, -23234, 94101, -23708, -55196, -88909, 24887, -29698, -77062, -97602, 10484, 54537, 49925, 77495, 57860, 35419, 64171, -17652, 4685, 13369, 19398, -67750, -76814, 61910, -50949, 38411, 70798, 30096, 72760, -22163, -1553, 70002, 82887, tail", "row-591": "head, 21624, -2751, 56317, -39945, -11248, 40390, -50553, 74670, 20715, 11038, -79141, -56970, -83452, 58990, -30940, -18321, 74724, -77771, -86699, -58924, 12014, -71102, -65516, 51929, 40491, 3692, -24222, -74848, 63085, 34683, -80131, 73282, -84773, 25503, -64716, 17003, 62622, 6505, -2664, 15346, 73884, 3124, -17480, -75239, -99612, 67894, 86673, -36953, -97799, -30798, -35751, -23676, -84045, 41115, -91556, 87408, 11343, 98672, -41406, 55390, -10082, -71874, 31401, 35488, tail", "row-592": "head, 33034, -19666, -45723, 28911, -76207, -85123, 19314, -18127, 5992, -69782, 71557, -18389, 10704, -8204, 95138, -22240, 73018, -17983, -78622, 78036, 65242, -15102, -12320, -65847, -11493, -92379, 95998, 20180, 68338, 39434, -13122, 8688, -11420, -66391, 14345, 3694, -60388, -72054, -51144, -52781, -19374, 29375, 39063, -11044, -69259, -2987, -96755, -48918, 9209, -49865, 96504, -1755, -55223, -24450, 147, 71035, 10153, 56696, 79371, 61741, 58019, 44915, -38348, 98076, tail", "row-593": "head, -78361, 13690, -41030, -92752, 47781, -68807, -11379, -22430, -245, 507, 12665, 92015, -90969, 24144, 39166, 85461, 47887, 74209, -12143, -237, -81554, -37648, 92126, -79418, -593, -318, 27483, -16580, -39939, 5104, 9488, -71017, -80148, 88636, -69667, 15786, 32138, 51452, -71459, 88007, -99689, 6522, 56460, -7748, 82507, -84840, -19970, 39426, 49356, -81058, -70081, 29272, 68343, -18520, -98950, -73328, -32841, -57142, -26433, 80036, 7350, 94914, 53822, -26428, tail", "row-594": "head, -66609, 84226, -31151, -50959, -69856, -60439, 22879, 22880, 89243, -9981, -79826, 28, -50528, 56715, 86074, -83451, 39762, 16511, -40038, -76448, -73762, -93546, -69040, -22415, 3203, 64496, -57140, 35629, -53929, -40594, -3663, 90693, 84523, 89124, 53294, 20111, 14706, 24138, 55082, 15082, 84279, -91798, 54610, -34284, -41270, -60081, -71713, 17879, -12077, 25573, -95952, 70120, 72524, 93829, -10218, 58324, 45101, 51990, -19048, -71031, 2551, -76742, -50337, -38572, tail", "row-595": "head, -6919, 87675, -49379, 81022, -68525, 41046, 60966, -22466, 29542, -5123, -68568, 200, -75763, 99296, 79984, 49430, -88415, -93405, 57756, -48074, -62144, 57413, 53410, 27957, 89248, -87437, 62653, 22048, 98718, 47870, 99949, 94881, 93240, -75588, -83718, -57089, -18219, -86086, 6270, -91289, -36286, 47076, -22400, 83634, -97840, 28512, 46930, 60160, 76726, 73350, -83771, 83796, 74704, -60248, 38393, -19376, -84956, 79056, -45657, -52761, 89925, 10133, 10126, 95557, tail", "row-596": "head, -8886, -91915, -27360, 16522, 16527, -44697, -23954, -59023, 26761, 39083, 96199, -36904, 88707, 69836, -4663, 20186, 62934, -70288, 28934, 32474, -47556, 43782, -40660, 64256, -95010, 21825, 90198, 44292, -58509, -14463, -23719, 64292, -94557, -32550, -39024, -89423, 23217, -97237, 60531, 35456, 58364, -76535, -32823, -33777, -27073, -19055, 61497, -9252, -39910, 51465, -17675, -11345, 66792, -92807, -65613, -77032, 79941, 84611, -42244, -51331, -54313, 67613, 94990, 22093, tail", "row-597": "head, -88496, 45212, 80354, -37410, -25828, -46234, 54929, 19096, 52139, 34338, 41065, 10515, -9444, 31688, -54609, 94619, 49992, -16196, 5396, -15445, 70971, 67632, -24189, 23442, 44400, -11826, 62285, 43185, -17855, 22522, -92790, 18705, -7234, -5594, 3020, -26361, -42285, 83197, 35737, -34099, -83384, 74342, -42903, -32001, -22681, 16280, -91758, 31104, 48836, 71315, -94623, 90867, -60492, 66504, -36087, -16132, -74396, -21954, -36418, 83060, 31183, -8616, -91242, -92179, tail", "row-598": "head, 56831, 16526, -38102, -394, 36530, -18028, -49749, -53872, -7070, 7747, 79894, 6325, 28702, 10691, -28811, -62509, 33103, 95465, 34717, -33286, 37299, 45331, -80307, -45237, -22871, 9854, -10099, 2291, -13400, 47300, -43700, 9527, -24296, 10075, -39019, 36405, 10413, 73412, 22936, 676, -71128, -17838, 62195, -83905, 35268, -17449, -30371, 42119, -48122, -31968, 51754, 12701, 78097, -23804, -32504, -47040, -94733, 55978, -17961, 23401, 56380, 23905, -78108, -68378, tail", "row-599": "head, -7850, -6044, 95788, -94272, 80169, 13381, 90095, -98083, 70611, -46431, -77399, 92581, 84645, -14383, -17488, -60749, 34964, -44144, 40638, 79413, -95749, 66816, 7174, -18768, 59117, -78690, 66076, 89898, 86625, -99321, -57522, -80194, -58770, -96661, 79679, 42509, 79728, -82571, -64527, 83032, -71119, -12878, 60947, 17467, 97169, -63832, -51565, -48072, -99091, 96603, 31219, 82506, -81944, 87984, -53093, -65805, -84436, 41586, -52665, 3734, -43907, -67127, -37140, -66825, tail", "row-600": "head, 91880, 94182, 15779, -42425, 53821, 37478, -99568, -27005, -62019, -72400, 79028, 84464, -34052, 25714, 63748, 54654, 5788, -56171, 81422, -29799, -67046, 61101, -84102, 94624, -90102, -93800, -34300, 14541, 45562, 42171, 21772, -97261, 95945, 69493, 48820, 40144, 63498, 16830, -33999, 98559, -56239, 63234, 95713, -79493, -36911, -66710, 98812, 3134, 15814, 66947, -63436, -30286, -44248, 46847, 34046, -23900, 12255, 58742, 43709, 24163, 16410, -93089, 39787, 6571, tail", "row-601": "head, 48366, 96755, -29884, 55093, 13155, -9124, -11978, -73003, 20778, -96867, -86809, 23503, 74318, -63123, -37966, 61142, 94138, 55825, -53833, -95078, -5361, 70373, -26018, -77739, -16311, -18756, 21868, 59764, 57353, -8712, 58285, -5078, -19851, 77453, 62520, 80754, 55279, 33793, -65505, -1978, -27171, 20281, 76560, -57386, 8826, -99294, -98485, -26827, -20622, 2759, -4331, 79172, 7375, 42144, 70297, 85504, 71354, 50076, 75563, -31350, -38885, 77892, 94570, 59310, tail", "row-602": "head, -82194, 5682, -82458, 84043, -81670, 1148, 99997, 62960, 61903, 85412, 31642, 57295, 36500, 31700, -36127, -91400, -28683, 69008, 65353, -48355, -46354, -74076, -70893, -98818, 7988, -44126, -10402, 62018, -61201, 37588, 16190, 46873, 21808, 35382, -89737, -24353, 28343, -48138, 45595, -59506, 68212, -50838, -87839, -16743, 50927, -98307, -95272, -38721, -7747, 31174, -66315, -27932, 61527, 37790, 53647, 6710, 26367, 1019, 18944, 39877, 61355, 74178, -9896, -94126, tail", "row-603": "head, 83368, -65546, 9373, -73790, 73656, -60685, -25432, 79524, -12385, 90091, 5319, -50211, 2958, 79460, -33904, -51627, 84756, 65343, -30528, 5008, 20100, 6298, 74155, 67087, -70808, 35250, -51737, 34655, 10008, 90425, 92340, -61963, 734, -61645, -44754, 46835, 9919, -29540, -59853, 52159, -61894, 51909, -20604, 14129, -87937, -10039, 42424, -86972, -90230, -59923, 32894, 61451, 93946, -99073, -21300, 23116, -26403, -21287, -55228, -72458, 3680, -91178, -7568, 51063, tail", "row-604": "head, 50275, -90773, -60983, -47613, 88565, -42088, 5994, 90049, 96100, 9266, 69494, 29512, 95126, -3516, -78051, -7317, 37949, -55001, 62343, 86723, 37556, -46779, 33115, 68805, 46069, 57343, -66149, -83312, 51529, -70594, -17181, -73602, 31518, 13392, -28357, 24648, -14540, 79815, -99848, -91312, 35557, 26066, 36546, -782, -73347, 70142, 28473, 48491, 44889, 11261, -81488, 27213, 44947, -17125, -4894, -56817, -85254, -91000, 8132, 98921, -80978, 60572, 55743, -1918, tail", "row-605": "head, 42104, -17610, 90343, -23980, 8591, 31571, -24881, 87656, 20876, 10429, 20646, 65158, -61091, -91673, -70350, 66044, 53069, -62680, 97168, -67084, -80538, 49377, -12669, -84852, 17707, 84697, -88581, 29310, -92951, -10958, -59269, 5959, 59879, 98545, 97273, 67651, -28468, -60269, 64077, -54397, 53270, 89869, 56776, 64989, -97990, -86085, 10602, 25359, -27290, 54269, 98704, -18123, 60169, -6295, 48073, 78544, 11799, -31236, 15180, -44365, 26669, 7402, -76871, 17991, tail", "row-606": "head, 9746, 67008, 57949, -75576, 10068, -88518, -67027, -41936, -17339, -23637, -76394, -22015, -85372, -4925, -69044, -609, 85629, 94537, -87873, -70113, 179, 14366, 43832, -61205, -29479, -83594, 31022, -38915, 93289, 16637, 28975, 39514, -90150, -3797, 83118, -8844, -26572, -80699, 20284, -69611, -84514, 66895, -14623, 69605, 43984, -76300, -85911, 31178, -60256, 74933, -54410, 83605, 67177, -14306, 60937, 15973, 35028, -9831, 86213, 63688, -60237, 41319, -88528, 72409, tail", "row-607": "head, -53558, 44016, 78256, 87697, -45635, 15771, 94921, 86146, 31916, -66482, 88930, 5249, 49875, 78476, -38543, 37435, 47506, 42955, 94563, -93071, -26984, -67029, -28554, -51970, -41406, -49810, 39639, -19971, 10953, 81202, -5087, 64697, -26867, 51889, -7237, -35386, -2432, 90613, -32939, -53019, 74350, 68786, 10326, -25528, -20311, 47196, -90317, 55435, 44157, -45417, -56638, -33302, -71821, -40246, -68793, 77524, 11863, -44067, -40130, 51620, -94109, -31477, -63492, 77078, tail", "row-608": "head, -62204, -95961, -20509, 53799, 73796, 37816, 68233, -92140, -52918, -52809, 56752, 27290, -45295, -26736, 94215, 11469, -73858, 14550, 88402, 10791, -86021, 65918, -12942, -42606, 50383, -32917, -32878, 1878, -59486, 2179, -10820, 42530, 89625, -42471, 81901, 48030, 1117, -94210, -91987, -81980, 59389, -14728, 87863, -7308, -95341, 92605, -78107, -30495, 26246, 36754, 29888, 57080, 66264, 10505, -39101, -3028, 30991, 66790, 38265, -24689, 93052, 71044, 84855, 77812, tail", "row-609": "head, 97458, 4504, -25366, -38177, 44908, 11083, 99276, 6627, -16870, 95980, -27810, 28348, -18714, -52786, -59829, -98595, 70552, 56116, 29312, -45335, 71373, 47697, -14954, 2571, -55648, -44211, -51128, 96821, -51981, 84763, -95993, -14411, -81665, -69882, -83644, 32604, 79063, 11271, 54365, 8931, -11080, 81668, -8043, 79613, 63415, -5879, 26798, -72368, -94353, -38691, -77224, -57385, -55046, -86409, 19440, 56748, 22436, 73517, 5455, 7318, 26241, -69670, 21262, 17335, tail", "row-610": "head, 39930, -52775, -55447, 56631, -24873, 86409, -30454, -88098, -6714, 75779, 65543, 59716, -41052, 22361, -95468, 37322, -75249, -18688, 71363, -83990, 87280, 24611, -6921, -26316, 18554, -29297, 89781, 94592, 48837, 76413, 47813, -37206, -4124, 57479, -16570, -38839, -97704, -95476, -32956, -91753, -26022, -99322, 66319, 61685, 95342, 10908, -11556, 66125, -44222, 62019, -10725, 62630, -90317, 90507, -18328, -42516, -51081, -84268, -38571, -91658, -8928, 66936, -32616, -65607, tail", "row-611": "head, -34515, 30954, -84308, 15392, 37617, 33309, -91385, -88285, -82395, 54345, 309, 49542, -93497, -66285, 10986, 57319, 35372, -74543, -20634, 53896, 91032, -58644, -62455, -26647, 14974, -58222, -24964, 62468, -62167, -50593, -51165, 87019, 38626, -13400, 73950, -86204, 33597, 39062, 11690, -13124, -64488, 51369, 46803, -45883, -6749, -73941, 89591, 21096, 69302, -22626, 4957, -6910, 45688, 50070, -66181, 81815, 98433, -47130, 3441, 23250, -54252, 14501, 53038, -14402, tail", "row-612": "head, -69947, -87032, 48994, -98989, -93611, 91336, -78519, -52229, 71375, -83564, 4658, 57213, -36079, -88296, -15759, -51749, 70366, 83641, -49185, 88597, 48022, -6071, -64527, 90601, -32016, -76283, 78937, 20615, 7926, -45710, -85772, -14174, 52046, 27911, 76418, 24830, -41405, -46138, 35, -72300, -37844, 71509, 80271, 70982, 66946, 88483, 62984, 78631, -90355, 13580, 43323, -5017, -5543, -52887, 36262, -9509, 89513, 37435, 66981, -20326, -9339, -46689, -42076, -18757, tail", "row-613": "head, 15432, -93153, 48751, -36852, -24151, -1951, -23977, -57849, -16908, -80520, 44706, -8639, -91856, -24791, 57379, 24117, 56590, 44010, -62338, -91991, 40176, -45710, 93298, 75217, 51652, -46190, -21138, -44596, -40608, -80151, -20484, -75696, 33807, 86524, 95804, 24860, 29038, -81930, -88264, -4748, 89905, 78390, 93684, 69657, 12675, 12277, -21502, -39873, -32317, 27457, 40181, -46048, 10801, 38801, 17600, -95034, -33942, 49218, -99809, 937, 61094, 14252, -23292, 16649, tail", "row-614": "head, 79150, -97821, -53117, 93182, 15799, 6061, -71953, 80444, 92434, 83530, -51837, -9396, 35269, 43520, 5380, -77120, -77036, -65954, -71505, -81319, 57815, 68225, -99660, -21312, -24193, -49523, 1007, -56732, 96048, -97682, -26508, -73756, 10622, 87014, 93909, 2011, -11737, 84319, 86366, 99743, -75171, -976, 21347, -53694, 38958, 23276, 64680, -98999, -88751, 46798, 58405, -32120, -79422, 4159, -6122, 86686, 98255, 77029, 33552, -48996, 55137, -32572, -1468, 40847, tail", "row-615": "head, 82649, 89709, 51219, 85491, -33569, 2749, -61290, -68216, -59740, 84616, -54641, -89751, 84501, -82783, 78018, -62836, 53333, -40380, -50900, -86559, 89080, 22436, -28169, 38131, 18025, -31447, 39821, 5257, 12062, -81866, 84189, -85221, -62631, 67442, -48476, -22880, -37095, 64311, 52659, -24627, 46494, -17329, 32628, -32356, -77534, 78076, 33959, -87398, 95132, 59283, 60218, 29362, 41707, -71977, 83194, -4079, -50647, -78176, 6825, 68801, 65604, 19543, -45777, 27592, tail", "row-616": "head, 93376, 12452, -86573, 92499, -62322, 18255, 68392, 90737, -27882, -65061, 2154, -3298, -87777, 83752, -76332, -67719, 84574, -29362, -71962, -86166, -16602, 8875, 21888, 57738, 6701, 74667, -32406, 56116, 43469, 14099, -53372, 3351, 97894, 16473, -80331, -75131, -92420, 33263, -48284, -38929, -62825, 20040, -39759, 76549, 36222, -97703, -35603, 73583, -69128, 77267, -90667, 16931, -81769, -9221, -84411, 79105, -17463, -91317, 36704, 13287, 43050, -82485, -64182, 78137, tail", "row-617": "head, -48536, -25781, 85048, 44859, -72731, 97296, -20614, 46175, 34924, 38901, 54760, 87645, 28761, 62826, -39700, 46361, 55183, -64765, 71565, -37385, 24035, 17678, -18340, -54769, -91392, 3129, -7104, -20054, 81108, -2637, 24088, -55043, -32203, 51832, -93899, -255, -14519, 78395, 59141, -6024, -13164, 42245, 51505, 11901, 47118, 76125, -79522, -12869, -62609, -3464, 68087, -29752, 99825, 1016, -95960, 46797, 87077, -694, -93812, -58060, -69657, -31575, -54474, 31787, tail", "row-618": "head, 53789, 27731, 14624, -60176, -79679, -78603, -89415, 58347, -57945, -18783, 83396, 70049, 10230, 63577, -4809, 56334, -38217, 9238, -16474, 62461, -21677, 63100, -80602, -10450, 62181, -10395, -40475, 48476, 57311, 47396, 28942, -57112, 44781, 33993, -74675, -31, 88337, 52477, -75126, 36862, 56788, 25483, -48471, 59481, 79304, 11811, 81917, 58775, -65315, -95884, -15500, 72774, 95638, 23608, -5043, 16652, 43256, 80771, -19153, -82340, 41473, -20701, 74496, 96977, tail", "row-619": "head, -48166, 97669, 50838, -72327, 10083, 87304, -719, -67911, -66249, -84772, -70536, -83726, 20983, 43186, -48362, -43925, -4070, 54026, -87243, -4570, -2069, 94040, 38925, -25503, -53677, 87084, 66934, 25101, 62745, -89386, 36777, -83269, -71408, 18348, 49463, 59330, 1362, -1609, 65778, 62474, 7316, 64857, -74579, -70408, -90440, 2141, -79050, -41763, 48901, -92723, 99031, 10332, -3760, 56439, -82305, 55838, 77062, 63643, 79000, 4619, -21100, 9081, 35156, -54527, tail", "row-620": "head, -9960, 89513, 85921, -10548, 53738, 55788, 73068, -60222, 68679, -25437, -81402, -18362, -12414, -21978, 7007, 21755, -61625, -33169, 56962, -46931, -9561, -49602, -40423, -7, -32083, -89403, 98675, 32857, 33119, 27572, 67449, 1864, 52520, -37829, -38624, 29965, -29327, -56104, 927, -96364, -42619, -22950, -65395, -47199, 84887, 14692, 68554, -60645, 80402, 94354, -31853, 256, -827, -85212, -94236, 68571, 80680, 95652, -30740, 92897, -48783, -44356, -96989, -45470, tail", "row-621": "head, -56780, -90778, 13631, 61804, -78194, 94707, -84373, 53332, -31135, 17893, -41670, -48781, 95398, -49903, 5467, 48026, -37845, 40805, -30953, 80172, -21291, 7501, -81403, -93389, 69834, -10535, -68668, 45407, -91011, 78131, 18496, -45047, -42515, -43371, -43533, -41464, 89957, -84111, -73890, 86793, -95240, -87354, 21603, 3192, 70638, 38379, 94340, 37744, 31489, 53530, -21320, 64271, -42142, 46013, -7066, 80615, 88352, 10755, 28596, -8277, -92452, 58759, -15138, -9168, tail", "row-622": "head, -55194, 49644, -55551, 41888, 5836, 6795, -88733, -55584, 16330, 385, -21427, 69160, -54086, 81554, 48069, 39283, 11850, 11869, 66523, -4787, -20944, 54527, 35154, 95805, 91541, 78668, 79570, -11799, -37312, -58185, 3426, 49349, 97852, -38653, -74901, -23065, -40042, 3138, -27324, -16892, 68449, 93524, 25593, -94887, -43132, -7864, -28159, -82234, -10669, -86651, 18445, 8843, -4842, 53655, -7084, -16575, -26708, 62041, 36365, -48297, -34395, -61588, -47100, 86124, tail", "row-623": "head, 99180, 8733, 70069, 89626, 91463, -79086, 94328, -51994, 25869, -61973, -56282, -16504, -57382, -68011, 52996, 95338, 1920, -5995, -82631, -27804, 49429, -96196, -63532, -45787, 87044, -90668, -92240, 42862, 23962, -69098, 19064, 1665, 1961, 63862, -69632, 86852, 695, -55102, -52828, -97474, -61960, -44230, -69456, 48398, 16010, -73439, -58306, -76235, 57019, 58867, -57740, -55900, -6507, -43211, -68619, -59932, 8616, -919, 47111, -37400, -1133, -36474, -47227, -40193, tail", "row-624": "head, 18871, 26270, -44613, 26249, -39598, 33345, 1877, 24266, 10715, 89593, 68785, -15724, -20837, -29141, -44076, -85830, 10807, 54563, 73960, 63997, 74069, -55747, -84271, 46613, 13046, 96065, -82072, -37452, -24436, 21821, 29392, -69740, 93249, 14065, -71216, -88683, 33625, -22233, 56252, -19804, 80342, -5027, 96158, 1117, 14561, -28062, 33002, 23455, -21061, 88823, 89327, 99380, -8556, -49402, 69330, -78859, -66795, 41138, -13636, 4049, 33010, -7436, -81376, -9178, tail", "row-625": "head, -29121, 24204, 24319, 62007, 76572, -22098, -4253, -82676, -900, 61088, 93713, 28310, 18801, 33331, -35673, 48342, -34673, -61898, 93708, 62992, 49416, -58964, 10906, -24881, 76882, -66725, 9210, -57622, -20372, 441, 48604, 92106, 56174, -85440, -73647, -91333, 39378, -80457, -10505, 57418, -15833, 73095, -70490, 6782, 63893, 44367, -50813, 89105, -10785, 65882, 98900, 69273, -91263, 67064, 45366, 8980, 58391, 21432, 32268, -20263, 67720, 38341, 93744, 21006, tail", "row-626": "head, -20857, -92643, 90835, 37395, -54095, -46706, -38945, 21398, -83905, -11914, -6336, 90279, -67948, -13031, 35507, 80998, 7746, -36863, -82635, -24416, -13261, 40366, 86831, -96141, -32767, -59770, 69189, -94445, 8866, -2624, 82176, -74059, 44948, -98404, 36266, -48370, 76452, -28887, 46447, -90397, 86548, 9588, 90895, 49587, -49112, -95516, -17130, -61302, 85807, -44496, 80784, 71484, -45071, -64305, 34892, 10918, -57355, 42791, 43144, 3686, 35670, 90265, 6176, 16917, tail", "row-627": "head, 6693, 18314, -9413, -20940, 38545, -13559, 57622, 97214, 61661, -72356, -38609, 4864, -2078, 81854, -71894, 20979, 13184, 7810, 49068, 18819, -6570, 53307, 67168, -79032, 90782, 53255, -77454, 60709, -90734, -26029, -53371, 55684, -65084, 10790, -10267, 97909, 66927, 27354, -9059, -91685, 72964, -13018, 94491, 43553, -93146, -52736, -56170, -29865, 31187, 25079, 59105, -13141, -78903, 65967, -90755, -61300, 97674, 13873, -8579, 61905, -33327, 71536, -75803, -59921, tail", "row-628": "head, 12936, 56486, -403, 41499, -16930, -60830, -12808, -41167, 16774, -96869, -69791, 38900, -47380, -32471, 14958, 82556, -10125, 50852, -84339, 53524, 39643, -21373, 6621, 24430, 1155, -47434, 49326, 36160, 52790, -15907, -426, 40825, -1624, 80099, -14513, -15353, -85642, -12293, 62190, -45694, -87876, 39202, 40916, 804, 66407, -18286, 96253, 15474, 23806, 63877, -9091, 56747, 95817, -45792, -17016, 39717, 69400, -48773, -69618, -45418, 60288, -56803, 73529, -40773, tail", "row-629": "head, 79211, -47712, -8818, 19535, -88100, 37095, -17856, 25086, 35501, -1416, 70879, -69898, -14525, -23706, -45987, 44451, 50769, -21938, 75309, -3053, 68735, -18869, -12594, 7928, -29400, -4802, -95207, -27116, -5656, -21111, -54180, -15094, -9830, -50268, 20269, 51049, 6394, -8642, 53771, 56114, -64017, 17791, -69768, 97416, 43082, 85997, 9310, -63123, 60483, 64434, 99590, -46629, 57787, 70628, 12902, -72303, -83064, 79130, -84296, -39100, 62821, 73706, -11040, -38440, tail", "row-630": "head, 63673, 40812, -68081, -18227, 81822, -99179, 36114, 52970, 73218, 78259, 62061, 97173, 48046, 92999, -70609, 99238, -98393, 77317, 75873, 19599, 56409, 59387, -86176, 51439, 97570, -75736, 38844, -75052, -28363, 88970, -98798, -24263, -22767, 88864, -13677, -41023, 39802, 14061, -34037, -71107, -1726, -94887, 39551, 8701, 21868, 78477, 44286, -90134, -73104, -84861, -84970, -3279, 39695, -12320, -69048, -80845, -95835, 66013, 4452, 64029, 36033, 31380, 45773, -81218, tail", "row-631": "head, 59827, 37120, 16914, -24773, -15774, -89474, 15752, -17532, 64042, 16612, 47761, 25098, -90830, 76636, 70361, 79665, 79555, -8883, 67942, 87062, 56929, -24963, 37733, 35437, -358, 51083, -44503, -56343, -19192, -9420, -30170, 75133, 91581, -72860, 76843, -27383, 53329, 79046, 74098, -92291, -56999, 8226, 35034, 94190, -58614, -83794, 54957, -14952, -93519, -47124, 85148, 87783, -64148, -6058, -17132, -44734, 10810, -77029, -47397, 2981, 4106, 46138, -93141, -83860, tail", "row-632": "head, -86095, 44397, -85392, -86423, -92612, -59623, -49480, 48173, -52027, 81949, 18001, -74290, 66449, -88972, 96633, 51448, 72979, 18252, 12287, -68054, -7618, 57290, 39594, -48726, 46817, 3046, 18862, -7085, -8919, -54928, 12656, -58193, -72645, -78215, 46254, -64520, 2392, -54284, -57155, 48093, 97497, -38421, 66056, 63542, 28289, 41088, -94621, 49034, 19555, -62896, 97865, 91039, 17195, 16409, -99247, 91389, -96048, -34509, -54910, -26156, -48037, -88765, 1887, -20555, tail", "row-633": "head, -59528, -52047, 71709, -59745, -92673, -2286, -61989, -23321, 95071, 50498, 17568, 90302, -64678, 13977, -49644, 98703, 37528, 64727, 22865, 28898, -88779, 33263, -70879, 84900, -20325, -37122, -25423, -2919, 24145, 95132, 2181, 41259, -15887, -41806, 98905, -43284, 73888, 98931, 82077, 87334, 13095, -33756, 4093, 67438, -36177, -85656, -86710, 16357, -83054, 10627, 18, 57527, 74498, 74972, 74024, -35441, -89083, -57144, 14204, -93291, -91411, -73059, -77421, -99249, tail", "row-634": "head, 85465, 68707, 79657, 69777, 12136, -96235, 90567, -53392, 74673, -84133, -31274, -37436, 13437, -48717, 49682, -62062, 94784, 4578, -47094, 73802, -49466, -91587, -49785, 51352, -33094, -65409, -26011, 38679, -33224, 54985, 72781, -97780, -92247, 91625, -31075, 29279, 61973, 56927, -25762, -2390, -47194, -95114, 34058, 33459, 59620, -29626, 71164, -16965, 38908, 45293, -42726, -85414, -68242, 65301, 8933, 48725, 6849, -46728, 67983, 4849, -14327, 34648, 41219, 92871, tail", "row-635": "head, 5536, 45296, -59352, 5934, 24912, 39129, -74514, 84104, 98225, 89545, -79820, -98774, -72377, -27321, -75058, 66774, -14391, -21938, -10913, -40730, 12164, 37084, -80999, -3785, -50803, 75634, 13434, -1806, -3896, 55, -99803, -90116, 29641, -79445, -1894, -26237, 19834, -29467, -36915, -61820, 87534, 17099, 50720, 97659, -66251, 16113, -49128, -8159, 14357, -25522, -57911, 20183, 5366, -12194, -27693, -30592, 26362, 12970, -86199, -61261, 50362, -3387, -5469, 18661, tail", "row-636": "head, 34203, -72976, -63340, -69646, 9712, -39215, 10541, -94807, -13569, 98614, -28717, -7530, 27035, -86983, -39890, 89317, 10846, 75189, 25871, 3846, -89844, -5126, -77, -8283, -70059, -46482, 81798, -13281, -15146, -49946, 38127, 81048, -10410, -67417, 88013, 34342, 45415, -52326, -84738, 919, -70982, 2498, 12719, 81717, -67431, -60083, 38000, 19605, -98216, 34934, -93727, 76121, -1442, 59065, -83218, 76068, 35271, -488, -54933, 40984, 35087, -27459, 80513, -73084, tail", "row-637": "head, -75026, -74083, 55193, 31063, 45923, -51762, -57973, 39310, 23591, -61368, 93171, -65548, -8029, 72563, 86064, -25613, 5561, -29212, -94334, 19351, 60584, -93061, 33526, 97692, 21299, 87390, 21936, 83319, 67179, -81821, -72546, -70577, -4926, -94337, 62363, -67989, -66348, 19386, -95212, -41641, 82182, 52128, 63645, 54260, -95017, 25386, -92995, -81286, -70201, -25038, -4070, -62507, 55054, 57060, 59090, 30727, -75451, -51587, -52251, -11044, -35133, -53222, 28895, -46045, tail", "row-638": "head, 55347, -43145, -28946, 32202, -33227, 15010, -21954, -52491, -28831, 39519, 26093, -42206, -97127, 59242, 65550, -46219, -5080, 57476, -54711, 93428, -60107, 42306, -20122, -17345, 32940, 57811, -95363, -69432, 36351, 23388, 88936, -91696, -71641, -71602, -95420, 6567, 65933, -83812, -3151, 54181, 86795, -34472, -63820, 41896, -56795, -36286, -35914, -25071, -51279, 41995, 84662, -84085, 89477, -97070, -45773, -93324, 88581, -96530, -12696, 12512, 47534, -82129, -95455, 97575, tail", "row-639": "head, 89825, -42148, -87911, -34420, 60638, 52001, -47055, 27170, 74592, -53461, 38103, -55375, 70806, -67118, -40408, -47138, 21933, 90820, 13946, -54660, -38862, -42265, -27925, -8259, -89057, -53575, -43050, -13311, 49832, -89297, -34187, -15532, 71762, -94657, -55784, 85387, 99944, -71237, -28728, -53330, 68019, -61448, 87345, -76926, -39509, 79832, 23538, -30358, 2229, -28718, -24782, -31846, 3530, 5467, 935, -39011, -79900, -71141, -96676, 44511, 5832, -4840, -15172, -38435, tail", "row-640": "head, -60800, 44623, -7312, -94881, 47511, -67051, -27563, -44816, -58282, 36885, 79980, -21890, 5884, 50372, 20853, -54488, -28738, -83153, 93057, -35175, 64063, 96170, -9880, -71617, 70644, 76541, -86477, -97619, 18687, -60537, -81215, 31592, -43933, -16617, -18487, -87735, 59654, -32932, 65220, -57661, -22103, -72451, 73610, -29975, -45129, -30005, 10119, -21350, -41490, -5901, 78917, 46418, 10638, 39420, -44936, 76795, -76731, -2436, 31968, 21425, 28984, 7846, 23666, -35790, tail", "row-641": "head, -95981, -65176, -4447, 65151, -99435, -32968, 89242, -70578, 57428, 76901, -97000, -53858, -50487, 3045, 86773, -24971, -80497, 45309, -87146, 49511, -6435, -45540, 19283, -86840, 1864, -92438, 48498, 12150, 31997, -72378, -60637, 13976, 66962, -58080, -36964, 68690, -84283, 96478, 39822, -98141, -9612, 73232, 45439, -99205, -83201, 87832, -67559, 86741, -47964, 83967, 87799, 30142, -72888, -18061, -23828, 98107, -56479, -91164, -56930, 28211, -92567, -51621, 46462, -21007, tail", "row-642": "head, -10208, 7616, -92578, -93479, 18915, 54632, -72817, 26987, 95278, 84624, 87778, -93327, 27255, -55013, 4010, 23086, 54001, 35610, 81999, -50219, -47568, -83632, 94627, 62615, -67221, 20638, -53500, 89297, -1148, -54878, -38255, -36517, -380, 56241, 84519, -77002, -84550, 11208, -94301, -91540, 43700, 46199, 49691, -92903, -6259, 33106, 98955, -136, 30065, 40223, -29842, -88913, 15355, -94386, 17907, -62307, 26267, -72446, 32948, 14763, -45547, 41382, 50969, 77961, tail", "row-643": "head, -84997, -65844, 79144, 17016, 33675, 93007, 31406, -67819, -81516, -47103, 7743, -91648, 14658, -67659, 43270, 74812, -87892, 94046, -1467, -50034, 78248, -98522, 75105, -1881, 45738, -95795, -18687, 37671, 59028, 84350, 54656, -79323, -99728, -13785, 90321, -97044, 26374, -51862, 98379, 81880, -95137, 16739, -53046, -61138, 8336, -99551, 80217, -72933, -71250, 5895, 70406, -83869, -87542, -17581, -60430, -64299, -89561, 87480, -55557, 34363, 27053, -12855, 75136, 6909, tail", "row-644": "head, 29750, 70670, 61141, -5812, 28938, 3204, 30153, 2425, 26768, -78746, 60623, 91516, -99232, -86202, -92304, 2005, -75649, -32380, 89191, 43217, 1503, -82218, -48630, 46569, 69032, -54102, -98208, 15903, 99723, 26488, -56770, -97194, 34587, 90135, -38713, 27556, 65244, -16704, 68558, 74935, -48546, 95820, -90949, 58650, -26725, 56856, 40467, 388, 74847, -14345, -55603, 27720, 92012, -31992, -99586, -50719, -62399, -65801, 83987, 93812, 29162, 53150, 94634, 24791, tail", "row-645": "head, -29841, 57650, -9528, -90579, -85789, 54366, 11632, -50845, 95864, 28110, 3084, 28854, 74209, 79990, 34387, -44879, 32493, 65008, -49401, -19834, -80304, 32329, 49671, -3328, -7852, 63809, -53656, 51921, -14015, 85778, 36697, 59989, -95617, 10583, 50302, -13362, -82201, -16426, -46719, 26484, -33212, 9506, -89055, -69315, 85128, 50694, 21898, 59953, 1693, -82173, 64277, 55273, -9894, 81133, -71405, -42758, -13891, -30003, -70539, 740, -44990, 77970, 20602, -49440, tail", "row-646": "head, 58112, 36114, 51238, -17606, -94518, -82506, -5381, 48484, -24308, -60616, -10457, -39852, -70420, 46749, 78269, 82609, 9645, -55829, 85825, 29901, -63931, -37281, -80752, -14076, 33326, 58383, -44970, 60807, 7359, -46920, -4569, 75823, 51515, 24532, 37807, 53244, 80078, 39056, 39908, 99474, 25997, -85826, 7947, 99819, 51924, -7238, 33602, -29460, 73413, -33635, -87823, -6694, -34372, 17679, -19629, -28024, 70403, 97889, -18217, 66187, -56962, -41685, -66912, -94717, tail", "row-647": "head, -5806, 277, 30330, -2413, -27960, -63983, -9261, 10900, 39422, 63292, 1740, 6322, -3967, -72681, -68066, 16792, 54847, -21246, 67913, -53785, -89213, -74282, 22307, -58234, 10243, -23141, 95170, 77485, -65391, -55765, 85306, 70015, 26603, 31220, -44811, 66175, -3099, -69347, -99584, 55287, -10791, -87785, 58629, -72185, 58939, 97305, -19728, -59027, 3638, 94104, 69312, 49516, -33604, 40957, 56862, 77978, 35083, -61931, 67676, 21229, 21624, -79055, -80992, 21286, tail", "row-648": "head, 23440, -75860, -52778, -92951, 56424, 23830, 45390, -89594, -76462, -56732, -12394, -24908, 41454, 98543, -71495, -68191, -73268, -88633, 48528, 3360, -17018, -53811, 59890, 74548, -38006, -31592, -23444, -21058, -38224, -85514, 73810, 57699, -13702, 97763, -20814, 8752, -84346, 6934, 20136, 24841, -99751, 32202, -43137, 20098, 68281, -4107, 32805, -53753, -85102, 8353, 30262, 65062, 37482, 76515, 45996, 80419, 16330, 87866, -94348, 8161, 79566, -16493, 42767, 47547, tail", "row-649": "head, -42147, 33648, 31046, -12218, -82091, 92104, 63400, 10101, -82661, 24387, -43195, -52740, -93628, -78312, -95871, 43771, 55304, 24116, 53807, -91662, 79890, 44929, 89859, 37303, 51293, -54812, -9078, 66158, 66966, 55554, 40007, -14979, 44155, 47573, -52473, 67003, -12812, 87188, 19254, 21659, 71529, 9610, -7602, 83332, -30524, -99281, -75104, -26712, -23851, 55219, -64959, -76821, -41245, 28848, 70536, 17881, 19712, 93960, -7774, -40876, 8025, 9271, -18665, -41926, tail", "row-650": "head, -56200, -32722, -77674, 93755, -43201, -20763, -14911, 50529, -644, 29390, 32401, -14682, 54730, 30096, 48741, -69239, -65112, -47410, -55249, 8511, 7027, -99107, 57394, 46546, -20256, 82665, -22542, 10913, 95513, -69369, -42162, 93297, 2688, -40196, -41865, 61491, 58777, -47636, 44079, 58424, -91863, 17767, 65408, -76028, 63810, 79356, 43650, -22580, -34836, -39818, 34330, -95981, -86721, -89328, 72955, -38478, 46756, 68983, -66109, -78508, 53588, -34082, 68896, -25201, tail", "row-651": "head, 41986, 99454, -71974, -36979, 54316, 4906, 87489, -85139, -38418, -78035, 4886, 91371, -27821, 89838, 937, -64817, 69762, 89470, -59953, 93960, -81090, 99089, 34161, 35183, -60342, -36156, -80112, 80098, 7984, 43850, 63420, -14040, -30440, -71260, 60707, 9873, -53575, -33458, 21538, 45036, -87853, 13272, -56029, 13059, -34692, -84873, 24977, 46018, 53412, 33965, 2710, -24728, 86739, 35934, 89429, -34153, -16149, 56024, -41973, -29794, 28310, -21082, 38674, 98732, tail", "row-652": "head, -21538, 65363, -66591, 26075, -21436, -88240, -81340, -11898, -93045, -40161, 43801, 55841, 79863, 65627, -95340, 23865, -8005, -35012, 40382, -6529, -49045, -53950, -28392, 91930, 91082, -92702, 46916, -27337, 67516, 33593, -76050, 12527, 76741, 98238, -73313, -68886, 75125, 44318, -85426, -31374, 14659, 30105, -48006, -79768, 16500, 86083, 72465, -17638, 92310, 77430, 91190, 64418, -33602, 23101, -98663, 67582, 68103, -98190, 96858, 78047, 25021, 76297, -50693, -30564, tail", "row-653": "head, -93846, -76119, -42806, -27111, 72743, -69716, 74471, 10692, 91340, 48410, 38709, -4705, -51673, 39431, 68099, 43813, -74393, -25535, -14980, -25567, -7054, 51525, -2841, 64438, -64862, 11532, 92652, 9907, -2489, 93850, -25393, -61907, -4975, 4331, 89886, 4721, 51484, 34660, -16577, 25490, -50752, 56048, 33061, 92854, 48408, 44956, 64553, -14787, -69457, 60634, 94925, -33, 20976, 53958, 41101, -82028, -72181, -27843, 14903, 60749, 46236, 70055, -91582, -47336, tail", "row-654": "head, -62467, 95798, 40262, -58699, 41496, -3138, 1427, 63356, 65600, -83834, -38112, -70826, -8635, 8898, -48539, 3613, -27459, 15527, 15503, -58917, 2593, -82261, 13849, 56373, -30730, 98765, -30547, 91527, 93836, 18442, -82229, -34811, -22939, 32677, 96692, 66973, 72844, -46401, 77348, 39694, 20426, -78802, 70163, 89102, 53861, -52479, -54928, 23323, -98749, -94865, -52630, -28412, -56557, 39536, -9354, 26800, -20759, 93848, 32620, -74689, -19972, 26460, 76832, -95602, tail", "row-655": "head, 98617, 60365, -87816, 75342, -5645, -51509, -72711, -12571, -42094, -71093, 18566, -62369, 88087, -8772, -20772, 68397, 56084, -98528, 61420, 7107, -90955, -18537, -64341, -60023, -96779, 36751, 42534, 665, -92473, 68894, -71568, -74654, 67219, 16363, 77310, -55652, -43531, -43368, -1355, -46095, -73082, -90809, 42240, -47156, -33774, 68620, -17507, 25282, 97489, -55837, 81854, -26619, -29273, -7583, -36492, 25587, 99429, 97297, 46377, -68134, -24090, 28335, -70536, 97356, tail", "row-656": "head, -13074, 43901, 72889, -97765, -27311, -53257, 11370, 41245, -56693, 75022, -50182, -43126, 10467, 95516, -20594, -29450, -9161, -28562, -41954, -31367, 9690, 38635, -13533, -75315, -40509, -75832, 32475, 68081, 23782, -10558, 64684, 65064, 28438, 8559, 83529, 46653, 27541, -40190, 36695, -48262, -41501, 58709, -98561, 32695, 86066, 92953, 56065, -28964, -3530, -12426, -98018, -64660, 58608, -23612, 63366, -84648, -51515, 9088, -5750, -67858, 91883, -76759, -16543, -57521, tail", "row-657": "head, 31771, 68202, 11709, -33820, 82413, -65032, -4692, 46897, 73921, -16639, -86077, 72789, 81800, 92029, 63627, -82191, 26449, -85230, -75961, -62628, 70888, 51751, -44579, -65864, 39060, 68783, -55895, 97716, 53284, 13327, -3868, -42082, -67094, 45581, 46831, 81805, -33643, -88041, -3873, 27675, -94776, 68645, -77275, -68753, -10156, -17572, 40803, -66017, 14972, 63136, 35884, -4944, -56430, 17740, -63061, 29942, -33075, 66562, 16595, -12183, 1425, 50713, -54314, -25476, tail", "row-658": "head, 9158, 62020, 82736, 92345, 51881, 91057, -52940, -17177, -11586, -70717, -25945, -82934, -80711, -62616, 54201, -23966, 39629, 86093, -35899, 67520, -92405, -15264, 18994, 71214, -58669, -87748, 80641, -58521, 79018, 41103, 12040, 32573, 25514, -52860, -56732, 52082, 90309, -38180, -78519, -40962, -65157, -78236, 48001, 28160, 49064, -98339, -81616, -48334, -60865, -46804, -74520, 20570, -36427, -80202, -12570, 95497, -77781, -2452, 41507, 677, 69372, 33171, -19343, -11473, tail", "row-659": "head, 4242, 22951, -78831, 19233, -377, -44850, -48689, -20377, 81805, 2608, -63292, 31153, -32776, -57785, 63516, -25328, -62100, 55345, -91182, 39681, 13322, -8301, -89964, 38377, -84682, -1061, 31907, 96969, 76508, -8384, -79441, -10680, -99324, -5960, 7372, 76450, 44296, -20382, -73226, -91543, -16172, 89120, -93920, 37620, -65861, 83114, -146, 77477, 2747, -35623, 40089, -70080, -68812, -72226, 97745, -95139, 66365, 69234, -98607, -49607, -41984, 86759, 35788, -20415, tail", "row-660": "head, 71251, -33473, -1545, 28901, 8980, 77226, -1526, -74163, 88669, 7509, 56403, 93775, -20176, 66204, 8981, 61496, -2822, -76794, -23054, 44921, -73172, -91699, -72102, 48399, 36312, -7893, 83097, -76140, -50263, -86334, 75630, 65325, -96738, 78049, 20070, 65799, 79945, 62810, 38716, -10693, 98348, -52549, 42791, 28403, 91825, 40289, 68511, 41921, -86583, -80365, -16291, 95328, 32337, -76933, 97955, 67630, -58721, -12546, 88486, 91799, 34905, 54980, 86393, 79318, tail", "row-661": "head, 59672, 82000, 22016, -68050, 69775, -40202, -63953, 43685, 77346, -52598, -30839, -91564, 27870, -15544, -19296, 90514, -68593, -90247, -12565, 50059, 19057, -62046, 97718, -67647, 58421, -9565, -20468, 62506, -90653, 87372, 59144, -41022, -1102, -66541, -10591, -4116, -63874, 43, -15511, 72285, 16144, -92087, 37285, 84221, 63795, -31933, 78019, 82677, -84059, -97277, -91892, -18927, 49066, -81070, 87724, 58372, -3026, -65426, 62582, 65307, 47735, -72329, 20569, -23394, tail", "row-662": "head, -4846, -23397, -19627, 28076, 75302, 15673, 9411, -76790, -56807, 48982, -76201, 98362, 77496, -22549, -39757, 61760, -71962, 19503, -18490, 25404, -43641, 7027, 97451, 66119, 97326, 68207, 33944, 20317, 72290, -31164, 21413, -1897, 34966, 7076, -10121, 83574, 452, -26482, 63590, 28573, -84055, -64827, -57339, 81124, 56080, 18664, -73493, -679, -24283, 45878, 33373, 73737, 12065, 57880, 52630, 28977, 50807, 87330, 33382, -9022, -26434, -87147, -53647, -13769, tail", "row-663": "head, -15480, 60292, -14978, -62571, -39155, 49769, -82481, 77721, 32112, 84399, 74878, -19267, -81374, 37585, -74501, -34343, 50562, 75740, 50961, -1946, -28208, -83125, -51980, -65761, -24572, -75527, 33154, -160, -39534, 41769, 23323, 5857, 78236, 74628, 74354, -9486, 60483, -91691, -3724, 47116, -41160, 65034, 72036, 21936, 41533, -57133, -39366, -90010, 32277, 14590, -87703, -54995, 76713, -50565, 20322, -75668, 26558, 72880, 80147, 48570, -53842, -59660, 54364, 10892, tail", "row-664": "head, 83401, -42346, 9228, -32077, -43631, 68233, 99413, 85971, 66790, -92330, 39532, 14589, 89654, 12461, -32712, 91080, 78015, -69415, -82465, -57494, -11179, -30219, 83045, -51049, -51456, 32204, -74166, 25081, -83840, 6843, 5508, 89008, 18849, 27793, -33817, -73782, -71183, 89526, 79719, 60283, 68639, -97328, 80598, 885, -85630, 86260, 55614, -61590, 92551, 28617, 44358, 2812, 87081, -36602, 81152, -97384, 54781, 67956, -42593, -7138, -92824, -16181, 92803, 44884, tail", "row-665": "head, -61966, 76031, -77927, -14939, -9882, 12712, 64446, -68466, -5597, -89793, 23776, -635, 34087, 13675, 81554, -35822, -15346, -56960, 95885, 87593, 11474, 55484, 1993, 4076, -27578, -89408, -33537, -93090, 86786, 80447, 3730, 7628, -88003, 32867, 37560, 83146, 95246, -78558, 75544, 60962, -55184, -57562, 69522, -52769, 67961, 38417, -80493, -26162, -51474, 78137, -34159, 99026, 59408, 65171, -32902, -74112, -29435, -10735, -21650, -94066, 94932, -58273, 20344, -43159, tail", "row-666": "head, 70038, 72266, -84959, -38801, -13108, 40953, 2558, 57456, 90117, -32292, 94860, 90197, -84434, 81395, 81503, -43428, -76419, -46236, 85793, 25273, 60820, -48824, -53143, -66575, 62456, 70912, -90053, 36414, -73289, 79284, -95171, -61588, 5293, -17863, 95389, 39032, 11136, 13351, -27014, 59011, -63274, -8822, 46236, 3560, 51064, -64162, 27569, -60048, 97982, 42049, 55475, 32641, -1130, 13957, 58587, -8105, 97219, 95218, -95378, -7454, 84675, -18486, 97405, 83763, tail", "row-667": "head, -90445, 50367, -79493, -6198, -32604, -1536, 21775, 97884, -88542, -41815, -19317, -28017, -6325, -73680, 59150, -47554, 39867, -68291, 41870, 27720, -42636, -15600, -5132, -20012, -48365, 46983, -39578, 4086, 51808, -69299, 70629, 46555, -90054, -70674, -69614, -61869, -18368, -84255, -22564, 82875, -42018, -27565, -36794, -88172, 56499, 93378, -57360, -63899, 68306, -3402, -86466, 11144, 52432, -38352, 62437, -10437, 296, 64225, 49059, -18709, 19076, -11584, -98152, 53588, tail", "row-668": "head, 29765, 2048, -22449, 34291, -23846, 55672, 10137, 28757, 4875, -21395, -65831, -89035, 75592, 20821, -42796, 37010, -23809, -80720, 63765, 49184, 23456, -86017, -70673, 88283, 62969, 93074, 26862, 29487, -59697, 40432, 7583, 52595, -43765, -89515, -53914, -64537, 75389, 84240, -11153, 12168, 35045, 28097, -79905, -75136, 19398, -22362, -56842, 68280, 45722, -31659, -40150, -33970, 54805, -81453, -762, 61338, -58761, 91003, -99970, 15732, 72261, -93630, 20414, -43754, tail", "row-669": "head, 3314, 31576, 59030, -4851, 86789, -11666, 64143, 65485, 20647, -88656, -30398, 36658, -69911, -97972, 54227, -43734, 50912, -32394, -92078, -29831, -17782, 15374, 31068, 61454, 74191, 98680, 76988, 26723, -67194, 49008, 6705, -80216, 10721, -19294, -16458, 39662, 93804, -32169, 70295, -84996, 96187, -35698, 20423, 44756, 59033, 49742, 78639, -69401, 23402, 23194, -24292, 59639, 25822, 94523, -94039, -4581, -77291, -93077, 13506, 7264, -18644, 35338, -8392, -80472, tail", "row-670": "head, 68087, -78192, -34417, 62542, 40602, 7479, 3495, 89708, -66036, -57693, -41045, 90092, 1179, 98577, 98035, -37632, 93317, 84686, -94707, -36159, 11105, -52660, 33803, 16256, -43378, 65449, -70603, 44524, -91181, 67827, 10926, -62141, -37189, 87399, -14771, -6602, -55848, 63562, -20350, 47524, 42485, -56998, -30260, -86556, -62666, -47812, -79475, -11424, 35092, -59752, 10102, -21454, -6045, 15894, 83042, 42997, -42361, 50805, 11721, 64774, 48578, -89463, -91618, 14042, tail", "row-671": "head, -65703, -49163, 21888, -36256, -70293, 77448, 23880, -12194, -2602, 71657, 91360, -15047, 63245, -53822, 98154, -44040, 63253, -74365, -32176, 31982, 85500, 29284, 780, 62305, -72264, -78459, 4631, -99234, 46607, -28399, 10960, -87082, -39494, -89965, -20459, 85798, 50713, 19706, 53657, 88734, -59011, -9464, -4114, -30988, 90637, 25890, -31644, -79978, 59344, -8247, -95366, -31078, 33197, 84049, 69134, -38258, -38694, -22659, -46179, -48360, -45930, 75443, 89439, -19960, tail", "row-672": "head, -38835, -47731, -21235, -95944, -69310, -10883, -31069, -59435, -65241, -90627, 96356, 97371, -65982, -23636, -35788, 70127, 18077, -62586, -69853, 83654, -60074, -7097, -37805, -71388, 2353, 7017, -21927, 8469, -26227, -63195, -29130, 19865, -39705, 55095, 36722, 46512, 55696, -11591, 732, -17705, -99836, -36640, 2973, -94474, 15265, -94990, 42541, 55188, -47844, 92831, 87534, 4840, -47498, -24303, -93351, 69442, 18926, 60194, -20475, -57036, 1171, -98169, -5398, 44050, tail", "row-673": "head, 77527, 86874, -45293, -78009, -73384, -22408, -57288, -17489, -36655, -65200, -13145, 90924, -25460, 87085, 51758, 38372, 7766, 74732, 49560, 11062, 30852, -8208, -27929, 58392, -43253, 93272, -5166, 58525, 87467, -3473, 98950, -36201, -42402, 83181, -44720, 40542, 70758, 39848, 99175, 69886, 70470, -61330, 99903, -22361, 68437, -23873, -40746, 13122, 92356, -6900, 12957, -15409, -68965, 50601, -10781, 12262, -24955, -2923, -55872, -79599, 24853, -83396, 41153, 29144, tail", "row-674": "head, 67803, -52872, -67900, 68657, 64285, -23947, -73390, 14334, 86960, 44777, -35539, 73099, -1761, -27271, -90521, -11660, 41456, 13849, -94550, -62695, 75949, 17282, -85249, 40662, 48292, -31581, -63184, 21416, -40945, 31437, 27739, -97601, 17428, -34950, -22399, 48975, -88859, 9632, -75556, -73520, -6256, 13183, -63061, 47704, 72461, -78529, 86833, 72627, -67361, 41580, -14976, 56542, 59031, -18914, -65858, 24833, -29672, -7284, 91734, 36914, 98189, -54612, 66431, 1183, tail", "row-675": "head, -51829, 71937, -16403, -23761, 8197, 55827, -63123, -74736, -42516, -22673, 53594, -76393, -67800, -98981, 62456, -66685, 88223, 72536, -12230, 14434, 27287, -46881, 5885, 19981, -57114, 11845, -52551, 20373, 99220, -91398, 4120, -19033, 48787, 75011, -29070, 2052, -746, -25971, -56933, -23886, 21562, 45628, 55788, 35123, 15138, -50949, 44145, -75208, -76983, -6395, 29396, -61640, -37507, -18809, -97850, -73316, 25511, -1749, 95961, 33996, -31195, 80486, 69108, 95719, tail", "row-676": "head, -9818, 60582, 71321, 32126, 95597, 59684, 11419, 94073, -19303, -28547, -21594, -13613, 73684, -80468, 16624, -89330, 25149, -98817, 65958, -76546, 59340, 9531, -10144, 94568, -31505, 81533, -27158, -9454, -49530, 86511, 93571, -19013, -15278, -39013, -38658, 41920, -73829, -37340, 39804, -56639, 65296, -88876, 71779, 42464, -12567, -5173, 1913, 21288, 51297, -73944, -260, 79213, 24077, 20132, 18639, 29300, -4329, 9260, -70507, 21855, -98449, 3442, -40653, 4277, tail", "row-677": "head, -11611, -82565, -50573, -95993, 9472, -29389, 10871, 62928, -54149, -66590, 192, -75451, 95678, -82105, -34540, -10184, 25237, -54752, -31667, -7244, 37103, 14536, -56522, 33722, 90135, 58613, -45639, 74322, -356, -37300, 11626, -79823, 98512, 92591, 8901, -56183, 64527, -18123, 13282, -69547, 73906, -99108, 62108, -54075, 83756, -61132, 15646, 40459, 86073, 21473, -77974, 83461, -48951, 35781, -75098, -64415, 96915, -75455, 33949, 98642, 42214, -72193, 33159, -66660, tail", "row-678": "head, -73571, -80977, -5316, -70253, 36814, -8337, -30966, -68861, 17144, -42089, -70575, 55925, 10855, 18246, 71170, 61561, 54669, 5306, -8949, 77190, -38797, -45165, -59435, 93444, -24873, 3386, 33927, -80289, 86655, -38862, -82800, -14110, -1215, 31791, -53954, 49160, -57999, 81533, 90177, 43000, 47206, -92439, -53292, 30133, -62450, 17447, -32520, -96042, -30931, -34377, -46978, 12641, 16036, 10326, 83335, 57922, -35856, -53386, 59028, -53188, -22655, 14233, -30618, 79330, tail", "row-679": "head, -37605, 29393, -96580, 97549, 52005, 16090, -57044, -89600, -39277, -33958, -30961, -76661, -56417, -71710, -54786, 5994, -91650, 29891, 90221, 63747, -58752, -72652, 63282, -43962, -87166, 2022, -84764, -35904, -92500, -56286, 27049, 78026, 54659, -62003, 65856, -60032, -93687, -38453, -76067, 87112, 99616, -45232, -11423, 23451, 70282, 55306, -30686, -58921, -71164, -98509, 1851, 17659, -83940, -80966, -27334, -28948, -163, -8037, -24407, -53221, 92397, 15770, -1892, 96720, tail", "row-680": "head, -30176, 31382, 61897, 31996, 54042, 32223, -52744, 64480, 33490, 40224, -11941, 5856, 86918, 51142, 84590, 98206, -57067, -7703, 48335, 87430, -18977, -99471, -20966, -83842, -66100, -52932, 46656, 46198, -74874, 52392, -51124, 41437, -70655, -37341, 2196, -45973, -36322, 50132, -29851, 38209, 58479, -32335, 59401, 40837, -68942, -5212, 26850, -26272, 91058, -55747, 30118, 32351, 18412, -14388, 13309, 91506, 11776, -83440, -12173, -71882, 87109, -70309, -33604, 73064, tail", "row-681": "head, 77921, -79183, -84270, 36522, 2691, 68004, -29545, -5920, 16178, 91203, 20537, 83713, -9551, -16739, 56784, 91939, -29862, 29912, -17655, -9656, -99403, -11282, 78811, -61251, -96357, -40970, 33752, -91264, -57245, 83079, -78143, -53853, 87946, 86286, -31893, -30736, -60790, -34586, 77730, -98303, 37346, 41024, -83863, -99375, -83203, 32534, -8307, -64694, 7630, -37037, -38918, 74939, -21945, 89345, 49856, 84038, -76869, -45586, -37931, -74760, -58477, -48022, 67876, 71634, tail", "row-682": "head, 97126, 24669, 98097, 85542, 62640, 96334, -35695, -409, 76922, 92568, 48953, -54119, 66672, -57061, 43020, 11635, -84930, 46469, 91766, -63731, 40885, -9380, -55326, 32497, -97541, -4610, 57182, 99286, -45513, 41122, -89224, 81679, -54877, -65019, -44249, -83110, 35199, 94222, 86921, 7938, -97344, -77987, 96979, 61476, 8812, -8935, -14057, -2573, 31412, 49394, -8095, -87531, 98758, -31867, -63571, -3710, 69178, -21288, -57233, 23003, -50183, 72683, 49790, -64859, tail", "row-683": "head, 18369, -60832, -37249, 43140, -85878, 14564, 51528, -69448, -81531, 37291, -73308, -59317, -32072, 1404, 31152, -63160, -86796, -55565, -50393, -81932, -59792, -42399, -33535, -93, 86200, 13136, -76699, -60231, 83997, -21781, -36977, -83492, 67423, 33136, -54492, 3474, 94484, 69198, 75225, -26481, -25691, -90231, 16090, 71983, 23133, -44112, 28002, -70719, -41005, 29456, 68503, 30967, 47107, 61543, -71632, 93031, 14177, -69673, 54110, -71985, -8988, 62820, 73912, 24947, tail", "row-684": "head, 28280, -65044, -73163, 87229, 59910, 17996, 9684, 54156, -90442, 96325, -36689, 25017, 51524, 78771, -3358, 42520, -25230, -92461, 78878, 2682, -39644, -2925, 99031, 44064, -65260, -67340, -44829, 7086, 30289, -10565, 75791, 81233, -76818, 91348, -26549, 16764, -71152, -2742, -55149, 50208, 22571, 73937, -2794, -18410, -291, -19847, 6783, 87236, -59871, 35785, -11276, -37269, -38452, -86241, -63121, 42699, -45466, 10442, 34186, -5910, -85267, -4553, 65202, -72982, tail", "row-685": "head, 47171, 6984, -80416, -68757, -24857, 41518, 23053, -87144, -88899, 37500, 16750, 69710, -53979, 75388, 35208, -54269, 11591, -74986, 45026, 89465, 29053, -16638, -72745, -65726, 21073, -87840, -70826, 22311, -42696, 78991, 36183, 93744, -82612, 11304, -32091, 2597, -17893, -96994, -67397, -67505, 12631, 66197, 13454, 62587, 13861, -70776, 43598, 6043, 96797, -42145, -18320, -99358, 91522, 23093, 35532, -63283, -66483, -40876, 78458, 13767, 93209, 74843, -11250, 4108, tail", "row-686": "head, 28243, -57630, 15067, 41443, 94075, -93557, 40710, -9453, -33871, -15066, 13785, 38467, -32588, 98927, -56752, 46005, -48502, 56683, 39552, 80531, 8425, 65602, -3157, -27903, -22008, 85607, -85110, -11044, 8173, 88514, 70572, 71373, 12000, -19799, 46807, -15158, -41139, 14883, -95639, -68124, -31436, -25549, 60069, -98060, 30735, -61327, -13824, 96566, 95154, -2752, 86992, 29957, 46227, -14401, 22907, -4915, 14754, -74063, 86675, 9027, 33173, 80468, -25923, -86043, tail", "row-687": "head, 31871, -66743, 52214, -76327, 46476, 16187, -10122, 60397, 16957, -39921, -91961, 1064, 99941, -3617, -65429, 51177, 1094, 60975, -40752, -94334, 6754, 29003, 74423, -42820, 9294, -93865, -5477, -24940, 88732, 64962, 91378, 69158, 96506, -88415, -18028, -61585, -96375, -16347, 24788, -98019, -4621, -70805, 67965, 380, 48477, -33229, 83445, 50171, 31105, 60402, -35632, 45002, 49159, -85308, -57680, -47195, 14761, 37960, 47808, 59698, 42143, -97621, 72707, -87550, tail", "row-688": "head, -3549, 65097, 83339, -30773, 61632, 10671, 78981, 98535, -76732, 81570, 33666, 97646, 96537, 44047, -1343, -59225, 40307, 61861, -56781, -79124, 89451, -34828, -90379, 45162, -33174, 55748, -16160, 15799, 97425, -9001, -70647, 43501, -91393, -5938, -1709, -73535, -2158, -73113, -26193, 12100, -19368, 17641, 36300, 78321, -30570, -95991, -41078, -94797, -63379, -78781, 69470, 6530, 44737, 24145, 82416, 91922, 96873, 74642, -29718, -81362, 656, -40535, -30699, 60805, tail", "row-689": "head, -49846, 77229, -58315, -13844, 72381, 69914, -32368, -84338, 40234, -6956, 51063, 87724, 38219, -29421, -19676, 97978, 90737, 41319, 95860, 83594, 83866, 33447, -96743, -96148, 71391, -52572, 71435, 2035, 41732, -95352, 5358, 79128, 6217, 71616, -53858, -41246, -97612, -29858, 72970, 71335, -89073, 85682, -24502, -33408, -97951, -65418, -71534, 83518, 9785, 60871, -30486, 47542, 46203, -44050, 27546, -12823, -62405, 44086, 40956, 88103, -20751, 67641, -90807, -56974, tail", "row-690": "head, -77544, -22968, 21248, 22010, -82926, -2041, -44429, 90234, 74085, 91664, -21747, 29330, 5410, 57295, 52171, -23296, -91968, 26100, 71684, 49165, -15450, -11747, -81223, 9190, -14333, 18197, -52328, 11109, -70249, -97321, 1969, -79648, 48094, -87421, 88918, 43067, -73090, -1267, -71931, -38045, -22265, 87547, -75823, 33042, -87671, 67794, -26320, 99935, -67518, 63950, -40853, 78465, 47263, 95117, -56721, -51370, -43413, 31955, -70236, 69212, -51386, -4916, -31703, 9268, tail", "row-691": "head, 15781, -1457, 27533, -31089, 40629, -1634, -29230, -66954, -91858, 39381, -23793, 18280, 89300, -99742, -75066, -26831, 53085, -66624, 11021, 29065, 15429, -10124, -56498, 39976, 17952, 69936, -61676, -46906, -13499, -8495, 61243, 64734, 92767, -82691, 52883, -77647, -8952, 89186, -21954, -63425, -4011, 92301, 10680, -15172, 79074, -35814, -74967, 13148, -1277, 84798, -99027, -48889, 58069, 74657, -42267, -85923, 23754, -87501, 96189, -64104, 28810, -41526, 25805, 34570, tail", "row-692": "head, -73306, 97679, 85495, -51642, -86327, 16281, 37686, -70333, 80177, -10693, -68723, 51892, 21635, 92061, 58542, -72573, -32794, 27578, -1102, -81676, 52780, -72685, 64343, -23484, 80470, -16091, 72622, 59940, -76481, 1620, -57981, -34166, 98470, -12764, 57549, -26943, 59234, 651, 80349, -69072, 83406, -1806, -37742, 88466, -74479, 4913, 13510, 32110, 48684, -18667, -42986, 54324, -36607, -57070, -71956, 30721, -42373, 70890, 15381, -70691, -20222, 77419, -46533, -85293, tail", "row-693": "head, -83619, 97053, 35686, 8622, -75634, -90705, -6318, 84517, -28356, 15250, -65111, -67320, -10315, 78983, -15096, 89468, -21827, 21656, -59605, 15332, 97759, -26006, 47299, 51678, -68703, 53761, 83191, -65142, -23887, 57096, -14972, -83195, 2047, 94102, 78672, -82517, -4324, -29707, -76165, -3986, 10544, 6997, -92030, -20998, 46641, -36406, 35610, -99589, 44875, -68788, 38809, 69938, 29627, -30882, 87413, -49827, -78204, 46805, 41052, 91378, 90132, -68308, -238, -80665, tail", "row-694": "head, 52190, -86676, -53058, -37811, -518, -33129, -73641, -4546, 68116, 70416, 12996, -10567, 67670, -39989, 53225, 82542, -78841, 14810, -21599, 15291, 69502, -73230, 83502, 42186, -20916, 92983, 95247, -43162, -7625, 49822, 33310, 87280, -77993, -41788, -12726, -27739, 40674, -32875, 87232, -5144, -8568, -18089, -31228, -71784, 51975, 92539, -21312, -54588, -12402, 25353, -36381, 89852, -74180, -1761, -39644, -12841, -21287, -16549, -60180, -68658, 93927, 83264, -54456, -61475, tail", "row-695": "head, -67781, 82077, 76589, -45122, 28125, -40102, 37243, -8278, -88701, -7134, 17648, -90196, 48103, 23628, -79758, 8002, -53842, -35768, -11311, 11994, 81053, 13682, 30102, -32385, 49639, 35281, 30665, -34836, -28734, 74961, 9381, 93170, 24485, -46798, 66108, -46579, 8500, 12739, -28043, 21820, 47042, -92713, 32471, -55307, -72054, -76136, 27303, 64573, 15700, 85469, -54181, 91531, -46459, -53817, 69593, -71729, 22785, -68412, -43113, -33223, 52724, -63437, -54213, -47294, tail", "row-696": "head, -42725, -68101, -6732, -74522, -39223, 98394, -92863, 45326, 23827, -14967, 54395, -85476, 42476, -66475, 73951, -9777, -24498, 31855, -12695, 20840, 78202, -6341, -89926, -57006, -12349, 79704, 80574, 23345, 79767, -64736, 94860, 58550, 34710, -73117, -2439, -72916, 79611, 60652, -37475, 76272, 75224, 94609, 34329, 11666, -95843, 54542, 91719, -59549, 12648, 15925, -11693, -6500, 87380, 27298, -35769, -1129, -4592, -95351, 65427, -44411, -50424, -95487, -85028, -79515, tail", "row-697": "head, -15300, -88105, 86073, -48804, 54613, 91160, -43109, 35847, 92530, 65920, 30766, -20789, 98237, 77333, -83654, -74630, 29656, 7282, -88379, -72944, 82276, -87731, 81904, -85838, 12124, -62847, -98556, 71327, 58353, 51181, 78128, 72312, 31734, 78376, 11051, 16273, 802, 60826, 17032, -45642, 43483, 2728, -77031, -19531, 38594, -73150, -20767, -55853, -11744, -10802, 60278, 37221, 90873, -29402, 42394, 18831, 99815, 12271, -82714, -71901, 51490, -51496, -1189, -96762, tail", "row-698": "head, 7605, -82250, 11051, -75023, 72336, -45311, 17655, 66568, -12297, 84554, -61268, -5649, 9930, 92146, 40085, -31628, 32067, -34783, -30727, -8579, -26581, -17947, -29546, -46430, -43353, 6637, -20602, 18533, -62627, -17125, -36701, 34594, 13533, -42452, -78083, -54920, -59380, 47766, -64470, 55980, 17491, 40824, -29929, 67109, -32397, -39732, 96140, 6158, 40874, -51635, 5658, -10243, 16583, 87918, -89674, -21828, -5665, -57157, -37917, 58292, 28293, 54264, 84506, -74487, tail", "row-699": "head, -57877, 32558, -84598, 34917, -49964, 62712, 28592, 72827, -47381, 68834, 65742, -71826, -96254, -93100, 6949, -32467, 59668, 48180, 74208, 46279, 88809, -73270, 7968, 54074, -29828, 61450, 57650, 93213, 5652, -83832, -65378, 16478, -73434, -93074, 79858, 16025, -6403, -90205, 37743, -57875, -22782, 7309, 35018, -81645, 30888, -39483, 59267, 14700, -92904, -20913, 64582, -42438, 82912, -87402, -80056, -85988, 82437, -93884, 90658, 88209, -58136, -75378, -83173, 91892, tail", "row-700": "head, -52659, -70870, 29805, 37014, 12779, -88960, 97047, 46957, 93859, -17877, 28165, 95029, -48839, 41342, -74719, -70312, -72150, -95985, -50641, -3739, 16069, -13983, -14645, 6033, 69578, 58986, -37273, -40318, 94544, -95986, 4347, 91378, -53201, 37288, 73262, 63402, -97969, 91110, 24634, 72105, -9882, -79758, -24335, -32497, -24299, 74044, -39853, 21695, -19422, -83278, 18906, 76592, 81578, 41771, -73925, -16901, -53472, 36053, 41961, -63470, -92571, 37673, 91587, -52949, tail", "row-701": "head, -26265, 3971, 42004, -68298, -64400, -87523, 49914, 46437, -27954, 61808, 56961, -1508, 59097, 60570, 93714, 26640, 74602, -74283, -77621, -29427, -69034, -82814, 73698, 97422, -87319, 84497, 30447, -75716, 65316, -54619, -34523, -73314, 35972, 41748, -63491, 58757, -31422, 37972, 49033, -97741, 79884, 67311, 76665, -25110, -94669, 74041, 33367, 27267, -82817, 35545, 51625, -79420, 18614, -63647, -8506, 20280, -55321, 80258, 43415, 38370, -69650, 8154, -56633, -9476, tail", "row-702": "head, -60652, -52487, 30087, -77928, 10864, 61258, 37018, 22580, 11560, -3502, 62651, 27362, -72249, -58038, -16531, -63305, 6671, 81355, -25807, 34502, 18764, -26229, 91025, -97890, -26791, -17816, -44672, 3447, 1246, 24822, -99190, 54927, 11244, -31426, 36061, 75823, -19375, 32824, 61964, -92690, -27967, -70433, -56772, -85882, -11259, -20823, 91940, -61437, 19686, 86051, -90276, -58857, 30751, 365, 57009, -55226, -63256, -88006, 57942, 98822, -54743, -84968, -86930, -81509, tail", "row-703": "head, -80555, 47268, 58962, 4070, -52183, -53466, -64450, -71834, -84, 24185, -96799, -55236, -78110, -98324, -62922, 99654, -6738, -38628, -62060, 58324, -56193, 95220, 21763, -16032, 96627, -87212, -98649, -30735, -89598, -8785, 13882, -93183, 80936, 22205, 33529, 51052, -62143, 39552, 91666, 22418, -48913, -42376, -59399, 69388, 47290, -21615, 93462, 41312, -96167, 98105, 42247, -28228, 77402, 47318, 37201, 80344, -80336, -17751, 83992, -63486, 48063, 26551, -11370, -24158, tail", "row-704": "head, 11485, -17453, 17689, 13576, -22040, 99171, -70254, -78843, -42884, 47710, 8826, 29191, 81690, -30779, 93409, 58004, 57622, -51874, 51041, -55393, 25038, -10402, -61290, 21001, 25613, -68160, -32536, 19009, 18302, 5929, 77576, 98751, -500, 66362, -82447, 15248, -25331, -56976, -81355, -15438, 84646, -20327, 35156, 50734, -40161, -23074, 72263, 70896, -74624, -94247, 70702, 9488, -48596, 9863, 92584, -29967, -92924, 99225, 12827, -40336, -79862, -65252, 86163, -46981, tail", "row-705": "head, -6389, -76640, 6894, 44302, -23760, 32177, -59402, -12419, 56317, 86389, -8163, -13198, -2420, 29407, 68764, 36694, 99110, 48258, -30438, -65594, -81767, -28236, 38571, -89605, -95088, 26384, 39648, 98866, -58230, -54289, -60430, 70529, 39280, -7162, -63971, 24603, 66094, -28555, 61671, -97316, 93177, -85185, -50051, -91490, -83530, 27786, 25532, 53348, 64645, 13155, -11480, -13571, -45452, 38890, -74574, 42022, -6615, 33323, -34812, 80615, 93683, 50519, -25816, -72978, tail", "row-706": "head, 57587, -58247, -24038, -41664, -90049, -16918, 99023, -99691, -39505, -32220, -82004, 87329, -90516, 3813, -77983, -86257, 41181, -82227, -23555, 37648, 99126, -37785, -78694, 80846, 47578, 81933, -79663, -81385, -44694, 11944, -37557, 69101, -57537, 20479, 56844, -12865, -2079, 32589, -56904, 96263, -71798, 47994, 41848, 33121, 60083, 58244, 52084, 18674, -8783, -52866, 25926, -62760, 37522, 90541, -8087, -15103, -49716, 66992, -8017, -6773, -93703, 70981, 78640, -24768, tail", "row-707": "head, 36827, 23227, -56821, 67006, -79968, -27295, 78436, 9765, 36585, -8055, 73494, -43918, 74535, 82495, 81495, 64020, 72737, 31297, 87089, -72294, 15299, -4223, 28653, 35341, 24344, 83621, -97419, -37440, -70197, 8770, 18648, -68527, -51112, 89172, 95991, -22632, -35853, -68782, 19700, 26355, 48356, 28803, 33257, 68942, 90323, -14400, -23761, 52592, -21743, 77378, 85855, 70230, -58968, 74475, 99514, 70259, -98070, 33910, -33524, 94715, 77488, -98263, 99220, 15435, tail", "row-708": "head, -95843, 11861, -19375, 7676, 63449, 78929, -55120, 34268, 42350, -34173, -43024, 35279, 9353, 36488, -66609, 33888, 17542, -22276, -81366, -6752, -35279, 37822, -24922, 97566, -77916, 62434, -53302, 32685, -57575, -63015, 89821, 73813, -18024, -30759, 98479, 85762, 2440, -70732, -71603, 71184, 67507, -18270, 71571, 73439, 1634, -73873, -96232, 9016, 31302, -60751, -61994, 93433, -52747, 48235, 60828, 11856, -66342, -53331, -94340, -233, 54450, -48448, -3100, -34218, tail", "row-709": "head, -52348, 70862, -94933, -79586, 10656, 44540, 36630, 74152, 10708, -60142, 60061, 89371, 73595, 74442, 91278, 43374, -87209, 77955, -12128, -55002, -90102, 88647, -66019, 60325, 55054, 31009, -46089, -12312, 54264, -1015, 60317, 55803, 4663, 93673, 92923, 80213, 60670, 31185, -19664, 14549, -84392, -93570, -10527, -89449, 44506, 72553, 99500, -47010, 79224, -22338, -77707, -33125, 20585, 72508, -80430, -89348, 6437, 18302, -15563, 29977, -79424, 18751, 78054, 14333, tail", "row-710": "head, -3965, 63572, 64716, 1335, 8432, -93170, 98056, 93347, -36273, -90518, 95826, -22191, -7004, 53303, 48834, 123, 63215, 57846, -71483, -91093, -61004, -1831, 63130, 20395, -68023, -88191, 80191, 88616, 49870, 88879, -1186, -80787, 98733, 38156, -1953, 71746, 12236, -60128, -85879, -71344, -6298, -90552, -82249, -72415, -67625, 32940, 32528, 99310, 32053, 26596, -93997, -71107, -3188, -96118, -35089, 87188, -37386, -40272, 65128, -27038, -12706, 39628, -35009, 10643, tail", "row-711": "head, 86872, -43990, -60065, -85294, 15730, -70758, -47533, 14647, 58351, -87479, 27026, -4911, -80039, -48897, -69278, -23065, 37692, -39541, -60521, 39637, -6282, 21859, 95173, -65995, -30278, -16817, 14045, 41945, -75565, 77682, 2442, -77419, -42776, -31967, 98564, 97226, -31133, -72439, 97472, -29133, 82896, -47845, -99043, -28018, -22857, -15687, -45024, 9071, -16113, 38030, -26776, -40965, 80405, 2701, -16116, -86769, -82114, 57851, -66293, 9354, 6945, -44363, -5487, 29327, tail", "row-712": "head, 16865, 28640, -64283, 21449, 17193, -75233, -25593, 80994, -15743, -17954, -14895, 39747, -82474, 51161, 87197, -8213, 91085, -64742, -14196, -26675, -93597, 47734, -98763, -75852, 74107, -88854, 73112, -67727, -17116, -72170, -84301, -72298, 8607, 83517, 26777, 11986, 11291, 13261, 25455, 81077, -78048, -62609, -54742, -71731, -19314, 37518, 86675, 28900, -55175, -27625, -54908, 75782, -16418, 82849, 19091, -32150, 11766, -21274, 73321, 25074, -50102, -52889, -8677, 42899, tail", "row-713": "head, -54164, -61375, -10431, 71509, 12540, -43333, 98588, -99207, 25988, 6513, 98568, 69620, 30357, -20286, -7779, 56976, 52179, 42082, 42599, 36671, 97610, 34731, -63280, 49158, -22605, 95534, 47429, 28747, 3985, -56183, 438, 89855, -4746, 70829, 80251, -96271, -78896, -57990, -98943, 44592, -76853, 36092, 1924, -69052, -51992, -24500, -5136, -43669, 38642, -63638, -34000, 64075, -22269, 69221, -15500, 16826, 13307, -97082, -66675, 99891, -58487, -43335, 9092, 99501, tail", "row-714": "head, -82899, -35158, 70873, -94470, 87817, -8585, 56350, 47232, 85004, -28293, 59674, -12417, 53220, -11285, -46754, 55790, 87005, -51526, -92440, -45343, -49034, 98196, 46198, -53382, 74690, 32445, 1962, 75443, 49633, -39956, 19337, -48102, -14792, -75944, -9427, -4117, -85697, -96668, -80792, 84747, -98215, -40877, 15568, -36901, -32868, -6890, 1474, -53603, 73162, 82986, 73051, -98804, -41291, -29552, -31461, 40857, 32109, 25025, 73903, 92487, 98713, -42057, 65061, -48555, tail", "row-715": "head, 8128, 50954, -11685, 78598, -98100, -58905, 14686, -99695, 76577, 48751, 93988, -11105, -77160, -12213, 76, -3807, -91780, -89214, -59930, 46446, -29347, -28568, 24692, -88047, 85559, 62603, 91486, 92234, -39554, -9882, 62281, 71764, 64045, -75921, 86296, 72065, 44395, 51722, -50274, 57139, -39913, -2566, -19863, -97781, -89946, -42152, 17450, 89995, 85266, 56663, 22458, -93447, 62166, -7114, -58740, 57238, -13449, 94978, -44214, -39231, 48993, -11340, -80809, 92568, tail", "row-716": "head, 91645, -71378, 70754, 39580, -61034, 59365, 22524, -49155, 65391, -26825, 80138, -7566, -72030, -5663, -58788, -18620, 81318, 95326, -41821, 88295, 83186, 35459, -54526, -64454, -60363, 2541, -45828, 68047, -73122, -69028, 56650, 7205, 85510, -17162, 64086, -18138, 74085, 53737, 73026, 28483, -83356, -55695, -6048, 45154, -81440, -66909, -12698, 84325, -13030, -89011, -17795, -83359, -45935, 45946, 91711, 94255, -32051, -13660, -85671, 1455, -69305, 96437, 97524, 79577, tail", "row-717": "head, 14072, -51928, 11476, -8741, -19564, -74887, 2871, 59615, 59459, 41694, -32111, 96650, 41998, -23949, 20738, -62595, -33144, -46173, 32300, 71163, 31375, 42196, -9982, -1309, 27240, 25696, 19501, 22704, -25064, 56010, -23841, 86910, -80393, 80538, 76678, -57879, 39415, 86112, -86016, 5634, 66502, 19520, -73570, 10462, 44410, 7795, 6891, -69396, -33060, 40654, 43387, 28976, 9199, -83656, 43898, -64822, 24182, 74683, -93253, -65164, -64508, -78828, 45909, 89078, tail", "row-718": "head, 87892, 3396, 51432, -99775, 21743, 60871, 96330, -22912, -35248, -17471, -27780, -17326, 22614, 55058, -54973, 72931, -7591, 25581, 86259, -68874, -2803, -49061, 40691, -68552, -17550, 89031, -39713, 3451, -21795, 23871, -25907, 16398, -52077, 80732, 78036, 99621, 16730, -54461, -46499, 88778, 95289, -34986, 21615, -6219, -89752, 86128, 47959, 26617, -53639, 81519, 65468, -60113, -6715, 96512, 51389, 60425, -30022, -46860, 49102, -45285, 22421, -44148, 52984, -21365, tail", "row-719": "head, 55454, 25122, -47041, -70587, 63600, 3080, 55214, -67461, -57848, 73783, -55625, 76543, 16483, -100000, 3098, -66481, -67224, -15103, 78730, -16534, 2508, -21329, 10265, -9982, 80152, 72252, -89980, 92973, -57629, -30645, 2050, -83963, -36079, 87599, 84904, -98530, 54883, -94905, 95084, 78038, -65503, -34997, 92894, 71215, -54739, 43481, -51195, -32660, 22343, -45951, 3844, -58055, -2036, -71251, 896, -86355, -45742, -64454, 27021, 32797, -35245, 31035, 96855, 33022, tail", "row-720": "head, -32681, 39221, -22771, 62387, -76317, -28946, -9324, 27183, -4128, 19132, -49179, 61575, 22146, -74158, 22350, -16027, -85288, 31883, -82016, 28382, 58415, 2775, -37927, -5716, 86887, -34899, -70202, -83139, 45442, 5840, -1228, -77465, -65829, 91419, -96596, 46810, 83056, 60020, -27682, -71539, -3206, 12218, -72933, -81536, 95694, 84851, 70878, -63676, 34461, -1183, 34277, 54151, -64733, -93020, -86144, 73995, -86604, 65802, -57386, -90126, 555, -40691, 6688, 67678, tail", "row-721": "head, 71504, -96354, -40416, 56071, 19994, 5802, -19960, -21104, 85300, -39053, 43124, -22612, -99640, 11745, -78146, 705, -86402, 29400, 26346, 98531, -97100, -57636, -47663, 60265, -52301, 20051, 28230, 64219, 91751, 3150, 61435, -69611, 79612, 44525, -4074, -5317, -6886, -28290, -35264, 3224, -57108, -94372, -79194, -80579, 39135, -12373, 92378, -36026, -75102, -87038, -13647, 65465, 28684, -16783, 92949, 70628, -19674, 47615, -85862, -11475, -63390, 61935, -71658, 85993, tail", "row-722": "head, 5754, -47103, 88810, -83539, 19796, 53829, -84795, -27130, 31694, -23697, 75668, 10818, -67059, 80424, -12763, 86367, 76637, -57100, 93391, -27398, 18650, -8281, 93147, 4107, 2876, 1725, -27585, 13284, -28994, -63476, -28745, -87884, -49870, -28638, -5306, -7427, -47315, 4844, -53038, 72730, 31382, -60588, 74238, -31212, 44608, 91292, 39304, -18511, -92837, 53673, 7042, -1411, -71488, 14923, 27274, -66005, 80752, 79511, 28016, -30117, 4688, -9380, 11972, -53721, tail", "row-723": "head, -27381, 26851, 7402, -42657, -76934, -82674, -34002, 68160, -53820, -64178, -16628, 71309, 91868, 24483, -50824, -15785, 69929, 43670, -69039, -74274, -6957, -78118, -26567, -26759, -60576, -37121, -87600, -51557, 70726, -50654, 54823, 89507, 10704, -46590, 78596, -78633, -86966, -28228, 98639, 27140, 36819, 92262, -443, -51737, -7198, 79638, 81074, -14319, -64239, 82567, 72474, -32450, 82284, -76430, 5031, -28779, 8579, -80783, -1993, 13362, -12395, -53015, 18156, 18101, tail", "row-724": "head, -37492, 36905, -30193, 90619, 2233, -91439, 53220, 60852, -34060, 56217, -71647, 5203, -43911, -37100, -7217, 85252, 2966, -62304, -63773, 84032, -56800, -60980, -2514, 1928, 60985, -64398, -27807, 59919, 60110, 25989, 57174, -19936, -26475, 81019, -32568, 35205, -48545, 45285, -1576, 66669, 39295, 37337, 47122, -37802, 68948, 8870, -84267, 50162, -89389, 20492, -83256, -4002, 51888, -67620, -58960, 14807, -285, -64380, 67147, -68460, -50308, 46225, 85244, 45712, tail", "row-725": "head, 78182, -97981, -34655, -52279, 43585, 89953, -78917, 54068, 8353, 92992, 64114, -21452, 2457, 36402, -71788, -55632, -20656, 38915, -25966, 78119, 39526, -39617, -86777, 8984, -89116, -48989, 33327, -62371, -90261, -9474, -73882, -92020, -65255, -94818, -45779, -78709, 18302, -94793, 45009, 23846, 68317, -7761, 60922, 5309, 34243, -17526, 15745, 50129, 83358, -58967, 58823, 92817, -21307, 6, -99058, 18445, -25615, -96880, -96331, -65908, 28004, 15199, -63222, -56042, tail", "row-726": "head, -88741, -81003, 16223, 32206, 5821, -48109, 68287, 1745, -65036, 91302, 24284, 54089, 13825, 20533, -1365, 59953, 20895, -61314, 87209, -7911, 42594, 8177, -45660, -49747, 71595, -74124, 26614, 94691, 78888, -1480, 4891, 66683, 31293, 19223, -30895, -13449, 33710, -99773, -34571, 53918, -26149, 55611, -98286, 1362, 28676, -44655, 24006, -32171, 55580, -9203, -48904, -73759, -52887, 57738, 60426, -53310, 15356, 3834, 46234, -22773, 64852, 68500, 59809, 92841, tail", "row-727": "head, -57439, 62336, 50398, 21669, -12140, 46562, -39971, 4733, -26378, 99937, -47621, -26699, -91719, -2376, -71569, -9120, 46029, 85718, -88203, -34671, 64353, -90145, -65736, -83922, -91548, -14054, 97492, 86447, -40636, -12410, -71124, 40768, 60444, -49377, 97546, 223, 60885, 90019, -99691, 41062, -11575, 68819, 313, -93267, 92111, 20879, 76688, 73826, 85070, 44249, 7774, 57845, 70138, 45405, -87405, -55970, -66373, 27471, 32911, 1945, -73303, 56643, -69783, 70109, tail", "row-728": "head, -45128, -87965, 26101, 69969, -56602, -45085, 46812, 8036, 9591, 48003, 18375, -63276, -42267, 10039, -65774, 70391, -49331, 84509, 23738, 26113, -61494, -87843, 64015, 54015, -64090, 34623, 60094, 90280, 98062, 27512, -7811, 654, 61613, 63254, -69506, -24026, -26778, -14036, 89193, 367, 7060, 9591, 1692, -52537, -18265, 75125, -74712, 62640, -65793, -63455, 25186, -77620, -5355, 80886, 98490, -70970, -85754, 11191, 28823, 15274, 27749, 786, 79393, 48809, tail", "row-729": "head, 34122, 9419, -6919, 94314, -14297, -7168, 93621, 30227, -15251, -36657, 95750, -694, -97337, 81260, 66783, 18375, 3479, 22300, -64436, -62219, -92047, 41789, -84856, -8571, -59057, 70512, 16143, -86747, -18598, -63895, 30180, -88817, 70471, -22243, -60313, 59273, 99209, 23840, -95209, 5287, 42309, -18837, -61918, 14671, 46297, 91527, 82383, -93365, 68270, 17499, 81967, 2798, -49170, 18526, -9987, -63256, -25403, -97416, 66984, 28065, -73517, -16862, 94119, 87844, tail", "row-730": "head, 92075, -36351, 60760, 12269, 19110, 248, 39908, -52931, -51757, 13725, 96483, -12255, 13674, 46788, -28070, 60233, 80758, -36934, 33655, -70897, 24085, -84253, 82034, -62269, 14971, -54682, -52523, 17041, 28785, -90121, 7886, 26851, 86900, -51451, 85223, -69332, 46811, 38086, 40654, 29022, 6332, 79191, -10652, 43943, 96353, 76916, -49617, 76819, 67688, -64966, 71828, 17643, -3386, 8299, 90008, -30967, -87242, -48379, -63874, -10256, 30207, 41803, 6757, -71035, tail", "row-731": "head, -62342, 90041, 93386, 75106, -59809, -15573, -44610, -38443, -22859, -79898, 30478, -23463, 4179, -38238, -25808, 90714, 13686, 34646, 82644, -57835, 6370, -30796, 36257, 60129, -9091, -91193, 94201, 52830, -50976, -76671, 92532, 27055, 84824, -85906, -97576, 38480, -64938, -61123, -11159, -44056, -94426, 7463, 31574, 12811, 19271, 23630, -94927, 61549, 25670, -52139, 87987, -30672, -68763, -5103, 32636, 19167, -57782, 35902, 30611, 64118, 26750, 16878, 70125, -23161, tail", "row-732": "head, 42492, 58110, 82056, -65587, 52232, 95614, -47485, -95671, -36593, 99059, -22275, 20075, 86108, -63646, -80313, -87075, 85799, 51950, -62160, -70211, 52375, -48992, -14999, -1654, 52990, -20889, 91314, 67773, 48805, 39258, 3015, 17780, -96567, -93254, -6875, 96910, 89198, -96327, -80187, 39588, -10698, 67227, 23465, -62164, 23215, 12216, -78596, 29930, -70667, -5300, -94210, 75566, -45566, 31990, -71491, -59488, -80042, -48115, -34126, 60037, 66777, 55995, 52970, 68379, tail", "row-733": "head, 90111, -86068, 59097, 22522, 96760, -96728, -47044, -17106, 53544, -97247, -64280, 71997, -87412, 35123, -16422, -97939, -11936, 27737, 23599, 83858, -49185, -92719, -66455, 62919, -85571, -51308, -40003, 98585, 13335, -49257, -66801, -99095, -61197, -51104, 49720, -37335, 57990, 42353, 91134, 91740, 35794, -75298, -18503, -20861, -60207, 95817, 61229, 87052, 19123, -38954, -33717, 10218, 32784, -72065, -95992, -12147, -39586, 90843, -42869, 14338, -63364, -78318, -14291, -62685, tail", "row-734": "head, -34873, -82126, -68682, -87890, 29032, 91518, -45344, 44863, -89703, -11293, 25875, 16882, -65447, 63909, -89993, 6610, 70450, 5411, 70733, 19094, -306, 83585, 26029, -30121, -35173, -69497, 95516, 15313, 16747, -63261, 5522, -83720, 2035, -27939, 67908, 81805, -94669, 43997, 9182, -61080, -46342, 48894, -11721, 80752, 20748, -14784, 48764, 44276, 47253, -73786, 68464, 52867, -76893, 35079, 44348, 99789, -51848, -36186, 49845, 99214, 29785, 40425, 6819, -31926, tail", "row-735": "head, 53572, -68842, 16200, 3936, 44196, 41334, 40291, -4306, 3664, -59669, -80645, -51150, 56674, -89249, 90763, -14890, 1406, -60988, 4088, -72277, 72781, -70130, -81086, 97457, 15727, -73803, -43367, -72159, -99836, -24977, -38962, 86737, 71319, -35029, 26357, 35968, 66312, 13715, -35497, -55683, -826, -13417, 50272, -22949, -57673, -60238, 17460, -57797, -16536, -17266, -463, 82812, 99259, -53859, -12578, 36886, 48083, -17773, -68738, 18757, 27465, 86709, 64465, -37745, tail", "row-736": "head, -89984, -9651, -95781, -27733, 67224, -38935, 42504, 57204, 59741, -79725, 30719, 31573, 61452, 14598, 52285, 51109, 72197, 98736, -20689, -49736, 148, 93189, 55885, -76054, 4663, -42872, -18024, 28138, 21766, -26456, 23143, -84447, -2131, -90652, 86559, 9470, 55947, 11405, 53097, 35680, -81517, 84448, 3729, 19067, 76445, -47209, 69797, 27814, -38751, -83284, 54956, -29919, 55650, 33258, -99117, -63099, 22735, 51307, 64569, 27217, -28119, -87053, -16112, -55707, tail", "row-737": "head, -97933, 70940, -47254, 53111, -88558, -83182, -97664, -86754, 87126, -75969, 59012, 62525, 51262, -61581, 90779, -41723, 9016, -19706, -14238, 1481, 11238, -185, 21062, -83924, 13761, 25145, -67883, 6785, 50741, -48168, 45746, 9503, 52711, 58354, -51219, 87109, 25219, -57182, -34555, 36191, -60259, -23438, -78516, -68334, 76054, -62206, -89608, 9366, -27395, 46210, -56200, 2948, -14311, -68538, -78452, -15215, 37570, 32789, 78039, -34853, 66644, -60386, 66679, 15167, tail", "row-738": "head, -73259, -91334, 6536, -98888, -93708, 56023, 76610, 47703, 3792, 52478, -42826, 210, -46224, -17885, -93904, -73282, 37279, 14384, 46641, 2874, -62451, -47367, -6505, 17817, 93801, 45013, 33270, -13106, -2316, 80770, -64865, 22786, 24631, 52685, -61527, -85630, -9862, -97635, -64607, 99384, -76757, 21681, 50533, 54734, -56188, 86560, -51746, 28522, -36099, 18364, 23616, 71149, -13438, 41236, -86081, 93788, -55120, 42084, 77244, -86847, 17529, 62493, -78875, -97779, tail", "row-739": "head, -16760, 34500, -68875, 83421, 30066, 61239, -63152, 1639, 21559, 1708, -50641, -93055, -33118, -12375, 90589, 30527, -52363, 10811, 58526, 43484, 45443, -89810, 38739, -52663, 42492, 88299, -25849, 63644, 85916, -53560, -11160, 3361, -32942, 63509, -62570, 59390, 66958, -97020, 18616, -55229, 1597, -42492, 39794, -41102, -30, -17960, 95104, 89158, 40028, -96378, 89697, -31507, 19044, -84094, -10861, 42509, 81894, 58693, 62982, -58800, 96631, 64750, 54182, -18211, tail", "row-740": "head, 54410, 11142, -60476, -90230, 46613, 99085, 46387, -96564, -40137, -49675, -54731, 37783, 7613, 27902, -60305, 66726, -63654, -21576, -41723, 94255, -54823, -61701, 67785, -87805, 78051, -772, -52885, -77085, -61356, 314, 55325, 19958, 95798, 65317, 97929, 5990, -82690, -98027, -45200, -41544, -14812, 43907, -83279, -19187, -6160, -53180, -19261, -23144, 78807, 25780, -92090, 39337, -22983, -78786, 98666, -7241, 65497, -72545, -72383, 65408, 12817, 61386, -93418, 63089, tail", "row-741": "head, 36856, -24236, 93797, 77430, 55208, -98952, 52446, 53515, -57274, -91417, -61879, -21733, -36561, -17318, 35126, -40889, 7110, -41298, -36956, -52722, 87276, 51077, -74171, -85746, 2108, 41043, -63343, 42385, -1371, -81900, -8845, -16430, -77965, -16295, 43394, 78440, 5390, 68117, -85500, 37492, -88020, 27212, -15930, 26443, 96610, -29763, 29346, 6333, -77679, -45007, 37036, 57536, 15847, -96721, 80312, 54207, -27257, -36815, -98448, 58189, -67263, -92640, -26429, 54381, tail", "row-742": "head, 77229, 49968, 99977, 29140, -27143, -13021, -49342, 93360, 27646, -62161, -93314, -14439, -89385, 87879, 52653, 3005, -9714, 14878, 92786, -93913, -21981, -48581, 81537, -43175, 94521, -38234, -29731, 96296, 8426, -39555, 98551, -61576, 53316, 50919, -69298, -11086, -75494, 81434, -90998, -67458, 91805, -43977, -59504, 39260, 25445, 92390, 51590, 69402, -58720, 69109, 25342, -70480, -8616, -69091, -26937, -62532, -15372, 93068, -47067, 46617, -95125, 46642, -95858, 8977, tail", "row-743": "head, 75936, 58680, -12113, 44861, 34701, 83128, -10371, -2695, 72711, -76352, -84353, 23565, 11532, -10396, 33033, -81966, 14674, 27525, -14656, -30090, -29308, -77168, 41314, 53346, -59821, 75471, -61351, 47729, 94675, 37231, 55618, 68227, 98931, 59053, 2659, -6336, 39819, -14385, 25292, -32557, -35256, 57521, -78150, 67248, 46784, -92081, -38872, -31282, 38951, -97001, 98990, -7918, -77765, -99052, -81457, -27054, 53709, -69906, 9313, 72407, -93882, 21812, 87265, -54823, tail", "row-744": "head, 44595, -34282, 31261, 57798, 80799, 27047, -80639, 37034, 73200, 1952, 97489, -23008, -36039, -830, 85884, 14128, -54221, 34395, -19447, -43542, -38533, 33189, 59679, -84657, 10420, -43211, 94243, -45527, 32723, -29123, -23277, -98601, 12937, 26821, -14562, 88630, 20314, -36458, -70045, 85327, -31112, -98649, 24097, 15030, -30274, 73867, -65388, 29565, -51145, 47389, 42128, -24806, 33531, 10072, -21473, 14822, 46931, 24473, 71948, 64593, 7570, -72756, -12340, 97416, tail", "row-745": "head, 47405, 56634, 88885, -64211, 37811, 93885, 33757, 15244, -8684, -80910, -99662, -48803, -11670, -61777, 84156, 94453, -67382, 33516, 86182, -4486, 82494, 38028, -80258, -89334, -25388, -99739, 56717, 45716, 45077, -26425, -18815, -17770, -32442, 2056, -91104, -75295, -53897, -19139, 34704, 94353, 55170, 70195, -49041, -55757, 385, 38102, -30450, 59750, 68389, 92794, -8884, -1024, -76785, 1124, -13055, -87846, 56464, 64708, 89484, -36104, -2517, -98068, 65717, -98594, tail", "row-746": "head, -38698, -26304, 686, -1307, 92250, -83651, -19366, 97599, 75147, -45681, -22971, -4204, 46517, 52374, -58946, 43765, 47645, -97572, 93022, 60587, 66025, 31364, -7079, -76333, -70341, 26923, 97233, -88030, -6958, -52758, -2484, 90630, 24385, 75650, 67645, -10697, 93164, 37855, -3364, 34816, 28553, -97509, -21717, -89208, -67627, 72726, -11536, -86290, 74718, -57930, 16688, -75973, 98012, -75977, 72891, -61847, 22535, 24102, 24109, 77968, -41012, -77716, 71741, 82581, tail", "row-747": "head, -91716, -48943, -29904, -69882, 59645, -30319, -16682, 40843, -76825, -56551, -87147, 65301, 82091, 3740, -55880, -21100, 540, 45189, -40433, 61728, 16458, -33380, -103, -1495, 51565, -77100, -47625, -41566, 39943, 96372, 43771, 4793, -69125, 88496, -54018, -25732, -18674, -55417, 24358, -32852, 43645, 37551, 44828, 52230, 284, 23234, -46815, -51941, 37081, -21861, 51710, 12047, -86665, 66990, -66748, 37007, -84017, -70036, -3208, -60398, -233, -6777, -41426, 30870, tail", "row-748": "head, 24304, 43426, 48918, -112, -81428, 82026, 79720, -49048, -52872, -89648, -78579, -81638, 68487, -58395, -8047, -46366, 56878, -48929, -55662, 57812, -86066, 34284, 77836, -68914, -14076, -93256, -49782, 30449, 23213, 19663, -98162, -30774, 96729, 37043, 34966, -30880, -7714, 53384, 55050, -50687, 88425, -71454, 48174, -70360, 24068, 77954, -38027, -6827, -91762, -95968, -25284, 29129, -75419, -56065, -67751, 4447, -51094, 24645, -14281, -72462, 48289, -46234, -387, -85792, tail", "row-749": "head, 89260, 83850, 52950, -71217, 55558, -62989, -53660, -3001, 29301, -5199, 29557, -89265, 32111, -36036, -38338, -73806, -19689, 25249, -78649, -57403, 83260, -38659, -4894, -3065, -1219, -27118, -42390, 84313, 5590, 43422, -4878, 52161, -39133, 45408, -81401, -32828, -65493, -60206, -19847, -72034, 9038, -74562, -50962, -85223, -68208, 58463, -20688, 60501, 47284, -10398, 22003, 97148, 76186, -51466, -8808, 12351, -68439, -33591, 78480, 14884, -62582, 25875, 89051, 9238, tail", "row-750": "head, 76765, -54987, -75820, 88369, -38080, 45016, -14103, 65918, 39314, -79579, -17564, -93123, 17853, -75281, 80463, 46031, 482, -10317, 719, 3795, 54425, -39886, -85571, -72422, 97744, 51673, 20387, -46914, 57642, 93781, -93804, -97911, 59286, -72867, 62264, 2646, -82024, 78078, 6691, -7053, -5668, 10304, -10037, -59808, -65217, 20529, -95758, 15823, -23690, 24410, -6530, -55604, 96267, 21894, 17948, 33227, 60716, -72631, 7369, -8367, 88038, -51808, 54311, -56069, tail", "row-751": "head, -20083, -92335, -26382, 78697, 41895, -98487, 72474, 54071, -69896, 35847, 14902, 2518, 6092, -14991, 73397, -29222, -28919, 51832, 25376, 36285, 39114, -56919, 4930, 10947, -57454, -70008, 3125, 93799, 38755, 83493, -45904, -98462, -51673, 44496, 66180, -42308, 74198, 76505, 4150, 44971, 47776, -58231, -31929, -73068, -40661, -41805, -17257, 3910, -28524, -74881, -58242, 29972, 49458, 52425, -57447, 40164, 20081, -54343, -23288, -94661, 94820, 1014, 53199, -86842, tail", "row-752": "head, -74597, 93405, 77854, 71568, -81518, 26222, -35278, -58486, 24583, 49006, -6147, 46476, -74317, -73977, -69683, -31472, 66999, 67437, 91041, -29272, -59844, 48131, -29267, 8645, -49483, 57534, -67113, 98728, 64490, -29926, -20101, 29409, -47388, -20292, 92670, 74738, -98315, 51977, 1712, -29619, -35297, -29626, 41822, -59933, -83414, -20984, 83077, 9452, -21746, -56717, 93246, 4948, 97943, -25369, -46612, 17740, 45925, -40588, -64327, 83173, -54554, -21281, -88353, 41283, tail", "row-753": "head, -95861, 76563, -56797, 70776, -67479, 1401, 85077, 35853, 37349, -52073, -53973, 15676, -83459, 38192, 1810, -26578, 35115, 29942, -47760, -40469, -7169, -81531, 33332, 84410, 79357, 897, -53266, 30280, -35071, -17134, 1789, -89159, 97947, 67259, -37094, 80901, 51637, -73009, 18655, -13284, -1730, -57977, -96112, 39532, 55640, 64994, -98551, 34097, 39955, 48513, 88932, -65049, 33933, -20054, -16047, 8745, -2441, -3903, -60701, -98206, -28775, -26924, 4602, 81602, tail", "row-754": "head, -41014, 78136, 51376, 21437, 74240, 5095, -75189, -58090, -51341, -38719, -41404, 90177, 19327, -66141, 60683, -66659, 69215, -39447, -27997, 67497, 51339, 848, -90637, 41346, 90622, -46482, 71294, 4234, -35883, 24310, -84709, -98016, 87331, -43205, 61121, -56063, 9955, -71083, -90394, -64779, -29945, 30861, -88543, 41685, 16349, -44202, 81014, -70107, 51487, -99971, 6093, 24392, 95757, -51814, 20486, -95638, -13476, -69661, 40382, -18798, -68954, -47164, 67839, -68435, tail", "row-755": "head, -78887, -58197, 93650, 2726, 51539, 75362, -37267, 29297, -3632, -83621, -69371, -79919, 30911, 48336, -1382, -47858, 93922, 75283, 2289, 52058, -70281, -26871, -55170, -32149, 99455, 1701, 18832, -37511, 88113, -91852, 61133, 96100, 33544, 97973, 80225, -34359, 72954, 4124, -72832, 90692, 21504, 18291, 40775, 71293, -26730, -24665, 28657, -98877, -81851, 21479, -88572, 62566, 61710, 45444, -5528, -56462, -92409, -89090, 84250, 16844, -56185, -16263, -7880, -3253, tail", "row-756": "head, -8601, -30668, 19036, 70821, 37166, 57347, -99251, -48565, 12838, -70370, -34366, -66548, -79497, -22535, 8117, 67984, -16355, 91368, -84137, 33795, 70993, -51153, -17961, 70411, 91661, -1353, -76494, 55943, -89609, -78694, -19340, -3623, 38030, 9774, -93408, 79759, -46812, -94050, 23792, -8891, 9420, 7742, 74086, -79003, -70939, -91811, -99635, 24711, -92023, 19845, 77712, 9341, 75664, 95452, 84074, 42821, -22694, -24851, -98487, 30154, -3411, 69818, -46391, 96722, tail", "row-757": "head, -5688, 34347, -25766, 25200, -44298, -83637, -94194, -4781, -75593, 32767, 53843, 49659, -68814, 42214, 43168, -79737, -73464, 73777, 1157, -75754, -28023, -16891, -21786, -18035, -27867, 98248, 37100, 54527, 43800, 55149, -96001, -74001, 86116, 47013, 29686, 56467, -76945, -13335, 75598, 19973, 3438, -39537, 4380, 11562, 77986, 80090, 30414, -23234, 19669, -82166, 795, -1688, -78191, 68890, 78460, -73945, 21716, 86405, -63455, 44130, 31900, -17753, -77637, 53369, tail", "row-758": "head, 44678, -2772, -34538, 11924, -67595, 80136, -73795, 7452, 16471, -61223, 13694, -24800, 85524, -62422, 19006, 17783, -56374, -40862, -85748, -13153, 78585, 16674, -52646, -35845, -53656, -46868, 80977, 29867, -91990, 21683, -41698, 34563, 34287, -68551, 6006, 6874, -37100, 45930, 18060, -20743, 80286, -73082, -97185, 97286, 72022, -46895, 80900, 34753, -80713, 32853, 96566, 75746, -41546, -14623, 54341, 81642, 22891, -19909, 50618, 99552, 49349, 17911, -44536, 25375, tail", "row-759": "head, 70355, 13206, -8450, 78652, -70857, -97675, -58038, 96875, -38252, 24237, -69392, -17299, -77448, 61671, 12735, 79014, 15977, -28010, 56176, -94500, 15508, -30789, -91859, 81079, 68607, 47453, 46417, 55991, 64196, 77912, 15273, -54991, -97685, -12246, 84397, 64159, -58292, -40711, -55326, 11250, -94575, -82381, 78357, 97376, -45394, 30142, 5264, 85789, 17499, -56072, -24951, -35034, 20592, -74192, -12610, 99935, -56799, 91386, -18580, 13682, -35030, -24614, -60274, 59626, tail", "row-760": "head, -24528, 19262, 30164, 19736, 34166, -45303, 86904, -83086, 37553, 17308, 6230, -50119, -51914, 53328, 13685, 58615, 68347, -32134, 42969, 9434, 30724, 45401, -46391, -60094, -7797, -46393, -49295, 76124, -9760, -95861, -12571, 82426, -87836, 37854, 5785, 76765, -53334, -2218, 31143, 7615, 47038, 3642, 14, 5876, 21021, -41142, -99666, -61684, -39514, -44165, 87127, 10217, -96924, 71224, -42618, -78476, 34415, 35614, -7448, -6814, -11476, -76325, -50228, -94947, tail", "row-761": "head, -21784, -94805, -58892, 51377, -70953, -32317, 91157, -84337, 40139, -34098, -93356, -81231, -48297, 67266, 7212, -42258, -67469, 37996, -84654, 4029, -96836, 16723, -38263, -25197, 53208, 82422, -28967, 61286, 94930, 44434, -45999, -43756, -36158, -26177, 20332, 92322, -65657, -90712, -36384, 1544, 68848, 91429, 40140, -70081, -19290, 80611, 80784, -41963, 99067, -46876, -28507, 15602, 57377, 15437, -77377, 41011, -79378, 93093, -92938, -32310, -31438, -49246, -31481, 84752, tail", "row-762": "head, 27538, 45149, 85600, 57673, -81247, -12031, -99865, 32328, 85208, 39509, -77548, 54223, -46763, 10367, -51918, 1499, -62368, 69629, 34652, -69675, 34280, -90398, 40645, -72562, 16232, 45066, -96315, -95598, -10477, 10449, -25496, 52640, -48515, 88839, -94724, 70700, -13089, 81916, 48521, -29071, -8046, -12368, -3501, -72330, 73277, -42443, 12350, -71668, 85492, -82654, 99008, -15195, 72544, -3448, -95361, 71568, 24445, 19251, -18297, -60476, -98976, 44307, 64831, 89472, tail", "row-763": "head, 60986, 2616, -40732, 78750, -89751, 22427, -43957, 66, -25799, 1668, -40771, -32999, -20020, -66002, 14543, -59965, 29118, -50839, 76656, -50240, 37503, 49255, -38264, 58317, 34337, 60414, -9183, -85953, 96141, 84537, 2280, 29966, -82980, -69157, 56208, -58879, -61035, 56760, -51412, 93386, -92105, -75187, 74480, -99784, 1539, 13690, 28611, -15942, 13995, 49563, -79400, -52736, -7748, -88281, -87882, -19853, 69563, -81852, 45135, 8173, -82481, -4427, -6610, 98889, tail", "row-764": "head, 25799, 94663, 63944, 54550, 32144, -54646, 66411, 18489, 52888, -28478, 17230, 85714, -32819, 94352, 65405, -11629, -4426, -99517, -3019, -46557, -8829, 38102, 32653, -72692, -10119, 9505, 29574, -54105, 40174, 66604, -51455, 26729, 47919, 61765, 19501, 51759, 4952, -90722, 4453, -70042, -17051, -3774, 63660, -53858, -98584, 23558, -71132, -65534, -52178, 73223, 37688, 19464, -88273, 54184, -23684, -34790, 83504, -41660, 62110, -3774, 93548, 17789, 49855, -7757, tail", "row-765": "head, 73956, -81128, -95094, -97871, -23732, 85298, -20699, -6975, -88176, 73944, -26474, -18034, -43521, 60662, 15113, 61599, 11821, 28673, 92863, 32570, 58359, -17782, 87270, 71575, 20286, -87547, -25508, -53856, -97952, 7242, -28425, 35323, -88362, -19167, -37484, -98536, 65102, -44608, -17584, 14581, -47953, -25830, 72092, 12886, 70772, 48178, 67891, -37449, -83824, -9769, 40329, 45879, -79102, -38536, 70818, -13695, 79190, -83773, 21912, -22429, 5346, 93843, -68012, -52632, tail", "row-766": "head, -33343, 91595, -69287, 5514, 14204, -61514, 80937, -94996, -28857, 21255, -50533, -13755, -43971, -55748, 60314, 70096, 54815, 90553, -86315, -37537, -57046, -29435, -22033, -13309, 17551, -78539, 20922, -60050, 32066, 77673, -43030, -95438, -23547, -2453, 79128, 63470, 49960, 630, 13109, -90853, -84550, -11356, -45783, -57712, -10732, -2632, 9013, 76729, 7841, -33835, -31730, 95745, -45981, -31858, -4791, -66642, -17403, 323, -2220, -28274, 67850, -89092, 96988, -73955, tail", "row-767": "head, 91789, 10112, 96494, -58980, 26371, -25308, -38569, 71116, 85767, 51529, -94544, 38889, -9464, -76754, 20502, -42063, -42466, 77300, -18062, 22423, 80078, 70669, -8814, -27403, 73012, -22940, 4037, -96188, 85331, 62200, -847, 7589, 64469, -71074, 44483, 73943, 62965, -63841, -31916, 12708, -58775, 17532, 57197, -17071, -46617, 96930, -59547, -97313, 57431, 74057, -89528, 61829, 19860, -48425, -37963, 22601, -73007, 94399, -71535, 74635, 33449, -74839, -18934, 41865, tail", "row-768": "head, 76781, -14156, -87723, 62190, -71492, 73681, -2808, 86574, -23421, -72727, -64229, 94827, -61657, 92788, 32153, 64626, 21975, 68261, 88832, -95523, 22216, -29081, -69627, -1381, -95615, 46331, -66213, 33214, 95071, 34504, -26192, 41382, 9286, 64565, 84120, 2185, -16338, -92948, -72486, -27108, 62232, -76889, 90767, -93840, -23213, -44664, 26847, 47780, -32227, 37452, 80129, -82295, 26818, 93796, 57081, -10110, 22842, -75948, -36444, 55771, -19763, 7302, 33242, -31264, tail", "row-769": "head, -63243, -64158, -44350, 97986, 92539, 72912, 75322, -36829, -16774, 36801, 94590, -16238, 83711, 23230, 76408, -38518, -22583, 24023, -31249, 47644, 41641, 6641, 63161, -22090, 99883, 3126, -37971, 21090, 91831, -12553, -51903, -3471, 58759, 97940, -32334, 39720, -95144, -59292, -63383, -1415, -46650, 15974, -66454, 43490, 85184, -80136, -34131, 71687, 52434, -57986, 60630, -77697, -95533, -31082, 14221, 97648, 89491, 57922, 97730, -7851, 82185, 16454, 25146, 52217, tail", "row-770": "head, 42763, 12857, -9566, 4311, -77336, -7612, -44589, -38466, -46006, -78880, 94753, -29461, -86504, -97881, 85301, -95100, -81346, 69248, -87387, 10923, 98760, 70245, 98054, -7894, 72699, 31731, -94599, -29348, 32021, -93895, -89361, -55338, 84287, -4426, 95281, 16796, -59146, -55638, 92212, 68982, -88192, -90200, 40897, 44191, 12266, 44240, 60792, 46020, -19225, -95885, -42403, 26281, 50735, 38669, -97093, -58568, 29128, 45599, 69228, -57623, 38979, 36278, 35328, -60597, tail", "row-771": "head, -25090, -60994, -36839, -26070, 76518, -71243, 8378, 18817, -12470, 16064, 17297, 51730, -23871, -40023, 77415, -1952, -16084, -52850, 38079, 62919, -22084, 6832, 15079, 95219, 62890, 76782, -71629, 90069, 48842, 70308, -90132, 52539, 79594, 89179, 20417, 38271, 74149, 92151, 62573, -43413, 94802, 53281, -1547, 6515, 97439, -60901, -64606, 37283, 15342, -91225, -60479, 30472, 76914, 39585, -31469, -34461, -86838, -48808, -1260, 47164, 97204, -87545, 54584, 95726, tail", "row-772": "head, -6292, 12290, -58063, 65967, -89173, 27418, 80795, 91024, -48823, -6940, 56501, -57337, 69959, -45465, -60138, 15576, -70865, 84307, -82699, 11337, 62575, 42700, 73667, 20543, -83754, 63482, 10645, 24881, -91725, 85671, 9357, -13753, 59778, -64517, -24215, 43155, 98805, -26154, -43533, -52773, -65606, 43746, -49057, 5740, 87786, -26184, 65975, 64219, 89187, 90828, 72348, -16216, -69821, -72415, -60324, -12111, -3888, -84279, -86386, -41573, 36514, 44669, 21665, -48272, tail", "row-773": "head, -68175, 60379, -62265, -56691, -49695, -15742, 2364, 12163, -57651, 38832, -5156, -3649, -60036, 83610, -64503, 75295, 98937, -97150, 41988, 29032, -41895, 57396, 27644, -96378, -22587, 98204, -98561, 26429, -8486, 84000, -89725, 2546, -65112, -8504, -87879, 99997, 8528, -75482, -97678, 21182, -54429, -58482, -87946, -47289, -90958, -21346, -58846, -40054, -96429, 57854, 78355, 56576, 57415, -20057, 95277, -94023, 89943, 77035, -12516, -56078, -53539, -44896, -50133, -80831, tail", "row-774": "head, 35318, 92300, -12082, 99021, 40085, -43208, 64207, 90962, -79675, -37923, -77986, -10744, -60888, 5770, -57640, 28190, -18167, 55207, 61474, 45646, -3408, -32153, 62937, 39855, -96579, -93526, -23355, -31971, -40240, 66813, 61336, -80791, -61051, -44025, 8716, -86637, -485, -60322, 18533, 28532, -76523, -86381, -164, 91121, 8309, 93342, 3219, 14764, 29668, 83043, 42328, 49093, 7053, -84001, 34100, 88417, 62874, 51755, 61159, -35773, -49634, 15733, 6853, -46007, tail", "row-775": "head, -7176, 98535, -7585, 68590, 10443, -8397, -95662, -12933, -18556, 75626, -3807, -16760, 43686, -92138, -41398, -91752, 93382, 2669, -86823, -75985, -14410, 12196, 23855, -43351, -77140, -31549, -74489, -90684, 18363, 44302, 85875, 86061, -51566, 58108, 82372, -89759, -78131, -11270, 60119, 77113, 68693, 17727, -73919, 12530, -46142, -39641, -7905, 82387, 10680, 21733, 98632, 5646, -51442, -89432, 16096, 16721, 24744, -57580, 38810, -83540, 76458, 91567, 60881, 33039, tail", "row-776": "head, 73594, -56832, 79323, 84747, 45007, -35157, -40524, -65879, 99140, -3225, 71015, 16236, 92224, -16229, 3305, 18611, -14480, -96019, 56458, -32599, -92063, -37874, 15820, -9531, -93731, -64395, -60762, -72981, -59211, 90051, -33681, -48198, -30465, -32390, -35939, -27577, 76175, 9361, 3506, 17548, 9156, 34587, -22948, 57141, -25299, -64859, 8586, -74263, 30275, -58592, 53905, -75547, -48374, -23092, 34233, -49880, 83284, -12295, 17211, -24285, 59249, -5416, 85066, 47228, tail", "row-777": "head, -13936, 2769, -91233, -39260, -26620, -61777, 3908, -37500, -56192, 57308, 339, -15904, -47589, -27997, -88096, -3176, -67268, 44888, 76367, 8568, -52373, -11498, -60110, 92713, -26682, 20159, -65475, -67683, -84551, -52080, -39493, 49638, -42254, 65311, -9313, 52582, 61779, 93873, -64492, -94190, 39197, -22156, 97515, 53800, 27641, 43689, 9565, -81002, -11685, -17238, 65218, 24221, -45177, -93244, 66007, -59977, 34178, 86233, 34209, 4333, -45815, -66433, -89096, -51572, tail", "row-778": "head, -50974, -23488, 55290, -52908, -87969, 86014, 63273, -34365, 82083, -33035, 24092, 90304, -89269, -30837, -97989, -50701, -58756, -24681, 62825, -31903, 71431, 77410, -47887, 44762, -88874, -63014, -89342, -26093, -7394, 4698, -41774, 46886, -76829, -60568, 83969, -55228, 35235, 48462, -26410, 86196, 17736, 67659, -98202, 66941, 78116, 7712, -67760, 48068, 84091, 33858, 42938, -62730, -64517, -69619, 70645, -3404, -91703, 7906, -788, -66012, -8374, 4609, 74559, -41484, tail", "row-779": "head, -26982, -65158, 40099, 37852, 92074, -9118, -37735, -41282, 98322, 79489, -33515, 34091, 16115, -39101, -36996, -85260, 61936, -9611, -13487, 22792, -330, 84085, -30527, 70027, -99102, -53512, 43692, -10087, 58744, 52386, 1579, 29754, -99976, 14681, 57776, 76656, 73430, 22939, -10453, 84523, 73039, 22367, -56936, -39301, 56608, 65767, 97258, -21340, -96889, 42080, -99008, -49575, 86876, -49659, 74878, -77975, 47985, -73937, -16925, 31064, -83938, -78951, 72251, -82798, tail", "row-780": "head, 86725, -68175, -98145, 35549, -82775, 23115, 7896, -2114, 41395, -19369, -53710, -39876, 98175, 81845, -88246, 28787, -89554, -76570, -23359, -34133, 79510, 57681, -78616, -4032, 48352, -86406, 8734, -15118, 81472, 50246, 81634, 9901, 33470, -99224, 70127, 22170, -21060, 39388, 83031, 69280, -11146, 39479, -14649, 52683, -87462, 90391, -2382, -10830, -43206, -22045, 37740, 80368, -94554, -19761, 125, 25874, 80105, 76452, -18306, -88626, -39725, -7707, -62578, -93201, tail", "row-781": "head, -24157, 74142, 15280, -70464, -72554, 10136, 83800, -11668, 22154, 42493, 70859, -33807, 33747, 62549, -93055, -88071, 22810, 94509, -44631, -38704, -983, 29129, -2591, 60614, 80042, 2864, -93186, 58677, 55847, -63132, 64746, 8428, -4466, -3304, 61634, -57125, 46170, -64207, 54890, 20075, 9835, 77206, -31373, -20305, -19585, -93975, -82253, 95862, 21326, -92592, -31443, 42590, -31050, 84541, 18959, -97420, 44713, 1679, -82926, 44837, -2963, 60548, 41459, 28539, tail", "row-782": "head, 61956, -20390, -52476, 88576, -74685, 94356, -64417, 26518, 27060, 3787, 84928, 59291, -42684, -59623, -24181, 45198, -81175, 9525, -47720, -28726, -95019, -70389, 97857, -91757, -84521, -64977, -1481, 11248, -64596, -57164, 35972, -91963, 62248, -4526, -25963, 47750, -5707, -40128, -12223, -84986, 5885, -4450, 45023, 88830, -70004, -34124, 67073, -12067, 73731, -28294, -60352, 75537, -25413, 67613, 69769, -14683, 4417, -53106, 4973, 26533, 62801, 2902, 59219, 78752, tail", "row-783": "head, -23517, -65336, 77127, -29608, 78044, -68113, -96406, -17392, 55675, -19931, -24900, -2313, -34442, -60166, 24725, 20194, -62520, 58953, -87146, -97635, 77212, -86716, -73033, 95444, -58028, 47670, -35465, 32328, -28513, -84499, 67963, -1424, -96449, 47088, 59114, 17984, -74363, 91192, 70506, 3478, 79870, 80765, 4306, -41310, -45120, -4659, 76135, 22791, -16431, -2082, 19182, -3265, 32255, -70381, 70419, -9496, 82004, 11324, 16946, -38393, 36559, 37147, 92035, -7274, tail", "row-784": "head, -22595, 61243, -76432, 47694, -86663, -98821, -567, -90479, -51307, 28182, -32084, -7576, 30289, -85976, 54417, 43360, -32663, -54327, 14761, 75312, 14561, 79642, 31377, 39032, 86316, 39666, 4108, -80286, 89717, -82940, -20323, -22025, -57052, 51801, -35192, 6716, 52789, -89533, 55134, 39375, -3306, 50507, 25740, 99795, 53852, 10622, 55461, -75197, 30048, -34944, 30956, 44401, -78406, -53714, 78665, 31749, -4166, 29060, 4878, 94755, -33730, 81940, 23580, -3862, tail", "row-785": "head, -24189, 7596, 4160, -38843, -41974, 83697, 49972, -56129, -97995, 76152, -55849, -75664, -67417, -76900, 65399, -68004, 44083, 27408, -43366, -34218, 14959, -91355, -91883, -32295, -35216, 41701, -14148, 23707, -90909, 76670, 12152, 21294, -32881, 9246, 89989, 64373, 5819, -47191, 30080, -8214, 30044, -50696, -48963, 51583, 82835, -37780, -1822, 89708, -860, -6851, -54866, 30011, 44353, -55574, -10697, 40779, 88382, 17061, -35819, -86378, -25362, -83471, 47386, -15679, tail", "row-786": "head, 98103, 33782, -11002, 41252, -57440, 26717, -72709, 99941, 39618, 20170, -91401, -77939, -90935, 81948, 54502, 78842, 40313, -73103, 33354, 52756, -72952, 2133, -8443, 44622, -65818, -7378, -92569, 65298, -99987, 20706, -12170, -56837, 3136, -91719, -73667, 4214, -83098, 16772, 99028, -28607, 8532, -65144, 28690, 73465, 23966, -99205, -37614, -45312, -62317, 3095, -90529, -68347, -95792, 3775, -15394, -32698, 33577, 7153, 45688, 2009, 96881, -46982, -1231, 87450, tail", "row-787": "head, -20184, 34456, 91835, 57618, 39746, 35628, 71807, -99581, 21025, -50322, 12195, -42262, -71730, -10380, -58433, 75746, -89574, -18222, 65848, 98082, 76660, 42379, -9145, -17441, -82622, -204, 65901, -53059, -65049, 28633, 46697, 3189, 85356, -46852, -5085, 38209, -37850, -39630, 2360, 48630, 1055, 21199, -33156, -97155, 70926, 10374, -53660, -25629, -71110, 16356, 6970, 99273, -21575, 80358, 65026, 96991, 71316, -93203, 16925, -62569, -44493, 81280, 95009, -64566, tail", "row-788": "head, -10464, -56029, -21962, -21914, -23262, 62235, 83156, -36388, -93167, 21878, 12090, 655, 6632, 22613, -81359, 60218, 9397, 19621, -94913, 93016, -60167, 15654, -29865, -58200, -5998, -60841, -38304, 8750, 61958, 92657, 92588, -98235, 15176, 17391, -26530, 5241, 19332, -37844, 47886, 36555, 98373, 87953, -79873, -97940, 67686, -78370, -8534, -22387, -9159, 3904, 16277, -99596, -36153, 2724, 8568, 92500, 85539, 79644, 49677, 90914, 77819, 28891, -51372, 73828, tail", "row-789": "head, -74112, 2811, 43997, 61580, -83606, 69869, 68793, -50811, -72566, -13481, -85447, -14059, 97275, -22527, -44274, -34221, 94050, -1981, -86450, 67741, 61894, 87879, -43170, 43899, -51241, -74251, 36569, -33533, -39716, 33024, -59684, -43090, 86482, 15305, -69174, -5032, 15762, 67058, -78200, 4239, -54405, 30379, 89196, 26829, -14726, -10986, 21589, -61683, -54209, -64826, -68109, 26900, 24438, 87416, 81850, -71799, 54057, -98406, -21049, 59579, 6674, 23390, 86285, 15462, tail", "row-790": "head, -79360, -38055, 21666, 74493, -95297, -37413, -91419, 6330, -49453, -86172, 26983, 14502, 10356, -80984, -65722, -65381, 42336, 18445, 22115, 20209, -41439, 8952, 42226, 4196, -5965, 71953, -72807, 53151, 2663, -68472, -45141, 39296, 56673, 82545, -95675, -29494, 51864, 18959, -46681, -90956, -56145, -16164, -41303, 43386, -71041, 36942, 92071, 38846, 83394, -3898, -23329, 69594, -64722, -67540, 90295, 95900, 76421, -53858, -27112, -15887, 18749, -89395, 46275, 61489, tail", "row-791": "head, -11150, -86434, -19528, 25968, -93408, -23030, 13300, -55189, -46507, -11094, 54843, 25952, 23432, -69536, 55441, 86608, -44288, -13458, 69401, -76685, -69199, -11374, -65726, 62242, -38761, -22426, -92688, -39912, 98768, -74136, -83564, -42383, 93396, -47847, -78192, -76973, 99131, 93847, 14474, 12339, -68538, 12108, 83798, -74724, 10176, -60445, -26996, 49162, -35984, 51730, 73565, 71306, 93703, -67194, 37565, 42188, 45610, 24405, -99389, 88439, 77973, -66799, 14907, 99321, tail", "row-792": "head, 76104, -91632, -98075, 32694, 62721, -40017, -37670, -4594, -38873, -55560, -27089, 33509, -65154, -8026, 38195, 24262, -94830, -58767, 47302, 93271, -15006, -33665, 83447, -51667, 34359, -45981, -39546, 96459, -26989, -10734, 46533, -2430, 81810, -46716, -79202, -1251, -59588, -76629, 63832, 13019, 43624, 44940, -46355, 87284, 62630, -9525, -38793, -61909, 2066, 32256, 50772, 24756, -67169, 26247, 49180, 57168, 64399, -80090, -67911, -59575, -30715, -5740, -52181, -45350, tail", "row-793": "head, -33346, -58400, -33225, -93356, -36895, 98658, -64197, 84001, 80784, -49637, -9229, -8216, 18210, -17307, 15107, 11012, 40741, 44709, 18000, -62513, 67331, -25052, -20313, 90292, 32540, 56827, -93064, -20101, 11771, 2207, -19055, -7258, -75194, 13252, -63483, 23995, 95293, -60395, -98671, -74717, -80276, -29421, -38589, 83656, 73753, 8590, 58094, 3089, -12275, 84523, 80479, -12100, -97592, -80988, 57679, -36686, 13235, 64989, -80280, -56721, 20452, -96460, 56786, 77508, tail", "row-794": "head, -16962, 85197, 63830, 68989, -56965, -86538, 23950, 24433, 47409, -53370, -68561, -67802, -68778, -11697, 76791, 78278, 4537, 63837, -28107, -1841, 27465, 61535, -1455, 78097, -42664, -8890, -4475, 61432, 21218, 59445, -24461, -17421, 93037, -5992, -88100, -13221, 82678, 72417, 65404, -71116, -73874, -78137, 53740, -15109, 61912, -64453, -36752, -93396, 12141, -36130, -48661, 36309, -42284, -78654, 43758, -42550, 42249, -61172, -92411, 7597, 66467, 10256, -29169, -3297, tail", "row-795": "head, 81246, 82192, 65685, -99599, 2012, 18255, 4172, -39065, -91545, 71361, 72529, -26203, 64996, -64514, -1345, 84093, 16285, 58066, -38156, 78691, 32226, 90006, 44616, -95106, 73, 68532, -9625, -23108, -88901, -74763, -14412, -46721, -65533, 13232, -72339, -88911, -98405, -84383, 57082, 89518, -64644, -96144, 28225, -11451, -19799, 86732, -5828, -79830, 96462, -69163, 29745, -53475, -16975, -30370, 92387, -29798, -24693, 23881, -53109, -49633, -95434, -8773, 98548, -23388, tail", "row-796": "head, -4389, -1156, 62987, 76236, -45833, 51465, -34382, -85199, -97628, -10492, 32748, -47473, 45534, -48903, 33769, 64522, 26791, 45372, 51059, 47418, -17893, -88459, -74479, -73599, 28077, 34480, 21256, 86547, 8606, -76982, 8191, -88975, 36067, 76545, -87727, 22289, -18642, 89590, 17411, -33327, 51356, 43436, 24912, 63584, -25898, 36169, -66795, 28737, 73987, 13158, 13719, -16589, -79371, 41343, 80956, -35064, -1297, -35256, -44675, 12883, -70360, -36219, -3431, -61561, tail", "row-797": "head, -38207, -28514, 3995, -14046, -79367, -37423, -28350, -92312, -94654, -575, -41719, 54526, -17526, -64278, 26760, 89210, 53932, 94726, -23480, -87229, -92867, 36240, -74617, 27963, 10765, -44344, 71074, 8477, 92232, 39046, -15332, 72065, -14405, 18340, 28152, -80260, 47079, -12261, 63373, -96990, -24966, 7518, -15880, 37744, -30922, 36728, -6661, 57965, -73387, -82207, -2177, 58132, 18383, -9776, -42399, -14711, -68553, -90477, -19058, -5850, 18732, 32617, -40177, 36031, tail", "row-798": "head, -22593, -65565, 64674, 39094, 3741, 57746, 41783, -21794, -19787, -43152, -11738, -30622, 20102, 76557, -74993, 1869, 20032, 8476, -53220, -2650, 95027, -68724, -55164, 17212, 35648, -69588, 93659, 41778, -81420, 77147, 60458, 66343, -53383, -70946, 30088, -1976, 8292, 85035, 12880, 90113, 96954, -2041, 8078, 622, 86119, -64596, 1781, -51688, -51690, 18415, -84151, 31777, -8641, -64805, -50627, 73816, 20805, 95737, -49700, 13654, -49240, 79707, 81133, 4512, tail", "row-799": "head, -44842, 96463, 88188, -19210, 78567, 9092, -40896, -15533, 29821, 75422, -93430, -41078, 18011, 56809, -39588, -39996, -47214, 93376, -98133, 23660, -66024, -58499, 75608, 22117, -83272, 71032, -98209, -68809, 89254, 58875, 3196, -22412, -85230, -45551, -93740, -69900, -79569, -79214, 96068, -83887, 55034, -52723, 87804, -8390, -62963, 37545, 96633, -41997, 55965, -31898, -17771, -56113, 28341, -51134, -79833, -56228, -30547, 50504, 88793, 76869, 18290, 77791, -16977, 32567, tail", "row-800": "head, 15052, -20091, -67627, -83958, -93519, 2197, -9368, -12618, 5702, -53072, -58550, -71651, 18107, -39496, 2941, 23038, -69104, 37134, 37300, 63762, -98274, -55875, -91542, -37262, 23608, 78242, -46058, 3455, -4296, 14263, -6545, -29195, -25568, 72137, -96060, 35073, -16087, 29406, -62723, 37822, 33318, -16361, -30231, -42795, -42163, 1606, -31758, -68621, 17843, -96113, 61778, 70473, 9554, 153, 80960, 53806, 58649, 41049, 9864, 82667, -34661, 87834, -14818, 32204, tail", "row-801": "head, -60073, 91374, 17622, -64656, -99922, 70137, 8389, 2589, -20913, -32513, 754, -46648, -57340, -76798, 75971, -43049, -39471, 58046, -28615, 95930, -58332, 46822, 15623, 45056, 50402, 3637, -10830, 53681, -68028, -37487, -55620, 86219, 81900, 5041, -1798, 14809, 87487, 33294, 22051, -51627, 88058, 76736, -14746, 88128, 22804, -48589, 42301, -41578, -30925, 77849, 13288, 62653, -83534, 16364, -7686, 31754, -82063, -10479, -64938, 74508, -10457, 46679, -14552, 77155, tail", "row-802": "head, -9467, 61560, 71816, 38943, -58290, 21590, -32248, 64943, 36810, -18256, 22431, -23377, -32087, -37142, 10867, -78815, -25615, -41332, 84148, 49405, -6093, -2759, 63802, 50011, -29068, 24133, 82163, -89896, 11233, 13362, 36889, 54034, -67037, 11239, 47171, -35461, -78731, 85901, 61044, -8501, 86398, -95571, 74299, 44620, -41128, 22832, 9053, -7062, 18120, -12803, -40076, 87124, 9489, 81098, 43091, -37288, 84963, 17588, 35331, -62397, -2515, -48203, 12099, -88973, tail", "row-803": "head, 94533, 96256, 10394, 11012, -93204, 20228, 59171, 39003, 540, 5870, -66936, 79807, -32478, 37094, -38444, 78760, -7275, 32141, 48953, -5032, -8097, 63954, -67854, -72649, -52606, 31009, 37380, 53936, 50066, 67698, -57917, 46790, 6443, 35046, 6857, 27587, -12465, 24040, 53148, -71825, 81875, -22302, -24406, -94460, 35600, 19107, -34049, -34039, 53025, 36111, 30641, 7292, -34725, 99521, -84890, -31982, 82948, 9087, -26569, -69436, -95285, -87846, 68741, 13747, tail", "row-804": "head, 63755, -60177, -20592, 92737, 81148, -92658, 62387, 79785, 92978, -67221, -54100, -79373, 79519, -5368, 33158, -84978, 66245, 32143, -13320, -38268, -70105, -68855, 890, -71008, 78194, -77959, -82035, 67048, 89106, 91391, -26826, 39941, -20649, 13818, 12638, -3097, -3245, 73980, 3315, -85577, -14389, -16005, 42352, -21103, -87589, 96770, 47395, -10130, 10986, 13512, 24152, 52168, -73441, -85001, 10685, -19344, -75070, 2532, 19737, -91240, -8416, -77903, -4167, 40281, tail", "row-805": "head, -66295, -19675, 65738, 51713, -92482, 51314, 78769, 99207, 4020, -10092, -42173, -72804, 98565, -47868, 36651, 98168, -29600, 23720, -58119, -66539, 63571, 67916, -10883, -81458, -19370, 19584, 97978, 50721, 365, -85515, -36596, 27413, -21523, 21002, 44814, -67908, -34330, 98006, 21915, -29104, 19980, -53077, 91480, -28276, -66611, 88315, 12532, -84278, 95204, 74424, 79229, -58889, 28997, -10106, 84953, -4137, -2272, -22417, -48626, -74333, -65807, -78565, 20186, -14842, tail", "row-806": "head, 16003, -76408, -57054, 42027, 24862, -80783, -10636, -95344, 84158, -59839, -35593, -27539, 63811, -42390, -6751, 84857, 67122, 73520, -44573, 36517, 77449, -93115, 89637, 13992, -4746, -87159, -95043, 6651, -71977, -72902, 69849, 12774, 8328, 43730, 66389, 42581, -20930, -78966, -34730, -14102, -527, 27227, -78116, -5245, 22998, -60463, -48590, 31393, -9150, -63476, -27847, -8745, -43875, 58175, -74811, 49290, 82712, 57385, 64771, 64901, 66296, -8581, -15282, -1611, tail", "row-807": "head, -93278, 23686, 95184, 77907, 82035, -85508, -16791, -1215, 75694, -12160, 7260, 96822, -47444, 55183, -53945, 33212, -36627, 86725, -56208, -70960, -80983, 36819, -80384, 42499, 7756, -8666, -99077, -31181, -16757, -27038, -40949, -12102, -14496, -4631, 55023, -27313, -42476, 91347, -20914, -90150, -6789, -18099, -91291, 46238, 79379, -3681, 18640, 46294, -73295, -48936, 80451, -30939, -2121, -76174, -45666, 5588, 41412, 43675, -86287, 87494, -17218, -76221, -62160, 40984, tail", "row-808": "head, -98509, 86823, 75155, -23181, 15239, 68018, -2984, 15453, -69836, 6100, -30575, -50446, 34932, -46791, 39938, 71723, -6944, -17360, -18996, 75822, 66530, -11688, -2107, 46374, 47852, 28498, 40644, -37976, -82087, -87102, -44582, 34980, 96800, -79190, -63705, 15159, -40957, -62663, 75076, 8956, -58820, -39173, -10306, 67992, 88782, 15437, 48521, -76287, -56062, 21795, 63054, 6910, -40638, -23727, 10427, -67874, 70052, 16854, 10282, -16738, 70626, -66147, -12808, 9468, tail", "row-809": "head, -25006, -50740, -31807, -49897, 45580, -12985, 19973, -81350, -24654, -43605, -86431, -77673, -8915, -54540, -20248, 94399, 67133, 5620, -20213, -34197, -93320, 83892, -28737, -25308, -33477, -66356, 35275, 552, 63975, 52392, 67252, -10446, -11691, 53264, 67424, 56951, 58254, 93598, -67873, 17671, -88570, -34054, 28687, 29172, 94996, 22236, 55467, 51683, 30907, -41486, 53026, -18019, -35132, -18008, 36319, 72984, -75662, 66395, -29385, -45854, 60143, -7357, 18836, -92104, tail", "row-810": "head, -40830, 44756, -96250, -25332, 59206, -35464, 21914, -35330, 31692, -40301, 69304, 15854, 16226, 10166, 81316, 7161, 45656, 95685, -31324, -9605, 69151, 96486, -9061, -61334, 70546, 66658, 91017, 805, -32690, -10102, -67984, -8796, -97997, -67040, -72169, 74480, 94788, -39614, -56699, -99898, -5255, -63368, -21785, 93105, -66140, 93125, -70783, 50918, 29286, -51081, -67369, 60642, 83070, 30234, -37970, -39019, -31164, 68794, -92227, -85461, -87502, 31462, -97771, -70755, tail", "row-811": "head, 58289, 10680, -63759, 28235, 6159, -92998, 36254, 18889, -57088, -67244, 20270, -64573, 8297, 31562, 83016, -10858, -17621, 69803, -58323, -17261, -7582, 13016, 84427, -13778, -11545, 8362, -45029, 34841, -78370, -45489, 93454, -99556, -49981, -75211, 74831, 43616, -68836, 48625, -51501, -68876, -60489, 93040, 95234, -39042, -46347, 1582, -35644, -47822, 89512, -77325, 70069, 74302, -45691, 26684, 16674, 9685, -34198, -55579, -47147, 71233, -59523, 50229, 35937, 23207, tail", "row-812": "head, 50171, -33554, -35142, 597, -91439, -85963, -50970, -29858, -90756, 58743, 75543, -25536, 55287, 22483, -22634, 3963, -90110, -13245, 80503, 92725, 55106, -74077, -26966, -89567, -14315, 55998, -34579, -76026, 8826, 9842, -53569, 10731, -77499, 47323, -15727, -29364, 35937, 2854, 9276, 83289, -33921, 4647, -70507, -82735, -94674, 54510, -55356, 25008, -51521, 70776, 33632, 61735, -63152, 59719, 89597, -26585, -71141, -82615, -30325, -80887, 44612, -41300, 39117, 90318, tail", "row-813": "head, 42193, 47347, -97389, 97693, -52995, -56657, -30099, -39151, 21904, 70037, 27972, 32508, -15989, -37711, -18038, 44571, -2394, 70483, -54619, 38259, -72811, -53110, 22950, -31004, 99993, 4962, -47239, -51506, -61868, -17952, -54551, -80574, 6015, -17278, -55734, 40614, 33565, 52861, 58467, 95604, -49669, -22864, 26659, 19970, 571, -48639, -29487, -49084, 38530, -80292, -41864, 69793, 51287, 34474, -98429, 36012, 54968, -47669, 35737, -38965, -90749, -6243, 50553, 14977, tail", "row-814": "head, 95069, -32047, -92583, 31869, 79061, 40981, 81104, 56900, -52995, -14409, 6920, 75565, 18402, 44698, -79466, 17342, 30089, -25496, -30186, -21585, -67674, 29168, 41447, -66558, -16339, -16459, -25134, -55567, -37570, 78986, 2450, -18848, -97912, -45687, -22839, 5419, 8897, 83730, 59187, -19103, 27220, -97476, -62797, 8948, 42157, -72911, -89292, -76829, -81528, -62559, -46181, -19997, -85617, -12983, -5669, 69063, 56489, 98235, -67886, -52320, -84559, 70219, 97680, 73013, tail", "row-815": "head, 30520, 58364, 68182, 80585, 15782, -77007, -90540, -51233, 680, -8063, -62246, 61221, 11308, -44172, 35185, -82398, -32384, -28846, 3051, -83551, 37668, 45479, 65956, -1359, 65685, 38666, 5582, 27607, 99435, 15817, 50111, 75667, 74268, -44559, 94016, 5968, -4334, -28278, 18570, -5065, -40647, -75020, -3579, 19138, -68138, 35263, -81627, -86583, -54099, -19555, -96692, -31422, -79961, -11401, -90700, -61972, -56775, -59829, 87842, 2770, -28549, 14503, 94801, 11969, tail", "row-816": "head, -91848, -46679, 50823, 74318, 63716, 86075, 85998, -22795, -44862, 3001, 15741, 29040, -58007, 58998, 8514, 32813, -85103, 84824, -70230, -6973, -85158, 75689, 15078, -71424, -16055, 92093, -90482, 86573, -81903, 6001, -47448, 10724, -76374, 94083, -41463, 45383, -22315, -37592, -93955, 55202, -18582, -62131, 34538, -16847, 59384, -75416, 15139, 78228, 5951, 73583, -54956, -31124, -6081, -14763, -3926, -3437, 33213, 7356, 63479, 61397, -37591, -1604, -51980, -95528, tail", "row-817": "head, -5436, -60475, -96339, -82963, 19604, 88621, -40053, 97463, 62611, -5506, -47281, 4559, 29037, -16119, 72011, 64286, 14188, 30290, -4905, -63047, 88205, 71713, 10149, -40158, -40207, -58910, 9107, 20015, 95793, -22017, 60751, 85174, 6564, 5891, -37803, 82917, -51573, -49437, -19315, 95095, 22748, -81568, 61721, 15130, 95180, -18818, 59493, 12900, -89457, -97315, 62725, 37186, 94224, 34958, 40897, 70720, 47971, -92325, 34000, -32282, 95383, -58317, 70166, -15981, tail", "row-818": "head, -35232, -82187, 64496, -56172, 41128, 34002, -48693, 5714, 97726, -65601, 12984, 8972, -7652, 41834, -78741, -40037, -36390, -62432, -80380, 8266, 37419, -78618, -602, 2566, 89555, -37337, -66432, 10932, -20328, 59484, 27544, 62846, 64775, -34067, 97958, 25545, 22311, -4482, -33965, -45532, 87983, -3217, -90704, -1040, 23939, -65139, -64339, -37343, 73558, 21164, -99388, 27610, -46077, 84998, 59853, -6774, 79595, 13680, 1547, -54592, 18570, 99667, -66430, -64660, tail", "row-819": "head, 93348, 46646, -9436, -26014, 9744, -41045, 99873, 80919, -43282, 78968, -43470, 38723, -17618, -24701, 63902, -42212, -84321, 62610, 49418, -49317, -40767, 94552, 37349, 38349, 64094, 23616, -52326, -89354, -16707, 6322, -69139, 70641, 37460, -72021, 15815, 40051, -83231, 9750, -93523, 53555, 59917, -83456, -56426, -34476, -5645, 84534, 87300, 84288, 93243, 41597, -6767, -80207, 97812, 13857, 54425, -27393, 21562, -37702, -77662, -25738, 38493, 84702, -35275, 54363, tail", "row-820": "head, 31636, 50602, 67703, -40013, 54886, 73047, -68335, -47488, -41307, 99390, -47539, 96749, 61017, 55533, 23285, -95737, 62035, 84699, 57408, -55658, 58398, -55993, -76994, -7787, 23862, -63343, -96313, -91394, -48578, 51123, 57627, 74850, -92375, 75587, 6417, 1859, -70295, -98329, 22849, 61207, 7877, -12238, -29291, -10882, 28970, -4942, 7357, -9229, -91444, -83723, 39184, 83569, 96442, 60481, 73161, -84590, -52684, 28062, 45039, -47218, 84751, -63712, -53614, -5329, tail", "row-821": "head, 97309, 15113, 26051, 68685, -34531, -6504, 64818, -57625, -12599, 80344, 66425, 26849, -99549, 5463, 49285, -18958, 47837, 71982, 65139, 83098, 48682, -60982, 2751, -95476, 89178, 87243, 24562, 95132, -40675, 75543, -37168, -42561, 38277, 52944, -37631, -29448, -49401, -26044, 87193, 65985, 80963, 10234, -79175, 82311, 98277, -2583, 82687, 36361, 87347, 62657, 18885, 68544, 75568, -13719, 74766, 28624, -93145, -47289, 93001, 79701, -6522, -83628, 45167, -87618, tail", "row-822": "head, 49014, 42826, -28817, -38070, -59383, -56169, -66713, 38174, -46602, 71319, 59015, 24856, -59735, 12668, -67800, 41047, -84731, 74399, 31521, 35866, 61812, -3284, -54174, -16660, -36574, -18102, -26818, 6050, 93389, -65238, 10326, 89294, 69714, -62321, 72696, -655, -18508, 86826, -19494, -64061, 1425, -2787, -58416, 76108, -53274, 11955, -65974, 37707, -9942, 90645, 77502, 67682, -53586, 15980, 74224, 5577, 13867, -46267, -92637, 58210, -80875, -43867, -56636, -53228, tail", "row-823": "head, -57494, 87843, -93389, 6553, 8699, 86768, 29300, 10949, -69285, 14091, -79049, 43700, -5100, 44434, 74989, -89602, 34537, -42358, -89318, -50870, -34135, 94005, 70154, 26078, -51967, 52935, 83911, -73897, -80311, -13778, -80640, -2861, -18910, -10256, 91608, -64817, -43548, -26271, 18784, 70053, -50229, 44777, 29654, 16598, 89886, 23797, 81757, 93901, -27363, 94558, 80255, 25513, -93672, -11403, 70976, -22220, 33077, -39983, 31132, 24058, 38327, 2011, -5354, 10445, tail", "row-824": "head, 4732, 57564, -58262, 89094, 34053, -88741, 45855, 80680, -75953, -66358, 6796, 22757, -83957, -66397, -12343, 15362, -25468, -46318, -72679, 49783, -69710, 59576, 61443, 8556, -2670, 21935, -84177, -78591, 56460, -34813, 3677, 75163, 10368, -34553, -74886, 28816, 2728, -25650, 13643, 44291, -32517, 10133, 66071, 11685, -44378, 90374, 12369, 92086, -14538, 85507, -38747, 28745, -89391, -31677, -65433, 19400, -6489, 53789, 84098, 56776, 17344, 97488, 12401, 76684, tail", "row-825": "head, -94960, -53220, 27831, -89362, -25731, 88234, 13083, 56488, 16825, 25905, -81775, 86279, -83140, 26886, -6829, 5357, -25797, 63741, 40359, -43198, 62225, -52764, -27576, -73022, -78010, -99572, 74148, 38494, -76454, -50329, 73249, 10534, -57140, -78510, 55752, 47739, -71833, -96781, 8730, -48514, -70586, -77803, -4001, -30553, 88641, 92828, 75193, 40478, -74289, -79582, 34843, 1402, 30663, 82562, -16726, -25811, 50621, -65581, 53380, 42648, 65755, 91540, -12915, -70261, tail", "row-826": "head, -22158, -98964, 28978, -15704, 27648, 11743, -2698, 67345, -59353, -49325, 42936, -96696, -52464, -22056, 19033, -42321, 44035, -59262, -66879, -13088, 8049, -83980, 91373, -77421, -38653, -33073, 6198, -6074, -29106, 14644, -86352, 53084, -44115, -83935, -39902, -64942, -30289, -51966, 33790, 87843, -28317, 34263, 45892, 43259, 22473, -5429, 50914, -11646, -48356, -88456, -54386, 90771, 97464, -51918, -63404, -58385, -10666, 27993, -44984, 97730, 95390, 68383, -2656, -5190, tail", "row-827": "head, 94813, -49692, -80503, 55795, 11242, 82003, -53273, -38970, 52967, -45737, -15680, 9860, 13089, 90763, -67292, -99217, 89419, -9163, 74050, 36825, -69073, 74214, 31349, 62355, -82657, 46484, -72869, 91248, -7353, -61004, -96582, -27437, 71315, 55009, -29098, -69770, -32872, -29362, 32712, -40535, 97352, -33101, -20131, -381, -94556, 22064, 7889, -29142, 57786, -9557, 46312, 81888, -5859, -10678, -29580, -76344, 31089, 86325, 84815, 4438, 45539, 54620, -24759, -64279, tail", "row-828": "head, -48343, -44865, 55285, -74401, 1955, -66920, -82307, -49659, 55653, -85743, 54943, -54234, -34522, -57913, -7601, 79463, 66651, -87531, -91240, -45334, 17553, 24708, -42227, 27288, -57836, 59615, -2580, -40419, 17514, 98215, 31305, -48079, 43991, -96141, 25148, 28792, -81654, -56423, 25265, 31114, -81091, -72693, 98676, 74907, 17725, -38605, 47162, -68980, -1922, 30453, -31195, -55298, -21117, 3849, 45218, -33594, -5935, -27103, 77596, 6216, -9983, 95490, 68914, -73894, tail", "row-829": "head, 33972, 64984, 70832, -53284, -86746, 35788, 89869, 3534, 56669, 73619, -24887, 88967, 68844, -71471, -21375, -48365, -93332, 81345, -59382, -52287, -195, -10645, 47455, 75561, 50512, 48121, 37219, -19780, -21258, -86411, 67461, 50092, 97824, 68873, -86639, -8141, 95349, -1601, -6634, 88580, 27199, -7057, -7636, 97301, -33019, 8138, 56595, 2977, 18128, -90976, 40138, -45138, 67187, -59685, 23587, -78461, 75384, -98652, 26327, -83383, 49758, 52365, 85573, -50884, tail", "row-830": "head, -79688, 38405, 36584, -57376, -58463, 18249, 41593, 6765, 52898, -74954, 15185, -9952, -92749, 93284, -7399, -43139, 44213, 72408, -3314, 81088, 50351, -90364, 73384, 73253, -95278, 72739, 12571, 90357, -88082, 86657, 173, 59788, 10234, -78353, -39245, -69674, -69158, -18894, -81922, -14936, -550, 87693, 83732, -50722, -91545, -23447, 68352, -2435, -913, 50834, 35804, 28472, 38724, 34778, -37029, -77479, 36099, 87369, -79313, -15113, 80437, 82600, 33246, 92820, tail", "row-831": "head, -94681, -48983, -81101, -56226, -46673, 91392, 81650, -47911, 22754, -5483, -22971, 49640, 96879, -56059, 97132, 23567, -20638, 89211, 93727, -11147, -93192, 24188, 32311, 63363, -33751, -26765, 88807, -7624, 91708, 92335, -72080, -62479, 21571, 88942, -89931, 20136, -98276, -33750, 24036, 65312, 81599, 78985, -82129, -22617, -76997, -31954, -2492, -87204, -32325, 57300, -489, 79279, 6658, 48460, -4185, -938, 13334, 18579, -90316, -56660, -13901, 23066, -51392, -67061, tail", "row-832": "head, -94754, -75609, -50900, 39483, -59475, 87611, -29402, -52168, 50804, 66985, 76629, -40536, -22832, -80480, -85366, 6979, -25574, 66015, 30177, -69667, -13573, -67417, -9080, 11072, -62209, -26009, -52689, -48591, -62519, -37449, 35954, -65700, 53864, 96878, -95370, -12428, 39448, -4726, 51609, 55864, 82770, -94615, 20546, 73202, -61244, 72222, -35874, -82740, 97563, 15056, -21155, -14067, 42704, 79409, 56319, 52929, -62409, -82733, 51853, 61817, 28783, -3782, -37799, 67183, tail", "row-833": "head, 45772, 46554, -90353, -82955, -26864, 64199, -18585, 93133, -99698, -60962, 90396, 24752, 76691, 64060, 32176, -90165, -46473, -19633, -28085, -74557, 58760, 2495, 48643, 31251, -7271, -54220, -85945, -11408, 65014, -96886, -74503, -49641, -78732, 49358, -13080, 57359, 59203, 94533, 71582, 82304, -87003, -24561, 89276, -40681, 54320, -1651, 72806, -47749, -76033, 36999, 71689, 83940, -60144, 2918, -42416, -44069, -2468, -15714, -12603, -41708, 38400, 66528, 65829, 76457, tail", "row-834": "head, -53125, -49274, 73206, -67688, -5026, 92108, -84062, -9499, -62924, -13535, -60843, -54476, -60969, 31381, -90080, -4451, -91303, 10585, -28018, 50806, 68524, -61270, -97951, 96506, 29190, 84801, -40377, 94959, -64317, -23994, 95578, -8343, 45096, -94713, 330, 90741, 98755, -72382, -68065, -28004, -21311, -48323, 78685, 6184, 2234, 78181, -10837, -16925, -81459, -13745, 84905, 29694, -38239, -21411, 37364, 31154, 60450, -72617, 34586, -17757, 34377, 74731, 77587, 44704, tail", "row-835": "head, -1849, -70264, 11528, -87816, -37861, 6060, 74572, -78312, -83824, -56089, -67208, -21625, 10117, -42556, 29773, 41540, -29654, 48242, 36542, 93038, 19804, -33414, 89732, -94959, -40046, -89703, 36877, 97192, 15655, -89364, -95741, 79643, 84451, 78588, 81941, 95410, -48265, -17016, 64692, -85400, -96115, 3912, 86726, 40926, -97370, 97053, -35231, 62032, -15036, 91708, 27198, -78616, 76507, -10094, 6128, -1259, -15053, -73900, -40412, -93657, 9054, 27021, 27248, -18600, tail", "row-836": "head, -58466, -37554, 54754, 71883, 57286, -96756, 75935, 38722, 31245, -75033, -8984, 44592, -26156, 22537, -64557, 4635, -38833, -28931, -95305, -36415, 37620, -97847, 30632, -26817, 4669, 82539, 96057, -70836, 45579, 22576, -90732, -69440, -4470, 45878, 92888, 86053, 80319, -63843, 18750, -70609, 4073, -93701, -94106, 73087, -17940, 7102, -95869, -28665, 91088, -89617, 70083, 74699, 20303, -15127, 26207, -43895, -42701, -23923, 89483, 79928, 52752, 20027, -78134, 1549, tail", "row-837": "head, 27245, 8822, -29656, 34788, -58147, 56399, 76484, -8335, 28431, 11043, -46244, -14215, -52444, 4361, 14922, -8689, 60200, 63129, 18661, 48331, -16005, -56880, 81963, -61585, -46834, 18185, 91144, -3551, 97988, -65596, 93759, 71877, 52265, 70806, -85606, 45672, 35233, 42053, -77236, -92551, -21113, -40138, 36293, -27834, -26529, -84380, -52217, 87, -75637, -14614, 43099, -73256, -98124, 38685, -77511, 62822, -21603, 59026, 85689, -97062, 7670, -8174, -90929, -72865, tail", "row-838": "head, 4584, 77350, 50801, -64892, -67156, 20938, 8989, -16841, 66318, -6133, -57248, 36594, 60606, -89642, -26023, 19085, -44937, -79086, 43910, 11481, -8362, -47403, 14852, 13573, -63316, 52598, 58184, -64112, 89788, -454, -69426, 84653, 13446, 96693, 88188, -59548, -1974, 65964, -69957, -70971, 76038, -34761, 50023, 14617, -89721, 26780, 19013, -68092, -46514, 50166, -83906, 49330, -58025, 61991, -63264, 58842, 39636, 53748, 60071, -55253, -39863, 87778, 15548, -88364, tail", "row-839": "head, -34133, 42174, 25106, -44732, -5930, -34402, -48138, -52374, 23847, -3344, 63576, 34080, 83567, 35843, 30684, -28288, 69403, 47682, 91356, -97693, 37812, 8445, -85111, -35578, 121, -17220, 15021, 32812, -41240, -59446, -29428, 10896, 13325, -89362, -37904, 11054, 86577, -29760, -89041, 62913, 28295, -7280, -94054, -98932, -9086, 50863, 83603, -2921, -3410, 11526, 79207, 29700, 51615, 79904, 60891, -34407, 68143, 64404, 43991, 37083, 12736, 60523, -49547, -56296, tail", "row-840": "head, -12528, -87542, -4329, 95633, 61271, -99581, 92110, -73183, -39796, 41040, -2365, -98751, -42369, -17102, 47752, -15135, 51999, -28699, 93792, -45639, 23817, 92589, 49709, 10008, -73290, 98960, 97669, -11512, -20519, -96113, 9505, 42634, -96790, -90975, -61415, -64927, -37589, 49024, 64180, -83164, -12076, -12250, -80628, 48220, -27678, -54578, 37305, 37184, 18387, -5735, 16109, 552, 58980, -4014, 93199, 52993, -25537, 53794, -59538, -36825, -39102, -55668, 68843, -2638, tail", "row-841": "head, 27055, -94946, 12138, 17017, -76464, 59569, 21820, -50934, -91135, -23860, 32769, -47733, -7526, -6945, 92995, 53096, -16734, -86080, 18047, 55176, 25866, -41004, -11072, -60366, -16659, -16342, 74934, 22391, 49138, -32122, 63400, 32677, -4732, 63254, -88285, -99544, -52534, -73127, 29489, 13729, -46557, 4119, 54107, -95505, -73758, -81051, -24692, 10611, -85442, 81898, -73599, 66514, -10014, 60366, -88140, 83315, 64162, 61116, -4827, 90854, -92115, -20405, -25708, 61417, tail", "row-842": "head, -34379, 66697, -89027, 29597, -8112, -11786, 9651, 30504, -70998, -15357, -21347, -97054, -2220, -59908, -87640, 37264, 4009, 83175, -23191, 95031, 15302, -78133, 10951, 9127, -36902, 81634, -89356, -57003, -72168, -88038, 19887, 13240, 13607, -54065, 67075, -46132, -74542, -82677, 41666, -90211, 8889, -20670, 56698, 1821, -72500, -47288, -92339, 31835, -1649, -74872, -24918, 72841, 58699, 17743, -40933, 75802, 55439, -89338, -40764, 28427, -50815, -72985, 5307, -12851, tail", "row-843": "head, -4273, -26440, 98674, 89655, 50178, 19090, -58369, 41723, 29294, 27091, 44703, -77646, -23013, -43122, -98002, -96595, 69154, -68764, -41875, -19766, 15062, 51142, 33411, -56461, -98862, -89, 98490, -81936, 41574, -40246, -48842, 57619, -94877, -48839, -59905, 91458, 92876, 47825, 59056, 51027, 56974, -51169, -17886, 97560, 10784, 90439, 32320, -41144, -97439, -33938, -19996, 87130, 70148, 12207, 94639, 7727, -24167, 56832, -75287, -34696, 9747, -39068, -18692, 13582, tail", "row-844": "head, -31961, 58270, -21223, -3807, -69551, -2782, 71871, -5993, 92089, 66625, 13723, -28572, 106, 54931, 92129, 21838, -64328, -69357, -7321, 57745, -76701, 79763, 95144, -61133, 8317, -37375, -31642, 6538, -56500, -10793, 33440, 22493, -74803, 75389, 55857, -40726, -85122, -66754, 96556, -13249, -18929, 39202, 50478, -5823, -48334, -79034, -85501, -72467, -31969, 32439, -28827, 83188, -3020, 20504, 23579, -98936, -6140, -46168, 87882, -84972, 46268, 51261, -93599, 14807, tail", "row-845": "head, 97729, 20224, 41814, 20025, -94931, -8496, -26940, -53146, 3102, -5075, 54942, -26988, 47005, 17156, 85632, -98615, -58697, -87832, -79278, -41991, -66702, -94459, 22738, -19303, 17242, -48862, -71566, 82752, -90944, 36956, 80284, 54258, 1889, 68220, -1812, 24605, -94327, 24150, -75935, -28213, 42843, 66333, 32573, -23487, -37803, 11547, -27396, 34571, 83387, -61269, 71038, -6332, 79825, -19572, -11803, 59228, -17497, -75999, -46785, 24876, 32751, -23874, -72070, -36981, tail", "row-846": "head, -10491, -37117, 33710, 51260, -35188, -86661, -89485, 36810, -24195, -43098, 18867, -79933, -58730, -49834, -98707, -33855, -69403, -86929, -66215, 28727, 52783, -16255, -54039, 7717, 92388, -96278, -93508, 87507, 23710, 13174, -83054, -25630, 79550, 34213, -20517, 13770, -36940, 56470, -95642, 17878, -21191, -63254, -75162, 85777, 93348, -36663, -8334, 53571, 95941, 13832, -91774, -64165, 18346, 8089, 93033, -86061, -28474, 37633, 70737, -55238, -54296, 62446, 3231, 17119, tail", "row-847": "head, -50809, -96878, 26636, 93192, 90504, -41490, -98653, -36949, 76611, -79469, 47337, 21198, -42912, -6318, 8624, 62323, -45417, -95458, -19738, 79379, -43305, 51875, 18855, -16878, 98620, 78530, 89878, -49895, -48327, 95376, 65612, -72575, -89541, -20626, 86943, 4167, -64243, -22881, 9428, -76623, -94923, 49310, -56014, -77803, 37018, -44050, -52243, 88411, -78885, 8265, 48820, 34851, 20602, 56693, -43881, 94384, 56533, -36150, 34921, 92258, 13787, -65864, 92991, 77633, tail", "row-848": "head, -75918, -76300, -97872, 63107, -80163, 18107, 28168, 6859, 41146, 24762, 44514, -39923, -83908, 87625, -25711, 48888, 89213, 66646, 60095, -26237, -94489, -58129, 73577, -67400, 88212, 49383, -13400, 49999, 55039, -35952, 4402, 36527, -32988, -83550, 35461, -6174, 49499, 64172, 55655, 32223, 68443, -7068, -30680, 42274, 87525, -30818, 42767, -27158, -26454, -82804, 75825, 16501, -63864, -21435, 40814, -78227, 88658, 25194, 47270, 34669, 53562, 11292, 59092, -46843, tail", "row-849": "head, 57485, 54090, -75359, -86575, 60865, 92517, 10332, -23572, -5517, 78539, -31008, -44284, -15192, -59481, -26841, -84315, -5574, 99021, 81004, 17741, -74117, 17156, -43804, 87788, -79587, -73738, -81365, -95110, 2982, 60790, -77192, -55796, -93440, -16003, 74678, 12054, -50120, 42043, 37652, -37452, -88911, 14830, 54534, 37841, 64237, 38061, -14742, -41512, -82611, 5247, 25063, 78218, -89744, -46708, -67404, -3827, -3558, 86182, -19377, 15457, 15285, -4864, -69971, -20055, tail", "row-850": "head, 20384, -54525, 31256, -90069, 40551, -88478, -32117, -47445, -29883, 55271, 57340, 66212, 6015, 15378, 64267, -85578, 32027, 77202, -28241, -60491, 22864, 28006, 22029, 21366, -79487, 27010, 50453, -95448, -68634, -54813, 86372, 96767, -4132, -2495, -39058, 49145, 18399, 96747, -67999, -21732, -56041, -60607, -64695, 33492, 2481, 38142, 10856, 82332, -44950, 77877, 26457, 78671, -68379, 90978, -91049, 43699, -63350, -75277, -73339, -18364, 44349, -38044, 94730, -41689, tail", "row-851": "head, -76616, 44376, -99900, -60509, -32437, 28960, -45893, -26775, 97127, -43532, -71348, -33280, -37386, -87071, 32323, -10471, 45701, 39548, -57093, 93315, -34575, -68499, 60424, 60040, 93507, -96067, -97787, 47406, -95812, 32025, -10034, 6441, 89447, 87121, -41137, -90374, -46173, 70012, 66802, 76087, 25998, 86455, -77965, 19426, -38893, -86193, 60503, -12296, -12904, 6140, 21986, 84220, 81940, 37612, 38080, -81496, -5658, 58782, -38634, 11489, -67006, 28419, 88646, 35215, tail", "row-852": "head, -65076, -31026, -74846, -65601, -4638, -63618, 989, 35173, -52602, -44061, -42070, -68887, -55266, -53341, 89015, 57309, -29740, -57787, -25102, -14836, 7198, -75965, -91805, -50090, 8834, -55609, -92099, 53882, -31968, 17449, -35134, 26168, -27477, -6296, 79134, 1729, 57085, 58893, -61653, 10775, 93609, -19836, 59358, 32182, 69706, -17336, 14773, 75552, 52066, -18814, 84790, 46523, 82331, 6282, -80817, -12842, -2938, 47794, 60340, -12279, -62216, 4429, 70312, 75283, tail", "row-853": "head, -25390, -65645, -5764, 56540, 33070, -60043, 12005, -39181, -15419, 71165, 32372, 88362, 43298, -77155, 80403, 51180, -98487, -18416, 98082, 29199, 91884, 94611, 13712, -7649, -12139, 46248, 48126, 60982, -39414, -29329, -5429, -37693, 73837, -18971, 10004, -828, 94460, 57991, 85840, -87905, -9429, 1207, -67516, 66406, -52167, 76939, -3591, 65702, 60578, -62189, -19155, -42626, -3823, -86818, -43796, 19885, 20600, -30531, 12648, -55445, -7507, -37628, -18058, 64810, tail", "row-854": "head, 12968, 69570, -91688, -36984, 85980, -95572, 66654, 69640, 45550, 34496, -65154, -19705, -54808, 53842, -89482, 76989, -80836, 15435, 78275, 1119, 10479, -69288, 97199, -26347, 36185, 27909, 79731, -57600, 24605, -76381, -39660, -90317, -5590, 94950, 39135, 34260, -34360, -25252, 97454, -74226, -39648, 53727, -71775, 96227, -35271, -81834, 78624, -37075, 96995, -10853, 64542, 31171, 20074, -23445, 6476, 33110, -7639, 87584, 41277, -23724, 3748, -92816, 43466, -21447, tail", "row-855": "head, 50790, 2918, -20385, -47448, 70380, -87670, -62393, 81376, 41190, -51613, -99155, -58958, 17768, -4949, -72997, -52082, -62852, 85475, -62471, -93804, 86810, 24381, 67000, -44886, -403, -64045, 53219, 846, -83123, -57473, 71800, -72141, -94914, -50947, 55404, 75855, 7725, -73050, 34201, -94483, 79403, -40743, -51389, 97549, -141, 54498, 90689, 53360, 63457, 35054, -37559, 99161, 58785, 96911, -48595, 34545, 91551, 38232, 2411, 38901, 33621, -11624, 58455, 39756, tail", "row-856": "head, 81191, -53484, -32355, -72218, -79593, -95844, 30981, 57311, 13341, -32472, -57546, 30419, 24041, 89521, 65646, 60304, 27539, -9481, 26079, -91074, 6568, -86492, -69050, 80527, 54964, -31963, 75532, -31198, -30046, 66045, -53022, 12319, -77838, 53374, -40877, -54916, -42014, 47093, -6168, 70061, 62263, 27002, 30325, -7696, -32866, -61963, 75753, -25500, 85148, -74772, -37761, 62894, -26549, -44624, 31686, -65910, -46388, 21550, 59082, 99717, 88028, 12811, 45450, -69990, tail", "row-857": "head, 54849, -8121, -50013, 26277, 13284, -53615, -30876, -12283, 73583, -46163, -22269, -6291, 77067, -68, -13812, -50179, 94483, 70695, 35856, -86569, 29256, 7032, -12802, -83497, -78634, 26293, 71411, 63592, 27749, -48960, -84938, -16221, -85945, -90040, 43191, 95799, -851, 21420, 93024, -41080, -79430, 2881, -3890, -66090, 72360, 54009, -32497, -10373, -49377, 33591, -69580, 48449, 82869, 77421, 10999, 62280, 78949, -62475, -39319, 90167, 29873, -37476, -73746, 25831, tail", "row-858": "head, -18300, 36054, 33299, -80777, -82758, 15310, -71817, 23904, -89081, 29557, 38642, -917, 40227, 81091, 6728, 40516, -80340, 36624, 42389, -2723, -32949, 72596, -59266, 55012, 81932, -31388, 93621, 60178, -38878, -25388, 77862, 49544, -24431, -1729, 14349, -88623, 39131, 77909, 24343, -32851, 39560, -93864, 14893, 5816, 49786, -78386, 8893, -61150, 84142, 97327, 69502, 27364, -76252, 35210, 33408, -9485, 76261, -21264, -47111, -98094, -58110, 58803, -64320, -95774, tail", "row-859": "head, 78183, -22501, 4767, 23929, 76171, 82086, -21662, -73704, -96381, 11914, -62518, 43728, -45423, -41847, 77569, -1630, 42920, 41491, -64966, -49453, 22464, -29376, 2629, -72936, -2803, 78082, -68880, 23986, 28996, 25724, -96136, 56901, -90622, 81765, 64894, -63871, -97950, -76419, -36019, -15824, 58981, 62905, -4719, -81849, 79278, -14154, 81442, 59243, 24274, 84805, -35542, -57527, -51627, 52148, 79125, 72812, 52399, 71695, -87993, 28103, 5793, -88611, -61241, -11366, tail", "row-860": "head, 19358, 23066, 73716, 71167, -13929, -13788, 29315, 61576, 27749, 33219, -29845, -44444, -9257, -84218, 15913, 99947, 86595, -17148, 73412, 18813, 93052, -16340, 92346, -22218, 53171, -24267, 2795, 94313, 43541, 8719, 72193, -1307, 13991, -81491, 77982, 32196, -60884, 53889, -66347, -21190, -69747, -18630, -22330, -57058, 75049, 83475, -95877, -2092, 97841, -58023, 18418, -91563, 26587, 82240, 20503, 32375, -79396, -74514, 88343, -59812, -81047, -58175, -17111, -51808, tail", "row-861": "head, -34323, 32075, 68612, -20631, -57004, 86280, -18900, -86669, -83124, 30425, -52722, 26005, 89459, -31141, -68788, 56465, 80211, -11900, 67484, -54744, -11226, -31491, 76975, -48521, -31167, -72879, -99719, -10032, -23469, 34473, 7356, 11808, 22541, 1858, -17217, -14934, 51379, -43517, -35448, -67775, -71108, -2438, 4996, 24938, 21551, 82330, -20955, -16701, 69696, 96687, 4246, 92288, -48023, 65800, 42824, 80287, 26556, 30004, -4859, -45311, 82459, -80165, -13784, 71741, tail", "row-862": "head, 48006, -21098, 67055, 78407, -60272, -8850, 73058, -97121, 89254, 96020, -37136, 73208, -20485, 8298, -27375, 12038, -10977, -57047, 35728, -56574, 33212, -27628, -3923, 6232, -93747, 40104, -31309, 27321, 62557, 29364, -34960, 37559, 97174, 87204, -15348, 94925, 22167, 24619, -61413, -20286, -56536, 52411, 8370, -62649, 41819, 44551, -35974, -51228, -27391, 28423, -45546, -4815, -62731, -26846, 56486, -68298, 34579, -70855, 7146, 21405, -10656, 79647, 29595, -16636, tail", "row-863": "head, 1961, 66856, -83041, -98669, -4846, -36150, 48473, 25962, 47689, 75719, -20018, 63153, -41963, 35805, 3372, 79827, 70800, -61176, -98182, -74940, 6717, -3222, -53788, 65858, -9209, 63548, 8425, -62717, -2812, 43546, -35389, 50208, -61197, -2037, -84310, 27839, -64697, -65781, 79227, 38658, -77373, -94550, 33348, 6466, 21245, 44596, -20960, -25130, 11257, 82422, 91790, -5639, 98713, 78443, 40799, -27380, -4020, 94312, -72348, 24478, 57796, 46892, 82812, -62926, tail", "row-864": "head, -18194, 34094, -36933, -71017, 51830, -73227, -84630, -82686, -26277, 62648, 40070, -45259, -7001, 1504, 90134, 25833, -58731, 66218, 89280, 58034, 45529, 42587, -38916, 79659, 68079, -56103, -7016, -60754, 80148, 36020, 50775, -83996, 78920, 68156, 50338, 23329, 95633, 8989, -58830, -7623, 29546, -36149, -44289, 7176, 11560, -73296, -91920, 43506, 77457, -87545, 41320, -49184, 86601, -90759, -59125, 77949, -67318, -63493, -63693, 18056, 37573, 38742, 74166, 9245, tail", "row-865": "head, 76620, 51824, 33530, -61923, -65522, 35670, -7986, 21943, 60078, 25146, -81966, 77693, -52438, -14727, 39075, -81164, 11134, 77577, -75483, -35963, 84391, -67659, 2245, 5373, 97724, -10727, 30593, -60117, -3317, 8922, 534, 34299, 37007, 93022, -11669, -31751, -15386, 65895, 54441, 33853, -63136, -77359, 63425, 97117, -85760, 61949, -81909, 98893, -31757, 25339, 89263, 91120, 78427, -45832, -21685, -64672, 92779, -56834, -59338, 27138, 56112, -68434, 35689, 4042, tail", "row-866": "head, 74439, 5692, 13314, -98282, 94083, -78833, 41299, 88725, -70492, 85225, 67436, -84419, -98946, 38961, 24735, 41508, 2800, 82353, -4724, 63783, 57475, 28755, -1017, -10669, -36813, -80108, -71168, -33793, 6745, 26750, 40957, -77688, 80751, 64797, 44065, -45633, 88501, 49499, -11084, 15924, -90685, -87819, -84976, 45987, 8088, -96785, -44866, -47824, -4183, 99447, 44255, -55057, -88414, -15929, -84070, 44635, 26482, 37645, -1370, 76858, -2683, -73354, 16925, -50443, tail", "row-867": "head, -42306, -8519, 80294, -65614, -4639, 65381, 32099, 31382, -63995, -87972, -5363, 39450, 38376, 44805, 82112, -77581, 81089, -71463, 80013, 55902, 94420, -55483, 49759, 59026, 49499, -30056, -72658, -31624, 7396, -79001, 78683, 69684, 86038, -83046, -18399, 91508, 3917, -34197, 94160, -59991, -60195, 30405, 77087, -5068, 68435, 35966, 38576, -65515, -96502, 71692, -79079, -60631, 49091, 1914, -20801, -73500, 57065, -81408, -36245, -2403, 72767, -25878, 4066, -18047, tail", "row-868": "head, 75251, -3221, -38344, -94960, 34226, -31436, -85361, 10573, -17637, 81119, -64379, 76031, 50139, -74078, 55018, 815, 95932, 53875, 36329, -81957, -74347, 50431, 33672, -55204, -95822, -37907, 59534, 49931, -45949, 47969, -58409, 26845, -14574, -44359, 32241, -22405, 81579, -97502, 92056, 31436, -46570, -477, -36157, 67711, 8513, -71807, 21197, 50170, 23408, 85638, 17463, 4710, -5806, 21055, -18624, -45511, -35270, -35808, -60483, -33996, 99252, -86249, -71614, -32252, tail", "row-869": "head, 19473, -43522, -91011, 26059, -77238, -19903, 4281, -50469, 46228, -46411, 34853, -99349, -55186, 64326, 61768, 57848, -23449, -35987, -32767, -40335, -65287, -70796, -14765, 92028, -33078, 72610, 69259, -233, -23542, 52506, -5832, 8688, -35445, 65098, 45892, -89967, -95655, 52806, 18873, 95672, -30999, -76186, -36626, 47538, -14013, -51500, 79593, 99419, -64460, -53681, 64105, -44597, 58289, -31634, 11210, 75569, -23046, -94187, -21925, -10684, -39728, 70603, 54706, 37226, tail", "row-870": "head, 38009, 18399, -29679, 74036, -4507, -43755, 90432, -80509, -97361, -32878, -84140, 70933, -73047, -61860, 64629, -67142, 68308, 63716, 82798, -72218, -46645, 95209, -39172, 69620, 57515, -38869, -67481, -43366, 64169, 23728, -98781, -76197, -39156, 10902, -70252, 48719, -25530, -23654, 73875, 12632, 52131, -91968, -48040, -95356, -48288, 17353, 42787, -30751, 913, -9887, -88879, 44673, -57482, 59991, -30783, 27787, -27344, -58300, -87575, -90314, 81350, -12135, -17702, -857, tail", "row-871": "head, -29573, 24961, -89446, -64399, 30494, 38308, -54131, 60774, -5122, -94634, -27400, 50699, 89990, -7502, -81353, 63105, -72795, 1460, 78377, -15162, 52362, -61674, -85413, 80170, 11247, -6349, -61361, 60846, 14891, -43165, 71483, 80467, -43090, -26782, 2724, 17143, 13652, -54019, -39926, -99836, -80702, -80212, 94707, -23697, 58399, -84151, -92958, 22513, -40817, 23654, -62212, -80123, 15910, 8548, 14378, 11239, 54748, 37464, -53166, 45696, 62204, 12944, 29165, -61591, tail", "row-872": "head, 30922, -20721, -66532, 27206, 2584, -94579, 59326, 61845, 56982, -89659, 86613, 71150, 66574, -29186, 29096, 79982, -90388, 84950, 33797, -17241, 85117, 85802, 90960, -15054, 60725, 93324, -75222, -68809, -49318, 75346, 636, -35054, -32322, -76681, -90059, -71809, -79713, 14703, 89570, -96351, 8477, 59587, -3538, -68268, 71227, 91327, 17579, 38705, 95542, 82625, -17166, 26933, 98502, -94313, -60902, 69405, -48725, -1460, -1249, 18805, 37281, 2509, 33471, 44499, tail", "row-873": "head, -70401, -74949, 58454, -88523, 32210, -88293, 80983, -70547, 16387, 73950, -73528, -13187, 39522, 96383, -17909, -34532, -33897, -37395, -59101, -50055, -28913, 51368, 59237, -56809, 95934, 28091, 35238, -13934, -89061, -9260, 70119, -3411, -67515, 79969, 22334, 87028, 25583, 5673, -1374, 2520, -32095, -46973, 60397, -36539, 7716, 25015, 94982, -47134, 14960, -23750, 1580, -56838, 5103, 69113, 79462, 804, -31855, -82279, -27204, -80701, 76461, -99375, -62218, 86329, tail", "row-874": "head, 87229, -61889, 15798, 37609, -52495, -15363, 4868, 56185, -60536, -87615, -14538, -1372, 38874, -57560, -80269, -31329, 15398, 78555, 8090, 11417, -83858, -83238, 45187, 27376, 19712, -4896, 27618, -9596, -16889, -47478, 59855, -41514, 66757, -58817, -76587, -91439, 2583, 99958, -65965, 24022, 7684, 65627, 49444, -68763, -41156, 34518, 71068, 67180, 18559, -70751, 92094, 77621, -30483, 31476, -23088, -60923, -50353, 7992, -29565, -38079, -67057, -11182, 29897, -54246, tail", "row-875": "head, -84912, 25072, 86436, 74992, 16706, -9490, -11257, 34512, 50184, 74471, 67180, -8840, -82854, 89450, 17660, 97347, 13687, -71281, 19818, -2083, 96025, 55745, 56130, 71772, -27838, -54722, -51921, -22286, -35336, -45829, -73983, 92142, 30608, 83825, -59026, -36129, 84981, -50787, 67952, 14114, 46725, 31181, 21915, -44124, 58286, 22420, 21498, 41185, -44408, -21409, 78240, 49611, -38739, 16642, 28283, -79350, 60159, -78129, 45741, 76602, -65548, -74514, -6981, 99355, tail", "row-876": "head, -40897, -6193, -61852, 22928, -57759, -12174, 36286, 14044, 65113, 47404, 24198, -65553, 3131, 4447, -87100, 14348, 83819, 48568, -920, -55409, -16230, -36106, 93058, 41073, 59617, 65982, -72862, -83855, 16135, 86081, -7004, -64118, 76638, -48425, 92099, -76325, -88128, -57177, -17849, -31617, -64717, 36601, -64567, -43297, 63585, -35467, 76713, -58017, 40958, 3314, -40346, 38054, -9863, 12462, -30178, -99619, 40331, -62410, 31500, -37833, -59424, -2830, 42425, 31475, tail", "row-877": "head, 36922, -44760, 30801, 46586, -9722, -19542, 93786, 48681, -33863, -76916, 71272, -95570, 1177, 81858, -19126, -91203, -463, 17722, -30095, 8377, 45884, -2846, 75077, 90087, 23333, -52124, -93170, -68303, 74714, -39807, -11545, 13915, -51859, -36637, 13952, -90871, -69033, 23353, 24391, -45689, 4754, -78430, 76548, 37596, -16107, -56766, 5365, 89398, 27429, 31258, -61940, 39838, -10637, 1078, -25644, -27159, 22939, 20041, 9800, -22370, -70848, 49921, 56957, 34157, tail", "row-878": "head, -26966, -18006, 72652, 15018, -88476, -5804, 30622, 95508, 89984, -7125, -2453, 53987, -52735, -18344, 13963, 50091, 41006, 51199, -93468, 90441, -3058, -44057, 61759, -2027, 8685, 42447, -78955, 76828, -26016, 46038, -36719, 6735, 20835, -32015, 31965, -75638, -18600, 25780, -12877, -57078, -27985, -70420, -44763, 60265, 43299, -39368, 96760, 71329, -10363, 51390, -5692, -40762, 55870, 40154, 95196, -62938, -97246, 94563, -69765, 4053, -29679, -98219, -78799, -16694, tail", "row-879": "head, -83202, -64459, 17894, 7241, -27204, 62745, -34297, 59576, 10896, -50079, 13971, 85267, -79367, -11845, 48677, 45203, -46899, -7905, 54218, -32367, 20416, -47816, -41241, 50102, 95052, 23813, -14761, 10106, -62186, -10000, 43355, -2343, -62735, -35524, 52091, 44310, -94346, -64348, -18191, 65236, 40199, 72779, 18799, -15930, 21950, 35974, 73127, 11423, -36721, -36155, -93417, -32822, -68528, -79976, -61944, 98357, 58007, -72119, 45557, 53197, 56833, 12967, -48583, 97802, tail", "row-880": "head, -75623, -50076, 35790, 93481, -43151, 16713, -98522, 82448, 83080, 53855, -98994, 44166, 61873, -91718, -94869, -21035, 61685, 99269, -24296, -42581, -50428, -25773, 83726, 91897, 91201, -24212, 32443, 54050, -77880, -26855, -45228, 19115, -9943, -58536, 547, 91581, 91328, -49956, -98661, -96080, 34971, 55596, 44061, -28645, -80695, 18488, 4810, 64230, 38770, -82131, -11945, 7044, 61547, 72808, -94214, -39411, -5636, -29410, -51278, 43269, 14507, 25874, 15112, -63823, tail", "row-881": "head, -66971, -94799, 62701, -31976, -52529, 66099, -99500, 23971, -27414, -8215, 94142, -62644, -38640, 47875, 99423, -98741, 27030, -42199, -46909, -61685, 2891, 86955, -45122, 89898, -88748, 99610, 21802, 50508, 64140, -90303, 87951, -46622, 80698, 66911, 69593, -97019, 2514, 76972, -24146, 86066, 89618, -60629, -61863, -56206, 83817, 82167, 25190, 97433, -91513, -69690, 9188, 23343, -91702, 91635, 67805, -84369, 73190, -39419, 33624, -72901, -44568, -5321, 44286, -59454, tail", "row-882": "head, 27370, -53435, -57088, -62596, -78825, 35122, -51902, 15868, 96019, -76681, 29749, -48385, 72160, 69118, 7577, 63792, 26687, 81373, 8241, -44464, -81103, 12765, -29929, -77329, -180, 92189, -42490, 25445, 67044, -79076, -46143, 80403, 33451, 85362, 11029, -24204, -19683, 4039, -7287, -13484, -15134, 39054, -42850, 90674, -52452, -73451, -35290, 94198, 58744, 30989, 48018, -23123, -58676, -60524, -53914, -81329, -25183, 10409, -46468, 25983, -10931, 2829, 85615, 90353, tail", "row-883": "head, 38223, 81066, 78832, 27133, 28081, 43737, 73113, 92561, 61442, -96412, -73025, 27719, 16531, 90291, 34412, 71267, 99513, -50906, 48795, -46199, 30552, 38765, -53508, -57169, -35087, 20833, 41014, 1950, -21711, 14172, -30789, -27320, -40895, 88717, 35799, 15854, -69777, 79459, -94231, -40075, 33072, -52932, -64235, -82346, 99661, 89158, -20932, -87943, 25193, -22440, -24998, -75691, 74128, 1704, -73451, 41571, 66254, 70525, -95051, 60059, -60932, 54236, -71738, -31240, tail", "row-884": "head, 38167, 94262, -40355, -81359, -2641, 48196, 97784, 84661, 13227, 63256, -51610, -12499, -35267, -26133, -3876, 72313, -27799, 48382, 29788, -64902, 4971, 35373, -77607, 42668, -11168, 11765, -41215, 51235, 60041, 33600, 57156, -29871, 42782, 73379, -37424, 76785, 538, 99880, -34673, 9597, 53944, -11755, -33379, -67603, -39940, -84567, 82956, 97756, 78839, -61259, 80153, -15430, -27689, 21101, -72433, -14648, 77963, -72365, -3722, 30478, 26268, 84191, -15369, -76808, tail", "row-885": "head, 95766, 20540, -87435, -26081, -1392, 73702, -29664, 13465, -3613, 19434, -89291, 88455, 95523, -14975, -38152, 76041, -49612, -92414, -30756, 13115, -88995, 98790, 10460, 64442, 17474, -57179, 51420, -77764, -61231, 92761, 68392, -57328, -50189, -78936, -22676, -46760, -8567, 33719, 10224, 40859, -52106, -16283, -57582, 50419, -6032, 12498, 24976, 76677, 70590, -29883, 12011, -35031, 82105, 89536, 91121, 50760, -86524, -7625, -36444, 41947, -46145, 41325, 12393, 96624, tail", "row-886": "head, 49445, 30820, -51351, -37400, -3209, -48172, 94926, 63555, -63161, 32569, 38227, 7057, -25092, 20004, 12639, -50345, -4554, 25749, 78210, 76825, 66838, 47642, -42605, -94230, 21902, -35049, 78864, -31834, 31083, -33371, 74843, -40400, 41440, 65744, -15861, 4906, 57777, 17575, -86929, 62626, 2573, 57112, 3148, 46569, 33253, -70637, -74779, -298, 18464, -99103, 80312, 62833, -31537, -58373, -35230, -71062, 9880, 48273, -23384, -13712, -1147, 80312, 11786, 20671, tail", "row-887": "head, 66644, -1240, 20146, 21132, 52234, 38061, 15424, -82256, -34460, 46001, -21119, -91026, 75088, -92176, 40000, -36751, -8411, -53266, -5831, 14476, -93404, -92260, 10980, 99719, -39965, -81095, 44204, -37937, 11260, -15274, 48942, 29793, 55734, -84675, -39330, 49086, -32766, 63922, 54245, -34486, 50887, -93345, -93972, 42454, -91090, 77929, 22761, 63483, -31306, -8665, 66820, 46546, -22243, -22993, 76426, 59960, -60887, -55984, 93831, -98349, 55395, -76795, -14576, -37724, tail", "row-888": "head, -71663, -66845, 30648, 9900, -88630, -72664, -60410, 42251, -32590, -21548, 88896, -12479, -25552, 38536, -31664, -49787, 7247, 15590, 73287, 9688, 96968, 96128, -63007, 24936, 31268, -34316, -68098, 56397, -41828, -63941, -85123, -71768, 83285, 55791, 70464, -65588, 19814, -69087, 93371, 73467, -47314, -66178, 17910, -23169, -18934, 98829, 61808, 3891, -70148, -97323, 73225, 17644, -72899, -76961, -12253, -88779, 21552, -42116, -16666, 18567, 14636, -69698, -75220, 77292, tail", "row-889": "head, 62622, 21994, 52672, -30235, -82805, -47531, -71829, -28, -63195, -69394, -32623, 23394, 44275, 75052, -30630, -18542, 68259, -72009, -96963, 4333, -73522, 72664, -92656, 25583, -93125, 51074, 79554, -23254, 9265, -67955, -41383, 59337, 4366, 44822, 38942, 42869, 22476, -88785, -8681, 931, 5237, 85617, -82262, 47067, 5071, -84471, -48420, -84679, 77011, 21598, -46325, 72784, -24456, 27324, 29021, 45058, 57873, -31424, -98646, -22370, -14702, -90114, -57103, -1775, tail", "row-890": "head, 44935, -60991, -41394, 29331, 17913, -59015, 81396, -27419, -36005, -13042, 62182, -72596, 81200, -39675, 22849, 52032, -81566, -40966, 75641, 29927, 88533, -67829, -66404, -93293, -57512, 6930, 45617, -54, -78898, 99354, 42101, -24376, 77184, -93227, 22624, -39760, 99744, -74440, -43104, 2080, 74146, 4917, -20826, -98017, 18487, 38649, 34771, 92558, 63745, -23147, -32924, -91777, 6523, -11561, -23642, -40858, 77460, -83831, -56352, 21497, 28778, -69386, -31533, 19077, tail", "row-891": "head, -91469, -35406, -26328, 52213, 1494, -83304, 47493, 85466, 61867, -68178, 14867, 83330, -28017, 43431, 40001, 19699, -44103, -57372, 2894, 96573, -28998, -660, 51715, -42960, 65019, -59741, -82038, 86189, 6180, 95004, 57172, -86150, 30323, 32096, -75544, -53526, 64768, -61341, 35649, 73399, 97520, -63182, -36946, -12234, 60188, -96201, -41775, 61945, -71380, 86438, -24438, 27943, 23803, -18771, 34848, 16426, 94596, -3627, 47319, 70415, 14546, 31885, -79849, 26536, tail", "row-892": "head, -59533, 30981, -87305, -79265, -87793, 46696, 37468, 88642, -67854, -10686, 61802, -71172, -2941, 26881, -15458, -16166, 33639, 73769, -75613, 72101, 68270, 57257, -77540, 99941, -54857, -47124, 60249, -20592, 31975, 29578, -27185, -77929, 85829, 4030, 26579, 36127, -87496, -95793, -33142, -56340, -35081, -40687, -28400, 11041, 3322, -40888, -39576, 82201, 42122, 85492, 25818, 10868, 25699, -62274, -41010, -90102, 32186, -3802, 70303, -54258, -4899, -18711, 13384, -99724, tail", "row-893": "head, -29534, 42875, 69346, -78609, -7078, 59644, -23007, -83561, -99921, 75584, -12790, 81338, 86837, 49466, 93584, -65711, 97027, 95691, 7762, -14457, -16087, -15583, -22416, -86697, -85398, -85935, -69977, 1516, -38430, -42814, 87752, 92516, 24056, 99928, 35780, 50120, 9898, -87526, -40762, -51123, 94393, 82518, -91421, 95876, 4340, 93464, -40403, -42097, -27588, 13665, 11394, -3114, -5085, -71800, -84584, -16610, 75820, -5165, 44702, -18701, 81183, 79995, -32908, -26195, tail", "row-894": "head, -81323, -56631, 30858, -19597, 97933, -43106, -28624, -74864, -57260, 38896, -36508, 48256, -18068, -98631, 11699, 9399, 90290, -77863, -1781, -44721, -57125, 21245, 29806, -44484, -60935, 55004, 93942, -5765, 68648, 69704, -12204, 5580, -87387, -28023, -26109, -62827, 28741, 65933, -28406, -5300, 6015, -28377, -76776, 57225, -48473, 99870, -52536, -33805, 14669, -62119, 96496, -51117, -25022, 17375, 65685, 25688, -6319, 19720, -65578, -55195, -36931, -30645, 13596, -74145, tail", "row-895": "head, 12630, -6561, -50013, 98078, -54527, 71388, 47311, -88814, -24302, -6456, 47531, -82327, 7171, 78681, -27065, 76703, 69600, -62036, 28691, 59131, -7637, 14602, 69113, -73820, -14741, 53099, 29809, -1080, 10738, 35694, -2426, 30790, 55848, -83213, -20968, -98415, 86235, -56514, 39448, 98767, 25795, -8759, 20633, 60561, 21566, 9578, -16553, 59037, 50458, -49036, -6112, -85289, -6654, -74043, 98326, 79070, -96302, 48748, -98891, 95247, -31174, -98487, -71234, -8697, tail", "row-896": "head, -58722, -12034, 17210, -76706, 57870, 66239, -10829, 87960, -86014, 79814, -89655, 39617, 63814, 744, 47418, -35325, -2969, -44431, 81649, 97941, -73366, -19447, -62241, -28713, 57784, -29374, -90879, -15238, -28700, -48682, 11024, 19126, 80691, -43081, -6086, 60798, -11114, 61684, 38872, 11583, -92779, -47825, 8223, 14579, 64900, -15521, -33417, -60932, -38168, -24825, 33558, 2058, 33134, -97646, 20336, 25800, -46818, -47053, 31428, -37083, 63520, -65219, -99428, -34230, tail", "row-897": "head, -97085, -76768, 22798, -61190, -87062, -79984, 45035, 66004, 27972, 27497, -1763, 34779, -19360, 10881, -53439, -44942, 48621, 77395, -53763, 7489, -82207, -71827, 61575, 22005, -31458, -65774, 64560, -28211, 19915, 59925, -38519, 41389, -69806, -32220, -19066, -90063, -41225, 3493, 50733, 36232, -7343, 99989, -42109, -39008, 64168, -97621, -51590, 71101, -96446, 92684, -52735, 87963, -26010, 67016, 46556, -11382, 78350, 32367, 16598, -3672, 55851, -12361, -48955, -95203, tail", "row-898": "head, 45903, 94991, -50168, -52983, 97801, -99250, -51165, 82659, -70207, -69244, 56197, 29696, 11392, -84514, -76145, 20277, -30645, -61853, 61360, 6945, 31545, -59673, -29540, -76296, 11507, -40761, -679, -23440, 8801, 70235, 71476, -79848, -20780, -64579, -61786, -40545, -50044, -8181, -41845, -6918, -1674, -28815, -40830, 10218, -48539, 93927, 14478, 45825, -22936, 18379, 31931, 51554, -34033, -15647, 52261, -40806, -27691, -1916, 60573, -20291, 28908, 7305, 99632, 96360, tail", "row-899": "head, 28763, 54960, -38842, 74427, -84319, -89544, -42871, -17203, -96929, 38689, -93167, -24385, 64904, -5569, 21972, 81436, 60158, -14131, -45768, -91214, 77028, 65903, -26862, 10815, 51258, -31828, -59481, -47968, -88972, -43662, 635, 66943, 15215, -78541, 74781, 3414, -46899, -96479, 11727, 84135, 8264, -59545, 41077, -20148, 43829, -67558, 62227, 15595, 34400, 5726, 15578, 18560, -86023, -65274, 36309, 43068, 89295, 81977, -28424, 36508, 85751, -94837, 33957, 9677, tail", "row-900": "head, -19041, 75470, -92055, -37393, -14044, 25765, 18217, -72848, 88520, 88850, -96834, 28064, -98042, -97727, 85900, -45459, -1220, 87426, -20246, -99622, 70489, 80431, 15947, 61133, 22341, -33776, 55729, 42033, 30820, 16437, 92835, 18995, 88131, 5856, 67152, -15025, 76179, 89659, 99893, 39150, -41485, 30170, -9582, 20045, -54378, -86142, -63645, 14196, 31065, 83102, 83202, -67122, -63572, -95942, -87779, 6065, 83874, 13373, -48747, -68560, -45407, -42896, -99947, -45743, tail", "row-901": "head, 57356, 22418, -60082, -45767, 79945, 7009, 6195, 14040, -85761, -57977, -68389, -4747, 74723, 51977, 71248, -58418, 69154, -87231, 56178, 36887, 15574, 77215, -86653, -32502, -50405, 75843, 98691, 7714, 8917, 2198, 48553, -55592, -73963, -563, 50930, 41041, -13165, 11980, -50806, -94333, 45304, 93112, 72898, 92588, 88960, -738, -10205, -51297, 6383, 1913, 91611, 52144, 65480, -83400, 55759, -48659, -21208, 78743, -45952, 86342, -15090, -50046, -21932, 36939, tail", "row-902": "head, 45941, -37136, -44557, 37763, -70225, -79775, -72123, 47352, 45780, -47597, -50603, 94987, -84965, 69427, -96682, -58758, 18082, 27829, 95066, -72950, -97559, -45775, 4722, 78754, -4595, -61635, 80052, 66453, 4729, 17418, 17723, -88206, -48890, 16991, 47939, 83255, 70269, 7001, -27956, -6712, 95369, -54232, 34646, 37487, -30434, 90081, -61797, -37908, -55058, -99927, -39154, -50980, -57533, 15986, 48030, -75306, -394, -1530, 91901, -81724, -80965, 80772, 37249, -79414, tail", "row-903": "head, 24811, 5617, -41815, 22867, 77204, 33731, -24296, 55321, -35156, 17997, -55712, 90120, -69907, 31085, -25740, -86374, 1500, -2444, -74432, -96594, 58467, 14002, 32729, -96307, 96657, 8532, 25689, -1528, -70841, -38085, -66390, 35596, -39949, 4400, -71622, -52611, -19896, 64199, -39107, -91325, 71689, -5972, -5609, 55614, -97662, 66479, -70436, 40233, -53708, -47877, 33733, -37836, -44764, -66120, -98552, 14926, 53299, 50016, -45294, 4556, 8099, -35270, 86971, 6096, tail", "row-904": "head, 35640, -20478, 40657, 13947, 44395, -36136, 46200, 25481, -3124, -57451, 57938, 56213, 69004, 30150, -4520, -41130, 34076, 34833, 4763, -16360, -12771, 29329, 2552, 56117, -2378, -65861, -38024, 55509, 48969, 65416, 15129, -3813, 61721, -22035, 82, -22768, 96310, -11675, 13462, 64141, 77338, 30478, 39518, -60548, 91992, -37134, 85214, -94327, -26991, -18189, -20569, 24116, -79317, -19010, 35094, 29025, -64000, 58713, 87831, 6870, -51496, 58064, 92125, -94893, tail", "row-905": "head, -46203, 54858, 80512, -82925, 56241, -55100, 67965, -95470, 61455, -20735, -28851, -97731, 23726, -44462, -30300, -88074, -67584, 69042, 86123, -2291, 50252, -10501, -97192, -64341, -64912, 78541, 2386, -92928, -11608, 64520, -48544, 13555, 89275, -16528, 43553, 89929, 83913, 81657, -84549, 42692, 16187, -24072, 38820, 42587, 41780, -86280, -64089, 76149, -46749, -21648, -61639, -60073, 57466, -1878, -21821, -75905, 34482, 40711, -62626, 67036, -54220, -21938, 15201, -51274, tail", "row-906": "head, 28476, 50830, 66268, -77214, -42219, -65750, -50695, -94018, 90094, 21401, -13267, 11223, 6219, 75559, -37047, -41944, 15609, -55411, -81251, 35527, 51707, 61720, 52274, -54329, -29225, -78432, 26825, -30781, 64956, 58870, -34591, -80521, 54968, -1774, 50204, -74302, 94553, -11467, -8590, -28370, 645, -19640, 71222, -56019, -5624, -11041, -4405, 26114, -78547, -89309, -78683, 60435, 58109, -32874, -65842, -61120, -3694, -62747, 68258, -41619, 51977, 51210, 52288, -66082, tail", "row-907": "head, -99651, -40824, -32773, -1549, 52599, -23070, -57907, -34571, 98317, -52305, -40271, 73550, -15972, 9286, 60655, 71044, 1890, 5483, 56413, -36607, -13428, -26019, 77889, 35710, -98770, -55437, -31528, 9813, -92828, 43701, 87221, 99513, 45240, 38523, 87837, -29533, 13582, 19869, 74758, -17357, 96714, 54515, 60923, 85750, 68398, 47358, 81014, -26456, -12594, -96286, 55973, -50106, -24684, 376, 99574, -46221, 39413, -19597, 75742, -44638, 91923, 27799, 99983, -15294, tail", "row-908": "head, -31595, -66409, -61052, 60843, -56267, -19492, -47235, -10666, 49226, 20714, 96585, -61375, -23725, -44578, -19827, 32867, -21204, -98092, -31792, -44391, 91140, -68981, 72257, -97031, -44686, 33417, 95957, 39973, -94286, -50257, 88377, 98945, 90040, 69209, 54919, -18211, -80803, -24252, -43727, 59001, 98825, -83030, -11403, -13641, 85826, -69916, 11145, 70670, -30010, -66727, 78551, 19572, -13228, -95597, 1212, -22731, -66111, 65448, -86828, -13778, -48184, -82943, 61225, 62336, tail", "row-909": "head, 65553, 23762, -39760, 27583, -34848, 57761, -32187, 11172, 66380, 55945, -90352, -33745, -60381, 1892, 67506, -85310, 69098, 95432, -13825, -17263, -16803, -65192, -99108, 18340, 93586, 14917, 71139, 20433, 81849, 44188, 80156, 29121, -33402, -50742, 31453, 44526, -33508, -68148, -11075, -39484, 85669, -1131, 63918, 28739, 53381, 98313, 57429, -97500, -35857, -84991, -63883, 27315, -36030, -20573, -84993, -66123, 67355, -87508, -48037, -38917, 41947, -12031, -58473, 4991, tail", "row-910": "head, -89449, -96676, 43937, -93333, -46527, -62244, -46019, 80377, -44440, 69634, -21361, -40128, 57256, 80732, -37874, 1675, -80235, -39437, 24384, -89343, -43629, 22390, 43846, 25207, 44553, -12563, 16844, -58351, -58087, 14707, -68474, 62042, -53449, 39121, 33414, 54451, -54091, 34506, -99444, 88585, 55864, 13358, 93214, -29250, -2779, -45281, 37287, 37250, 36786, 33478, -73969, -22839, 36182, -78088, -26558, 91750, 7240, 14875, 23812, -39986, -26676, -80261, 53678, -61794, tail", "row-911": "head, -38990, -54086, 6298, -92384, -39918, -79890, -23873, 9238, 39551, 74742, -80547, 48142, -63170, 68202, -52369, 82929, -5447, 30170, -68829, -78522, -87862, 63106, 96307, -37311, -76579, 34931, -30823, 47879, -30949, -42699, -59586, -92960, 34321, -89163, 92000, 60275, -73070, -29152, -51026, -43088, 82920, 38864, -22391, -16325, -39121, -23392, -71240, -43186, 89575, 47193, -94587, -8136, 42510, -51164, 30371, -24787, -57179, -63798, -19626, -26336, 87704, 80921, -71270, 83721, tail", "row-912": "head, 10309, -69825, -34724, 54069, 78600, -15398, -51406, 91973, -17342, 45378, -44899, -13239, 70222, 5547, 31877, -70892, 89743, -55445, 10390, 87096, 61636, -20606, -17494, -79278, 68447, 25398, -22221, 14985, 60716, -27527, -92804, -28108, 60465, -46402, -79910, 79411, 75407, -54686, 92476, -40674, 39826, -13054, 64568, 12367, -46444, 80445, -39481, 69703, 80018, -22163, -10887, 98217, -1324, 58654, -26252, 93810, 81092, -38925, -8800, -74227, -58866, 26320, -62149, -71802, tail", "row-913": "head, -78147, -67745, -3543, 24453, 11137, -53550, 24891, 34352, 81296, -74050, 27716, -17793, -44437, 27542, 12908, -61119, -18661, 4706, -80354, -4328, -89585, 49880, -61137, 13411, 80558, -76902, 99411, -48417, 15209, -91407, -57063, -24642, 67493, 98813, -71787, 40658, -70346, -58714, -96973, 83877, 39714, -10215, -75780, 98591, -98475, -27995, 93544, 14806, 95644, 32914, 47398, 42775, -63822, -57115, -85659, 8716, 44259, -12165, 21348, -1242, 2894, -62438, 89909, 44383, tail", "row-914": "head, -86754, 52322, 86381, -79567, 57117, 85541, -37947, -5423, 25021, -10099, -20212, -21347, -84753, 48706, -97787, -24423, 52486, -39088, 98538, 66486, -15155, 52038, -15705, -88255, 22902, -76082, 52004, -57124, -86522, -28181, 96883, 66529, 40128, 78467, 93463, -7345, 94783, 37532, 17640, -13007, -59211, -32990, 58971, 50019, 65467, -70033, 71833, -90766, -1893, 65920, 41165, -15250, 86355, 94843, 54722, -54116, 99004, -35035, -81776, -64410, 99156, 5377, 37711, -98588, tail", "row-915": "head, -98774, -20156, -78778, -58771, -7243, -36985, 46989, 96256, -56360, -37489, -41892, 13563, -45396, 34384, -38345, 55040, 29631, 58236, -28184, 61315, -74967, -59337, 51566, 45711, 49290, -67979, 97272, -16659, 97585, -42560, 69693, 58070, -83373, 88494, 9214, -93949, -30547, 36579, -63966, -87487, -43168, -62636, 11893, -40121, 48484, 41979, 94276, -1677, -1292, 12080, -56576, 75366, -50957, 74585, 60201, -96545, -78490, 82861, -26330, -14896, -56672, 2943, -10637, -82042, tail", "row-916": "head, 63847, 54996, 35269, -98466, -99965, 82393, -23149, -49393, 75258, 10072, 50300, 56248, -92207, 48833, 64517, 81767, 1694, 8024, 1923, -72666, 32686, -27234, -73609, -24074, 59067, 64494, -71189, -40207, 30101, -89610, 35122, -3248, 38258, 2618, 42685, 45168, -10606, 79300, -7103, -96167, 94124, -17435, 20413, -1312, 97031, 96268, 60453, -35903, 38956, 28303, -83011, 60658, 95792, -25293, -28334, 25280, 76838, 15893, -52127, 89185, -30210, 9403, -41843, 88680, tail", "row-917": "head, -17911, 69670, -74056, 80000, 28592, -24454, -61176, -10643, 28158, 31349, 88450, 65768, 23291, 2989, -39964, -68696, -29271, 18389, -38616, -10940, -15222, 70402, -3194, -51546, 53678, 27379, -87548, 87357, -46342, -36883, -35384, 85847, -57706, -77143, -31886, 79886, 74608, -3958, 40177, 66284, 45298, 86142, 53908, -5706, -29181, -2906, 98494, 90993, 56539, 39255, 49601, -43826, -67010, 43799, -46990, 33182, 70589, 95116, -66435, -33829, 42794, 22200, 28229, -8322, tail", "row-918": "head, -57315, 96780, 88535, -51350, -40168, 82841, 57435, -3687, 61366, 61412, -72093, 40797, -21821, 51596, -29944, -39863, 81676, -317, -2998, -62703, -15369, -61113, 7106, 33652, 31516, -35152, 11335, -81704, 53381, 58136, -16331, -25142, 29253, -90724, 4324, -23873, 49774, 81604, -31846, -6914, 81392, -49818, 58812, -25078, -21055, -47295, -74182, 80751, -99309, -7676, 50320, 87209, 47431, 16389, -84457, 35660, 62525, 64245, -9933, -87921, 53147, -74017, 75842, -94662, tail", "row-919": "head, 91901, 47881, -35322, 17661, 89809, -6771, -88593, -94725, 89811, 53701, -56504, 86412, 94617, 6012, -91813, 11140, -292, -36978, -42134, -54681, 36985, -74733, 5449, 37812, -59064, -13430, -93726, 62653, -4768, 34111, 55832, -65651, -13872, -65950, -83641, -34362, 49149, -74221, -96180, -92726, 1061, -8807, -87452, 89550, -36115, 88297, -98346, 97442, -11723, 88026, -34499, 99044, 37395, -98110, -74208, -208, -27483, -43289, 42504, -83835, 9799, 57095, -11178, 63872, tail", "row-920": "head, 85706, -69162, -53335, -52284, -704, 57419, 56630, 55719, -32732, 13774, -92392, 25428, -93265, 34687, 66669, 21884, -59101, -97995, -61290, -25263, 29566, 56046, 32509, 29727, -95472, 83944, 26769, -2768, -61264, 67376, 11454, -48078, -89108, 30131, 1148, -5008, 62286, -17944, 88413, -88748, 89704, -55011, -42325, -59207, -7528, -35605, -24529, 29048, 38151, -39127, 94910, -86098, 75641, 97039, 83403, -21048, 64303, -2610, 18639, -27536, -62373, 33705, -77555, 32769, tail", "row-921": "head, -45271, 57356, 52254, 18330, 55768, 49192, 86110, -83839, -2735, -47018, 82372, 79177, 40324, -59957, 31829, 68456, 36480, -76020, -94127, -44076, 54508, -78001, 82265, -61036, -50242, 27647, 30428, -81144, -76271, -24043, -98145, 56351, -58703, -78947, -71193, -17791, 22917, -87421, -99153, 58512, 86022, -97215, 82938, 93527, 52660, -92287, -46946, 15750, 86750, -91269, 77707, 65927, -40136, 85446, 89328, 14404, -38522, -90369, -73984, 23276, -13093, -64789, -24984, 59264, tail", "row-922": "head, 36602, -82207, 61360, 40888, -6848, 73564, 32526, -69711, -57966, 77543, 78804, 60875, -80129, -72913, 32550, 28019, 52066, 93518, 16540, -43122, 88520, 34301, -81686, -53930, 18291, -5183, 6508, -71234, 79228, -79113, 27734, -78082, -31175, 33481, -46083, 72447, 25924, -87058, -12650, 10268, -7945, 65307, 87687, -88811, -98322, -1296, 78110, -49720, 34642, -88496, 9812, 20416, 12874, 122, 85643, 6575, -37097, -77165, 10290, -50553, 4353, -1727, 11210, -97837, tail", "row-923": "head, 65540, -18168, 99528, -86938, -80675, 10580, 86769, 32423, -61538, -70728, 61977, -24204, 15403, -16851, 39044, -37999, -25246, 64569, 65426, 21222, 26798, 73542, 4052, -36047, -58303, -352, 56031, 35368, 59993, 65230, 37646, -97792, 1514, 92865, 72510, -58315, -18002, 57515, 31959, -40254, -36490, 98227, -27832, -98378, -50156, 34803, 3236, -88024, -9629, 42984, -38220, -8304, -21617, -48345, -37559, -15432, -4749, 24799, -56687, 31663, 26874, 52970, -44540, -83918, tail", "row-924": "head, 60807, 99138, 68435, -2045, 61703, -40696, -2820, 42377, 35719, -63184, 60832, -6514, 64277, 37686, -8009, -88579, 90506, -16812, -90042, -46327, 32929, 77711, -73870, -92196, -97845, -36937, 44720, -968, 71576, 96393, 59510, -23479, -31132, 58727, -76710, -33479, 11690, -27185, -44803, -20247, 67492, 2385, -38857, -89186, 52155, 48952, 89984, 74491, -34101, -53162, -37459, 81880, 21974, 30234, -51462, -43194, -999, 18017, -95444, 15785, 42721, -65485, 32682, -21367, tail", "row-925": "head, -18733, -67181, 72006, -35965, -21939, -4345, -38424, 69020, 53342, -20140, 41483, 91972, 86137, -13977, -39357, -7704, 49967, 75001, -89239, -51082, 2662, 2808, 83439, -44286, -69665, -18876, 57822, -48502, 15025, -28122, -95475, 83878, -11011, -70512, -98462, -25690, 38163, -51785, -64153, 25906, -65942, -27195, 8593, -91374, -63141, -84011, -57122, -50415, -59385, -54433, -26337, 55178, -16344, -16602, -41217, 12756, 92683, -49742, -78641, 37259, -70348, -40957, 93388, 77939, tail", "row-926": "head, -25222, 68954, 44767, -73653, -56113, -16251, -25931, -10837, 81375, -5932, 25219, 71338, 6368, 3956, 19175, -9825, 5301, 88352, 34735, -36179, 84072, 33718, 77260, -63389, 8348, -69679, 95862, 81216, 68457, -91227, -24002, 79404, 21612, -16016, 70204, -77839, 15233, -25068, -23289, -25274, 94048, -99980, 63655, -91931, 66820, 71642, 57617, -98636, 86810, -32830, 43629, -64660, 37741, 79793, 86024, -35842, 66778, -63153, -48400, -19056, -5455, 74259, -58566, 62949, tail", "row-927": "head, 71848, -3261, 35058, 3443, -14450, 17450, 24607, -9313, 65944, 88454, 96220, -16625, -3302, -23274, -61589, 22457, 66234, -10198, -24308, 45068, -71415, -39976, 49664, -82770, -9925, 41973, -7692, 67789, 17501, -32479, 37544, 24821, -7285, 14639, -31538, 31989, -89578, 37587, 53427, 98099, 78848, -43032, 79963, -81289, -85971, -23055, -99456, 7106, -82757, 24072, 22870, -28061, 66923, -96909, 59346, 14345, 66813, 10874, 33018, -51602, 83654, 26487, 91021, 27989, tail", "row-928": "head, -99212, 80086, 85070, -94243, -37845, -29792, 452, -58549, 82088, -80036, -57646, -16377, 38453, 45413, -95214, -86413, 72647, -30287, -80571, 67930, 28948, 24987, -9345, 64583, 71818, 15873, 38593, 74086, 40307, 60461, -40673, -93940, 97257, -21603, -18501, 48890, -79123, -82582, -14884, 89878, 63586, -21405, 11549, 44514, -86112, 50383, 82453, -80782, -84309, 14191, 67005, -30439, -11595, -81610, -96047, 89455, 90438, 74443, -77846, 28745, 30158, 25653, 84516, -68303, tail", "row-929": "head, 80433, -18440, -99500, 53112, -39982, 36946, 57469, -33308, 30547, 37569, -4986, 25324, 85047, 22260, -46477, 3580, -95421, -64844, -48788, -20102, -40013, 92026, -20978, -79339, 18192, 86856, 80938, 51559, 51338, -26461, -50954, -47927, -40402, 40635, 83388, -60323, -28493, -40330, -86209, 36497, 77407, 59066, 86957, 91317, 55989, 21677, -11464, -26671, 21302, 91482, -76693, 56518, 33618, 70238, -90045, -53383, -74267, -69236, 89890, -99855, -37460, -8677, -79558, -77240, tail", "row-930": "head, 18822, 10406, 7724, -26054, -34949, 96065, -84532, 27585, -25212, 82969, 95894, -42061, 69132, 18856, 42114, -12832, -14648, 97070, 88033, 84649, 36492, 67032, 66130, 84596, -86515, -32795, 93932, 21296, 85699, -37772, -99418, 93704, 60962, -78833, 64575, 28016, 1630, 16616, -34593, 36322, 29859, 38022, 16534, -29665, 42711, -50711, -5970, 99865, 79528, -91758, 41885, -9879, -2022, -91282, -8708, 83243, -30811, 59360, -74376, 86285, 82723, 62813, 84356, 52287, tail", "row-931": "head, 3173, 37945, -95484, -79601, -33897, 28017, -25242, 92272, 5062, -96344, 16615, -21494, -52767, -44857, 30712, -96241, 2327, 9118, 2809, -16115, -88139, -26112, -88092, -3714, 44027, -87590, -68711, 24588, -42043, -60979, 37280, 79644, -13894, 21533, 15409, 41788, -93915, -74858, -65422, 6139, -64170, 89080, -54559, -1600, 11550, -23165, 76581, -57757, -18832, -32354, -37284, -59341, 81398, 72763, -91489, -61836, 15512, -42524, -79812, 9203, 60077, 73881, 48946, 35016, tail", "row-932": "head, -46393, 64351, 23718, 70665, -83193, -51693, 97776, 5013, -88168, 95592, 10901, -2529, -95879, 43331, -66418, -30530, 28225, -97238, 18520, -86410, 18636, -30342, 16180, 18290, 27669, 97765, -94259, -28557, -65301, -92834, -5711, 56633, -3771, -56978, -33001, 23268, -81911, -21123, 21266, 62567, 33640, 40437, 47760, -28601, -13847, -61469, -70897, -15323, 61852, -1597, 12359, 58285, -97943, -28111, -47480, -94618, 73126, -87545, 59335, -68177, -19510, 46327, -58743, 66428, tail", "row-933": "head, -32665, 45027, -46888, -80295, -76701, -21332, 95934, 68772, -52902, -76878, -80273, -76632, -98113, -97360, -15601, -24977, 9454, -5870, -90649, 33119, 98343, 95115, 59453, 37504, 58926, -88573, 8585, -49109, -99804, -83533, 94711, 55051, 25827, 28237, -20888, 63904, -59697, 3679, -77268, -46830, -51439, 72943, -64922, -77032, -20687, -64723, -7333, 47707, 3221, -8476, 52081, -56535, -16063, -39113, -65067, -30033, 33239, -56063, -17024, 851, -74691, -75321, -74316, 64245, tail", "row-934": "head, 59036, 75604, -978, -66893, 16792, -72939, 23010, -88089, -59668, -49754, 76543, -83733, -25705, -61580, 62814, 59386, 45380, -27721, 45664, -51884, -51430, -64492, 2213, 83616, -65500, 72637, -15875, 86246, 54234, 65893, -20041, -8177, 37062, 72543, -68712, 59208, -97058, 41719, 78234, 60144, 95826, 17516, 67222, -65995, -66567, 47304, 27596, -11859, -93189, 62018, 77832, 65717, -26009, 11796, -161, 98090, 88973, 71926, 87856, -8871, 74703, 21225, -12017, 36328, tail", "row-935": "head, -70865, 94527, 64803, 24569, 95884, 55263, 91987, -30645, 5801, -90114, 64405, -60663, 87839, 83404, -28704, -85353, -18799, 69238, 59844, 27592, -2806, 70212, -77238, 20877, 97202, 61492, -69856, 93763, 20259, -22356, 5580, 32770, -27718, -93827, 21118, -43876, -55411, 82723, 11614, 29839, -43189, -5178, -48762, 84716, -90182, -83642, -15092, -3994, 82787, 32015, 58106, -33147, -34622, 13884, -66127, -93204, 19325, 78557, -62646, -71716, 68971, -32892, -61557, 98740, tail", "row-936": "head, -59422, -704, 54732, -7121, -42175, -71258, 61198, -42547, 41999, -63721, -96269, -59124, -34883, -21759, -68448, 56674, 89128, -82920, 69396, 89246, 32682, -26469, -80959, -33579, -38626, 54188, -70676, -47751, 42512, -4065, 17827, 17550, -93723, 84671, 41639, -27691, 27300, 74423, -37059, 70128, -6066, 6335, -24638, -86295, -20275, 9048, -79534, 83463, -34745, 42685, -44900, -59208, 84810, 10932, -14616, -68049, -63760, -17433, 30746, 37206, 21328, 17716, 27893, 95126, tail", "row-937": "head, -15260, 76213, -58253, -46061, 53453, -25178, 33077, 5322, 23483, 93492, 82120, -3325, 8654, 75852, 13622, 20776, -84147, 66769, 63425, -56718, -78194, -37962, 81352, -64431, 36164, -56252, -8604, 76566, -27844, 85677, 68240, -89765, -73854, -60954, -53147, -8633, -6727, 33075, -47376, -81986, -79256, 9592, -98047, -15051, -93039, 43884, 10065, -63480, -44015, 2578, 62005, 56189, -82271, 74644, -94751, 56757, -64472, -60786, 14810, -95839, 81138, 71663, -72959, -31040, tail", "row-938": "head, -14934, -31652, 2252, 70284, 88898, 23509, 73557, 57443, 49931, -96086, -46816, 59413, -26346, -8734, -305, -87906, -14738, 77082, 10382, -22608, -11194, -93611, 7477, -57447, 60680, -8676, -17543, 48088, 99024, -71320, -16584, 16260, 54461, -42979, -72827, 60224, -32154, 89856, -77062, -2070, -52997, -58277, 65816, 56232, -95601, -64987, 83033, 24919, 27581, -2080, 98087, 35413, -18604, -76816, 82484, -88143, 59487, -18321, 70411, 39718, -63100, 69346, -60488, -44565, tail", "row-939": "head, -80771, -69777, -79498, -86847, -44672, -70300, 10177, 13770, -91970, -57939, 50611, -3913, 19804, 39619, -84850, -54879, -55157, -35330, -41995, -2042, -73847, -11976, -41174, -24809, 79678, 90985, -69163, -44945, 15550, -892, 52814, -62507, 63568, -48277, -28504, 45614, 86495, 2797, -16546, 84256, -6801, -76605, 76579, 75215, 28208, 18740, -41618, -5360, 41249, 14235, 80584, 77789, -27331, -45222, 62589, 43590, -4748, 16878, -30882, -68125, 25796, -39513, -40398, -70624, tail", "row-940": "head, 86887, -92229, -56815, 64277, -68423, -69039, 5945, 88053, -22939, -18443, -53690, -4981, 21479, -80021, 80558, 18093, 98901, 97286, 13196, -71569, 629, -40725, 40773, -34583, 37815, -50551, 95109, -71586, -5669, 48413, -31052, 51969, 46726, 11032, 33609, -72410, 61594, -67401, -95314, -25760, 60897, 33705, 90413, -22304, 33402, -62389, 99270, 60686, 16488, 67530, -43054, -50726, 52981, -82787, 38948, -19618, 14788, -90613, -46882, 4630, 62763, -96928, -76332, -72414, tail", "row-941": "head, 72231, -50491, 84873, 57994, -74941, 32552, -74158, 27594, -81027, 44716, 19173, -41279, -5586, 20058, 46569, -19542, -69753, -38738, 26102, 17847, -67483, 82315, 26437, -81664, 45040, 45862, -55992, 55328, -55245, -42290, -87655, -1320, -93519, -39881, 2506, -68739, -31459, 68640, 32682, 40096, -780, 37414, -82573, -65785, 36100, 80930, -78310, 93822, 93327, -14403, -46791, 17632, 75712, -97336, -88692, -14681, 64684, 60419, -54173, -28942, 54447, 87423, -40995, 70683, tail", "row-942": "head, 91787, -84154, -71845, -20479, 99338, 94740, -94831, -28480, 21523, -36516, -95202, 60804, -97666, 68013, -55585, 21650, -79203, -49922, -48922, -63774, -75044, 93879, -95954, 96145, -25898, -46871, 18489, -93480, 58409, -80883, -12959, 52691, -92677, -59688, 58029, -76885, 3397, -30021, -16755, -97178, 81669, -92973, -46814, 86633, 63033, 51306, -61093, 92992, -98585, 89462, -83234, 21413, -77321, -47873, -54598, -51644, 98001, -45251, 28287, -70128, 53712, 33922, 65109, 596, tail", "row-943": "head, -19838, 39418, 48200, 91327, 61327, -48244, 44026, 13459, 64741, -7586, -39792, 62326, 73001, 29207, 3117, 57687, 7510, 2180, -34638, -39130, 32844, -34659, 88204, -17207, -73387, 54413, 81061, -82337, -77544, -81266, -89463, 92315, -31978, -41684, 36055, 78730, 41180, 50538, 78010, -75427, -1655, 82565, -98984, 70565, 20832, 90490, 74153, -44500, -54847, -63653, -21588, -81470, -5670, -57773, 40417, 25523, 10330, -63354, -59684, -74515, 53852, -93254, -46776, 57651, tail", "row-944": "head, 23287, -11297, 15670, 22312, -59595, 41541, 28352, 81859, -68721, -10388, 35909, -59817, -64844, 44021, -12033, 42137, -73605, -38773, 36112, 89678, 26570, 32515, 14099, 45762, -64906, -12327, -66727, 61896, 81997, 36745, 19176, -82879, 59317, 53403, 66802, -67751, -87227, 90458, 70833, -88820, 3443, -49327, 14754, 99472, -18792, -3527, -29785, 51811, -92440, 43398, 33788, -67579, 1259, -82972, 50724, 4287, 58078, -93475, -11470, 63892, -79484, 19027, -14229, 23874, tail", "row-945": "head, 91606, -75545, -24095, -59438, -59136, -86647, 24595, -24412, 75369, -5590, -1659, -89285, -41815, 87404, -33681, 61905, -59908, 75213, -19109, -23121, -13805, 47659, 1286, -26012, -59692, 80418, -10339, 77663, -26931, 53358, 20803, -97992, 48075, 71876, 80129, 90334, -49920, 48878, -1406, -2535, 21278, -56226, -26929, 94110, -53941, 58798, 71466, -65304, 68333, 99626, -43750, 47764, 45391, -68254, -92381, 31798, -93562, -62044, -4371, 76109, 23953, -42150, 74696, -59188, tail", "row-946": "head, -27869, 7824, -91407, -42602, -64453, 60248, -8927, -39979, -96596, -41759, 86704, -56500, 99560, 8962, -76001, -68376, -28744, 87077, 8525, -69487, -99608, 31368, 53492, -30488, -73140, 77344, -52478, -52639, 84191, -41975, 69159, 5205, -91327, -92154, -16217, 18983, 13337, -76042, -32342, -25998, 97050, -29985, 41208, 874, -325, -40204, -79419, 54852, 57250, 60559, 22743, -30114, -79515, -61490, 31615, -68618, -84826, 72679, 63118, -59468, -82809, 20967, 95393, -35044, tail", "row-947": "head, 32850, -89322, -17689, -17084, -48118, 13425, -73607, -79745, -54134, 52332, -66659, 64757, 14669, -52468, -39860, 59705, 17218, 38606, 23563, 82661, -31013, -22576, -79453, 60009, -49971, 82613, -41994, 94824, -57494, 84803, 52591, -45505, -27188, -50529, 757, -65622, 72247, -15555, 63165, -87543, 38721, 1296, 8933, -24499, -45244, 97166, -71778, -99644, -3394, -15864, -84402, 67932, 81772, -72869, 49675, -70849, 89552, 52283, -54272, 82256, -69810, 4870, -14343, -99251, tail", "row-948": "head, 36980, 93147, -30349, 59269, -16702, -21073, 32616, -78080, 68159, 79523, 91868, -78384, -13920, -33542, 91947, 76939, 34884, -69547, -81208, 42429, 84952, 2871, -93677, -56250, -9719, -36678, 91282, 27697, -28507, 62885, -83724, 92711, 46079, 6071, 27553, -32559, 98444, -17347, -70122, 77671, 51147, -13836, 81086, -24778, -27695, -13155, 14594, 86460, 41231, -76827, 36658, -5029, 47136, -91655, 62554, -89196, -76292, -72274, -39313, -2979, -82087, 8345, 60291, -40658, tail", "row-949": "head, -53678, -19243, -80516, -17462, -45448, -66571, 722, -85610, 99693, -87260, 3309, 16192, -51559, -39080, 10623, -7373, 14441, 42793, -88662, -8712, 37944, -87809, 73374, -92033, -38193, -1310, 34533, 83118, -25686, 12074, 10211, -99121, 33529, 31283, -41787, -23939, -69776, -12798, 29640, -38949, -20168, 11176, -99786, -19687, 99683, -14325, -60067, -79670, 5220, -32195, 54668, 27744, 68324, 39217, 64313, -77591, -84028, 50341, 56893, -76655, 98140, 32927, 35452, -81606, tail", "row-950": "head, 17110, 20688, 55729, -3737, -30144, -90850, 70710, 64545, 4911, -12648, -14446, -62251, -47245, 61998, -23402, -80179, -90132, 42470, -13873, 13255, 94157, -89702, 73835, 93350, 81713, 10301, -88100, -211, -95221, -39706, -73521, -72401, 24557, -61827, 60842, 68308, -5062, -69234, -182, -37720, -92338, -24929, 72055, 55028, 56023, 67830, -69641, -65162, -1425, -9760, -69939, 36581, 61810, -93835, -4226, 8674, 74530, 13150, 7287, -12692, -40965, 63548, 10367, 22498, tail", "row-951": "head, 37085, 37848, -67358, -75470, -67897, -76823, 37206, 26902, 24945, 50178, -54645, 89881, 19641, -61173, -26551, -66473, 16794, -39760, -8338, -33897, -73306, -24086, 65099, -80839, -47656, 89569, 82809, 19141, -29614, -67245, 28628, -39862, -16657, -44742, -9423, -83958, -96938, 47807, 4946, 70830, -77202, 73641, -68460, 6893, -42022, -67614, 33749, 56034, -22291, 35207, 24552, -35695, -57875, -90118, -90899, -24305, 62325, 35355, -85386, 90907, -41166, -70813, -42220, 98496, tail", "row-952": "head, 86760, 61504, 44227, -95613, 79267, 87108, -18772, -82140, -89620, 28533, -19101, -97630, -41690, -83515, 85019, 79272, 44669, 72823, 27469, 77643, 45697, 84157, -48857, -81000, -91604, -86533, -52997, -74168, 72708, 47907, 98488, 98286, 96040, 90924, -46902, 6703, 77419, -64210, -48542, -42106, 13370, 40047, -95355, 96829, -68839, -8443, 39628, -86699, -65814, 15094, 76001, -97437, -6870, -87302, 60936, 7221, -937, 52337, 8256, 99898, -12823, -6597, 54854, 21099, tail", "row-953": "head, 58711, -96986, 56073, -42796, -62418, 2447, 46978, -28813, -22282, 60045, 6206, -77192, 21591, -13764, 55263, -21347, -10359, -23835, 50800, -63746, 45257, -83104, -86290, 28200, 19030, -58133, -7672, -44441, -94476, -72463, -83936, 89008, -27314, 84965, -98798, -28161, -70881, -13805, -807, 35965, -4795, 28327, 5297, -86115, -64539, 71074, -6538, 83345, -23403, 25875, 26817, 24310, -7718, -11343, -28764, 48023, -4783, -30839, -89216, -11145, -66880, -60525, -76684, -7848, tail", "row-954": "head, 53077, -2816, 69169, 47331, 89184, -64883, -98827, 5558, -5238, 36689, 59698, -20811, -22220, -8193, 57971, 5626, -63292, 96341, -70187, -49450, -56805, -94765, -7873, -50018, -66202, -66828, -20509, 46040, -31788, 78705, -20371, 54739, -11418, 6431, 92351, -46900, 92702, -4205, -84791, -11892, 33389, 46336, -54817, -71149, -24354, -28987, -23200, -88025, 89050, 61537, 15457, 76857, 96719, -88697, 32660, -92157, -68661, 40130, -94934, 49569, 40204, -27547, 85689, 25970, tail", "row-955": "head, 50218, 50350, 67497, -64403, 12440, -86322, -98153, 72747, -21200, 96240, -34758, 82703, -15579, 85219, 67416, 78388, 31013, -67346, 63549, 76061, 6644, 57229, 69820, 55280, -27676, -13025, 99521, 822, -62091, 65416, 47776, -68903, 41452, 70075, 70781, 94676, -63735, -21925, 2263, -84771, -79271, -95427, 59430, 10311, 58961, 67708, -25456, -38952, 60294, 62948, -94116, 81543, 69092, -18299, 54939, 25400, 41892, 53267, -524, -5544, -37100, -42538, 40135, 98317, tail", "row-956": "head, 29328, 70509, -73071, -61447, -57425, -71967, 19816, -42909, -87212, 84878, 5404, 58312, 93918, -57441, -2279, -53839, -67719, -753, 48956, -49913, 86968, -44854, -4836, 4623, -88878, 45614, -13140, 84132, -57338, -23245, 76558, 52815, 15153, -17419, 41906, 3842, -64944, -34797, 56633, 47006, -20859, 82694, 91244, 47224, 81485, -40137, -79090, -76886, -85660, -13427, -30640, 49798, -95190, -40185, -11899, 87586, 387, -57415, 60797, -45172, 95926, -52452, 16685, -79376, tail", "row-957": "head, 96881, 95515, -20463, 71283, 18502, -1045, -45234, -45129, -9271, 40713, 63923, 34218, -3284, -30556, 98859, 8928, 51868, 94526, -85393, -3831, 10096, -57523, -8527, 76776, 83190, 16611, -76214, 71095, -25767, 10006, 79367, -13417, 9460, 72899, 3239, 72720, -93942, 63441, -20522, -88078, -27317, 236, -96074, -20180, 76311, -75685, 80403, 26484, -62346, -18735, -25562, 555, -16164, 78102, -74453, 59970, -47210, -24308, -26489, -91754, -49735, 92474, 46409, 85939, tail", "row-958": "head, 70194, -41029, 90701, -63828, 185, 25528, -64898, 99422, -78981, -31097, 88965, 24609, 81823, -35379, 39259, -2074, 78100, -44523, -18437, -91755, 57497, 93001, -80400, 88428, 10031, -16555, -42063, 59520, 24071, -37725, -52441, 80828, 49065, 97328, 9285, -78020, -42506, -23109, 6832, -12713, 53412, -98419, -31011, 58710, 11501, 26808, 79471, -39365, 60054, 41405, 32590, 33625, -5256, -37569, 88713, -69665, 44982, 73061, 20650, 59496, -55274, -277, -8000, 16864, tail", "row-959": "head, -39360, 32464, -79299, -80856, 55962, -80900, 31875, 53309, -67970, -38160, 12596, 51389, 53795, -83506, 83711, 94070, 87897, -69505, -42251, -46385, 18798, 27513, 22841, -29028, 66705, 22107, -8551, -67540, -27096, -99527, 12370, 26793, 36859, -18156, -18192, 16748, -92128, -27564, -9513, -29470, -98950, -51550, -68505, -95905, -23730, 35252, -53305, 48776, 76927, -13695, 98173, -28347, 88031, -93459, -68678, 55416, -37323, -57619, 96929, -76529, -75678, 23743, -10336, 33312, tail", "row-960": "head, 39663, -28820, -90999, 25889, -91338, -46043, 96595, 71627, -262, 25771, 84007, -71010, 13696, -24658, 77249, -70074, -22392, -16214, -11857, -28074, -98197, 69750, 41026, -71877, -17192, 93763, 83359, 24284, -88444, 38641, 57270, -15417, 89941, -29171, 57648, -62537, -56467, 43035, 22637, 26364, -71363, -88025, -95597, 5181, -58661, 75699, -44520, 62864, -57108, -3801, 38583, -87202, -43174, -73891, -6442, 70368, 39080, -3338, -38948, -32435, 1067, -58538, 17514, 68651, tail", "row-961": "head, 82632, 10094, -67904, 86083, 26578, -11734, 91239, -38094, -85254, 66991, -70474, -45061, -75543, -54973, -41886, -45065, -68478, 11102, -23921, -76128, 55728, -8172, -1803, -55041, 85127, -65822, -28598, 56650, 16894, 29096, -37508, -62513, -93982, 30626, -15574, -74775, 72776, -74040, 99741, 33502, 51374, 6301, -61543, 20954, -62445, -77267, 72606, -50663, 99277, -81184, -40472, -10715, 32909, 23114, -85737, -75716, 1171, -80217, 11515, 51760, -26093, -41228, -51335, 75067, tail", "row-962": "head, -26435, -50201, -50821, -11044, 99745, 3362, 55898, -93941, 2969, 57423, 91556, 99264, -48297, -9658, 89805, 49174, 23328, -82804, 62510, -68196, -89368, 58904, 22224, 24566, -10933, -59869, 42643, 50013, -32836, -51647, -10702, 81075, 20328, -77682, 23565, 56584, -58142, -64107, 72448, -95980, 44120, -90410, -4686, 15737, 587, -83833, -95420, -55287, 56174, -43796, 62229, -45175, -88994, -27796, 12105, -52800, -38654, 27404, 52553, -26906, -96638, -49219, 23960, 79761, tail", "row-963": "head, -4400, -69988, 40802, -44184, 23544, 89957, 74947, -10317, 98303, -58113, -962, -26512, -83107, 77933, -63877, 6241, 58260, 31084, -39221, -3896, -61012, -77007, -56306, -67058, 67012, -17186, 37324, -81138, -74730, 95581, -46571, 92308, -52647, 9146, -48705, 14336, -4542, -66653, 75969, 53698, -14449, 1557, -63345, 36684, 9847, -51105, -93875, -57290, 23570, 76829, 66487, -35108, 74929, 50968, -88848, -6116, 7431, 52744, 97715, 9714, 88345, -97682, 78119, 46297, tail", "row-964": "head, -74968, -29973, 20016, 20749, -49443, -9385, 27639, 58231, -6731, 78057, 28242, 98050, -22939, -37074, 61171, 14043, -49590, 52865, -49345, 22807, 33138, 73085, 41803, 21286, 3522, 75914, 1779, 2569, 70596, 69319, -59012, -82860, 87587, 39335, -43789, 7444, -57712, -97614, -61439, 75539, -94196, -77427, -21951, 39681, 18753, 15242, -32746, 10182, -53388, -11783, 99165, -13510, -43174, -96497, 20956, -46561, 62270, -61895, 16197, 91994, 42589, 74091, -13687, 66176, tail", "row-965": "head, -75550, 34379, 50092, 14081, -45848, 86604, 143, -51874, 59638, 31663, 44569, 53559, -69213, 83398, 61799, 40833, -34344, -17432, 75793, -55924, -53328, -23560, -24037, -14905, -56737, -27638, 13100, -28175, -96949, 72863, -17700, 28046, -43349, -79230, -5613, 48272, 90559, -53535, -48364, -20740, -82419, -90651, -11670, 85755, -49085, 22449, 6889, -76262, -15585, 47431, 36799, -53660, -65869, 59064, 82507, -17172, -88659, -41557, 15924, 87097, -46753, 3175, 99922, 77473, tail", "row-966": "head, 38095, -71864, -36884, -11949, 43102, -77311, 22315, -36668, 95044, -78306, -77989, 83213, -56191, -23310, -25142, 71183, 96097, -21765, 21872, 83910, 75138, -71591, -78627, -750, 53725, -64110, 30886, -78060, -6624, 58422, -20654, -22702, -52849, 16809, -10625, -88321, 77545, 81445, -90536, -61253, -23541, 81580, 40000, 87024, 5581, 45442, 9945, -12115, -14267, 16713, -69377, 98910, 5081, 18923, 14171, -71001, -32330, -61497, 66083, -95574, -75727, 37662, 8771, -96337, tail", "row-967": "head, 50826, 70861, 45167, -84644, -5521, -41144, -33838, 1769, 43272, 4904, 49562, -89136, 13466, -25375, 28594, 30924, 16179, -23015, 21717, -73624, -48774, 93967, 40072, -45103, 12229, 99031, 26008, -3915, 56034, -86232, -12329, 73346, 54415, 98659, -65779, 82336, 97694, 12780, -91075, -21089, 94730, 94439, 45590, -54248, -98409, -12331, -81889, 72954, -13395, 45730, 93359, -84560, 44531, -62601, 38337, -80268, -76482, -2801, -98662, 63916, -63930, -17883, -79092, -28673, tail", "row-968": "head, -55483, -43975, -36911, 69328, 19098, 20589, 14311, 33585, 3704, 1335, -51986, -66780, 21830, -14191, 38817, -575, 39049, 69839, 64099, -72543, -71953, 9213, 31693, -75820, 78826, 33599, -34669, -91574, -21647, -5017, 48933, -28915, -82233, 12143, 23489, -88379, -6367, -44222, 69167, 33063, -55334, -21065, 9768, -78833, -22513, -16900, 3259, 4358, -65009, -57801, -3454, -71527, -18843, 14786, 73176, 17343, 57654, -44149, 603, 31288, 59741, -69595, -26973, 27355, tail", "row-969": "head, -50640, 92349, 44235, -76250, -4631, -60570, 99462, -78041, -85254, -13423, -91456, -60631, 7924, 4309, -42212, -83704, -2919, -79130, -157, -86147, -96369, 68243, -84253, -22292, 68330, 87140, 45038, 1758, 97936, 5929, 69486, -72448, -5427, 87899, -8028, -15260, -42118, 8067, 56068, 3495, 35955, -86680, 10122, 78866, -28420, -6349, -84343, -16537, 72865, 36176, -29544, 43162, 71224, 93496, 64794, -22554, -65989, 73537, -62078, -89797, -52749, -34426, 1412, 56135, tail", "row-970": "head, -94210, -73405, -3309, -27865, 62342, -30903, 13213, -36689, -77346, -21159, -22300, 78175, 2194, 17382, -141, -1832, 50175, -28457, 70410, -55920, 16438, -16285, 59014, -63117, -52860, 45721, -99030, -30260, 73598, -45626, -39119, 75022, 78322, 34500, 24665, -8132, -74598, 4383, 87415, 67687, -62494, -88446, -90395, -17525, -54833, 97331, -47276, 3770, 61129, -83992, 73117, -79083, -72951, -34789, 65940, 68725, -87845, 1865, 39028, -9524, 50441, -51656, -71169, 29767, tail", "row-971": "head, 94105, -97160, -48241, 43120, -74644, 38460, -75246, 76844, -84711, -53316, 93017, -74743, -69079, -26858, 48339, 38199, -39128, 68114, -57860, 33700, 37050, -67482, 35466, 63323, 90942, -77729, 62468, 14485, -76907, -99160, -75391, 27623, 42092, 78903, -70378, -86831, -88915, -70550, 25015, -64099, 18593, 68166, 94188, -97839, -50342, 80011, -71531, 43689, 1819, 97315, -29377, 29756, -89771, 64478, -98335, 29460, -77034, -93194, -2937, 80026, 76088, 17574, -96025, -54008, tail", "row-972": "head, -78608, -59277, -55036, 95596, 78151, -84779, -74362, 18970, -2382, -3155, 67491, -77750, 46334, -49954, 69008, 9306, 28540, 67540, -60110, 54073, -29262, -6264, 49927, -70459, -20598, 58818, 53684, 80701, -25347, -97388, -22302, 41799, -29259, -54602, -86916, 5367, -49392, -36178, 74300, -84040, -32459, 45126, 44984, 76624, -92471, 70998, -23264, 60085, -33158, 31824, 40691, 14478, -58996, 9689, -49792, 55971, -77650, -38725, -8977, 74702, -87925, 47236, -42685, -21665, tail", "row-973": "head, 17738, 30921, 2403, 32207, -34541, 38904, -10640, -42148, -30429, 96136, 87260, -67184, -62815, 95444, -33731, -2374, -95801, -67203, -23935, -5408, 14106, 76892, -42998, 32946, 8494, 19617, 85347, 27644, 43845, 39505, -38476, -86190, -54263, -19866, 278, 78580, -57831, -52560, -48015, 61286, -56849, -21990, 26057, 59162, -14335, -71454, -28376, 73193, -15134, 21145, -68410, -58459, -44910, 87345, 86567, -92201, -82078, 59314, -9386, -89230, -63322, -44406, 74413, 19092, tail", "row-974": "head, 38073, 92160, 11312, -72608, -78962, 30550, 22865, 29944, -89921, -87189, -62510, 32688, 77074, 83008, 67579, 47176, -85638, 16885, 79483, -19060, 53879, 9697, -10013, 69050, 52712, -94903, -41475, -93528, -84363, 67144, -66831, -9942, -14848, -40723, 91851, 55794, 22998, 43582, -89478, -72834, -79027, -89754, -36045, 98255, 10622, 38262, -98809, -22908, -81679, 50975, 65323, -77380, 15144, -27193, 84167, 43593, 58348, 71787, 87936, 47858, -13736, 77410, -57109, 53135, tail", "row-975": "head, 11146, -76750, -84943, 26709, -85561, 55853, 20680, 83546, -84932, 88309, -76739, 60439, -19331, 55047, 63357, 12087, 98064, -96441, -1176, 42895, -15112, 28433, 65816, -67702, -21744, -25886, 84526, 68644, -87381, 36253, 92010, 55891, 95415, -40082, -76611, -25518, -82556, 10515, 65931, 80313, -15794, 22129, -84035, -87303, 2213, -15607, 84969, -55694, 48621, -36723, -39743, -62418, 68696, -93712, 72238, -28364, -84396, 41832, 87801, -33488, -31730, 30815, 67942, 9707, tail", "row-976": "head, -72839, 28384, -93960, 92936, 17609, -18580, -77908, -48238, -61557, 5155, 39712, 38148, 32200, 96382, 5105, 77814, -36710, 77980, 47914, -44759, 28610, 83569, -45205, 86037, 17696, 90420, -72094, -82994, 39253, -3849, -59739, -43223, 23234, 2016, 30348, -48294, 56450, 49560, -46685, -27721, -81104, -15598, -15158, -43001, 23224, 67280, -66740, -77651, -29924, -3653, 57747, -58474, -78614, -38357, -80582, 49848, -22831, -82649, -61651, 49881, -77389, 4893, -96516, 87182, tail", "row-977": "head, 84998, 96409, 21582, 98357, 39360, 90795, 5052, -71045, -50661, 30548, -91672, 16256, -29809, -82254, 50274, -79100, 5779, 15553, -21190, 59870, 94730, -56094, -8073, 84832, 21536, 87062, -94989, 97596, -93168, -70011, -60161, 79016, -89974, -41334, 67433, 59631, -98404, -41150, -40733, -39751, -84029, -25238, -39526, -81189, 20008, -13296, -86108, 48801, -48787, 8051, 41892, 86385, 59027, 52756, -25598, -94575, -18776, -16665, -53282, -25750, -99168, 88009, 93483, 51143, tail", "row-978": "head, -54682, 69637, -54519, 25192, -20600, -91586, 21947, -40300, -2515, 45208, 4771, -7006, -88215, -21155, 69868, -47842, 51676, -33840, -58411, 95493, 62681, -8028, -99069, 53946, -9120, -66683, 24052, -73138, -36398, 75016, 86064, 92232, -30543, -90843, -64915, 83892, -7166, -7122, -13294, -22937, -91563, -39754, 86565, 15743, -98364, 64653, 88496, -42163, 58677, 33138, 188, 48800, 72814, -52582, 42971, 74347, -51000, -65719, 66673, 99846, 97653, -11426, 87930, 81271, tail", "row-979": "head, 66130, 10658, 47707, -10735, 54987, 72706, 29009, 10212, 28404, -16911, 4115, -68782, -67770, 69012, 68693, -37328, -93330, 54822, 90609, 34852, -53687, -21166, -95384, 34480, 11479, 96178, -57593, 5782, -36622, 68145, -30908, 21581, -89813, -92980, -32654, 1320, 80225, -34552, 36683, -39358, -68630, 81037, 28358, -61407, 63219, 70453, -75116, 11239, 82800, 73607, 77319, -36500, -20811, -37336, 17292, 6446, 92428, -16338, 17573, 1576, -64293, 71383, -34626, 74293, tail", "row-980": "head, -92258, -84273, 50902, -77428, 48160, -92606, 48297, -7971, -81636, -98447, 8494, -79509, 19614, -12619, 50510, 57922, -21743, 81714, 58511, -89321, -36863, -27315, 68652, 9089, 24938, -41156, 64207, -14093, -91080, 2196, -78176, -58967, 78681, -99937, -22091, -20669, 8877, 38134, 25161, 1545, -13826, -89348, 67231, 7409, 8010, -3813, -26350, 67726, 47446, -96359, -32656, 77178, 81275, 90159, -39380, -92180, -95073, 34527, 43116, 34898, -31507, 86113, -91860, 47751, tail", "row-981": "head, -43459, -3552, 21823, -4839, 72307, 83188, 53356, 25633, -15759, -71608, -1201, 80652, 81225, -10801, -48350, 79601, -84074, -68806, 71075, -38920, 45079, 3239, -75975, 76307, 86377, -6503, 18280, 42712, 41474, -10820, -76947, -35823, -40316, 65964, 23963, -99961, -54927, 58605, -26368, 88199, -88961, 36269, -12040, 80209, 61161, 87665, 88140, 24939, 51831, 13701, 48453, 43211, 67799, 18418, -75168, 22207, 98389, -8298, -60055, 12571, 35100, -3181, 94148, -73864, tail", "row-982": "head, 91601, -18515, 42397, -30794, 86999, 18208, 15186, -36260, -64241, 73496, -81161, -75865, 62526, -56345, 36276, -43550, -9500, -69005, -31989, -90299, 59896, -69801, -58821, -59113, 50969, -44880, 71509, -22713, 52299, -34969, 40799, 10296, -81094, 1365, -62902, 55201, 9181, 78624, 47170, 33532, 32627, 23970, -2587, -85357, -47352, -1699, -97512, 67297, 36231, -8996, -42952, 63305, 23445, -37495, -52087, -90944, 14096, -40672, -81222, 91680, -70262, -32982, -54993, 39458, tail", "row-983": "head, -24781, 48623, 4315, 37493, 30065, 73875, -65583, 25399, 39367, -19278, -88496, 68472, -61199, 96741, -79891, 35106, 7491, -47839, 39874, -67889, -50196, 53202, -56494, -52685, 53412, -81594, -23502, 94434, -48171, -64161, -98675, 14331, -45745, -28723, -5938, -19787, -48544, 82798, 35739, 41962, -74416, -10117, -52313, -50289, -27768, 38848, 79747, 19677, -42419, 22771, -88920, 15261, -37291, -37002, 55477, 48694, -1736, -79169, 65704, 45308, 76811, 15417, -32356, -38171, tail", "row-984": "head, -52457, -22674, -19375, -66863, -39383, 63489, -36860, -75920, -8693, -90346, -25494, 50939, 37342, 65648, -40420, -87882, 21918, 93575, 81488, 69566, -10854, 23078, -79112, 78059, -50582, -42554, 47422, 58424, -97869, -10070, -93730, 90037, 55580, -8150, 72998, 73045, 18974, -96783, -68197, -22313, -88174, 1189, 65480, 83766, 35006, 28525, 61254, 8260, 61227, -44314, 30094, 94707, -30059, 62140, -2661, -78268, -20918, 20476, -98273, 3357, 3617, -20065, 50554, -42180, tail", "row-985": "head, -13531, -56803, -34049, -53564, 27263, -92363, -48838, -97510, 55233, 55021, 73838, 79869, 21519, -42041, 85279, 16040, -52926, -66155, -40258, -965, -17812, 98364, 65754, -73484, -82562, -78886, 94262, 35814, -98825, 5243, 67263, -20937, 10672, 54560, -9705, -35927, 59533, 14274, -55507, 32276, 42796, 97055, 38135, 2801, -64722, 21845, -61935, -52489, 47734, -96376, 30346, 80365, -20807, 27129, 19797, -5610, -29023, 78674, -69832, 75017, -10206, -14926, 94061, 33530, tail", "row-986": "head, -50983, 37342, 7850, -6852, 91155, 93743, -43180, 47778, 25576, 78588, -68951, -94300, -35337, -53597, 93635, 3669, -46101, -51477, 56198, -52524, -69175, 8790, -2168, -65942, -59228, -5935, -15850, -25565, 28269, 25143, -8032, 42894, 76043, 52337, 58053, -47346, -22336, 89034, -78651, 68912, 89527, 89308, -96754, -94831, 44009, 66796, 94964, -17119, 44581, 96628, 39560, 34889, -46155, -81458, 80326, -16567, 25560, -48565, 74982, -5963, -79380, 13, 19059, 84957, tail", "row-987": "head, -86307, 4897, 20104, 62564, -22583, -9445, 34578, -37109, -22675, -59155, 66295, -95352, 5615, -57679, 74327, -73284, 83524, 83442, -33484, -93681, 58409, -2995, 38957, 23817, -50601, -3477, 48318, 72885, 57679, -40127, -8018, 77637, 5968, 63085, 26425, -50012, 8562, 17608, 90894, 60354, -29974, -41833, -11898, -84214, 45030, 75124, 4119, 99934, 47959, -40338, -82715, -46392, -5346, 57633, -8944, 50345, -53796, -52560, -70000, -14148, 66941, 57846, 56839, 50230, tail", "row-988": "head, 10800, 76811, -12923, 21227, 51461, -51536, 15346, 91854, 17575, -92252, -9794, -44859, 81351, 90927, 34606, 19653, -96328, 3877, -60700, -98797, -38937, 85673, -15266, -16640, -7145, 74890, -35376, 29880, 52573, 62789, 72040, -35571, 17973, 2846, 58326, -28500, 70631, 68021, -2006, -98484, 96654, -51236, -49656, 39190, 26173, -6970, 92884, 39652, 31495, -47143, -93950, -39162, 92899, 81048, -74368, -57352, 28950, -1981, 4786, 19768, 68329, 85833, 87888, -60214, tail", "row-989": "head, 84187, -61098, -23539, -84120, -20302, 83079, -41550, 37498, -40367, 79204, -81289, 31559, -51757, -97528, -11108, 8704, 30218, -5119, -74567, 55509, 14644, 17866, 99678, -31947, 9104, 71365, -94610, 83407, 21619, 14067, -54821, 62088, -47087, -66943, 83328, 15842, -91417, 76549, -86981, -17598, 21206, 72297, -24333, -37709, -19566, -88726, 20184, 38562, 21504, 7581, -25529, 4385, -89565, 18825, -43692, -54914, -47896, 60119, -49341, 31599, -42818, -54528, 94767, -39726, tail", "row-990": "head, 72671, 62298, -94462, -52518, 34728, -83567, 77039, -98768, -32335, 18403, 66158, -57724, -54930, 36528, 86758, -89469, -10757, 67121, 92126, 26035, -66414, 49084, 54871, 43362, 28799, -69887, -26705, 91116, -9137, -87565, 87675, -97043, 17799, -89626, -80438, 68473, -90832, -87496, 43150, 25192, -22993, -3005, 93449, 97103, -74126, -12647, 54600, -85475, 34613, -44857, -14506, 54240, -54443, 73493, -22199, 53142, 75802, 40166, 13442, -43412, 10584, -8390, -37831, 82802, tail", "row-991": "head, -89299, -13598, -62964, 41639, -32782, -67643, -30271, -55142, 64884, 66967, -63051, -90760, -49641, 80775, -86521, 80503, 79690, 97723, 54689, -9766, 26777, -22423, -75303, -55358, -14434, 38722, -34217, 48600, -96581, 4613, 76130, 29538, 94914, -43548, -69348, 88308, 5925, -31757, 15477, -81224, 89569, -48817, 7958, 79521, 19536, 16194, -87464, -85039, 8568, -46664, -20074, 10956, 33626, 22048, 88113, 46260, -24414, -84551, -40823, -86694, -19000, -80803, -67527, -54292, tail", "row-992": "head, -63595, 16751, -64369, -51358, 50816, -58260, -9770, -52868, -10810, -31503, -32989, 55240, 77031, -33563, -36413, -58367, -28114, 12497, -94055, -99287, -39904, -79284, 49703, 13020, 36292, -51144, -60515, 47856, 21419, 16080, -34255, -89445, -76244, -17214, 84917, -2233, -1508, -6465, 67843, 1779, 46473, -39130, 34287, 51244, -90185, -27288, -50580, -76126, -5082, 99239, -79498, 88605, -33206, 23363, 15723, 97124, 46995, -50692, 29572, 64922, 34162, -67898, 41852, 26420, tail", "row-993": "head, 69043, 4036, 69264, 49347, -93837, 76930, 19987, -28664, -55498, 42357, -31891, -91200, -95033, 36913, -25730, 8024, 62347, 26565, 22956, -7285, 70392, -92344, 66532, 85087, -39487, 54944, 8913, -25297, 67252, 7984, -751, 74725, -23921, 19530, 81087, -52642, -2868, 63966, -48038, -62738, -51818, -20134, -9579, 65193, 26545, 86529, 41833, -29767, 67869, 57839, -40205, -54952, -96887, -87009, 342, 34368, 55012, 63348, -59624, -40496, 94716, 32495, 70276, 39105, tail", "row-994": "head, 32585, 53507, 57617, 25801, 55270, 63113, -40072, -11233, 85613, 87155, 93361, -21567, 74406, -23486, 8074, 1617, -16520, -95975, 7859, -27829, -25365, 20343, -46045, -41745, -76021, -92407, 50762, 21056, -67255, 7185, 11546, -20131, 71983, -87613, -42743, -1456, -89503, -58228, 58044, 85654, 21974, -88315, -62975, 19713, -94480, -61087, -16422, 15280, -47624, 25805, -70152, 42906, -17788, 94233, -80435, -25079, 45980, 58478, -42102, -5821, 47919, -28146, -11875, -6461, tail", "row-995": "head, -84924, -41638, -74326, -81976, 79072, 88747, 10980, -77102, 33407, 70991, 8760, 15020, 98014, -546, -94131, -10055, 72502, -16018, 94335, -93336, 33026, 42133, -42382, 38918, -66012, -19262, -75841, -72151, -57341, 35221, -22359, -49553, -48454, 25891, -15724, 54979, 83403, -8545, 4017, -75368, -51769, -9975, 76033, 85594, 48657, -89762, -49291, -75379, -84073, 49606, 62693, 5701, -36467, -46008, 32754, -40608, -1528, -69600, -67822, 77166, 44314, -78857, -53029, 764, tail", "row-996": "head, 80983, -75744, -63548, -80078, 7522, 141, 7963, 42729, -25731, 14893, -17907, -63922, 83125, 96439, -32108, 30930, -44127, -47807, -47163, 73907, -77139, 7401, 24331, 88911, -3631, -21082, 36899, 74284, -37562, 28360, -96704, -98537, 35799, 75420, 21306, 40827, -33123, 69137, 64182, -29928, 1849, -62566, -27918, 24258, 45720, 27967, -57940, -77254, -54998, -98103, -1565, 74045, 56459, -38179, 8487, -98723, -27360, 67578, 74151, -85558, -92029, -71196, 98967, -72440, tail", "row-997": "head, 86086, -24089, -56333, -90199, 54821, 60200, -13001, -79754, -34191, 49288, 35072, -57905, 73954, -27021, -380, 53726, -46445, -63866, 72673, 21569, 81643, 60774, -65039, 59247, 49186, 6516, -46906, 14128, -65784, -14801, -26050, -70142, 49248, -97675, 19561, -39367, -29824, -89665, -88923, -11925, -62880, -90042, -56928, -37832, 19744, -675, 75935, 4657, 45312, -94031, 33235, -50244, -8610, 67855, 34589, -40408, -7263, -84331, 36453, -14179, 65699, 82175, -46327, 618, tail", "row-998": "head, -16425, 58718, -38879, 10688, 78051, -60622, -47537, 58632, 22045, 4910, -78949, -1101, 41854, 83483, 28701, 72949, 45368, 53520, 86250, -53773, -10255, 95543, -35990, 7218, -31544, 47790, 13407, -3427, -63760, 85201, -35547, -66838, 86749, -64829, 17534, -87010, 53086, -46356, -7923, 94700, 13288, -78775, -8279, -71617, 6, 51404, -85381, -3500, 2195, 39010, 40587, 32396, 64481, 11578, 72475, -81118, 72911, 24048, -55706, -89180, -79169, 85267, 21092, 78014, tail", "row-999": "head, 50598, 92167, -80678, 2130, -98617, -37096, 86764, 25889, -36224, -22236, -51315, -46899, 4347, 96587, 61803, -93617, 12501, 99103, 57658, 88088, 4577, -27744, -85317, -77196, -28526, -10555, 68948, -592, 46000, -96479, -94069, -88135, 94796, -54487, -72342, 95692, -43541, -37171, 885, -27485, -61764, 83773, -48086, -9573, 81064, 48386, -70813, -15635, 78290, 47658, 7065, -97057, 56086, 12226, -33137, -99720, 82026, -80632, 54699, 80753, 63486, -91657, 7594, 37196, tail", "row-1000": "head, 89621, -22645, -98095, -3513, -53974, -82702, 64151, 86440, -94504, 87191, -65612, -59409, 47102, 77164, -36892, -612, -46218, -25017, 57898, 70586, 45482, 52241, -99164, -27350, -60365, 95412, -18456, 40647, 50932, -76722, 74228, 29684, -27094, 79407, -34243, 1767, -64098, -2565, -98983, -78829, 26500, -53332, 49504, -31151, 74438, 77911, -66954, -9383, -97810, 54229, 58327, 1779, 39729, 68611, -65263, -24951, 94301, -26318, -1726, 74419, -93585, -89111, 95357, 97695, tail", "row-1001": "head, 4943, -39667, 86489, -93980, 70568, 2765, 1808, -35987, -67750, -8651, 31715, -69629, 72973, -86660, 21122, 66659, -38736, 47571, 34728, 29446, 76287, 50373, 37642, 28362, 9125, -12599, -70792, -19297, -83551, 41379, -92441, 27730, 94899, 31151, -77745, -55593, -94421, -71806, 31047, 77729, 75707, -24915, -31690, -66175, -35338, 72856, 54409, -21750, 68252, -57445, -55846, 78986, -84281, 10082, 64505, 12003, -91193, 64881, -70514, -91303, 73035, 34742, 47981, -13382, tail", "row-1002": "head, -40725, 43363, 15340, 81120, 38568, -29680, -27034, 75805, -14529, 28289, -61742, -69971, -88633, 25324, -39660, 50706, 7864, -45754, 90232, -5180, -48543, -13233, -47198, 44236, -39590, 93588, 49791, 73840, -19204, 98540, -27678, -16451, -58544, 66103, 62142, -42518, -51279, -67907, 5391, 40458, -96032, 58433, 11692, 63949, 41378, 61320, -38246, 84766, 40794, 50298, 1294, -74578, 9659, 52823, -57811, 77124, -69673, -79789, 87486, -76170, 25406, -15874, -94340, -88730, tail", "row-1003": "head, 88913, -87756, 20887, -13604, -9618, -60986, 70904, 10956, 53558, -87701, 67171, 39134, -67379, 40244, 16275, 36547, 58925, 24249, 87025, 84502, -14075, 86835, -76725, 87346, 88293, 2018, 47340, -84214, 94233, -58632, -87398, 3150, -64775, -14929, -33004, 10001, 79290, -64145, -36769, 33476, 6746, 62539, 84823, 82609, -93970, 59255, 8870, -24397, -49606, 52613, -6998, -12420, -31893, -11434, 56358, -37751, -1310, -62631, 75661, -22866, 34345, -26030, -73870, 75711, tail", "row-1004": "head, -21646, 82898, 53155, 2661, -33752, -27014, 50928, 51257, -75337, 38424, 71444, 7604, 57516, -40800, 25827, 11000, -19284, 15665, -24944, 11992, -4511, -29109, -15598, 47176, -25536, 93852, 82389, 92461, -77885, -63943, 43595, 31879, 65394, 53854, -17292, 25318, -44612, -87272, 71400, -23427, 97572, 6533, -39879, -8483, -47243, -97705, 65303, -74328, 13022, -48528, -43409, 68558, 49781, 27669, 4906, 84718, -4205, 8968, 43798, -10975, -80457, -79647, -50230, 36669, tail", "row-1005": "head, 26699, 7624, -30806, 69114, 37977, -60624, -63057, 76170, -19426, 76541, 13752, 69600, -29222, 24327, -70495, 89850, 70506, 79638, -75519, -71133, 52294, 49663, -97144, -37852, 52250, 64035, -82871, 92192, 43050, -30622, 65445, 31148, -41598, -73148, 9189, -24494, -59053, 6382, 76664, -33694, 87101, 61050, 82531, -22783, -71151, 684, 47969, -38039, 4672, 21645, -60622, 27685, -6559, -67641, 62580, 44137, -20715, 44956, 54163, -6017, 23467, 42849, 22276, -80943, tail", "row-1006": "head, 34611, 68552, -95175, -78469, 49422, -33101, 74485, -26148, 83696, -59575, 55714, 6202, 40471, -11948, 76043, -59702, 75546, 32795, -85836, -37695, 29560, 64688, 82010, -52336, 40126, -69025, 32131, -42390, -19241, 82616, -70260, 18684, 21511, -52926, -43870, 59687, -53684, 94812, -37058, -1295, -84254, 71632, -32756, -52053, 22847, 44207, -8090, 80682, 82213, -9841, 20710, 90253, 52321, 71116, 83127, -41261, 73627, -45309, -14492, -79007, 76727, -42935, -69579, -14868, tail", "row-1007": "head, 87982, -65067, -82055, 84259, -52822, 60556, 40806, 32347, -1929, 55676, 99931, 66290, 66695, -33910, -24033, -35607, 69373, 88956, 20801, -8980, -29481, 80712, -32868, -82047, -91302, -74727, -95455, 34838, 33517, -18990, -85598, 54567, 67918, 69423, 27400, 48519, 9627, 6, 14873, -49765, -92150, -3181, -36252, 77356, -57438, 37089, 25971, 57170, -21323, 38997, -15756, -52084, 64501, 66236, -81982, -17980, -6864, -86569, 94201, -93819, -13211, 42409, 72636, -12475, tail", "row-1008": "head, 2902, -85392, -78289, 47380, 20637, 62856, -12002, 7781, -50151, 45081, 18826, 61412, 11164, 7265, -88319, 90453, 96100, 42235, 82436, 23015, -23181, 85563, -58642, -8095, 41783, -4712, 61048, 57932, -45975, -94608, 88996, -8180, -12721, -26277, 28685, -61028, 41989, -68946, 71460, -53651, 84783, 33821, -11785, 74894, 73473, 41980, -34463, 89184, 22621, -70077, 72080, 53175, -99695, -75959, -86214, -39085, -40457, -78618, 42128, 42213, -92140, 36595, 14337, -62816, tail", "row-1009": "head, 44538, -42090, -31844, 25473, -51737, -13853, 8749, 45106, 55704, 2018, -84670, 47155, -31892, 43175, -18414, -54183, -25880, 97775, 6262, 68306, 85090, -15029, 4848, 87946, -51829, 2180, -6473, 6223, -92822, -3390, 7053, 16811, -89378, 94506, -18496, 45354, -54379, -72323, 81318, 67543, 76531, 29585, -1543, 65122, 81777, 37622, 1396, 13773, 3051, 37763, 43268, -91782, -24144, -37580, -28550, -83273, 50706, -24239, -57826, 50310, 4709, -77761, 82058, -68516, tail", "row-1010": "head, -85719, 70403, 41119, 50982, -7362, 82218, 7838, -22004, 91833, 93339, -81711, -15777, -38614, -13145, 90984, 59163, -77417, -4175, 49728, 15501, -14254, 48574, -48484, 3415, -85530, 42278, -29822, 80018, -87661, -59034, -948, 95911, 34554, -15962, -83606, -34580, 3242, 73647, 95072, -86268, -5102, 723, -58281, 24191, 58736, -9761, 91418, -93939, -19418, -91094, 63203, 88189, -28597, 62163, -48916, 43201, 32554, -93637, -29253, 69731, 53834, -53719, 43291, -18822, tail", "row-1011": "head, 83604, 72149, -77962, 50127, -72678, -48732, -3510, 63509, -53823, 89907, -33563, -25097, -32785, -36920, 30419, -11555, 79635, 32179, 22794, 18182, -69682, 75600, -77576, -95761, 70935, 85612, -52611, -25247, 43443, -98095, 52556, -74687, 144, -70359, -30913, 81550, 58248, -87334, 9184, 22337, -82195, 35669, 1127, -17228, 74843, -17531, 42748, -35661, 66448, -50642, 3047, 55721, -66716, -7244, 47197, -48768, -39573, 88836, -20352, -8637, -92699, -65389, 73776, 10240, tail", "row-1012": "head, -31163, -46570, -60142, -78296, -3945, -69248, 24819, -72436, 17341, -47476, -13741, -47243, -97374, -345, 20303, -81405, -8712, -73141, 62344, 95893, -72223, 34659, 74849, -48963, 36549, -31992, -36739, 70935, 10302, -22339, 55080, 46815, -47908, 19781, -3503, 37145, 39919, -68826, 93193, -22067, 41830, 36832, 5353, -17565, -41414, -7713, -56336, 80264, 86817, -55311, 41052, 64835, -7021, -34037, -914, 36493, -52606, -63550, -97403, 59032, -47930, 66443, 7280, 87533, tail", "row-1013": "head, -63138, 70924, -81308, 36539, 61455, -54885, 42249, 84419, 33993, -45817, -87338, -26578, 45308, -24815, -62848, -50033, -694, -78006, 97121, -38189, 57735, -77051, -47573, -80001, 92272, -31088, -30191, 66352, 69539, -25586, -53488, -34411, -23326, 69151, -43224, 26879, 67330, 77931, -59675, -38474, -44311, 50299, 93016, 19021, -22713, 44094, 98797, 83232, 74244, -11159, -82799, 88584, 25295, 78929, -53199, 6115, 87694, -18034, -57646, 19939, 52746, -33548, 60404, 65373, tail", "row-1014": "head, 35685, -10040, -43062, 30686, 1741, 71165, -36771, 54383, -19915, 62115, 62577, 33274, 5128, -93175, 82421, -60540, -76826, -45398, -94526, 6865, -36157, 73014, 82180, -75825, -86826, 1646, -37541, 16069, -53149, -82595, 2502, -89192, 20660, 61937, 28171, 7215, 82732, -72098, -88639, 10614, 57289, -42896, 59795, -32365, -45714, -76786, 16009, -59756, 28866, 50347, -86551, 27583, 63495, 18634, 50187, 10707, -55928, -56027, -57364, 88198, -9488, 85439, -43210, -56464, tail", "row-1015": "head, 23286, 34240, -48867, -19605, 38197, -78357, 24933, 10131, 96676, 22609, -73538, -3961, 29823, -2520, -9211, -46783, -40993, -35628, 29462, -68921, 86835, -2126, 73221, 57223, 96760, -97032, 10360, 96248, 77277, 24404, -47984, -66306, -47157, -712, 61534, -66762, -48241, -70306, 38867, -67970, 31923, -88824, 91967, 68598, -31756, -45320, 48764, 32124, -25877, 42395, 36083, 9727, -74318, -24057, 23315, -51728, 35996, 10205, -14182, 62684, 29851, -60591, -11170, 92702, tail", "row-1016": "head, 10234, 67603, -65161, -67254, 48373, 54979, 25700, -21251, 7970, 85025, -1198, -43640, 97831, 6187, -62701, 49738, -69728, 60422, 43241, -36145, 39964, -59097, 22543, -58890, 84362, 17758, 35572, 33440, 2630, -22712, -66239, 73595, 37421, -64954, 64444, -53242, 70560, -86621, 82159, 25799, -24307, -83419, -88458, 27683, -23773, -36795, 67625, -66807, 26346, -5269, -37546, 60948, 65765, -85833, 24676, -94681, 35079, -34053, 87017, -70478, -90144, 12301, -86337, -22208, tail", "row-1017": "head, -60848, 2586, -93740, -49981, 62635, 32516, -15434, 56351, 32161, 2161, 27582, -67797, 87633, -76635, -47422, -96155, 57793, 68654, -10759, 48725, 25744, -70705, 36580, 36856, 21733, 59697, 6411, -32519, 86080, -50734, -18770, 85963, 39284, -39816, -33400, 79560, 48780, 1439, 69002, -89891, -87020, -43957, -11652, 53986, -54290, 87878, -28025, -75914, 34815, 58095, 27267, 6853, -15272, -26418, -22272, -71458, -49149, -53310, -99128, -99429, -44686, -50561, 58901, 86608, tail", "row-1018": "head, 86008, -72483, 15656, 39810, 12021, -8594, 48841, 71124, 38263, -74758, -78950, -76686, 45993, 2105, -79864, 19097, -70494, -20472, 31909, -40196, 15766, -94999, 816, -48867, -53709, -47736, -7441, -44218, 8821, -68243, 7931, 82997, -71444, 49504, 44648, 7614, 60754, -47978, 5190, 79811, 893, 88311, -13926, -97053, 34326, 54848, 46740, 35499, -75471, 4173, -40789, -69704, 48019, 72364, 52800, -16166, -56844, -50627, 83939, -68348, -78060, -53118, 45878, -9113, tail", "row-1019": "head, 59219, 44896, 84676, 38012, 69860, -92612, 8879, 14439, -72187, -10053, -3674, 13074, -62273, 17936, 80236, -86086, -49118, 69387, -2720, -47608, 21631, -52961, -18474, 26886, 96102, 42665, -56769, -86375, 78319, -99035, 60643, -72786, -11403, 59914, 43575, -9615, -39891, -35206, 10749, 56526, 15358, -36839, -51851, -23111, 26337, 10677, 1872, 17792, 73734, 32665, 7879, -54061, 78553, -54634, -53442, -77705, -86883, 53368, -15923, -31039, 16870, -93605, -5114, 24760, tail", "row-1020": "head, 32887, 1308, -84976, 29567, 42583, -15780, -18642, 59458, 70049, 67741, 48923, -36170, 90359, 43933, -16781, 85145, -83952, 10973, -72274, 9603, 64736, -99744, 7265, -74166, 6969, 10953, -74121, 25925, -7003, 43203, -52731, 86491, 67048, 69261, 65446, 28816, -66289, 4249, 19621, -96296, -67459, 79997, -5241, 1430, -31220, 55422, -34829, 83911, -61669, 48195, 22167, 1732, -30908, -31513, -1990, -65729, 74460, 5983, 12804, -21381, 18695, -73044, -17757, 84270, tail", "row-1021": "head, -13973, -87669, 3319, -61138, 89588, 39794, 77856, 49999, -95067, -39705, 47191, -76304, 86251, -9533, -72954, 95837, -19758, 44702, -85555, 2849, 76154, 38841, -83422, 11396, -47356, -30162, 87958, 7867, 50889, 8129, -23650, -17643, 78240, 66663, -45137, 66341, -86363, -5268, -35342, 92622, 14070, 1680, 54882, 44864, -79028, 46574, 54974, 97794, 27677, -65335, -64058, 19451, -61414, -80266, -94743, -70962, -74873, 11756, 27150, 5106, -63054, 35311, 76474, 61777, tail", "row-1022": "head, 77373, -6357, -91628, 52486, -1380, -58971, -90602, 20416, -63279, 36949, 32833, 64785, -43878, 48132, -4207, -83010, -62864, -66344, 38836, -93273, 92313, -1023, 24703, 91716, -14472, -2868, -67846, 22842, 88063, -5030, 4842, 7635, 58099, -2072, 68474, 24577, 8856, 4344, 97917, 90545, -19855, -17238, 35025, 41831, 51828, 32955, 73054, 31283, -59907, -10292, -35814, 39715, -32015, -5455, -70947, 7547, -88685, 33540, -80678, -47640, 69696, 49505, -13103, 86276, tail", "row-1023": "head, 90012, 24922, 98169, -42712, 77193, -33242, 52459, -91182, -25551, 9882, -40262, -49141, -94218, -87130, -98382, 63123, 70279, 44374, 36457, 48854, -59285, -6574, -22254, -82555, -92239, 74145, -30540, -78020, -70904, -86692, 38158, -89963, 16043, 64112, -23883, -27500, 93162, -26306, 177, -24955, 17889, -56535, -5346, -72381, -79078, -16143, -12257, 80996, 94357, -86046, 3046, -38753, 67559, 89260, 33565, -43242, -10339, 26434, -38143, -9405, 62383, 99709, 59889, -49144, tail", "row-1024": "head, -26537, -22140, 30353, 41497, 373, -10272, 52423, -36031, -33733, -45768, -70693, -80536, -55795, -70189, -6278, 84562, -48523, 55289, 43303, 31309, 70128, -57329, 61511, -20207, 14305, 57254, 7786, -33737, 85973, -18389, 23567, -83251, -97027, -11488, 21755, 94490, -14253, 38277, 55846, 8500, -62990, 46829, -5079, -49385, -46361, -70754, -93783, 6209, -3317, -46075, -14462, 85999, -61635, -53625, 3845, 99312, 21341, -62962, 22341, -82678, -25300, 58132, 67396, 79966, tail", "row-1025": "head, -80281, 49022, 88040, -36208, 32818, -97353, -41522, 63621, -36079, 61025, -79608, 20917, -32566, -26764, -48004, 83398, -99270, -91107, -30792, 83117, -3912, -33800, 58958, -13002, 37023, 89923, 58565, -4468, -2108, -97019, 8932, -99777, -43699, 72609, -71405, 4419, -77858, 44731, -18856, 7143, -76138, 13368, 73797, -30202, -40648, 57852, -93423, -26420, -98737, 75226, 64588, 31263, 87274, -73030, 5219, -95364, -56774, -49272, -61331, -19541, -37285, -63158, -43831, -34555, tail", "row-1026": "head, 30293, 74405, 61562, 47044, 15008, 75236, -85487, -95850, 88162, -83707, 80239, 83532, -13261, -99534, 87442, 86396, 19693, -91372, -99490, 19534, -12334, 23337, 23578, -82173, -96065, 69616, -75491, -92260, -45827, 42954, -33879, 50718, -14321, -27033, -14769, 76358, 26211, -38401, -77263, 78923, -79326, -17721, -26821, 31244, -31266, -40713, 63769, -91459, 11024, -42384, 11216, -45660, -7714, -94037, 36246, 16856, -41500, -78316, -55147, 56082, -4420, -29750, -11034, -98162, tail", "row-1027": "head, -1568, -99008, 59722, 21871, -65166, 83587, -73673, -88552, 68601, 1274, -22205, 60219, 47053, 93080, -94669, 63289, -72380, -71399, -55698, -66298, -41255, 38273, 24441, 55899, -23075, 7547, 19095, 71588, 48176, 77873, -99031, -46429, -30315, 6555, -97293, 30682, 38463, -78314, -11998, 63694, -37417, 83703, 63491, -10240, -36283, 77449, 39618, -11933, 81986, -95044, 92372, 26846, 90081, 68118, -21046, -88431, 12280, -37491, -23134, -95105, -47714, -95988, 63742, 5359, tail", "row-1028": "head, 41680, 38055, -51409, 32526, -84464, 93720, 57918, -65737, 92409, -4442, -38421, 11069, 7865, 59497, 61406, 74291, -82918, 67440, 9377, -23587, -94913, -44861, -70400, -21774, -62641, 82488, 6261, 19494, -29472, -16561, -33079, 64949, 65183, -19427, 18298, 42547, -44965, -20148, 44238, 44915, -16265, -74367, 78471, -49562, -1341, -63583, -17099, 75420, -25934, 90616, -55753, -38457, 12319, 94396, -20311, -84451, 1201, -5133, 78094, -97469, -19425, 24991, 79886, 53818, tail", "row-1029": "head, 40857, -98315, 59051, -57715, -83976, 99058, 32429, 84547, -39266, 46165, 88285, -45233, 40831, 11303, 85331, 36503, -26863, 69452, -11778, -68530, -79851, 28643, -83576, 99750, -25278, -28929, -11938, 44085, 33046, 76251, -87927, -73743, -61753, 71206, 7177, -95952, 53067, 5528, -55479, 10749, 61316, -27658, 57728, 67674, 84267, -47638, 81197, 16627, 77957, -38903, -51290, 73748, -96037, 93313, 62914, 6696, -14599, 62344, 38480, 52917, -24241, -7071, 24865, 34902, tail", "row-1030": "head, 77363, 26787, 67689, -28958, 90699, 74049, -59680, 8919, -30504, -23647, 89896, 44652, 57671, 23562, 60799, 26371, -2630, -2012, 85329, -32723, 28138, 88247, -32797, 17776, -21094, 94516, 67649, -48059, -55156, -81926, -69740, 75082, -49407, 64616, -1529, -13054, 13450, 46847, -33123, -83128, 27683, 17167, 52860, 29693, 7183, -87624, -223, 23266, 91173, 62980, 94925, -68554, 63788, 4452, 23910, 25300, 57995, -27763, 41687, 95546, 57853, 93545, 25162, 20947, tail", "row-1031": "head, 17776, 89098, 17547, 87663, -80504, 17265, 84064, -43591, -85269, -33861, 80798, 10188, 57661, 68859, 75138, -34204, -72155, 53690, -78103, 13383, -51234, 41036, -23, -63161, -40181, -1071, 12187, -59013, -60999, 39243, 71582, 6285, -27437, 70970, -92066, 68215, 18833, 35715, -72547, 8163, -91904, -89530, 93146, -47846, -15858, 89460, 58191, 36075, 49648, 21787, -36083, -61697, -79252, -63546, 58496, -30289, -89140, -54547, 65600, 58235, 18693, -90317, -99868, 70311, tail", "row-1032": "head, -43828, 58954, 48132, 31695, 14669, -23904, -21718, -69280, -93717, 28990, -95131, 42385, -59332, 95598, -55962, -60457, 81833, 3818, -94611, -67899, 24507, -19433, 91245, 98593, 64823, -92219, -69120, 89044, 60987, -61948, 86587, 88406, 72724, 45976, 50849, -42888, -32111, -10257, -87518, 31109, -41267, 61549, 68766, 11473, 81102, 12561, 96704, -50633, -85161, -63205, 79722, 367, 53545, -59727, -67176, 28539, 43493, -45464, 58081, 60096, 95042, 58355, -74922, 78290, tail", "row-1033": "head, -21971, -65681, 42681, -7452, 41857, -28153, -52153, -89563, -55414, 12363, 31046, -23602, 56839, -13882, 42204, -29727, 12756, 79430, -44412, 14591, -86872, 59074, 77988, -51884, -77455, -42510, -84622, 71291, -18312, -56185, 77267, -16060, -83782, 59346, -52498, 81453, -51587, 73559, 11087, -53311, 48027, 73623, -80393, 33746, -53873, 72447, 60264, 42867, 63881, -58022, 99263, 45884, 38472, 88494, -69452, -27494, 28231, -59935, 69816, 83716, -17661, 98537, 40218, 68521, tail", "row-1034": "head, -32788, 98561, -10358, -88859, 97530, 44193, 49446, -9713, -96406, 1529, 86151, 19541, -45063, 98154, -18060, -90314, -66439, 31976, -29889, -30105, -39583, -8575, 62230, -26547, 28753, 92458, 97520, -47408, 21550, -28779, -60726, 91164, 48374, 45324, -51518, 38514, 52590, 64960, 14074, 40910, 2994, 14259, 27423, -29946, 56464, 50882, 9968, 21665, 95917, 81590, -49947, -62395, -19429, 15805, -28645, -24758, 99684, 52185, -75007, 23499, 35911, 49376, -45966, 75842, tail", "row-1035": "head, 18118, 76074, -55040, 78302, -27495, 91799, 27060, 27226, -49196, -52634, 51640, -39301, 66480, -82547, -83759, -81646, -34794, 43498, -92570, -12991, -53058, 85026, 45783, 11390, -82740, 71358, -15172, -20974, 42441, -80509, -20466, -67371, -69627, -7208, 38141, 34726, 25701, -32312, 26518, 11288, -74046, 95069, -62333, 78893, 49008, -97187, 34312, -82120, 60258, -50345, -41603, 93381, 19279, -14016, -80525, 57891, -96820, 55055, 99891, -74408, 54416, 92171, -32334, -12101, tail", "row-1036": "head, 22458, 58474, 59261, 12993, 97936, -44595, 78125, -96667, 24416, -18395, 87983, 34908, -12873, -48516, -83333, -14138, 72493, 46736, -80279, -72617, 94752, 38964, -53019, 70404, -33169, 8100, -10617, -77321, 30837, -89046, -39660, -17954, 45210, -58291, 37465, 28672, 28728, -13407, 59599, 24077, 7436, 18127, -56518, 94888, -99647, 11820, 57285, -56306, -79171, -7600, 37981, 30199, 34032, 74642, -44208, -75894, 31613, 68187, 72659, 88779, 77017, -28965, 10169, -64200, tail", "row-1037": "head, -54690, 41939, 81283, 29035, -83751, 34962, -55117, 1749, 1753, -12412, -93055, -71438, 76082, -20345, 29473, -28304, 53032, -93647, 57321, -42364, -89808, 31250, 72356, -77537, 95740, 46786, 8873, 8983, 58316, 27017, -2723, -95541, 21463, -61942, -82249, 25906, -60234, -47403, 32317, 25702, -22295, 11236, -87884, -50300, 49603, -4169, -86860, 56578, 19102, 44276, 71419, 63818, 36258, 72864, -61533, 10768, -87634, 94692, 54785, 1892, -61296, -41447, -7568, 59278, tail", "row-1038": "head, -93050, -61795, -78850, -64550, 23664, 25061, 90310, 98342, 64033, 51202, -60012, 86431, -21358, 3086, 35127, 29364, -52371, 12936, 62626, -30228, 31664, -85184, -1215, 76148, -5305, -81794, 3410, -19770, 93342, 57614, -85572, -23844, 79201, 23053, -57842, 59997, 48033, -44229, 49674, -31657, 73042, -24560, 87658, -3801, -62965, 60854, -70117, -98439, -26715, -29636, 30563, -96063, -62841, 52871, -55378, 34080, -27479, -5705, 77567, 76988, 70622, 89465, 61623, -22008, tail", "row-1039": "head, 77059, 74804, 44610, 39070, 79962, 15286, -94699, 26936, 72779, 47571, 66537, 57097, -17286, -54386, -1456, 39042, -93659, 44672, 33462, 99685, 90236, 90693, 14186, -12340, 11947, -66370, 64236, -76100, 91880, 96338, 57007, -67738, -39469, 52290, 22861, -63786, 88687, -43655, 62634, 11300, 90736, -65345, 16025, 49076, 87109, -1644, 27342, -70965, -4021, -25519, 7937, -67610, 39563, -95503, 11322, -26432, 82564, -97042, 58328, -64135, -66993, 72617, -995, -53178, tail", "row-1040": "head, 66637, 87721, 31192, 92796, -18631, -34193, -38408, 57050, 45826, 32721, -59391, 4990, -4230, -43729, 63642, -40506, 71361, 98102, -51426, 23536, -36915, -65824, -48483, 43136, -7463, -25494, 61316, 45118, 27804, 12935, 31494, 81552, 19283, -33880, -96068, -14770, 43540, 37825, -68490, -19022, 41975, -66804, -53949, -8170, -61654, 40850, 78594, 5762, -24558, -51346, -80877, -60238, 94977, -41995, 38776, 26362, -7377, -46227, 69763, 96106, -50834, 63212, 4400, 71780, tail", "row-1041": "head, 39493, -68009, -23664, 66015, -70501, -85754, 31574, 26926, 70181, 35240, -11896, 10531, -82270, -92201, 62730, 76600, -45981, -95880, 3029, -28398, 47775, 67582, 78032, 40337, 8473, -65024, 52899, 15853, 58271, -33185, -32013, 61380, -50783, 36379, -74629, 32682, -68813, 96855, -40633, 81337, 67155, -66257, 33779, -25883, 82389, 94258, 3072, 15133, 31624, -56300, 15506, 16492, 59240, 77961, -8959, -25191, -62819, 76501, -92307, 42372, -99487, -54307, 52147, -24908, tail", "row-1042": "head, 42524, -93093, -87598, 19001, 62033, -24672, 6698, 9122, 53282, 74830, -63473, 52283, 55641, 11046, 21158, 46179, -1942, -30960, -84065, 21244, -98291, -93672, 11067, 47979, 64485, 30668, -6674, -40282, 53967, -92882, 12545, -47023, -26302, -83165, 222, -84198, -76250, -81625, 36227, -39917, 27654, -38621, -20440, -17868, -56804, -62373, 21737, -95782, 58917, 34091, 99635, -36969, 13249, 28761, -78102, -4956, -45906, 14592, -93106, -16025, 97458, -99523, 31972, 93074, tail", "row-1043": "head, -82301, 67692, 39337, 75521, -49491, -43809, -22866, 62528, -49286, -59612, 40014, -31217, 83110, 9987, -96338, 11439, 86295, -95576, -1231, -53354, 63919, 85405, -72914, -65958, -71690, 62728, 92785, -33488, 93359, 54545, 20504, -66711, -53712, 9778, -4608, 79341, 27573, 72504, -10728, -19989, 48939, -66641, 62361, -18914, -29035, -88839, 67773, -97921, -70908, 88257, -6188, 2597, 40170, -62446, -31210, 36709, -8703, 56550, 43652, -98272, 43563, 22799, -52848, 1273, tail", "row-1044": "head, 18499, 41799, 85595, -61345, 82874, 12145, 56602, 82558, 55643, 8303, 10814, -59846, 80053, -50306, 81608, 31020, 6617, -62619, -14185, -57916, -19721, -72773, -27271, 85056, -66876, 66976, -2660, -76054, 69126, 11268, 42170, -4000, 22175, 75166, -15561, 58142, -14267, -51168, 79303, 60618, -86805, -35682, 8739, 25431, 96986, 65875, -79370, -8563, 56689, 85388, -88496, 77219, 79566, 58899, 89292, 78918, -9484, 52385, -51233, 18187, 53858, -20083, 6301, -21215, tail", "row-1045": "head, -74427, 43043, -6285, 13060, -75262, -36216, -68412, -34742, -34607, -43618, 92418, -54358, -67270, -29590, 86323, -39681, -31503, 30020, -78286, 95103, -72702, 93334, -46659, -92319, 24781, -85189, -29072, -9580, -92427, -73804, -52450, 9641, -2671, 94623, 77963, 31735, -19460, 54069, -48717, -62540, -95277, 57596, 89184, -72440, -77452, -89261, 73071, -67055, -64709, -78340, 57842, -16504, -43592, 79429, -84236, 87660, 15605, 96401, -64200, 30311, 11100, 72076, 79696, -5723, tail", "row-1046": "head, -17526, -30228, 46928, 13114, -24822, -73299, 11250, 17154, 13918, 18470, -95838, -75293, 32348, 34137, 56614, -92681, -13625, 75143, -38549, -85825, 55226, -96966, -30495, 71246, -86220, -85040, 71994, -78770, -27883, -20566, -16865, -71844, -86097, -86854, 42670, 87452, 43013, 96461, 93630, -27367, -71859, 29764, 43950, 19172, -55042, 94965, -38157, 96678, 21148, 80035, -70487, -79220, -53309, -66376, 10285, 39460, 34384, 71505, -19673, 6550, 14904, -23238, 81527, -69586, tail", "row-1047": "head, 93485, -61754, 29876, 50878, 93407, -67887, -82951, 19211, 67941, -31659, -80460, -8378, 90382, 12084, 67782, 46407, -40435, 57190, 41711, -45687, 76403, -63054, -58257, 7499, 71657, 15270, 41597, -81302, 48911, 53942, -52414, 99429, 79903, -27165, -10248, 51323, -34043, 36979, -91449, 36483, 24445, 52545, 23311, -65970, 9912, -614, 44759, 32550, 93797, 50878, 44296, 51684, 21236, -11101, -11697, -75413, -67904, 79993, 62151, -1267, 26826, 23008, -33931, 1574, tail", "row-1048": "head, 13691, 48061, 8424, -62213, 15993, 3714, -62252, 42634, -60161, 64247, 34735, -78341, -29761, 11954, -40628, -99343, 96945, 12307, 91419, 15037, 49969, -27282, -13312, 92552, -60500, 43515, 82260, -39, -23611, -5098, 85699, -23908, -20850, 60485, 64503, 34257, -67872, 13137, 9946, -36675, -66438, -52118, -56297, 18099, -45501, -38521, -70279, -61292, -57835, 13401, 77495, 85471, -74354, 18884, -78762, 75150, 29687, -27349, 30475, -42388, 1188, 10036, -71438, -98349, tail", "row-1049": "head, -99814, -52130, -42078, 533, 54181, 99297, 55562, 44465, -31506, -12036, -26823, -75985, 86128, 71757, -26089, -73881, -64258, 56087, -35197, 32544, -54401, -96698, -48390, -28700, 15482, 21179, -47999, 89184, -11845, -18691, 43775, 36956, -28205, -93386, -95694, 56826, -37638, -47716, 83028, -62243, 79451, -61225, 86623, 22946, 35727, -78845, 48022, -5389, -27203, -55339, -84393, 34622, -54198, -34323, 15626, -45504, 14275, -84598, -53936, -251, -28683, 55540, 26038, 22213, tail", "row-1050": "head, -91042, -24345, 8875, -39880, 94441, 82708, -26699, -37512, 63632, 73952, 27394, 8767, 77651, 8548, -55159, 63841, 1556, -48800, 56908, -97060, 42787, -47635, 40138, -76326, -22136, 13056, -41769, -47386, -24426, 21706, 66544, 26057, 40279, -70786, -2114, -26898, 66773, 48815, 16024, 76425, -77337, -73479, 35961, -69407, -84255, -15240, -3458, 70819, -67980, 4700, -23696, -55023, 53716, -18925, -76913, 92424, 70454, -38465, 70756, -97451, -75266, -82298, 55991, -23399, tail", "row-1051": "head, -22151, -19616, -74593, -98094, -40422, 79777, -13266, -18013, -95131, 52856, -16493, -65860, -88612, -73647, 59010, -1536, -1711, 92767, -4634, 29469, -17911, -71546, -90885, -27480, 52347, -14015, 33115, -57814, 34369, 3591, 4532, 1383, 58497, 51586, -83759, 68156, 94720, -39913, 7848, 45773, 35886, -7253, -42178, -76371, 78532, 65213, 65319, -11812, -47884, -7214, 35729, -42711, -54572, 26426, 48411, 91304, 49800, -68470, 93173, 27180, -43220, -96128, 22036, 52151, tail", "row-1052": "head, -10547, -47388, -22375, -62241, -54427, 19526, -8454, 5426, -44346, -79271, 14274, -96024, 37353, 72643, 91591, -20249, -37662, 64764, 50626, 46029, 54021, 86937, 10305, 94541, 3169, -89230, -82683, 45623, 69640, -19240, 55326, -56753, -96552, -29535, -91486, 48036, -8070, 43062, -96498, -34275, 44199, 29165, -74910, 71709, -72861, -33313, 50842, 92189, 66656, 40777, 98774, -64813, 78343, -90819, -71943, -33010, 46675, -91631, -23118, 53199, 32754, -39143, -31503, 41054, tail", "row-1053": "head, -18375, 49835, -93719, -11392, -95822, -87178, -28950, -31931, -77711, -57585, 90207, -33812, 59644, 98522, 76781, 52383, -37272, -93578, 22890, 48947, -70892, 2282, -1267, 45295, -93798, -1781, -1254, 54490, -93044, 22365, -3741, -31978, 65481, -75348, 72277, -82519, 40015, 14672, -86275, -39318, 81079, 12252, -29978, -68153, 40892, -55399, -99213, 25597, -69044, -35261, -79618, -26288, 56282, -8096, -30977, 24328, -36183, 97412, -39432, 43069, -65371, -90192, 22298, 8159, tail", "row-1054": "head, -43335, 21555, -8238, -20257, -74632, 70825, -41662, -39363, -74155, 90119, 93545, -96451, 52660, 28382, -26370, 2637, 25178, 26774, -59078, -44740, -99414, -17684, 73010, -93551, -90168, 62071, -68781, 65505, 61953, 85521, 48768, -42232, -90299, -81372, -45709, 50697, -27787, -33045, -21839, 65036, -38788, -47584, 28394, 13287, -78421, -9335, 17764, 4908, -40352, 25953, -64566, 68243, -2146, 80395, 37901, -44555, 48149, 87222, 33960, -65591, 16081, 30575, -98103, -23102, tail", "row-1055": "head, -38739, -54164, 78772, -31680, 93343, -41077, -60829, 74606, -37677, -4492, 15199, -90854, -74848, -38450, 71730, 99786, -34380, -51589, 44462, -28448, 53423, 39681, 14105, 38240, -24769, -4176, -9714, -87786, -55981, 68031, -51237, -2545, -11085, -46965, 52180, -23659, -53201, 72830, -95117, 21078, -49784, 24032, 44569, 37506, 11633, -30524, -7174, 13421, -12184, -78177, -54430, 4334, -73227, -32988, -21613, 59571, -58211, -91760, 324, -91236, 87268, 69086, -45457, -88533, tail", "row-1056": "head, -69580, -28618, 16941, -87210, -69280, 77745, -90461, 92350, 81358, 7779, 7367, -71179, 35868, 12701, -2345, -59145, -18757, -50082, 71012, -46872, 90315, 63300, 42493, 36478, -35758, 19851, -70386, -42568, -11785, -63217, -21567, 88675, 89659, -55257, 5414, -74349, 6876, 54139, -84288, -36580, 47047, 62859, 13524, 29337, 3632, -34078, 14696, -24262, -35581, -57347, -83310, 61067, -22697, -84058, 63, -74723, 84142, 61375, -27281, 17015, -20665, 3419, -46989, -6771, tail", "row-1057": "head, -93348, -39208, 846, 81612, -58216, 29636, 92643, 15056, -66615, -13421, 84675, 59049, -22148, -76675, 56333, -78356, 38390, -83864, -94581, -16595, 2013, -15839, 22686, -48914, 72758, -59766, -22489, 44582, -43726, -63362, 93280, -72869, -64888, -47958, -51426, 61396, -66533, -93318, 79840, 55504, -18470, -70834, -25997, -84007, 19022, 37060, -51763, 37603, -4946, -40201, 35477, 46288, -71542, 15646, -31844, 99759, -80398, -58055, 34948, 21109, -31593, -23513, 84364, -90319, tail", "row-1058": "head, 28381, -46481, 2999, 6235, 56774, -77664, -6446, -29715, -67571, 35977, 49884, -84358, 22944, 97353, -92600, 86286, -1298, 52992, -54778, -24668, 69500, -34079, -47997, -99310, 37969, 24873, -47957, -39820, 79440, 84108, 69336, 29508, -62752, 19590, 63330, 19638, -32869, 96924, 82918, -30348, -56949, 83680, 29730, -51887, -32470, -32964, -44884, 47391, 5090, -38056, 37946, -68812, -80735, 86730, 3623, 58798, -14950, 55863, -20811, 87003, -3669, -15231, 52272, -76343, tail", "row-1059": "head, -21307, -33357, 88215, -42380, 75121, 19252, -25516, 84092, -40688, 59192, 2438, -38037, -15432, -2759, 99698, -5076, -8331, 67039, 76340, 39487, 71639, -55996, 95703, 33658, -68519, 84651, 82687, 87182, -95194, -28840, -68849, 22300, -96481, 39494, 27687, -71153, 53033, 79511, -90415, -66716, 83765, -991, 62220, 26124, -23354, -34636, 9427, 25200, -4878, -19137, 18887, 44134, -35837, 12715, 16766, 54450, -8712, -79836, 60992, 44655, -40502, 62371, 27289, -19673, tail", "row-1060": "head, -5436, -60430, -2130, 38596, 27919, -68510, -63050, 52623, -45332, -77182, 33544, -78471, 66109, 35830, 92588, 30125, -73990, -63759, -73588, 94775, 9408, 74857, 71842, 90396, -85991, -74906, 44620, 78633, 84494, -8823, -58461, 91389, 79856, 18156, 42362, 81047, -92711, -60372, 92135, -33636, -40686, 34790, -48568, -75811, 98667, -43285, -42428, -673, 95774, -31239, -63137, 2697, -66100, -21907, 5631, -74694, 23912, 91343, 97141, 43673, 22148, 28400, 23542, -18215, tail", "row-1061": "head, 24193, 17771, 84768, 73180, -21637, 92701, 27115, 77473, -50843, 27211, -33581, -75962, 93192, -9995, 91354, -81709, -51635, 60945, -50702, 92158, -47763, -23703, 1371, -78995, -85071, 16573, -60925, -74717, 16632, -27958, -46142, -3411, 66018, -6667, -49792, 24197, -89708, -51123, -53027, 43855, 2161, -4762, -17791, -43536, 31275, -42790, 79057, 70143, -11904, -71805, -84595, -90381, -65116, -90322, -91170, -59998, -98303, -59809, 4359, -70341, 63559, -29945, -45503, -31622, tail", "row-1062": "head, -82018, -978, 3722, -4609, -56645, -55787, -59703, 21603, 80750, -31445, -848, 45120, 18930, -65431, -63808, 24131, 76465, 49371, -33285, 46559, 10642, -67949, -7250, -73860, -27885, -16777, -14659, 58598, 71572, -38176, -27916, -51971, -10588, 97925, -44232, -16419, -57856, 16816, 54891, -13360, 27554, 92764, -43976, 81530, -1478, 14224, 22254, 65381, -92459, 89574, 98233, -89815, 7229, -42885, -10705, -68516, -68265, -13751, 68841, -48322, -84504, 36509, 60255, 19243, tail", "row-1063": "head, -9666, -23857, 58222, 16675, 5284, 19301, -21656, 92467, 33260, 18299, 82215, 40169, -20493, 4833, -72542, -70828, -46043, -61938, -86666, -90015, -27430, 56850, -1079, -61265, 75341, 34613, 830, 35412, 9251, -99325, 75156, 54900, 82458, 54929, 73656, 42553, 30930, -37061, 60582, -85557, -29461, 25414, 88628, -79252, 2623, -4425, -99491, 54032, 55815, 71182, -38850, 18699, 91819, -53439, -75925, -12428, -64782, 25578, -40943, -73373, -78944, 66168, 95377, -20776, tail", "row-1064": "head, 68120, 34317, -36791, -68699, -78015, 8993, -60738, -4993, 25665, -23043, -43379, -5264, 83843, 45749, 46954, -79570, -34353, -84463, 59898, -58445, -86767, 16773, -827, -14220, 56521, -22528, -1039, -78241, -40715, -52312, 83625, 74621, -93170, -23298, 40783, 11813, -88977, -44560, 46733, -56870, 15270, 33166, 75677, 14952, 47063, -89442, -30436, 30935, 90670, 8572, 82509, 82470, 51195, 47144, -5634, -3890, 29599, -38884, 14817, 59682, -85362, 58698, -97039, -63173, tail", "row-1065": "head, -98774, -54985, -25582, -78148, 28647, -29693, 67131, -34725, -76667, 13676, 57621, -14397, 58448, -41333, -92943, 70975, -87371, -7479, -88393, 81975, -78165, -21267, 7821, -42120, 79474, 87239, 87778, 2987, -2978, -10714, -39056, 94419, 29635, 48022, -4198, -91211, 45348, 17075, 21316, -15741, -24134, 70204, -10760, -63761, -78705, 10166, -12216, 92571, -4055, -40813, -58120, 16620, 4544, -65444, 8586, -24918, 92720, 81849, -2123, -64700, -8536, 171, -72344, 75685, tail", "row-1066": "head, -49808, 85803, 45033, 5472, 20968, 52012, 95689, 45832, -92096, 86874, 4574, 59774, 75351, -8028, 7749, 13857, -12714, 67149, 10408, 16573, -8886, 85897, 8598, -94269, 50506, -34255, 80320, -77472, -84476, -44662, -77303, -90867, 29387, 96031, -55050, 20936, -24505, 83056, 11101, 2444, 57561, 62144, -74200, 7158, -75160, -82274, 86834, 86242, 35142, 22755, 59391, 69382, 28141, 61898, 48123, -31442, -61533, 94533, 81760, 63015, 13445, 79553, -72715, 22530, tail", "row-1067": "head, -81827, 13334, 41832, -62295, -34491, 60976, 88483, 29425, 17218, -94753, 35212, -89527, -94910, -67320, -50411, -81149, -40142, 69599, -48327, -7352, 39225, 86057, -59237, -27078, 97116, 89117, -90526, 17305, 85451, 51826, 65954, -83451, -11667, 15172, 41981, -84489, -90446, -91940, -26948, -84011, 46116, -16122, 67219, -49318, -58835, -89805, 54728, 48451, 70674, 85303, -55935, -11457, 35839, -34634, 50255, 77323, -85092, 30254, 89223, 32590, -53926, -35800, 43716, 30811, tail", "row-1068": "head, -33854, 91729, -7856, 33498, -31833, 90726, -4885, 62392, 50400, -1830, 39336, 39503, -45976, -83195, -52208, 5323, -28107, -20627, -37225, 92035, -86876, -95952, -94978, 8369, -88848, -71535, -21817, 83199, -9947, 24754, 72027, 75814, -26404, 49174, 29658, 57856, -34092, -4799, -32977, -6880, 12214, 36528, 68582, 63615, -70332, -20190, 11569, -64566, -97137, -87085, 97224, 37591, -45603, -43472, -21284, -28642, 92904, 86063, 27899, 98244, 64116, -33665, -44599, 13624, tail", "row-1069": "head, 72057, -77379, 72630, -99119, -38816, -10875, 24884, 26883, 45033, 93057, 4645, -21851, -65056, 51272, 63826, -23236, -30269, -68702, -33922, 12749, 89153, 51789, 35107, 67624, -6332, 89585, 85693, 63416, 98140, 93460, 33786, 99106, 53797, -97817, 88613, 53487, -99794, 81516, 75356, 18023, 26355, -50770, -51341, 11199, -15637, 44815, 8701, 96519, 22395, 60951, 35215, 14102, 95054, 93274, 11062, -41458, 40097, -61801, 74219, 35521, -47767, -70250, 66265, 95900, tail", "row-1070": "head, 7373, 25403, 12916, -33990, -57964, -82270, 82669, -43411, -49993, -60030, 33641, -3652, -79567, -99044, 3672, -34700, 91517, -40623, 27571, 13377, -96167, -35355, -12245, 16429, 87639, -17075, 9390, -33365, -65680, 26541, 3172, 46538, 99282, -38904, 37248, 8516, 9788, -59537, -33380, -61467, -77395, -15444, 11185, 50970, -8262, 59065, -79046, 6488, -61311, 17878, -8166, 50797, 70736, 60709, -54658, -89797, 2312, 96831, -75430, -4672, -68886, 83380, -86483, 35059, tail", "row-1071": "head, -62055, -49525, 23872, 62957, 71010, 86141, -88780, -66149, 89538, -13960, -53773, -73588, -9295, 23393, 75043, 55671, -36468, 56152, 17550, -37104, -45354, 91430, 97345, -41499, 11726, 59996, 74041, -66955, -39213, 35787, -82129, -14526, 25768, 60150, -64160, -14879, 52256, -83506, 20439, -12891, -73727, 12066, 9769, -8035, 74023, -12880, -10208, -4719, 33559, 78725, 18658, 24248, -46601, -31481, -23736, 10287, 2746, 12241, -43344, -89254, 87218, 68993, 61213, 87299, tail", "row-1072": "head, 4033, 61697, 35620, 78101, -19642, 37815, -72194, -63208, -15752, -23363, -92064, 55454, 63153, 84625, 74096, 96769, 54019, 38631, -13433, -24359, 86619, -1022, -95991, 93, 13385, -9812, -22955, -51358, 87597, 94015, 83279, 39656, 63640, -5424, -23388, -32221, 29023, -82134, -46568, -8109, -26699, -25334, 81282, 96868, -14767, -4917, -79196, -56863, 61774, 79554, -70543, 67947, 65112, 58133, -91627, -94608, -12488, 77736, 74853, 38741, 76593, 36557, -48205, -87318, tail", "row-1073": "head, 12921, -23166, 83516, 90185, -70883, 83803, 98888, -91716, -75162, 13875, 47315, -61750, -80293, -62442, -76609, 83134, 59142, -58022, -49434, 64142, 76886, -78367, -56054, -73305, -23429, -21738, -58839, 65646, -83079, -67482, -44396, 29125, -83029, 1784, -96249, 96257, 739, -667, 36802, -80319, 93749, 48426, 36638, -76000, -91635, 43195, 16187, 16503, -13038, -91089, 55144, 40093, 90564, 71764, -90498, 46423, 74111, 47589, -83892, 36913, 712, -91829, 71652, -96296, tail", "row-1074": "head, 86785, -54159, -85769, 36042, 15223, -8152, -49358, -48334, 34759, 76040, 32300, -40173, 84696, 11567, -2564, 56862, 63002, 23537, -61980, 70792, 99178, 1456, -80018, 41872, 36554, -50647, 44011, -4158, 40146, 80718, 85498, 23259, -54486, -33067, -12870, 98462, -57420, -15544, -7913, 71871, 27145, -94804, -27080, -48548, 56439, 99607, -90826, 7064, 31506, 8391, 83794, 1046, -56187, -21568, -21198, 71100, -4230, -91094, 15657, -44785, 73146, -51524, -7270, 38268, tail", "row-1075": "head, -26451, -71905, -60797, -4695, 93458, 90770, -71019, 99577, -30160, 77124, -3714, 69826, 27817, -856, 45931, -66802, 33704, -46894, 78310, 16929, 6596, -90357, -12244, 89463, -73375, -63961, -1604, 93818, 87624, -8150, 70006, -74346, -42297, -2896, -6688, 16299, -9040, 45610, 57720, 48709, -20831, 21171, 20166, -47877, -85618, -99799, -45881, -13987, -20910, -86177, -82343, -7166, 88124, -41645, -57658, -4651, 47031, 89576, 2387, -7930, 56699, -19583, -81115, -47601, tail", "row-1076": "head, -57582, -80747, 56068, 45175, -16416, -17224, -9732, 20088, -43830, 56073, -15814, -96643, -59864, 30806, 89027, -30922, 3153, 94869, 15080, -29464, -17915, -60616, 23408, 27478, 89374, -64738, 43661, -25417, -78992, 21013, 36369, -78662, 43025, 92262, 37009, -92065, -58713, 35093, -84719, 10434, 31800, 67203, -80875, 60694, 37266, 27663, 47893, -53199, -2902, -5718, 2994, -66813, -11786, -15142, 73740, 27479, -6381, 30859, 71711, 50945, 6302, -9050, -51653, -42571, tail", "row-1077": "head, 32196, 55405, -79816, -56205, 63391, 32190, 56535, 64873, -4258, 98387, 78757, 41911, -52433, 44204, 7669, 53945, -50271, -33883, 41136, 84564, -40893, -91508, 75408, -63451, -17887, 25931, 95018, -8956, 5413, 60522, -26718, -58445, 91986, -93505, 51455, 97290, 65337, -27475, 35650, -14049, -29562, -54180, 77923, -47576, 38033, 79605, -65128, 70692, -74053, -67316, -9746, 62084, 30699, -1353, -31729, -17203, 71342, -92138, -55643, 1424, 36540, 45775, 94594, 48285, tail", "row-1078": "head, -74713, 53890, 45489, -36567, 55720, -85944, -40732, 24046, 70504, 61246, -20687, 15639, 55353, -98791, -81728, -62869, -20181, 44010, -77116, 9464, 4728, 24864, -51203, 40365, 90449, -56913, -30771, -56067, 71578, -24787, -63299, -55850, 35102, -72486, -59465, 17153, 50117, 60015, -64649, 59435, 63842, -74726, -8322, -73965, 30110, 99061, -72508, 89090, -38586, 82496, -12770, -74904, -254, 40487, 12203, 82353, 69135, 87474, -91177, -61419, 53523, -17448, 87949, 85421, tail", "row-1079": "head, 44191, -56612, 61864, -45016, -92770, -447, 79142, 27294, 27998, 2069, -57630, 18027, -46459, 86796, 13292, 74224, -9638, 26345, -68652, -52982, 11459, -45054, -30463, -27348, 36780, 77469, 94368, -30751, 27219, -62720, -50934, -56107, -51233, 38495, 23220, -62085, -5834, 42389, -41976, 37834, 85848, 17593, -69126, 12226, 79418, 78183, -74289, 1659, -39629, -84172, -95610, -21, 95318, 93410, -89221, -11180, -31651, 50170, 29999, 77033, 63074, 61562, -83375, 71517, tail", "row-1080": "head, -60409, 76334, 89237, 2211, 45995, -49093, -88441, -71752, 86467, -2501, -31572, -25935, 35591, -10295, 8885, 25840, -80734, -48489, 19189, -54677, 57583, 437, -89198, -54895, -67671, 59677, -54815, -26265, 56715, 83402, -28660, -73789, -77793, 43007, -82754, 91110, -89680, 49144, 73603, -26545, 5525, 60165, -23443, -84262, -99596, -19499, 30537, -7567, 54400, -34579, 18359, -22765, 25993, -85602, -67770, -13581, -13857, 95266, 53707, 83219, -64063, 23362, -4128, 92399, tail", "row-1081": "head, 82352, 75291, 28423, -47925, 57556, 40667, 92548, -89194, -60268, 87670, -68547, -45691, 47904, 48162, -53616, 96860, -52341, 7304, -4774, 40941, 29246, 78414, -83896, 5033, -82581, 45014, -78769, 18512, 81962, -29760, 89549, -2386, -73762, -16194, 12387, 74953, -11656, -88014, -58028, 72664, -41017, -47005, -15539, -32701, 33247, -25188, 73604, 7984, -3982, -96290, -20112, -95179, 40808, 7190, 53008, 48008, -74482, 17268, -48720, -1632, -75717, 31945, 78683, -66273, tail", "row-1082": "head, 33708, 55448, -41858, -88140, -74944, 46119, 67141, -79397, -91477, -66241, 12003, -5802, 8099, -65260, -83107, -62854, 82000, 30590, -63335, -75034, 54074, -9425, -28630, 88323, 28086, 75225, -85375, -38244, 57244, 74901, 22215, 75688, -98580, -38591, -46001, 75427, -99205, -70517, 5105, 59879, 63285, -438, -72217, 88770, -66939, -97627, 76504, -83804, 66547, 24566, -81201, -37807, 17105, -19192, -16012, 32234, 19300, -61143, -33817, 5395, -67935, -65417, -5678, 3736, tail", "row-1083": "head, -11519, -31547, -30514, -82017, -16742, -74373, 23052, -38049, -47582, -85810, 5376, -92146, 24941, -77289, -88012, -68640, -10943, 63029, 8022, -91935, 58440, 28239, -90377, 85413, -9607, -46642, -90856, 87155, -37011, -91218, 94426, -50380, -37949, 65267, 1948, -30727, 60436, 37415, 33398, 6391, 155, -10227, -69390, -82961, -4195, -20292, -9358, 13440, 5316, 7255, -74562, 38267, 29295, 75531, 914, -7874, -99791, 38217, 22935, 19352, -68311, 87885, -88146, -93792, tail", "row-1084": "head, 4578, 38843, 68373, 22130, 84788, -99962, 57377, -36089, 64898, 85240, 67024, 58286, 1028, -5091, 13055, -52427, 25179, -89370, -99945, 56538, 10273, -29942, -17426, -65823, -92391, 59122, -47761, 85289, -72922, -6764, 85085, -26421, -47647, -17855, 34340, -19189, 54616, -98138, -90861, 9152, -2345, 87359, 84366, -67741, 39461, -7575, -1868, -44429, 624, 47572, 60117, 85269, -65338, -60184, -11538, -85460, -1425, 42578, 75688, -44383, -6140, 87147, 41952, -38938, tail", "row-1085": "head, 27864, -2035, -76276, 47634, 647, 42959, 88777, 76330, 51090, 96223, -38634, -95478, -4133, -89969, -19887, -6935, -6109, -63971, -91770, -80493, 97233, 26337, -64784, -12862, 17866, 29553, -14337, -36093, -24515, -30673, -53548, 50512, -41700, -58374, 74192, 19039, 36976, -91812, -8028, -95834, -73093, 2542, -50106, 69237, 78020, 13337, -92784, 30300, 61030, 7729, -58230, -67706, 98717, 15262, 4865, 68549, 11720, -87465, -78613, -73756, -37048, 84226, 95596, 25631, tail", "row-1086": "head, 46811, 30468, 64695, 6080, -14641, 8364, -48238, 48199, 437, 61451, -47421, -51551, 13624, 61033, -67122, 69240, 83840, -77, 81279, 4988, 28188, -17790, 22050, -46530, -26590, 10167, 43668, -76092, -44070, -73059, 80459, 4533, 51097, -56454, -11249, -942, 67165, -39068, -45669, -7895, 19098, 46085, -84198, 5214, -27788, -65331, 49667, -61847, 68359, 15571, -72897, 73497, 33505, 46844, 77355, -12259, 77560, 24065, 68393, -9638, 77558, -2567, 81178, -31597, tail", "row-1087": "head, -98584, -18111, -44069, -72252, -54110, -24535, 21594, 3267, 20936, -97519, -72798, 44768, 71764, -55764, 66778, 9398, -13038, 9535, -44503, -72492, 8717, 85828, 67097, -75944, 54110, 61158, -5364, -60753, -59852, 12830, -95989, 8066, -1348, -35873, 18785, 62065, -57284, 24410, -55520, 71729, 43668, 40808, 81406, 80209, -7937, -69814, -77526, 27519, -84731, -92278, -88553, -33759, -18011, -3790, 16652, -39465, 8490, -39880, -40063, 12177, 18263, -86941, -57014, -9504, tail", "row-1088": "head, 81305, -94073, 2122, -5486, -72451, 99253, -28493, -80483, -11182, 21662, -38872, -48330, -43745, 59359, 82038, -15070, -22234, 24379, -41495, -62307, 42956, 5657, -12576, 73867, 83307, -82575, -63922, 680, -54579, -55899, -32720, 95472, -29404, 73483, 34836, 72883, 95390, 86946, 38287, 57812, -71530, -55288, -58290, -98646, -66539, -87241, -84552, -77983, -13602, 98918, 25948, -69076, 15686, -67651, 67190, -96224, 62446, -92304, 93366, 95555, -12047, -54649, 89114, 24329, tail", "row-1089": "head, -63412, -32495, -83834, 85000, -77549, 38359, -60551, 53645, 99430, -12981, -64423, 22602, -68247, 74644, -78256, -53407, 75885, -88802, 62600, -29311, -75026, -66678, -81527, 42416, -15817, -764, 59476, 64620, -13479, 23978, -98875, -91442, -61448, 83792, -94583, -38216, 36369, -68244, 82954, 63243, -30006, 54909, 34649, 10386, -27115, -37018, -71401, -75721, -15775, -23953, -71076, -58570, 34740, 17136, 18884, -83424, -83517, 35641, -84648, 96453, 72282, -68137, 22937, -61641, tail", "row-1090": "head, 93390, 4028, -11977, -16676, -54867, -6856, -51248, -57622, 35809, 81085, -60434, -91464, 14537, -19266, -42656, 40949, 48712, -45965, 9263, 84418, 48837, 62111, 97469, -92767, -13679, 45808, 76331, -61940, 98458, 89266, 1990, -61727, 22600, -33122, 79169, 46105, 72848, -36825, -93128, -45385, 37358, -20726, -41995, 36253, -86168, 3076, 32975, 83750, -35232, -97331, 74411, 74962, 17630, -60236, -74253, -88180, -33371, 39192, -17757, -40502, -66734, -84901, -36009, -26113, tail", "row-1091": "head, 9928, -79659, 41033, -94817, 82998, -93187, 36707, 24857, -92241, 91073, -16111, -31446, 66269, 24177, 58600, -34794, 21941, -99421, -9167, 57690, 4597, -64619, -70499, 28266, -87894, 2536, -4472, -97526, -59665, -66301, 99415, 75165, 35653, 55971, -57659, -46301, -93457, -46313, -23007, 26300, -32841, 62821, -89888, 11988, 56247, -44840, 90340, -39825, 50796, 54153, -29224, -76917, 46376, -35391, -45267, -21491, -94277, 92785, 99279, 27005, 93456, 442, 63709, 56900, tail", "row-1092": "head, 14933, 97503, 37612, -62127, 49286, 36894, -77509, -42684, -37280, 97911, -45127, 7673, 42083, -96363, -60677, -11228, -18739, -12831, -89944, -77526, -7040, 43942, -68272, -15925, -95639, 83072, 61992, -44540, -60561, -36623, 18531, 60098, -57201, 59594, -3990, -51082, -80580, 71014, -55529, -39631, -26045, -4548, 98284, -50055, -56860, -91256, -98407, 89283, -32625, 78375, -2763, 19023, 18069, 66114, 93753, -60058, 73791, 60169, 70111, -77407, -63705, -95929, 52719, 40058, tail", "row-1093": "head, 68441, 9877, -12400, 65416, -66464, -10383, 68589, 85194, 47544, 46550, -48880, -99081, -37003, -11733, 37303, 62599, 28374, -39402, -85342, -27761, 84253, -16609, 41964, -22804, 41029, -96215, -36494, 3080, 63798, -44627, 32329, 97151, -38533, 274, -8344, 4318, -14206, -86342, -52523, 88273, 45142, -98837, -69033, -64565, 14635, -2170, -43679, 39666, -22919, 66809, -77765, -61477, 6219, -3327, 49111, -52155, -62316, -95381, -33966, 27724, -9742, -99505, -74644, -48724, tail", "row-1094": "head, 62859, -15253, 49727, -32445, -47362, 78346, -68901, -63225, 9606, 30512, 41670, -37417, -50519, 46875, 19568, 32752, 66899, 84663, 31011, -15450, -25016, 33626, 88041, -30659, 31841, 70538, -7940, -21024, -23952, -98130, 50821, -3346, -95610, 35991, -64534, -10574, 13607, 50468, 5662, 82489, -9497, 84458, -50467, -97657, 37215, -87110, -91955, 23661, 76762, -18507, 82935, -55371, 91692, 72829, -35152, -87129, -74815, -6652, -32142, -21170, -32222, 23730, -92971, -36285, tail", "row-1095": "head, -54879, -11111, 19547, 3207, -46155, -88268, 60042, -38603, 91063, -77880, 96437, -64575, -98734, -63397, 72154, 68185, -55975, 92673, 7085, -62543, -91051, -23961, -53968, 46696, 98646, -37278, -40680, -34136, 18649, 10483, 33174, 31497, 85736, -65675, -85780, 48778, -54661, -96282, 66849, -74235, -6261, -28343, 12704, -67060, -84368, 14081, -27162, 32127, -57555, -66306, -37028, 58131, 92437, 83908, 31872, 33457, -15400, 64455, 10363, -87204, 92198, 93733, -7003, 55891, tail", "row-1096": "head, 76499, -99274, 28680, 95621, 59545, 24088, 57341, -84917, -87822, 10962, -93308, -15054, -34709, -87030, -9379, 87672, 91180, 92117, 94155, 8478, -13960, 44570, 74261, 12437, -6884, -40058, -83528, 39679, -36752, 73517, 75329, -78226, 82272, -19364, -39457, 20892, -14005, 79910, 97194, -90541, -73377, 69264, -96644, 30318, -93497, 29504, -32905, -56040, -58990, -85394, 57263, 31532, -5156, 12449, -73558, -27175, 42226, -83860, 923, 6023, -32704, -88338, 4459, -22176, tail", "row-1097": "head, 54739, -89747, -20930, 94811, -67638, 3228, -16718, 15522, 65687, 33997, 43645, -20811, 18937, -80654, -47385, 57207, 50033, -47925, 99440, 96549, -51752, -74252, 49385, 62887, -3155, -17813, -78992, -90645, 15379, 41905, -68644, 84765, 50402, 89987, 24598, 26369, 43419, 37892, 63929, 37875, 12357, -50163, -97172, 72504, 24889, -278, 69405, -73492, 55556, -54703, 43513, 77650, -33588, 53853, -25706, -89219, 8246, -65725, 59516, 75783, -35424, -53113, -41862, -71787, tail", "row-1098": "head, 85902, 70192, 25427, 90271, -30717, -83674, 92510, 15982, -38724, 38812, -9966, 65994, -33973, 82898, -67858, 63800, -29679, -79951, 91603, 98300, -42062, 39799, 24047, -55401, -97082, 33549, 17587, 98963, -55780, -19925, -65429, -95544, -42956, 48670, 10333, -58568, -84, 25252, 75840, -36718, -88263, -85101, 53276, -25757, -61392, 14953, 97596, -13792, -81913, 85608, 36529, -44996, 33268, 15275, 96636, 98173, 73486, -28537, 3605, 72834, 94063, 55594, -58760, 62611, tail", "row-1099": "head, -41968, 59255, 94397, -21859, -92546, 66681, 64877, 56249, -18130, 82574, -89907, 86237, 36165, 4582, -60851, -42295, 95281, 44612, 37814, -88665, 51804, 89650, -41010, 54076, -63198, 85624, -47185, 16352, -13784, 37504, 82376, -75878, 34556, 636, -84406, 31488, 28173, 61801, 7227, 96890, 15108, 42402, -70575, 74426, 63284, 45801, -9539, -8362, 31105, -94255, 47830, -95381, 9453, 67670, 94290, -46419, 36355, 79348, -43465, -54450, -32761, -87112, -71248, 86531, tail", "row-1100": "head, 19716, 66192, 87516, 47155, -99539, -83711, -90468, 47967, 77148, -83934, 27549, -42290, 95942, 73670, -30633, 92398, 81667, -44340, 63449, 97110, -31769, 8652, -63512, -55615, 96042, 40546, -76940, 23388, 10869, 30821, -97056, -65500, 19329, 51308, 10097, 87958, 65587, 84454, 4552, -58494, 50299, -29941, 44126, -26384, -5284, 4395, -2953, 45956, -85925, -31348, -46423, 30566, -45215, 77589, -14377, 62950, -52594, -93430, 93395, -29907, -72091, 92857, -67312, 14789, tail", "row-1101": "head, -16009, -84624, 44504, 62504, -83407, -59136, -24057, 62225, -28066, 52755, -8045, -44017, -21807, 63970, 20995, -20469, 81865, -98839, 65299, -19424, -9635, -8332, 72746, 72699, -16160, 42317, 67576, 47682, -58942, 75892, 23431, 62130, 65875, 57949, -97731, 95843, 76233, -34023, 35104, 86630, 60337, -53838, -7099, 36602, -78870, 5834, -42283, 2731, 63028, -24575, 99295, -67959, 59675, -36469, 95871, 30250, 74129, 99830, 114, 76997, 71017, -50430, -13078, -20627, tail", "row-1102": "head, -46138, 52474, 70805, 44437, -60941, 16011, 24996, 61112, 2255, -64964, -2726, 76283, -73068, 82350, 21256, 96475, 5171, 27299, 88246, 30629, -39100, -51274, 78003, -66026, 66712, -59912, 77152, 44970, 93110, -11034, -88017, 86124, -12890, -38011, 76960, -74152, 89986, 15498, 55716, 17716, 92450, 45534, 63742, -35253, 81364, -13594, -96628, -53772, -14381, -67945, 82102, 54723, -62125, 55240, 40092, 8984, 10441, 56686, 74319, 91457, -9168, -48865, 15196, 59301, tail", "row-1103": "head, 59959, 40837, 26722, -15082, 58844, -33362, -98981, -17239, -25740, -16295, -95400, -14695, -91046, 49565, 96494, -45329, -45299, -52641, 43079, -6071, -89528, -63418, -63355, 49028, 13402, 75486, -30781, -22365, 10840, 7150, -98662, 5287, 36360, -63545, -9592, -50279, 88817, 37226, -90576, -43663, -33208, -85152, -73560, -10168, -17572, -93607, 81386, 54401, -61993, 44285, -11361, 16837, 3609, -21737, 6854, -13712, 48736, 62228, 51991, 56663, -5806, -41215, 55069, -79761, tail", "row-1104": "head, -57646, -19230, 8666, -41256, 44493, -9415, 5666, -89175, 84127, 3872, -44254, 34981, 42883, 77920, -94703, -81360, -35301, -59241, 26199, 67706, 28850, 50300, -42202, 726, -42588, 41652, -62434, -60979, 49679, -34338, -51516, -67316, 29397, -45189, -20111, 87040, 18018, 94661, 52825, -44781, -98445, 46087, 28466, -48931, -77173, -64839, 7764, -16293, 13274, 4763, -17466, -36934, -78646, -27800, 73015, -11373, 35045, -74299, -77313, -87868, -2511, -77219, 49279, -96163, tail", "row-1105": "head, -97960, 11927, -67667, -23752, 85854, -94505, 47464, 53801, -61973, 79439, 93610, -80795, -44128, -89137, -55122, -34635, 84211, -41191, 76897, 15625, 73947, -96984, -6489, -59825, 42557, -87039, -64491, -27876, -70804, 17277, -8173, 93725, 76508, 11010, -31608, 43217, 9379, -51336, -26269, 8316, -13517, 34138, -96481, -82020, -85987, 86586, 8295, -51128, 13919, -84840, 36916, -75249, 60790, 12265, 6313, 29453, 92390, -81537, -88389, 14871, -2447, -4628, 99952, 26081, tail", "row-1106": "head, -51396, 46965, -60096, 81309, 76931, 13883, -47019, -92928, -90350, -94064, 26662, -31619, -85049, 372, -38650, -38931, -568, -80503, 78892, -49263, -40897, 77383, -28186, 92938, -76579, -58499, -89422, 96521, -59088, -36686, 75269, -97032, 58563, 71415, 77182, 26941, -24813, -91929, -52046, 86573, -56554, 59438, 63224, -68940, 88072, 10060, 61000, 82545, -48427, -16762, 51684, -88736, -48146, -77277, 12255, -43500, -38620, 54590, 12310, -5083, -35970, 32356, 22629, -15092, tail", "row-1107": "head, -60902, 82101, 6148, -92581, -4101, 31427, 98615, 20767, 49983, 63300, -56391, -5870, -88955, -86174, -20998, -19521, 16477, 93281, -51043, 97474, -74568, 67454, 493, 7774, -1569, 14984, -49317, -43700, 96645, 5934, -28453, 54298, 52632, 95656, 14620, -85311, 65283, -6743, -15193, 51032, 51493, -72604, -13667, 1741, 85103, 65933, -2815, 80311, 44199, -12878, 78400, -87546, -6427, -5723, -6627, -41030, 83792, -95040, 71062, 88158, 34340, 16622, 20617, 45206, tail", "row-1108": "head, -97669, 1300, 81154, -19099, 11663, -72000, 21150, -13214, 88544, 39956, 19011, 10812, -12113, -18029, 42169, -98579, 55590, 62342, -50902, -79319, -65048, -45627, -57887, 99531, 59295, 41069, -31440, -19321, 3233, 93388, -17085, -91315, -41338, 60565, -91167, 3542, 92250, 19935, 24803, 44908, 59178, -21583, -52944, -99704, -86312, 696, -60255, 49445, -15411, -23157, 13226, 62339, -44190, 13230, 92037, 30145, -78024, 26071, -75218, 46498, 88901, 6229, -40177, -66006, tail", "row-1109": "head, -17991, 15531, 28201, 49950, -53492, 79864, -46875, 69056, -87803, -70688, 69521, 84819, -45557, 91434, 39000, -18915, 84810, -1230, -59912, -74062, 83372, 96642, 27693, 54612, 49356, -54543, -22772, -74271, -20793, 67200, 97216, 32337, -36674, -30697, -55569, -71870, -67133, -38251, 94442, 41285, -3604, -23997, -56728, 19733, -72045, 4745, 4207, -37357, 54096, 24279, 73830, -30711, 27893, 54473, 50224, 86535, -76497, -66254, -59445, 5719, -97273, -9697, 27492, -84327, tail", "row-1110": "head, 56978, -45841, 8352, 85224, -92314, -20058, -77407, -23479, 22594, -80556, -2696, -75472, 54616, 33279, 6454, -20948, -49767, 79841, -87934, 10536, -5048, 75749, -66084, -40631, 902, 4855, -44238, -28321, 32554, -85884, 44032, -6371, 78901, 87688, 22105, 50149, 45467, -81094, 69335, 84977, 39899, 39143, 87291, 15995, 79407, 26115, 85223, -43393, -87099, -53279, 40071, 14716, 68572, 86360, -62427, -30998, -31395, 72032, 9425, 16387, 8942, 92469, 57582, 51086, tail", "row-1111": "head, -69593, -91973, 20140, 82477, 29335, -21182, -55715, 80982, -82546, -31724, -44320, 92852, -25568, -74135, -79895, 50237, -10264, -87660, -16085, -94197, -37063, 69321, 28789, 49455, -96909, -930, 73490, -6080, 5707, 53380, 16028, 23462, 20564, 89218, -6858, 58951, -29776, 80681, -85538, -29624, -79312, -73965, -52161, 52963, -72511, -46896, -78682, 44289, -38257, -76725, 37110, -54002, -45296, -52722, -24151, -81494, 56318, 58710, 26556, 7269, -1276, -62669, 34883, 26421, tail", "row-1112": "head, -2075, 91660, -86965, -84727, -40226, 13545, -62575, 81664, -28174, 75281, 45106, -76207, 48504, 31703, -82137, -68827, 52855, 98527, -8975, 15693, 37987, 42578, 16924, -3646, 67798, -51356, 51666, 35052, -47056, 89625, 56480, -91045, -24768, 20675, -25971, 61875, -4951, -18857, 65975, -41214, -64103, 22203, 71713, 51194, -3347, -90466, 50925, -54130, -40508, 39340, 57119, 95598, -23597, -32181, -21298, 32572, 12860, 73669, -5558, 51342, 25744, -96150, -95877, -55908, tail", "row-1113": "head, -10087, 26812, 50106, 53984, 76250, -7615, -76073, -36207, -29631, -64242, 80229, 82012, 20152, 38938, -75328, -93481, -5954, -49025, -27123, 80315, -34287, -62828, -61836, 53319, 40706, 25749, 87483, 57184, 54784, 24419, 58980, -14643, 88775, -51256, 9563, -8027, 38563, 3830, 83099, -48995, 40761, -79787, 11444, -32662, -38800, 21594, 14184, -67450, -48938, -23681, -37647, -74007, -18153, 11493, 17318, -85684, 23932, 1389, -72194, 47378, 97949, 60966, -12052, 99848, tail", "row-1114": "head, 95005, -71345, -63252, 29731, 88963, -11848, -85371, -70828, 96693, -51177, 91327, -28895, 58733, 7629, 5386, -21538, 52781, 59710, -48571, -59689, 4589, -80651, 27999, 1395, -61020, 76242, -67676, -68350, -18095, -75226, -16707, 84247, -72040, -95422, -46212, 80291, -986, 77075, -89878, 37763, 42222, 69895, -52856, -1116, 50370, -77453, 51951, -55544, -84711, 88796, -25406, 48247, -26037, -19505, 32522, -86022, 16927, -96235, -87018, -56034, 55076, -97339, 17472, 51636, tail", "row-1115": "head, 51747, 10484, 12105, 52942, -42822, 37305, 20916, 86334, 61783, -85948, 18352, 69959, 50816, -30905, 35492, 63244, 85737, -22664, -93077, -76432, -7492, -79650, -49684, -50224, 72176, -4148, -33139, 30656, -91278, 66561, 50006, 40032, -76571, 51971, -44941, -4231, 21574, 25785, 57313, -31306, 79217, 93952, 30442, -59362, 44904, 98333, -82615, -11933, -47849, -76916, 57778, -77577, -3254, -60668, 74081, -34466, 87031, -95570, -762, -58382, -71154, -91039, -53709, 85280, tail", "row-1116": "head, -16588, -56382, -55260, -49037, -11868, 48988, -87852, -35605, -89213, 85024, 54098, 98125, -22324, 96753, 13193, -10179, -23503, -67159, 83738, 97134, 31351, -6482, -60744, 58648, 44137, 73352, -1786, -28474, -97936, -75913, 52631, -57613, -93144, 59373, 97914, 33056, -77775, -32799, 49194, -90891, -47792, 43382, 21700, -56934, 91889, -78543, -76390, -70592, 10542, 10596, -11816, -83195, -2367, 30511, 32970, -74403, -6184, -22996, -62165, -1573, 56791, -27458, -63102, 61355, tail", "row-1117": "head, -51378, -65587, 21495, -88253, -48001, -2222, 80120, -69357, 36621, -14501, 12728, 66112, 70275, -4888, -81543, -12098, -8120, -35768, -64078, 50518, -8456, -16408, -96266, -49255, -84613, -61374, -45690, -6527, 17104, -90802, -73735, 49967, -42781, -75988, 67117, 53675, 34446, 46406, -18583, -41064, 62203, 83517, -36651, -56391, -58988, 16543, 65963, 31020, -21285, -92267, -48977, 40049, -13657, -23908, 22734, 71147, -41105, -79004, 56470, 17856, -57090, -63128, 55517, -88663, tail", "row-1118": "head, 69139, 45025, 66666, -72686, 85186, 21573, 77686, 69612, -66838, -52937, -6549, -4681, -98132, 9135, -88558, -23023, -30452, -2248, 93496, 21864, 45523, 67005, -52672, -87659, 19172, 69845, 39825, -83398, -2442, -26852, -63175, 16100, -14588, -29708, 90439, -18435, 49381, -53039, 84934, 26167, 92861, 73935, 8609, -49152, -93743, -63337, -24764, 95669, -7507, 60782, -98229, 92095, -98837, -12632, -43351, -88768, -95655, 40203, 70301, 18615, 71299, 13282, 76172, -39130, tail", "row-1119": "head, 2729, -26822, 60093, 24335, 12498, 5440, 63243, 54526, 43780, -92676, -74818, 72046, 71270, -40003, 57522, -8598, 38042, -96902, 52215, 90709, 50946, 36761, -59135, 93846, -67464, -17877, -87886, -43891, -51100, -59619, 79169, -20503, -75642, -34498, 76434, 10215, 79269, 41185, -63863, 23050, -8076, -15038, 78303, 43134, 1077, -90948, 73877, 72430, 81067, -20438, -78030, -10791, 61572, -74941, 33162, -73349, 66669, 10823, -10097, 70226, -64441, -27321, -67979, -63668, tail", "row-1120": "head, 71929, 6876, -32654, 4138, 48578, -29368, 73901, 5436, 91971, 18610, -26385, 17546, 41211, -55015, -83742, -45467, -93438, -29518, 41025, 4580, 44761, 5586, -57987, 13815, 87865, 21147, 38510, -11856, -19492, -70001, -90059, 78342, 46549, -32412, 89686, 90746, -48031, 36612, 54877, 89152, 53197, 47675, -32912, -11166, -12586, 57348, -96447, -86305, -99675, -52784, 93944, -43187, -64091, 30038, -67216, -70140, 79022, -83805, 10370, -57783, 84481, 97358, -93734, -95511, tail", "row-1121": "head, 70754, -94132, 32671, -70348, -86319, -76194, 2541, 20180, -98537, -14178, -19127, 66278, -81361, -69174, -55488, 82399, 33578, -23754, -12865, 95404, 73125, 90524, 56684, -9385, -62875, -30068, 87268, 34995, -13426, 79706, 21608, -9470, -93302, -82484, -79638, -90256, 27196, -21591, 87122, -15656, 70459, -39, -6310, -82683, 25991, 69407, 25219, 6891, -98339, 4163, -16333, -97605, 21685, -66107, -94807, 87951, 49995, -98182, 46389, 15802, 57866, 80374, -41365, -18661, tail", "row-1122": "head, 57539, 44655, 75599, -89054, -74902, -56291, -69509, -75524, -62752, -51945, 80351, 33839, 29147, -93355, 92079, -51590, -7841, 2899, -41593, -39119, 91195, -90443, -95564, -37658, -32567, -43803, -57404, -7352, 78039, 37462, -71999, -29808, 57514, 26558, -6354, -80196, -47256, -86977, 52268, -64428, -50730, 78283, 90273, 44945, 2154, -48030, 43007, -54176, -25385, 36513, 49573, -18259, -5368, 58593, 23745, -90476, 56310, -49853, 60366, 52858, 74905, -51680, -42869, 30056, tail", "row-1123": "head, -21234, 54332, -49592, 63201, -31883, 50751, -13774, 18783, 20819, -54362, -72206, 63657, 77496, -85840, 26456, -39397, 9576, 83123, 94121, 46344, 51476, 85763, -49421, 91540, 82487, 54578, -81996, -96400, -68417, 74696, -81276, 8301, 8784, -17817, 70150, 1692, 36044, 57706, 66544, -78808, -96089, -10308, 72487, 77773, 33153, 83353, 68621, -22753, 7189, -7516, 60436, -61667, -67752, 5280, -40063, -29800, 25988, 3511, -49317, 87540, 74205, 82443, 69234, -42107, tail", "row-1124": "head, 44520, 88331, 83964, 57437, 37293, -25627, 77030, 84657, -66789, 84747, 63654, -14002, 34601, 74917, 61333, 92301, 86258, 63416, -48596, 13968, -74656, 85264, -8360, 61133, -33867, -64710, -52703, 85523, 18322, -65115, -9570, 439, 36661, 57986, 85292, 59041, 98007, -58938, -19160, 98558, -1945, 28980, 95631, 53162, 86977, 57766, 64783, 42848, 47208, -51584, -95808, -75965, -40668, 82300, 60844, 38709, -74021, -89086, 79543, -81320, -15168, 39604, -89028, 67032, tail", "row-1125": "head, -69331, -55924, -31211, -79421, -27842, -34203, 97389, 61526, 65894, -76526, 55898, -51316, 19058, -3908, 76296, 649, -21257, -81274, -62054, -67853, -47168, -39925, 89717, -68025, 69208, -78815, 72513, 31406, 58817, -47522, -38793, 44943, 14456, 67937, 68194, -9329, 54078, 62009, 81853, 78716, -91171, 70385, 33233, 10637, 89510, -29413, -45037, 80946, 2839, 5140, 32548, -90601, 61665, -2909, -94442, 16475, 22421, -51159, 76249, 51327, 12886, 73648, 31378, -16079, tail", "row-1126": "head, 35978, 38846, -79399, -69505, -12400, -19134, 86001, -19531, 9568, -17078, 23077, 31133, 2850, 70038, -32260, -9981, 44478, -55515, 80095, 97354, 6805, 79884, -75595, 31565, -40799, -17008, 75892, -4061, -3666, 2658, -23766, 43262, 16043, -34579, -59961, 21240, 64266, -11208, 37744, -56501, 56287, 6150, 18268, -92367, 51984, -35000, 91554, -3988, 27402, -95563, 47656, -99583, -65311, 52209, 89117, -843, 35167, 46881, 17853, -37132, 90656, -69827, -61367, 76440, tail", "row-1127": "head, 32689, 25754, -60343, 70176, 52611, 61269, -28576, 36768, -44740, -60667, 72441, -2413, 63782, 60794, 31319, -19502, -27272, 58386, -41981, 24570, -82835, -2568, 39287, -78532, -6349, 29158, -76929, -4809, 1057, 36336, -35723, 10486, -12028, -30635, 4453, 51490, -60233, -28212, -8110, -58471, -80888, 92657, 58350, -2134, 47596, -41195, -47792, -34809, -21893, 81791, -96620, 84871, 57700, -88423, -20310, -81832, -53258, 4569, -62286, -22493, 17013, -12432, -52553, 93755, tail", "row-1128": "head, -46956, 89853, -92850, 97398, -78999, -45308, 5006, 36593, -46325, -91695, 8644, 50246, -49900, 81234, -47666, -92738, 21959, -76569, -84039, 80390, 72657, -72936, -72574, 9041, -98618, 57351, -70957, 1570, 10615, 88574, -87906, 4426, 32449, 74543, -23523, 28369, -99268, 88460, 25813, 19914, -73978, 34097, 59734, 46471, 78327, 17601, -50398, -69193, -14903, 76046, 11711, -31703, 99944, -29491, -68430, 70816, -93295, -46315, -56206, 50052, 70492, -605, -56390, -5989, tail", "row-1129": "head, 35257, 72694, -22929, 45003, -52600, -50977, -6107, 93925, 32244, 34313, 88491, -60662, -15764, -56443, -51179, -73099, 55607, 32260, 70265, 80524, -51423, -41176, 71991, 79329, 28412, 6589, 68112, 22033, -92994, -81257, 11175, 34581, -62742, -27958, -27482, 54595, -56242, 87053, -88952, 2580, -40278, 90616, -63116, -4800, 51407, -39259, 57432, -88587, 27523, 38076, -86156, 62518, 61866, 15553, 40156, -58375, 21660, 36991, 42031, -73051, 16836, 50205, -86757, 92341, tail", "row-1130": "head, 34407, 68957, -7184, -99940, -80994, 12270, 80279, 32964, -53879, -89121, -92745, 22435, -68819, -4931, 86521, 86479, -78643, -44074, -69621, -62497, 75939, -41067, 74488, -75825, -13209, -59893, -94492, -84255, 94664, 54620, 78345, -96901, 91775, -57894, 19014, 50123, 71660, -28023, -53013, 5075, -51707, 48249, 92357, 40101, 37864, -3230, -39602, -39160, -57053, 23808, -51655, 44507, -80883, 35000, -48015, 66188, -98408, 49948, 47613, -9334, -70254, -96423, 59357, 45286, tail", "row-1131": "head, -59775, 33374, -9719, 82798, -41719, -60072, -54724, -75430, -70147, -15475, -24870, -40660, -8938, 9415, -72773, -45467, 1201, -95152, -85141, -11367, -39192, 77827, -95234, 31503, 27616, 45720, 70703, 39500, -23266, -92648, 72367, 38532, 85428, -20271, 76948, 73799, -9541, 37605, 55581, -24725, -1506, 12838, 32357, -51661, -20490, 69264, 86081, 23976, -81608, -34806, -1554, 55739, 13970, -698, -49295, -77184, -96476, 74961, -91564, 67900, 66104, 23397, 27492, 24600, tail", "row-1132": "head, 17273, -94515, 72492, 10491, 47031, 98352, 90088, 30921, 96519, -9837, -70028, -47097, 78017, 33658, 49253, 5007, 59769, 86570, -8100, 11496, -97238, 37340, 91097, -39879, -21860, 72326, -52440, -73319, 86568, 15125, 28509, 59307, 60827, 25165, 6921, -48734, 43838, 82616, 38427, -57685, -20245, -18859, 39911, -78200, -34571, -7673, -29732, -9215, 2094, -36677, 40239, 83134, -36666, -48292, -83435, -39859, 85655, 89184, 41685, -39430, -71988, -71771, 92840, 94103, tail", "row-1133": "head, 4800, -12505, -96088, -40826, 82953, 75223, 39713, -76510, -55979, -61452, 34580, 64512, -7126, 14702, 7571, 20756, -13650, -14521, 80153, 34011, 97332, -91437, -88686, -18060, -74316, -70445, -32476, -13203, 52145, -77777, -11142, -81709, 21729, -39515, 32796, 43358, -97246, -10387, 77194, -81432, 66107, -41098, 4317, 19783, -96226, -21876, 71511, 27332, -9198, 38803, 73251, -3597, -16203, 90042, 31576, 79081, -86049, -50764, 55621, -90407, 97924, -86477, -62710, 63599, tail", "row-1134": "head, -10509, 13456, -28254, 10568, 49601, 50682, 91169, 51486, -44980, -38519, 3026, 91251, -76068, 43019, 21332, -88079, 81421, 8835, -51398, -15418, -49083, -19364, -91005, -67461, 77048, -10174, -33810, 82398, 55729, -40977, -13198, 19603, -44555, 78654, 41072, 89126, 97030, -25921, -88825, -62219, 59192, 16554, 28683, 45966, -233, 8759, -7334, -49050, 41548, -52298, 61280, 69622, 5257, 71954, 62492, 7054, 24350, 51869, 42955, 7606, 17974, -61923, -65624, -86744, tail", "row-1135": "head, -3541, -79651, -14211, 68861, -77352, 87363, 83815, 9118, 255, -5503, 84550, -25163, -75668, -29801, -8812, -44822, 87670, -45744, 19686, 57482, 52796, 88153, 5720, 19595, 16828, 36874, 79514, -14939, -89007, -20908, 2974, -60748, -5615, 76392, -69938, 98241, -75888, -9003, 43295, 27927, -74275, 83357, -77723, -20640, 8715, 84281, -88569, -15267, -2755, 78962, 36582, -21386, -19803, -61720, 6765, 39054, -56890, -28788, -97880, -10938, 26056, 33403, -72596, -77972, tail", "row-1136": "head, -73091, 72585, -58155, 1448, -60972, -73952, -8498, 12582, 61401, -42151, -99949, 6363, -29668, -15941, 53385, -72101, -17825, -28631, -87161, -82102, 61951, -81380, 7163, 70523, 26005, -52849, -9243, 64010, -27871, -80725, -76233, 37568, 30416, 89693, 19918, -98839, -14921, 65312, -54779, 16326, -20640, 64733, -94107, -68198, 63389, 89486, -72412, 60888, -79611, -80873, -730, 81649, 36421, -59967, -96006, 25810, -68682, -70040, -25724, -14254, 81896, -93321, 57468, -33258, tail", "row-1137": "head, 33608, 19788, 2533, -53218, -85723, 91943, 72674, -46151, 95242, -16353, 84400, 23227, -35968, -44268, -475, -94041, -16663, -81012, 42409, 59725, -27218, 26953, -22659, -61813, -30938, -70083, 56776, -83734, 26375, -53461, 24259, -40615, 10496, -10153, -39631, -32599, -75342, -93398, 35982, -80882, 69329, -34523, 31669, -36975, -23815, -69515, 66652, -54950, 45576, -35516, -36024, 88498, -85717, -86298, -18345, 92342, -75033, -44221, -44443, -81027, 75527, -47316, 257, -18647, tail", "row-1138": "head, -65727, -82631, -39296, -56291, 47571, -45910, 37942, 4936, -92440, -61195, 55736, 60252, -58742, 67087, 21189, -53056, 88740, 29416, 80354, -58723, -35639, 42130, -47939, 50836, -62813, -93042, 7022, 62493, -98572, -31588, -56975, 36400, -97079, -29600, 97041, 38913, 7038, 1378, -41167, -36704, -94093, -3408, -46224, 36475, -99196, 8145, 54964, -48675, 17046, -93564, -55354, -66372, 87557, 85088, 71810, 46185, -8340, -52567, -8073, -80263, -68108, 10812, 94102, -94884, tail", "row-1139": "head, 72996, -23251, 80518, -73651, 41721, 95613, 55926, -33224, 43594, -79178, -79940, 20604, -19424, -94907, 35407, -76731, -86191, -1901, -33089, 60957, 33025, 19809, -8359, -44294, -68829, -23930, 61163, 35933, 72822, -17757, 92216, -85698, -59770, -46280, 75855, 49859, -16211, 99075, 10692, 47602, 36892, 10545, -39323, -43971, 57046, 69256, -62469, -49000, -35929, 50662, -24048, -32752, -90169, 75402, 67200, -73476, 55632, 26746, -39561, -30762, 80121, 27209, 73219, -41046, tail", "row-1140": "head, 2029, -98267, 74719, 43480, -5704, -78277, -93377, 50959, 74814, -83891, 90298, 32901, 96709, 77306, -28085, 90356, 70109, -7765, -85011, 40827, -72736, -16980, -15085, -12163, 9598, -58117, -9538, -96740, 37050, -76429, -85652, 95995, 23752, -84973, -90132, -34464, -2554, -33192, 99683, -39577, -15315, -79830, -46011, 25932, -16589, -2069, 83108, -37529, -43379, 56230, 67618, -85617, 30832, 6775, 13658, 17477, 58611, -83034, -26024, -38078, -86781, 5786, -33988, -90644, tail", "row-1141": "head, 7836, -24691, -76714, 80792, -20749, -7021, 66806, 2567, 57623, 77320, 68485, 25668, -93532, 14840, 38915, 9761, -31804, -27909, 8928, 25643, 40772, 39708, 86133, 39988, -12455, -88006, 21799, 12451, 4381, 27544, -64810, 81239, -33557, -69488, 69645, 71741, 13666, -48565, 8669, 39038, 71814, 27630, -51278, 40641, -60141, 27553, -19797, -42629, -81976, -77159, 32211, -35097, -4704, 39911, -44614, -4052, -75569, -69889, 34252, -84383, -82235, 29159, -91601, -63498, tail", "row-1142": "head, -57325, -57447, -22297, -61945, -45748, -64560, -75268, -89459, -19282, 90598, -31492, -64125, 23436, 19612, 45019, 16111, 67253, -64596, -16796, 52176, 21745, -42220, -61323, 63716, -66748, -56811, 45089, 74762, 96880, -62431, 69265, -91238, -63972, -82274, 85025, -17933, -52123, 24489, 74484, -29000, -47229, 54785, -34053, -51331, 52139, -30286, -63677, -69863, 41537, 55462, 37724, -83997, -33342, 88524, 67461, -75052, 75129, -93339, 28957, 7282, 39142, -93710, 2609, 13275, tail", "row-1143": "head, -1005, -36857, 50244, 27498, -13065, 42156, 86395, -14545, 60160, 8412, -45111, -72503, 23432, -69472, -15086, 13776, -30773, -1700, -88245, 62072, 51364, 23263, -24093, -31698, 74831, -40245, -2885, -7451, 77855, 33569, -77076, 67785, -7925, -76559, 22549, -39170, -34515, 46563, -50385, -18562, 78212, -35649, -29934, 45639, 9078, -11856, 16310, 34752, -16199, 56213, 87103, -62223, -70260, -42405, 13625, 72510, 12702, 46662, 17260, -18956, 35894, -79591, -82941, -17493, tail", "row-1144": "head, 2473, -61409, -9749, -92674, 18747, 77476, -17988, 72003, 73269, 83636, -16213, -938, -39836, -33692, -42847, -33959, 7714, 47098, -11347, 26262, 74313, -14803, 80077, 93230, -81040, -13658, 87633, 6121, 94411, 29509, 28599, 31187, -68879, 91183, 78519, -42694, -13969, -37859, 76184, -55153, -97469, 87459, -76334, 75269, 24721, -47220, 47455, 96074, -27777, -94024, -20918, -6782, 88111, 70890, -45868, -64445, 27453, -90751, 60382, 22161, 9507, -56412, -34324, 31570, tail", "row-1145": "head, 66621, -83497, -3927, -27068, 76985, -3437, -82874, 78665, 91427, 90162, 70816, 31481, 63302, -63006, -19910, 9334, -49984, 33312, -19079, 76073, -85276, 4759, -79128, 13817, -56041, -56416, 85659, -34709, 46942, 53544, 80020, 49481, -52270, 20843, 74517, -91493, 68914, -76400, -58404, 18529, 15544, 42516, 24530, -92377, 54789, 49296, 74821, -2794, 74741, 77842, -35590, -42133, 74378, 86750, 4063, 34543, 73113, -1835, -12929, 23801, 5000, 23068, 41965, 9563, tail", "row-1146": "head, -21879, 88172, 40582, -57771, 84259, 64563, 59523, -3547, 49497, -97293, -73136, -41199, -65765, -51919, -53005, -5139, -80148, 73663, -34502, 39679, -59141, 30613, 81015, -34750, 44010, 56695, 3160, 30435, 98940, 67714, 67166, 29126, -70795, 61915, 38818, -22493, 43926, -26303, 54435, -78375, 40383, -20098, 89835, -53952, 72854, 84737, -82720, -38418, -88200, -73621, 75740, -92343, 3550, 71663, -42518, 36509, -63575, -84817, 13378, 31881, 49491, 6680, 71998, -67437, tail", "row-1147": "head, 37469, 8689, 37857, 29157, 44603, 68883, 20921, -53095, -76289, 83015, -71907, -47322, 15812, -81218, -35115, -60488, -1014, 98210, -24616, 51379, -86986, 87542, 17342, -54554, -36976, -53008, -15598, -7067, -21222, 87073, -30670, 24847, 87379, -6656, -72228, 78676, -6822, -96463, 6928, 15766, -76918, 48957, -63944, -23849, -6271, -867, -45739, 72269, 77205, 20369, 10834, -84840, -67801, -12570, 63560, -34042, -44214, -72998, 52360, 64078, -66995, -15223, 25231, 6094, tail", "row-1148": "head, -67301, -62209, 32114, -1055, -34911, -37797, -11559, 81122, 18290, 18652, -3285, -5335, -52552, 30475, 35573, 5081, 76741, -41686, 41238, 66553, -58568, -6831, 97767, -98522, -80317, 82951, 44341, 10151, 25228, -6563, 84014, 82600, -39178, -62148, 52315, 39591, 99053, 89031, -23719, 43163, -27373, 82442, -25529, 85069, -18713, 17086, 6574, 40903, -99789, 13871, 58889, -50043, 49687, 6689, -70255, -50130, 21165, 78264, -63037, 11506, 39000, -84299, 33033, -82179, tail", "row-1149": "head, -20463, 85792, 1142, -35866, 98781, 23906, 99247, 70165, 70774, 17931, 25928, -83560, -69928, -645, 24145, -65008, -81301, 27557, 13478, -64372, -19508, 74099, -84645, -49267, -86142, 69545, 86517, 60102, -34830, 94385, -42889, -43298, 48705, 68740, 33197, -29691, -61145, -83287, 28897, 89083, 96896, 53246, 87893, -47415, 64712, -20238, -17544, 73313, 53266, -91788, -58057, -46496, 24891, 55073, 75618, 61083, -18279, -70391, -82148, 65880, -65980, -9597, -7043, 91916, tail", "row-1150": "head, 31375, 98670, -50542, -24424, -85697, 7870, 57035, -93177, 60530, -75384, -73403, 57632, 92438, -60599, -11118, 66637, -84689, 53817, 40373, 47231, -39764, -91993, -54636, -91217, 23968, 86734, -94578, 46945, -3239, 40528, -88856, -86921, -56818, -62070, -71050, -75649, 91731, 72451, -55882, 2126, 94584, 38968, 75663, 84273, -35547, -31303, -1860, -3391, -24866, -83281, 27464, 34496, 94960, -29970, 71060, 15184, 22499, 28284, 88877, 60404, 72489, -99857, 40762, 45332, tail", "row-1151": "head, -57319, -97986, -33348, 19309, -5947, -20887, 55335, 47285, -14080, 46028, -60432, -99685, -5616, 12078, -73278, 39336, -90511, -68490, -82348, 89644, 29645, -39005, 81538, -55389, -77099, 80364, 29460, 52849, 48778, -17855, -83457, -67828, 61178, -12050, 78787, -34545, 10027, -28452, 65915, -27319, 41107, -26821, 78000, -17851, -45716, 92507, -55246, 58425, 77814, 90212, -90633, 47081, -16413, 51266, -32093, -63689, 87214, 23833, 77037, 16857, 6373, 53073, 92516, 54726, tail", "row-1152": "head, -66861, 63661, 44668, 78431, 76501, 3948, -47293, -65728, 81571, 65500, -63868, -23942, -13737, 34086, 87719, -35536, -26358, 67743, 74738, -31777, 18438, -97124, 32724, 65335, -39896, -6072, -80862, -45517, -7133, -42694, 20996, -17451, -67836, 86181, 63908, 41786, -6089, -65223, -58216, 40658, 93241, 52855, -48511, -95540, -83588, 19740, -29813, -1207, -99076, 69015, 68943, 31056, 31723, -30192, -58372, -68256, 83974, -72129, 47807, -14206, -93763, 12295, 57099, 89508, tail", "row-1153": "head, -39465, -82666, -10265, 18937, 42284, 42602, -17908, -2721, -44084, 55522, -57494, 62600, -80678, 90687, -93454, -33602, -25701, -40901, 89792, 26828, 33436, -73618, -11055, -58122, -58222, 13854, 86162, -24116, 13436, 71657, -19472, -22046, 70660, -29401, 22601, 15998, 12088, -81851, -6430, -89903, -83167, -79927, -13729, -53391, 7421, 34489, -6327, -65850, 13691, -64544, 64472, -40348, -64412, 30156, 39343, -95608, 21781, 61825, -44677, -49456, 39952, 84834, 33664, 62122, tail", "row-1154": "head, 74480, -58917, -23703, -70999, -39142, -3524, 65741, -74611, 61437, 76208, 67826, 80591, 24426, -82218, -30179, -1828, 24293, 92226, 71934, -77566, 29057, 89509, -97895, -21835, -31908, 23444, 95936, 99295, -79907, 14169, 86502, -80862, -30738, -8412, 10355, 75621, 84911, -98343, -16080, -61865, 14685, 83361, -33713, -38207, -39575, -92871, 23675, -92767, -616, 15361, 88794, -39272, -43445, 49602, -58391, 76692, 58190, -37170, 19315, -55135, 32365, -15824, -28734, -26149, tail", "row-1155": "head, -14520, -74346, -33633, -79488, -41875, 87092, -67235, -56405, -23660, 6480, 89749, 95715, -46347, -32528, 12798, 60391, -44370, 19324, -33009, 93433, 72059, 70682, -4752, 7600, -4526, -22698, -85386, 41549, 63942, 68804, -21029, -54740, -98350, -31055, 45729, -93805, -62975, -64676, 24544, -78224, -33097, 95001, -24421, -1612, 79208, -95606, 81476, -22045, -2315, -81089, -53522, -63963, 80222, 31181, -87507, 19872, -83574, 69299, 83102, -38839, -4318, -36784, -14796, 91691, tail", "row-1156": "head, 25126, 63230, 54275, 68300, -98670, 30938, -48842, -87607, 1248, -47656, -28592, -75614, -29447, 98843, -49032, -6907, -95724, -84360, 84321, -4273, 46560, -75307, -56088, 35631, -20710, 96598, -66940, 38612, 24890, 56085, -902, -909, 61735, -9540, 27054, -45520, 59580, -60061, -89061, -47888, -38727, 10979, 57815, -52588, 58679, -36812, 90502, -87159, 92089, -56401, 24016, -50613, 72970, -78586, -34552, -8542, -34872, -22909, -80485, -65433, -72820, -76233, -69348, -62571, tail", "row-1157": "head, 39334, 73563, -947, 76882, 70020, 43841, -24744, 66123, -60578, -48131, 96037, 16617, 97375, -60626, -43716, -75871, -86870, -96148, -37887, -24054, -75125, -69470, -47207, 23853, 55636, 10803, 15560, -46857, 83842, -30596, 70727, -19025, -8724, 78234, 16606, -4919, 99483, 1361, 51862, 52942, -56918, 56120, -56745, 56693, 81526, 36615, 23613, -18359, -56083, -45867, -17079, -19755, -23039, -74786, 62754, -94018, -78619, -95001, 26025, -18439, 56135, -35661, 89217, -83237, tail", "row-1158": "head, -572, -14055, -12187, -72519, 69713, -45387, -73175, 12243, -67327, -3587, 37506, 93895, 43886, -29777, 82946, -7296, 59198, 29679, -34968, 24568, -60246, -47962, -74291, 78997, -80295, -74946, -83264, -19042, -85418, 96079, 50733, -60665, -93514, 32942, 47837, -69211, 39731, 21775, 65747, -71170, 4899, -27769, 12456, -79400, 35116, 42303, -77675, 627, 45328, 52855, -79146, -45705, -94191, -43066, 82314, -46082, 59292, -23859, 86690, -60492, 3417, -181, -49721, 84441, tail", "row-1159": "head, 85858, -25966, 58688, -32696, -24105, -15566, -36722, 11221, 5527, -61741, 68290, 82241, -72366, -65849, 98384, -73502, 31976, -89300, 97633, -13280, -37619, 42790, -89, 63564, -60248, 21248, 65307, -59994, 49329, 21651, -95115, 11785, 69411, 74178, -30148, 48178, -79455, -50009, 41871, -72413, 84920, 35823, 43849, -40356, -11481, 13476, -23432, 92043, -13313, -25907, -40749, -10504, 96551, -91541, 76190, 59766, 72129, 6670, -65958, 96579, -99989, -76205, 52967, 78001, tail", "row-1160": "head, -53653, 36097, 66032, -59499, -39102, -30773, 45898, 15605, 63816, 69488, 56435, -25609, -5024, 9418, 33548, 43367, 87870, -71471, 91562, -33407, -99014, 71447, 62760, 63531, 97392, -28626, -38121, -11274, 89129, -60930, 59492, 70854, -57426, 46446, 45018, -79659, 21733, -34572, -4746, -6161, 58456, 62598, -3032, -75909, 65338, -85821, 20547, -41599, 11079, 82531, -79503, -43533, -57061, 53830, 43962, -54981, 36810, 56777, -10856, 11121, -72664, -44632, 49022, -77253, tail", "row-1161": "head, 50534, 46915, -54874, -79327, 54843, 10463, -8036, 5308, -97973, -55589, -41292, 935, -62726, 93788, -2264, -3023, -36870, 50489, 57001, 77946, -14871, 62917, 16285, 3154, 39372, 15982, 24137, -6160, 17273, 62506, 69577, 19746, 1546, 41981, 40584, -81743, 48868, -52299, 32893, -32012, 82341, -28009, -42931, -5038, -88471, -42643, -54408, -34176, 80576, 62122, 33333, 92703, -24630, -27483, 87354, 17400, -7658, -19655, 55307, 69662, 48990, -89592, 67714, -39647, tail", "row-1162": "head, 90081, -15962, 31011, 38068, 51826, 14400, -73885, -67055, -24977, 8053, 95265, 72824, 95953, -25091, -68373, -60125, 62203, 91337, 73949, -36862, -19258, -85488, -49769, -12731, 96182, 55510, 71436, 24265, -7965, 79852, -14639, -45022, -62640, 48111, -45719, 49763, -97567, -92585, 22846, -88727, -82543, -76991, 94252, -62569, 55510, 893, 39539, 99503, -45366, 27423, -45888, 15463, 93328, 90185, -77195, -34122, 35989, 96272, -22847, -30395, -80454, 9691, 31666, -8129, tail", "row-1163": "head, 35950, 92295, 8781, 38786, 99489, -6130, -92145, 28563, 63649, -7028, -72511, 15496, 56366, 22076, 90771, -94438, -87057, -58924, 36087, -40693, -66503, 18938, -60379, 80394, 52574, 56867, 22567, -93656, 24246, -68092, -61531, 53374, -872, 48960, -77902, 46625, 32637, 85634, -48510, -4008, 93681, 12985, 36799, -67726, -33076, 16950, 64683, 59988, -9719, -54194, -28203, -60577, 8311, -52067, -76819, -5831, -17599, -40613, -40762, -85721, -30926, 53132, -96569, -93192, tail", "row-1164": "head, 56025, -45603, 45192, -2329, -96591, 5035, -63326, 99519, -60330, 81551, 93670, 49489, 15931, -15630, 80794, -86086, -8793, 5249, -2898, -29861, 47416, 83950, 95893, 77554, -23131, 31979, 17096, -18069, 65238, -55208, -40523, 774, 24111, -82850, -757, 4074, -9535, -36274, 61209, 72014, -85465, -20763, -10600, 99232, 67298, -65242, 36224, 75772, 94747, 14541, 77736, -81177, 95197, -5083, 19726, -95520, -98383, 76076, -4477, -33384, -10719, 27711, 4136, -12774, tail", "row-1165": "head, -12092, 67991, 73405, 20917, -55369, -59300, 6484, -7796, -66062, -30226, -39159, 41251, 90945, 23215, -54518, -74708, -15014, 55257, -87606, 46465, -38275, -35187, -44359, -98664, -41584, 7376, 93575, 67154, -46814, 69287, 81697, 64316, 86530, 84618, -3042, 85252, 14411, 652, -86207, 2953, 24025, -93026, -22027, 84578, 93519, -76052, -2554, 52015, 16835, 86318, 63196, 93655, 36318, -32045, 43332, 89210, 25761, 76859, 26889, -39340, 29037, 45731, 3487, 44789, tail", "row-1166": "head, -51580, 95803, -32118, -28348, 6940, 27697, -60500, 25080, 5851, -13321, 19709, -92010, -56344, -56989, 28852, -49313, 66622, -49710, -9442, -21757, 84264, 14397, -74793, -51924, -98240, 2153, -94107, 13120, -41216, -39658, 59644, -45042, 62758, 61329, 84023, -76872, 70511, -73198, 23848, -51100, -97991, -86330, 73961, -58556, -59849, -29691, 53756, 37389, 97892, -52492, -30896, 49425, -4153, 80277, 75957, -37316, 85870, -2751, -45341, -7994, -9875, 7170, -58969, -88131, tail", "row-1167": "head, -71855, -43186, -64805, -67471, 65543, 81645, -73079, 51722, -90278, -97136, 57700, 35955, -75529, -74828, 83158, -97338, 79532, 98238, 23595, 22638, -35552, -18494, 47550, 55324, -9118, 26050, -9667, 31905, 30321, -4845, 87059, -857, 38005, 35698, 12791, -70504, -61133, 8769, -96865, -2973, 69949, 98541, 41267, -30638, -72175, -54780, -93676, -95135, -69466, -44308, -70895, -40286, -86328, 21962, -37365, 89446, -8578, 10833, -17480, -26812, -16602, -44839, -66639, -26243, tail", "row-1168": "head, -2982, -98948, -89672, 45352, 21839, 63464, -78425, -93303, 57840, -19095, 12302, -1778, -57741, -20874, -58266, 68380, 54373, 60044, -49807, -31986, 48910, -53706, 54089, -48674, -48110, 97617, 49765, -40486, -86569, 42988, 6184, 20723, 25314, 4322, -91295, -51715, 42831, 1781, -58936, 10610, 55920, 60861, -33220, 74350, 37474, -45365, 48265, 39004, 5752, -53780, -61154, -86866, -84830, -51102, -59382, 24734, -53044, 15542, -938, -62886, 89205, 7872, 80691, -54750, tail", "row-1169": "head, 40863, -67912, 51135, -76454, -63586, 65446, 24388, 83803, 72257, -1981, 4405, -76349, 41308, -78560, 53208, -98732, -20357, -53860, 15089, 76632, -48233, 68341, -46211, 59259, -67113, -32403, 79485, -56112, 59167, -95441, -35039, -36159, -62547, 17394, 51628, -21347, 42701, -43820, -58310, -40400, -26429, 63245, -90084, 7868, -21564, -76383, -5220, 2275, -86192, -49517, -11118, 60844, -2464, 93855, 86940, -40343, -39771, -54194, 65934, 34616, -79397, -11187, -57989, -19831, tail", "row-1170": "head, 64170, -88117, 84676, -61970, 65934, -65443, 91309, -3130, 33764, 15298, 77287, -28952, 11262, 58751, 61332, -11005, -16268, 55278, -44950, 65977, 57711, 29754, 57752, -76030, -18991, 11635, 27966, 36768, -25446, -77084, 29805, 75133, 10743, 35435, 44604, -94187, -31240, -60913, -70255, -93846, -60522, -6417, -4122, 13602, 94137, 4978, -74820, 93140, 32285, 785, 87812, -59902, -11446, 12125, 96964, -40251, 90728, -19693, 92674, -92909, -16960, -43035, 37197, 16661, tail", "row-1171": "head, -30168, 62973, 99297, -37118, 82782, -36474, -51450, -85171, 57655, -94845, 70531, 46600, -15378, 56413, -36623, -99037, -57335, -73038, -30620, 8456, -37883, -36769, -90138, -65393, -42382, 84110, -1222, 85090, -72159, 12304, -49524, -5020, -71708, 2394, 86602, -21795, -28866, -66698, 25714, -95197, 85934, 55556, 46349, -42993, 13449, 30477, -59833, -23745, 69692, -90969, 33054, 95042, -47973, -24723, -68256, 17330, 29473, 86077, -71912, 51568, 88956, 61291, -89983, 24512, tail", "row-1172": "head, -71510, 33923, -9656, 87881, 9413, -84496, -71772, -85366, 75105, -3513, 83509, -34809, 13642, 70344, 75418, 67698, -34214, -94285, -52544, -86853, -58646, 60392, 3281, -14118, 92733, -97349, 21328, 49723, 84738, 29818, 60928, -68071, -28231, -14867, 15850, -88699, 90143, 4785, -44990, 71523, -33226, 83442, -7825, 53719, -84138, -14942, 7417, -62214, 9192, -80369, -23674, 94562, 18963, 37134, 17232, 37755, 3630, -51522, -89168, -52271, -3875, -3222, 74919, 96484, tail", "row-1173": "head, -92185, 27865, -99063, -70550, 72335, 26791, 63644, 77515, -11374, -73643, 9598, -17596, -52708, -39682, 73425, -33174, 81557, -52717, 12230, 72388, -48910, -1885, 62489, -99959, -69550, -45701, -34848, 90476, 24627, -80862, 17784, -86147, -29310, 94629, -26634, -67734, 83280, 70405, -15696, 48110, -43625, 10141, 7169, -16984, -85069, -72736, -1591, 81074, -14391, 56977, -13076, 94814, -60813, -91364, 12550, 86769, -21948, 24669, -81236, 67302, -16789, -15457, -4160, 56683, tail", "row-1174": "head, -27734, 51418, -54173, -83721, 28815, 93024, -61102, -51119, -87898, 80617, -86791, 61118, -73020, 72163, 68850, -92296, -37981, 66907, -51494, -21109, 73380, 41181, -66179, -81584, -49971, 90398, -60616, -55744, -51906, 14799, 11044, -25682, 29370, 52649, 40776, 21166, 99156, 75416, -32308, 15920, 5524, 54923, 39941, -77795, 50674, -61668, -32, 21891, 38266, -21760, -83103, 31380, 61294, -80708, -88184, -21095, 10147, -86224, -58385, -55272, -46281, -2132, -65084, 16378, tail", "row-1175": "head, -96738, -82470, -83659, 9706, -5322, 98051, 93817, 36062, -35160, 69164, -36406, 49225, 27415, -29093, 81604, 57328, -49195, 46323, -13935, 79654, -3435, -98401, 84837, -55118, -2633, -16996, -39487, -30352, 48828, -9797, 56378, 23223, -15516, 39181, 96170, -57522, 68027, 86240, 10277, 66073, -65589, 56627, -25153, -57455, -37499, 34610, 5147, 8353, 33560, -18844, 91870, 49303, 13742, 9530, -5702, -6888, -4123, 28549, -93276, 19567, 35813, 67209, 57142, 85608, tail", "row-1176": "head, 174, -27193, 44634, -96695, 13859, 87857, 73739, -99064, 66982, -83159, 18361, 12690, 59501, 38372, 60034, 28726, 32215, 81306, 30655, -66809, 14197, -36993, -7929, -19870, -77882, 95239, 45203, 59766, 10998, 41102, -2247, 3492, -48343, -30022, 5626, 41446, 87337, -73439, -52010, 15753, 13266, -3403, 41840, 18683, -29713, -67885, -80575, -56320, -19595, -86426, 63137, 91407, 26155, -16093, -32598, -71694, -28134, 95840, -3012, -75306, 1030, 44821, -76124, 4285, tail", "row-1177": "head, 68642, 16908, 92147, 16755, 55096, -86053, 82842, -6759, 85580, 64411, -45537, -51674, -84243, 48944, -20851, -8598, 66621, -90679, -4607, -23959, 85491, 91466, 52092, -61466, 12534, -76499, -54470, -49268, 91889, -91614, -18797, 31476, -75610, 16997, -35616, 87457, -90851, -90156, -98764, -49709, -47938, 92514, -19665, 54956, -79554, 19578, 97160, 46066, 26732, -80419, -1556, -35507, 99739, 5054, 16538, 56971, 86753, 44853, -39052, 49646, -8347, 73601, 79768, -1610, tail", "row-1178": "head, -56147, 62960, -23183, 45179, 69500, 43562, -54820, 95823, -52794, -76749, -98158, 38839, 92786, 97708, 71365, 91265, 4370, 12782, 78373, 98852, 2549, 81500, -95507, 77235, 19286, 84339, -59484, -8222, 60406, -88618, 2256, -12794, -68405, -1551, -25105, -96883, 79112, -95440, 92152, -34187, 20931, 66700, -27807, 42802, -35701, 77510, 47121, 58886, 88650, 8939, 54222, -16460, 73938, 36846, -9393, -65874, -45496, -77739, -38999, 42450, -46974, 68758, -69183, -4254, tail", "row-1179": "head, -1293, 59686, -6626, -44392, 15951, 45303, -45693, 84337, -47000, 13784, -69464, 9170, 44124, -28538, -53680, 55904, 21576, 52332, -40238, 99250, -61507, 70276, -87630, -20487, -52396, 89562, 13212, 73151, -9321, 48238, -53842, -44464, -73170, 39164, -39831, -25520, 71405, -56824, -26691, -72695, 11588, -84646, 74567, -49195, -65298, 30558, 69307, 13268, 21010, -77785, -12397, -7789, 18363, -85476, -1924, 47461, 93093, 63876, -16022, -27859, 24821, 88265, -10008, 45900, tail", "row-1180": "head, -41493, -41911, 69147, 50634, -78480, 49595, 36750, -7887, 46161, 98282, -21672, 5042, -672, -4139, -6678, 79337, 53647, -11566, 51003, 94489, -29544, -23844, -15690, 25465, -87419, -60319, -41986, -571, 14725, 2460, 97139, 6956, -57054, 71652, -49555, -31535, 10935, 61415, -76899, -57376, 10760, 75408, -20114, 40346, -77036, 85639, -49857, 87489, -83375, 66453, 26448, -34054, 20135, 2712, -78641, -15904, 2129, 17439, -85936, -68102, -54103, -54900, 77469, -28736, tail", "row-1181": "head, -60756, -31622, -60276, -43755, 90084, -48984, 66395, 84957, 93739, 71628, 41835, 10389, -42753, -95048, -59036, 72567, -57942, 35790, -96861, 21624, 2790, 70865, 25830, 60662, 58114, 73146, 23659, -57467, 24960, 83085, -68946, 79758, 84049, 73691, 71301, 44869, 42013, 47166, 89427, 80195, 56047, 63833, 12260, 12733, 39526, -66745, 91820, -57198, 6146, -41010, -28410, 2131, 9904, -14379, 54277, -99946, 30982, 50788, -47697, -95395, -71753, -2043, -76616, -80917, tail", "row-1182": "head, 42062, 22640, -71490, 27090, -63871, 32718, 63847, 77620, 71223, 40919, 67215, 41653, -57169, 83385, -747, -24601, -39187, -24116, -63775, -10879, -85108, -45814, -33885, 21975, -53866, 23112, -48036, -48880, 97773, 23195, 98621, 99176, 83262, -67405, -68979, -90608, 14829, 29720, 65123, -11826, -52824, 62094, 23571, 64588, -49580, 24046, -8254, -13502, -46390, 7588, -30029, -6164, 25324, -23625, -75850, 35039, -6651, 69205, 67193, -74865, -67344, -27996, 18054, 25566, tail", "row-1183": "head, -34490, 6713, -47314, -74722, -46058, -80054, 99540, 13129, -97174, 15045, 72395, -52534, 77502, 21258, -17214, 31060, 64871, -93920, -32354, 62657, 72278, 61104, -33376, -5445, 76890, 80268, 64237, 86747, 67167, 1640, 92390, 95228, 57231, 9653, 63124, -91198, 29932, -36995, -56177, -95790, -96699, 34869, 64206, -98954, 803, -27562, 83174, -34152, -15048, -34602, 48101, -34099, -3526, 47107, 54113, 11341, -28636, 30285, 16590, -69334, 58797, -5975, -98909, 27510, tail", "row-1184": "head, 28154, -16636, -40295, 9075, 18242, -96082, 32876, -70405, -42103, 52647, -10116, 80794, -10733, 21019, 79714, 48441, 96796, 47874, 35383, -42732, -47575, 65622, 21449, -19830, 53379, 73364, -27274, 81133, -40596, -16094, -68616, -6972, -56191, 97958, 8125, -3118, 30032, -45537, 42081, 68447, 97916, 17177, 36680, -48960, 71419, -62633, -53853, 19507, -20842, 96427, 38475, -21000, -24491, 83844, -70213, -82295, 12964, -33447, -40243, 90153, -85594, 48942, -96457, 29208, tail", "row-1185": "head, -93023, -86441, 28853, 38041, -57743, -82973, 84878, 7449, -53072, 35725, 21425, 8654, 5774, 35657, -62133, -82765, -38777, -77451, -66481, -89857, 66767, 39524, 76189, 48715, -76370, -90918, 4420, 29260, 15494, -18132, 9709, -97685, 89760, 14764, -45992, -75077, -23018, -30408, -45425, 61863, -98851, 86835, -57042, 84022, 9001, 36819, 95733, -36100, 41315, 68155, 50347, 15759, 32586, -40422, -11095, 65828, -15591, 84611, -10404, 34233, 21450, -43948, 90486, 58354, tail", "row-1186": "head, -23219, -400, 99301, 37974, 28647, 31644, -42413, 76180, -34846, -80376, -57532, 21356, 14958, -5102, -3611, -66593, 99019, 22343, -12090, 87221, -96869, 69922, -65777, 37153, -4331, -52166, -4230, -59191, 49501, 86540, 24737, 25926, 60201, 91142, 12915, 38795, 51663, 32999, -88637, -14965, -69063, 41417, 1094, 21965, -47820, -58454, 55025, 7459, 32588, -89898, 31538, 52055, 54517, 10446, -68033, -2475, 65258, -70412, 82892, -82993, -66383, 37133, 11932, 34604, tail", "row-1187": "head, 60801, 37417, 32396, -73107, 18709, 75618, -95105, -39291, 53040, -49132, 43189, -56224, -21271, -2843, 56307, -72296, -24444, 28875, -22807, -24508, 90704, -24520, -48190, -8763, 48601, 10912, 11683, 86646, 97123, 33540, 54241, 93297, -45780, 44428, 11941, 95808, -72641, 16793, -15606, -52942, -87576, -39469, 54748, -10856, 27381, -42792, 27761, 32497, -94299, -86046, -23592, 1873, 22842, -54879, 98593, -60328, 88203, -14665, -84963, 4035, 65699, 52266, -98333, -57970, tail", "row-1188": "head, -79237, -73022, 4852, 8392, -24319, -62447, -49248, 84164, 71270, 59189, -8731, 15058, 27133, 89598, -31528, 23956, 20481, -88839, 56861, 41792, 45212, -80534, -32156, 20110, -86755, 32370, 36992, 81804, 39487, 77854, 19352, -45097, -51720, -59695, -51457, -86507, 52554, 99915, 20479, -30141, 83983, 41330, -13933, 7497, 23113, -90026, -91357, 76516, -5765, 99458, 47775, -48334, 46689, -76637, -96924, 16727, 29652, 96530, -13281, 16517, 85285, 51154, -45235, 36604, tail", "row-1189": "head, -50473, 70130, -3924, 96507, -59918, -68462, 89236, 45418, -8649, 89566, 80975, 76970, -31632, -64002, -19999, 54691, -59236, -3028, -461, 91402, 1082, -25661, 64733, -85097, -20145, 34348, -28425, -4712, 25481, 99753, 49975, -91168, -3785, 91960, 8364, -38011, -41017, 21176, 67152, -38145, -41114, 67619, 5351, 58722, -41487, -16606, 70588, 13133, 70628, -42738, 22169, -42301, 30017, -7651, 71604, 92200, -37437, 27268, 98479, 79993, -27498, -25564, 19703, -38102, tail", "row-1190": "head, -98902, -25307, -83579, 51399, -5343, 4440, -10918, 89954, -62533, 797, 42201, 6654, -55319, 93852, -13795, 77943, -59867, 66772, 46938, -28423, -85750, 8565, -68257, 36630, 11896, 34006, 29310, -75461, 82132, 78690, 53972, -14592, -76375, 35135, 89347, 44309, 59629, 66408, -83401, -61285, -65862, -73068, 15090, -95883, 19996, 17919, -99300, 48062, 47888, 93389, -50674, 6192, 29105, 27860, 49424, 37914, -32356, 88869, 98113, 89717, 66399, -66034, 11025, 94952, tail", "row-1191": "head, -1598, -83879, 42004, 47622, 31638, -53836, 2433, -5309, -91529, -34454, 23916, 92213, -16465, -88334, 84862, -63215, 22405, 40406, 62077, 31886, 79156, 17987, 18930, -59661, -22278, 68183, -5867, 99041, 79023, 619, 64133, 68371, 71965, 35080, -48732, 81060, 82374, 9556, 96134, -12157, -39670, -32348, -50745, 43956, -7853, -12367, 17078, -43534, 29008, -7368, 58233, -43617, -46416, 47234, -27495, 48398, 99623, -53201, 72463, 82027, -69278, -25108, -92855, -58641, tail", "row-1192": "head, 19104, 9267, -66375, 11994, -98335, 96311, 91238, 25747, 58950, -11503, -79206, 75440, -32336, 86186, 58730, 23797, 8434, 43072, 63366, 56727, 42464, -17813, -65100, -16738, -29940, -77191, 32124, -26814, -61980, 98922, -54182, -48064, -4130, 8723, -92953, 89093, -72130, 69653, -58150, 56901, 88409, -29849, 81194, -72831, 56349, 42893, 90379, -48226, -801, -51974, -33754, -87927, 97078, -26782, 15898, 65581, 92005, -85382, 9158, -1914, -67371, 21775, 18558, -8402, tail", "row-1193": "head, 72195, -28401, -27570, -39070, -59052, 57186, -2633, -47521, -57833, -46870, 42901, 34692, -49003, -72698, 40065, -75225, 49849, 67268, -30884, 33649, -98232, -3690, -2441, 10631, 50468, 39510, -13754, 39851, 61606, 28073, -59081, 22140, 50439, 31789, 80989, 85008, 14072, 92999, -90286, 92712, -92266, 55452, 56152, -71892, 90530, 9810, -55348, 90869, -75729, 24084, 51284, 43856, 41476, -67327, -86192, -11102, 54060, -74514, 32237, 13173, -58951, -13968, 94622, -80613, tail", "row-1194": "head, 87337, -35179, 77263, 51855, -51083, -50073, 2597, 75165, 20634, -92400, -37510, -3373, 35829, -95863, 77186, 2813, 37897, -93123, 54959, 31981, 3453, -84203, 53352, 66670, 85281, 34161, -52088, 53112, -27447, 32364, 19142, -53774, 58287, -55188, -50228, 46340, 51647, -55077, -11577, -53839, 86537, -92750, 38667, 82309, -68477, -69008, -40972, -37612, 37420, -4341, 5540, 23144, 47011, -27316, 10451, -65733, 11619, 95653, 22905, 50827, 19074, 97609, 52198, -14542, tail", "row-1195": "head, 84930, 34498, 10497, -36477, -19488, 41691, 24902, -68893, -11893, -3469, 77456, 78157, 26084, -50965, -35530, -38438, -60183, 59969, 19557, 24239, 91328, 28855, -79896, 63514, -5895, -69072, 84658, -57856, 25592, -24337, 9014, -87996, 77366, -90220, -34347, -77323, -99705, -91622, 22695, 42676, 23529, -58227, -41745, -81896, -49225, 82045, 52216, -35725, 11834, 58430, 97352, -4449, -92146, -89452, -56975, 32519, 24739, -15015, -25999, 78630, -97953, 65261, -33523, 64695, tail", "row-1196": "head, 54471, -35378, 26019, 35880, 1694, 23185, -58942, 95500, 1263, -89603, 7727, 64111, 58457, -60032, -12541, 2141, 63517, -36259, -30043, -82367, 68541, -91733, 42508, 37664, 99814, 53072, -98671, -82860, -25253, 53562, 55842, 82052, -30086, -22981, -87511, -62953, -1692, 57385, -91456, -78961, 9353, -86611, 57251, -24772, 4434, 19219, 1660, 95245, -68094, 28219, -43079, -41481, -83592, -77724, -92394, 21347, 10708, -80748, -9735, -92995, -51408, -3725, -72003, 60547, tail", "row-1197": "head, 29890, -36342, -30388, 58681, 85070, -18610, 95571, -95153, -74593, 54153, 17834, 74654, -63691, -51164, 95735, -95419, 33673, -8796, -71894, -47930, -56313, -4347, -43339, -39203, -88582, 90964, 85580, -91757, -4842, 9796, 87920, 1305, -73184, -45628, 53399, -13679, 37004, -46909, 41430, -22847, 35808, -82336, -49698, 5799, 42487, 55020, 46498, -46464, 91713, -42186, 88032, -32785, 17907, 89601, 23171, -64275, 98083, -12134, 97904, -2888, 53302, 68446, 36515, 19326, tail", "row-1198": "head, 6625, 82140, 1300, -78165, 68483, -4178, -66160, 35866, 82836, 72408, -6203, -79118, -7060, -71494, 43171, 53332, 38754, -52046, -57844, -7706, 79851, -6734, 29969, 43747, -12899, 57627, -57121, -2728, -50191, -99911, 91472, -43106, 4551, -4693, 37300, -3471, -12570, -88798, 44660, -49320, -90096, 38802, 67019, -96746, -61392, 30381, 70029, 29382, -97654, 78371, 79322, -99509, -30866, 59056, -69718, 52060, -66873, -10951, -12320, -73275, -59687, 28166, -62911, 28128, tail", "row-1199": "head, 24607, 61012, -26077, 87533, 39872, 65252, 20907, -36014, 29429, -48635, 25393, -36509, 30252, 19470, 78626, 30073, 75111, -40484, 44510, 46569, 24512, 86947, -61484, -3148, 20909, 39600, 30483, 44161, 73939, -85870, 65823, -37819, -82589, 13954, 79282, 34762, 66245, -10003, -36189, -13836, -52569, 50195, 27584, 6587, -52263, 93770, 17869, 56173, 66786, 52309, -20919, 9248, 4268, 98861, -24763, 4114, -43000, -46351, 40275, 87435, 25142, 66618, 84154, 40421, tail", "row-1200": "head, -36288, -48024, 30705, -55952, 98289, -74332, -18213, 58080, -78580, 43369, 57420, 37483, 62659, -80028, 42550, 12099, 88630, -78168, 31559, -51624, 47512, -25588, -17284, 27115, -84304, 34070, -24934, 56405, -76662, 41270, 85938, -13117, -8582, -70828, 46361, 2531, -13604, -32201, 44630, -56935, 70511, 29289, -86318, 51288, -40786, 23592, -23375, -78936, 10570, 2947, 10314, -16545, -85473, 42712, 56267, 41314, 29939, 93233, 99619, 83151, 97896, 26300, -46262, 37741, tail", "row-1201": "head, 30356, 619, 66879, 23614, -67058, -8529, -64491, -51252, 18911, 65241, 6530, 45634, 80091, 71443, -3388, -28779, -7330, -85058, 20224, 15359, -48853, -35231, 84993, 13189, 571, -86852, 59836, 55838, -31793, -26801, 47627, -83843, 63883, 78987, -47448, -52501, 14602, -62855, -90435, 65649, -18911, 9445, -9237, 69640, -47320, 6435, -24532, -76924, 24408, -55397, 1767, -78828, -9824, 4340, 56765, -40060, 64097, -13242, -29850, -77980, -51927, -16280, 46568, 10529, tail", "row-1202": "head, -36139, -6140, -37162, 54765, -3638, 67532, 61650, 96293, 93113, -9508, 7197, 24788, -83049, 54040, 58215, 2148, -56745, 27708, 21793, 15101, 46235, 1360, -85103, 73467, -11297, -51537, 93829, -33042, -50269, -32764, 24948, -39317, 5706, -94136, 78823, 94214, 41402, 53455, -32067, 40292, -30743, -17111, 47141, -71860, -48645, 41252, -65609, 75427, -75059, 67045, -12922, 14321, -24155, -60383, -78610, 7736, -97267, -19496, -81974, -55289, 10629, 4810, 71349, 30653, tail", "row-1203": "head, 57181, 11935, 2203, -78086, 9483, -68069, 6804, 47614, 916, -90024, 77499, 33317, -71650, 50133, -97611, -77721, 24899, -21876, 28253, 85070, 73970, -21070, -24683, -68969, 69641, 40652, 81682, -87892, 10534, -3374, -13407, 48137, 78256, -5696, -91828, 15816, -89119, 72088, -97507, -14814, 61012, 92300, 85233, -42963, -88478, 14589, -13980, -72958, -96538, 13550, 54791, 82283, -57626, -12512, -36211, 6733, 70244, -71512, 33997, 42647, -93912, -20132, 99249, -71363, tail", "row-1204": "head, 33140, 74850, 86762, -26641, -79049, -36983, 34269, -85627, 90801, 37526, -35473, -33374, -22476, -47886, 80284, 53691, -15021, 8961, 3657, 75719, -68907, -99832, 92838, 62315, -14242, 90545, 77446, -52275, -59317, -83809, -11295, -24210, 22829, -36437, -37882, -92779, 52558, -54386, -70581, 47706, 45445, 79752, -37658, 13862, -83645, 68372, 75968, -13571, -78729, 19652, 32202, 82489, 38398, -32295, -88790, -83302, -41486, -79257, -70870, 20049, 64653, -76667, -87184, -37642, tail", "row-1205": "head, 8907, -51714, 98132, 41601, -64300, -12459, -621, 86208, 15126, -82995, 23020, 69863, -29002, 22523, -56434, -91436, -71889, -75497, 90561, -69464, -16214, -61582, 13937, -95682, 40363, -62379, -55263, 68734, 62583, 49082, -64433, -8291, 4818, -73529, 18076, -68225, -99371, 80470, -5567, 17484, -87228, -94462, -51411, 78946, 31384, -96672, 2410, 28064, -39910, -5775, -51060, -80681, 1817, -40298, -81236, -16495, -80160, -32595, 86322, 266, 10743, 44626, -17589, 25807, tail", "row-1206": "head, -82864, 13648, -11912, 36232, 32817, 51309, -72563, 85277, 41107, -16929, 84129, 75571, -92546, -50428, 62271, 37156, 60536, 52013, -76201, 33964, -90587, 25908, -41013, 13619, -18946, -24708, -35623, 28399, -21518, -71895, -86241, 85655, 51973, -8012, 87662, 9198, 22002, -38749, 26922, 48579, 31418, -20380, -82075, -73944, 58097, 7783, 12048, -77503, -64596, 45608, -17891, -23684, -32431, 33552, -65929, 41147, -82663, -82046, 86378, -10621, 82988, -20196, 64911, 65260, tail", "row-1207": "head, 37205, 14501, -67455, -72609, -23788, 3532, 70380, -22153, -38894, -89615, 78849, 36614, 1296, -81063, -87700, -38614, -52547, 18958, 9985, -42199, 56071, -49257, -96971, -59034, -87366, -35534, -37903, 35069, 66123, 87246, -70135, -97799, 17939, 6491, -58607, -86784, 52366, -68247, 54231, 30756, 64050, -25442, -75339, 32266, 8004, 26899, 63966, 5800, 81450, 69732, -58120, -12826, -64786, -51570, -82498, 56228, -76373, -91674, 11960, 61301, 33518, -81222, 3968, 75203, tail", "row-1208": "head, 17563, 12761, -81016, 1773, 99378, -88791, 54018, 28810, 58484, 86517, 44748, -47099, 58545, 61691, 34691, -47826, 93983, 85145, 6455, 98572, -17800, -32396, 53542, 60267, 2962, 21793, 61550, -88668, 71887, -66196, 11315, 3337, 25102, -68262, -62985, 85334, 76404, -36987, 57330, -19810, -68414, -8115, -58396, 73301, -42010, 71731, -35598, 7608, 17576, -94309, -90937, 15938, -23967, 58016, -18768, -21827, 46519, 17799, 85390, 32725, -50607, 1281, -12497, 49685, tail", "row-1209": "head, 18747, -50939, 99761, 76004, -23400, 35167, -59101, -60759, -28464, -45477, 65924, -51264, 6112, -9150, 40055, 72659, -58615, 70896, -47847, 46499, -67570, -12940, 22100, 77376, -72102, 98822, -9207, 54544, -65756, -27772, -81741, -51293, -12582, 41384, 37539, 63135, 95516, 21384, 16283, -26794, 52974, -14949, -22302, 77002, 2171, -62413, 66114, -65010, -10424, -30964, 28963, 72117, 24614, 34126, -30710, 90850, 54520, 6619, 38251, -40751, -95819, 98842, -21324, -71662, tail", "row-1210": "head, -2433, 59294, -48967, -82045, -61086, -38846, 22996, 14397, -21545, 2706, 74576, -93067, -39741, -15954, 50251, 80010, 57192, 22305, -12474, 49157, -97266, -37473, 36596, -6314, -48292, 51116, 59035, 41032, -91114, 32166, -28999, 83700, 7831, 98941, 33304, -18398, 56634, 62392, -81276, -42291, -5210, 89444, 23905, -11826, -72128, -76005, -90435, -53625, -72180, 70515, -8485, 27954, -87290, -34889, 46254, -12502, 36098, -23195, -78672, 65205, 63285, 38340, -77936, 31455, tail", "row-1211": "head, 17366, -37877, -85800, -80158, -24654, 63191, -50450, -22828, -51338, 37447, 99359, -95090, -47499, 55786, -28857, -63605, -84089, -50092, 35205, 71179, 50285, -98083, 67865, 42962, 32838, -75193, -6819, -15425, 46151, 906, -55689, 68379, -38748, -1292, -36612, 62817, -98052, 25891, -11797, 22804, 4359, -94803, -58907, -30512, 25881, -61587, -52396, -34342, 4031, -80660, 62796, 62035, -14242, -1798, -97503, 14709, 22616, -92137, 58304, 30946, 55118, 12269, 69130, 28638, tail", "row-1212": "head, 7078, 42780, -56090, -32284, -2068, 62949, -96181, -10976, -71219, -75067, 21656, -11851, -70679, 76708, 89868, 57719, 30446, 30632, 92180, 80724, -36802, 65708, 88775, -90372, -12665, 46160, -6859, 79704, -84348, -97779, -33983, -12632, -92172, -50258, 79634, 84404, -55702, 92405, 10343, -41950, 90255, -18613, -32208, 45282, 64915, 69657, 36511, -19893, 90291, 63814, 28781, 54014, -9213, -85198, 24862, -18918, 24951, 63941, 51592, 44607, 86392, -92825, -80120, 38749, tail", "row-1213": "head, 48625, 90928, -78946, 45714, -86853, -79490, 39512, 73199, -50440, 15651, -51927, 32228, 30756, 35333, 22183, -56369, 15783, 47839, 89121, -37256, -81914, -46837, -55832, -19689, -44890, -53758, -52029, 307, 90240, 65009, -96927, 70155, -10817, 86093, 79385, 61480, -33529, 77365, -74843, 39454, -45952, 52399, 90040, -19403, -22742, 29667, 29187, -63775, -74319, 15609, 21056, -3309, 84483, -29091, -48783, 40262, -47996, 13557, -68361, 82957, 73664, -76248, 83283, 82971, tail", "row-1214": "head, -62943, -14462, 6965, -83042, 37777, -79567, 83145, 72149, 84259, 21883, 99686, -27316, -12565, 84219, -72435, -53704, -11516, -33734, -83475, -50779, -8394, -99631, -38093, -49835, 85330, -10411, -81232, 37107, -69967, -62435, -14427, -93614, 18885, 45087, 71965, 29127, -60927, 70076, -98680, 70688, 30086, -34692, -97576, 94620, 7193, 34552, -82909, 27196, -87948, -60140, -4279, -25844, 96327, -57856, 10629, -77018, -34318, 33780, -77460, 1011, -35242, -21038, 16755, -59036, tail", "row-1215": "head, 73666, -65418, 85006, 14932, 90025, -86240, -92078, -98638, 31043, -74198, -42515, 28066, 93038, 22476, 91795, 19090, 23973, 18276, -71288, -56204, 4169, 93972, -75156, 17598, -6070, -26410, -28520, 14528, -53967, 2216, -44669, -40905, -61253, 32584, -667, -86947, -19773, -25102, 23885, 63684, -93543, 82266, 62874, 41696, -44724, 23070, 27126, 63094, 64693, -98616, -37138, -70480, 63803, -4606, -84373, 11967, -58769, -57980, -32204, -90810, -8379, -33927, -35937, -94085, tail", "row-1216": "head, 81964, -89912, 33110, -95385, 33260, 30624, -20972, 73441, -3377, 36933, 68053, -7824, -29556, -2485, -17871, 55578, -56890, 57321, 27196, -73265, 42945, 33874, -16131, -44867, -73696, 80181, 37282, -78080, 19459, 22958, -92499, 42030, -80219, -63016, -54994, 55838, 60203, 97919, 74041, -46735, 63952, -30198, 10195, 12878, 98130, 28603, 28186, -60254, -34021, -29006, 12549, -67806, 57156, -74146, 65317, 9990, 63778, 54027, 59505, 7269, -11147, 90877, -13132, -68085, tail", "row-1217": "head, -61713, -610, 79061, 49498, -89731, -78703, -37403, 99212, 74682, -65454, -26456, 27803, -19662, 79348, -5259, -20283, -17025, 93739, 12140, 49536, -82857, -14214, 13908, 60973, -69970, 31257, 39243, -66554, 55539, 64143, -58968, -72208, -57810, -68226, 76703, 64888, -28712, -99421, 34375, -69983, -14280, 27737, 60270, -3754, 70021, 41752, 41029, -64382, -63842, -45727, -3791, 90421, -64277, 57548, 43484, 37652, -61525, -5134, 60116, -24563, -92250, -53838, 80920, -54233, tail", "row-1218": "head, -16737, -33741, 4031, -76566, -69737, 59135, 92148, 60817, -55400, 37585, 14965, -87152, 41182, -69416, -29719, -21022, 61489, -85284, 29670, 35880, 54871, 48021, 99741, 14211, 21884, 48372, 69741, -18816, -98754, 95248, -52822, 44360, -38542, -75045, -12348, -8429, 40849, 92827, -27876, 8804, -48747, -87866, -77764, -34304, -54112, -90920, -37871, -85255, 72079, -6779, -46968, 20482, -75930, 77484, 5285, 47235, 82176, -89016, -43496, -21366, -89374, -84563, 9157, 14519, tail", "row-1219": "head, 17725, -48580, 37009, 35402, 49853, -15359, 26128, -21630, -28463, -15264, -47156, -3875, -73844, -23723, -14540, 26155, -78575, -78314, -49696, -4202, -21630, 43389, -25583, 37211, 13328, -21884, 23311, -83696, -22019, 58531, 13058, 51477, 20732, 50541, 33150, -96891, 46733, -38052, -81753, -18358, -6447, 46305, -71466, 73637, -78865, -75936, 5474, 71034, 79393, 15640, -57372, -89147, -31472, -4233, 73245, -23053, 6789, 97671, -31042, 70927, -39730, 25991, 47740, 34367, tail", "row-1220": "head, -31549, 48178, 70971, -57833, 59103, 16514, -13303, 86321, 91263, -22576, 85221, -19883, -58605, 89940, 32395, 22811, 87226, 43476, -22092, 27650, -31864, -74340, -50820, 9682, -75694, -27759, 39355, 50632, -28870, 76020, 17683, -53067, -78378, -74553, -13818, -87886, -59781, 55159, 25093, 10281, 46676, -33940, 49619, 40247, -22167, 45724, 44416, -9699, -2345, -5971, -36405, -44351, 44713, -91997, -47629, -29954, 75678, -45918, -53821, 48171, -44167, 54510, -38751, -73149, tail", "row-1221": "head, -95875, -87051, -85421, -29898, -78222, 28696, 28169, 71745, 47593, 88480, 23688, -59733, 6344, 85373, -68907, -67739, 69757, -43524, 35002, 52147, -33933, -74263, 33704, -82098, 70454, 21362, -77963, 24781, -5953, -30399, -93558, -43948, 25767, -65139, -17140, 67971, -31447, -4235, -95101, -74669, 22785, -50123, -69232, 80233, 84008, 73779, 67306, -43458, 29247, -65811, -60913, -35857, -52187, -32883, -4744, -81381, -82367, -41343, 22356, -93007, -93743, -86950, -41313, 4237, tail", "row-1222": "head, 52271, -84721, 208, -95035, -81617, -38259, -26640, 72004, -65762, -64759, 73050, 63207, -60571, 714, 91054, 98764, -16519, -53902, 59024, 95769, -87926, -15671, 31662, 5918, -70593, -40547, -4534, -30644, 41823, 50707, -27067, -72937, -4687, 36860, -56185, 42583, 96507, 63935, 60555, 88429, 43684, -60985, 81197, -96498, 24127, -16760, 60126, 51077, -26349, -95531, 28339, 80116, 80331, -59904, 83926, -43162, 47382, -42051, -28703, -30074, 89033, -85485, -19804, 10102, tail", "row-1223": "head, 9216, 32793, -51835, 15389, -71833, 74919, -15631, -83120, -68955, -27947, -91055, -49379, 42324, 27655, -84123, 12129, -71606, 98610, -53101, 78271, 6763, -25768, -52186, -43294, 47011, -3106, -55769, -46453, 57639, -89926, -55311, -38957, 18416, 66888, -69600, 93813, -94964, -68256, -4779, 28103, -18398, 28829, 57438, -2302, 29273, 30843, 86762, -58787, -64528, -51487, -63919, 51487, 28010, 96906, 36226, -70982, -86064, 41923, 92650, -1757, -2397, 97438, 22448, 36616, tail", "row-1224": "head, -33176, 23554, 90083, 71121, -96249, -80259, -3453, 92533, 89293, 11263, -2347, -23620, -53776, -52587, -14693, -69828, -70754, -63766, 32628, -85700, -32720, 1776, 4222, 54713, -67138, -91849, -44641, 50253, -91898, 31739, -53413, 37284, 16385, -15448, -67651, -81839, 47235, 97925, 32937, 15596, -63007, -2865, 37426, -18619, 43568, 46107, -91371, 82676, 98563, -76717, 32423, 58667, -85069, -10840, -32604, 16879, -53720, -62016, 21620, 58874, 23918, 70485, 34265, -42002, tail", "row-1225": "head, 9691, -53973, 63820, -80172, 9276, -35242, 34170, 82800, 42337, -44540, 16228, 38768, 96306, 83235, 1910, -42692, 67516, -68468, 59818, -69397, 65672, -68692, 63765, 53218, 85724, 4306, 99969, 39062, -72495, -23199, 37532, 81079, -87597, 35887, -59100, 95612, -847, 94460, 70308, -76439, -80469, -81983, 34299, -6575, 40486, -78865, 92081, 66159, 86172, -29792, -14367, -6546, 17367, -33924, -76136, -97822, 15595, 3715, 84213, -62862, 13805, 29086, -19126, 18697, tail", "row-1226": "head, 69916, 38297, 7291, 6533, -56906, -13996, -49053, -93405, -90791, 47705, -8433, -46045, -54617, -68556, 22907, 62568, 69542, -58620, -74696, -51833, 76396, 39153, 34420, 60730, 23988, 12382, -2259, -66096, 38488, -68120, 99363, -3185, -75090, -73040, 37431, 42359, -40095, -72829, 54673, -15949, -3846, 63888, -32210, 10413, 89512, 892, -79264, 57033, 56700, 89027, 15853, -7319, -84460, -31612, -53535, -93023, -50279, 56752, -62482, 63213, -6853, -46843, 37220, -44873, tail", "row-1227": "head, -73874, -31906, 36900, 1456, -21966, -36565, 64024, -8557, 52760, 87456, 50253, 90991, 83319, 85251, 7680, 50996, -40036, -32847, -11754, 6188, 44692, 38001, 99953, 88011, 70422, 89269, 89103, -71365, 27517, -47978, 68511, -96006, 68279, -69943, 8069, 18117, -30890, -9740, 12097, -57415, 15978, -79215, -60689, -85501, 71427, 92759, 9601, 5776, -70219, 27454, 78967, -22189, 20110, 44965, 19367, -78017, -30167, 10218, 8664, -1743, 86155, -34177, -46113, 99502, tail", "row-1228": "head, 13508, 22177, 35936, 98818, -82855, -8677, -78586, -42721, -94996, -41790, 14083, -15296, -9216, 1319, -67807, 67820, -26334, -98161, 83002, -87874, 1415, 32216, -80952, -27844, -13095, -42530, 55739, -29410, 99130, 12410, -21795, 53859, 37059, -6021, -98393, -40662, -42936, 16391, -93053, -21230, -28246, -60169, -42384, -21643, 86108, -43615, -16315, -80995, 99954, -1514, 52234, -62534, 50023, -96550, -41465, 84479, 81327, 63484, -23088, 97869, 51349, -53824, -81428, 54975, tail", "row-1229": "head, -70200, -87395, -79729, 39390, 50087, -527, 64085, -45944, 75391, -52076, -30637, 46641, -45857, 76774, -50820, -68938, -54812, -56364, -41544, -70584, -82225, 5324, 62184, -66316, -39447, 29111, -45479, -95507, 89031, 6340, -33264, -84457, -36651, -76375, 67020, -83270, -21696, -36486, 46716, -42484, 37193, -9064, -33078, 38701, 19643, -2082, -55235, 46832, 67999, 48858, 75752, 8831, 18648, -57188, 43156, 56056, 25118, 86666, 46475, -2123, -50706, -86730, 4995, -3981, tail", "row-1230": "head, 38922, -45766, 7064, -69365, 9750, -39514, 42127, -79568, -86845, 81113, 78834, 96259, -79244, -70418, -46716, 81102, 73155, 78619, -73184, -8863, -38638, -62547, 34121, 64308, -58334, 12917, 31437, 6321, 64829, 95586, 35802, -77310, 37155, 72774, 22571, -37203, -8174, -57611, 24371, -45252, -83025, -93083, -57460, 24088, -46314, -78827, 77106, -40366, 78217, -86555, -98649, -9369, -54958, -73705, -34625, 36583, 25784, -21853, -95917, -3699, 92805, 35497, -39, -13772, tail", "row-1231": "head, 32030, -38646, -94303, 58506, -60936, -44517, 43067, -86967, -11326, -35086, 5238, 78230, 19867, -27943, -66461, -25275, -88202, 73419, -88610, 40931, -55914, 30450, -35092, -72312, -41283, -33443, 14787, -91049, 56311, 7973, -3652, -25581, -56326, 727, -902, -96276, 73560, 93907, -17748, -28557, 50833, 75554, 34129, -23082, 37519, -10922, -93063, 40077, 57604, 5307, -82750, -29725, 42048, -53571, 22769, -75940, 41846, -30976, -37006, -12872, -46408, -26357, -61046, -14356, tail", "row-1232": "head, -47655, 404, -43969, -46802, 97427, -98013, 67784, -74536, -57069, 43439, -46988, 74444, 48897, 61610, 23979, -33502, -87339, 72290, 26194, -69175, -30912, -22085, 92482, -32733, 33929, 64318, -36274, -63779, -44602, 38227, 51541, -61578, 61700, 85432, 77577, -44043, 44045, 561, 23856, -20253, -23346, -64170, 40550, -13147, 72203, -51962, -63053, -38129, -98219, -45174, -66255, 72948, -80884, -38511, 52037, 9917, 42268, -36347, 94267, -11726, -26636, 10865, 10820, 44348, tail", "row-1233": "head, 90893, 41908, -97447, 17846, 43625, 55597, -77155, 26853, 62263, 50384, -23076, -3161, 52342, 71285, -71095, 8371, -40395, -78587, 52300, 10607, -4347, -33621, -41551, -33712, -45234, 6881, 20842, -48399, 31820, 91213, -7057, -50947, 58087, -88846, 66150, -10544, 34049, -58767, -68410, 6929, 50376, 84656, -1238, -21090, 87807, -22417, -2966, 53963, 64954, -42211, 62199, -36770, -37070, 80001, -19787, 91136, 43209, -27371, -76576, 56433, -90986, -41802, -99987, -5740, tail", "row-1234": "head, -60269, -53160, 33836, -86786, 72450, 72750, 49982, 20685, 74360, 60362, -45909, -92679, 48016, -3726, -72323, 87849, 65426, -36816, 39818, -49038, 11759, -21927, 923, 72545, 46477, 57804, 7583, 25979, 90289, 37458, 47032, 95431, 47213, 72332, 45988, 12285, 32551, -7765, 94271, -77736, 7698, -55106, -28039, 97693, 25330, -32399, -12830, 54541, 53953, -6216, -71481, 77363, 55073, 30036, 47217, -89174, 9494, -83815, -85131, -57423, 40280, -21025, 3475, -8493, tail", "row-1235": "head, 4670, -38365, -71190, -14096, 97267, -52066, -82206, -26052, 44759, 90806, -56838, -49496, -88326, -39685, -43826, -45039, 22485, 94525, -12307, 6325, -66380, 55316, -33851, -1274, 41897, -50287, -20983, 78858, 84745, -77277, 2450, 86462, 93738, -17722, -23664, -89093, 54622, -54383, 64553, 65832, 55261, -47652, -59216, 91035, 37097, 56286, -92235, 69344, 66849, 87293, 17372, -85776, 68989, -56313, -13277, -99071, -45886, -55036, -3170, 20678, -36754, 55503, 25482, 30119, tail", "row-1236": "head, -48834, 57134, 16612, -97584, -32380, 72773, -38060, -23559, 43164, 54762, -70948, 27696, -52604, 94762, 41462, -20975, -17469, -12580, 8372, -15376, 47238, -74675, -39415, -93375, -43940, 49702, -20303, 43468, -35005, -12575, -16641, 69206, -11211, -42029, -589, 97339, -82975, 69367, -24211, 4694, 45890, 25227, -65055, -82249, -8866, -16876, 25960, -3654, -94361, -70452, -40638, -21605, -64849, -3753, 60584, 5944, 46807, -17804, 56894, -69577, 90979, 23002, 245, 54896, tail", "row-1237": "head, -46771, 69999, 84740, -73401, -95488, 80554, 64881, 5438, -20592, 13284, 24218, -41466, 31334, -40769, 5746, 46, -43534, -16346, 19320, 29532, -50199, -1956, 38917, 32769, -75413, -59590, 51861, 96343, 8328, 97923, 72550, -6500, 68441, 668, -5618, -31919, -92867, 29690, 59566, -8123, -14402, 33453, 26726, 65983, -89315, -75792, 29813, -63666, -36171, -90941, 76712, -41821, -44626, -4584, 54029, 58131, 79934, -27082, 65774, 43662, -82444, -32467, 46226, -90606, tail", "row-1238": "head, -70927, 29799, 68360, 74596, -30027, -93389, 14143, 40820, 4027, -62338, -16012, 63805, 69553, 62169, -94462, -57060, 12252, 11048, -66108, 34176, -21988, 85289, -43179, 683, -46590, -57766, -6231, 8559, -31941, 2211, 75735, 87251, -13186, -75059, -44052, -99018, -63063, -39262, 75000, -79822, 41892, -84285, -58573, -75824, -19201, -13319, 25340, 79888, 59209, -94916, -83636, -39998, -23403, 68531, -81423, -81526, -42292, 66100, -6067, 40734, 8948, 91939, 27618, 82541, tail", "row-1239": "head, 88272, -35262, 14829, -67695, 41995, -99141, -13623, 27351, -52926, -54107, 63395, 97868, -85988, 18646, -3108, -63547, 66186, -67164, 45632, 58632, 35695, 44385, -11509, -38737, 22538, -41482, 17168, 78369, 37828, 3739, 75492, -30999, 33763, 60045, 11033, -43186, -89922, 74971, -41625, -30379, -68558, -20876, -15088, -33494, 95844, 12320, -80149, 65962, 15477, 60409, -73442, -52028, -43631, 29180, -29314, -21067, 2476, -96847, 5790, -20056, 64735, 83972, -88022, -85468, tail", "row-1240": "head, 77130, -68082, -63599, 60777, -10208, -83954, -74603, 98233, 43951, -92195, -5090, 817, 37038, 10438, 28483, 40989, 56325, -46640, 65070, -8427, 70772, 60077, 71288, -78656, 99488, -26861, 20411, -4512, 11282, 39178, 86293, 19119, 72140, -30505, -53006, -29618, -7186, -2191, -82314, 71997, -2510, 19579, 75932, -79662, -47103, -94999, -55851, -62303, -59218, -57476, 70634, 23419, -9411, -77298, 78565, -66722, -94991, -24303, 26846, 3116, -58920, 9749, -58790, 3446, tail", "row-1241": "head, -5527, 34387, 91393, 13184, -82776, 55146, -57163, -29195, 40171, -78699, 15868, 79060, -72145, -34951, 97519, 65024, 65434, 32997, 79930, -22972, -1149, -8729, -92484, 27899, 43194, -54517, -72013, 98692, 40212, 60740, -89662, 72975, -55469, 74853, 30121, -36684, -90735, 98614, -26401, 893, -79438, 60512, 12077, 6125, -72362, 65025, 65818, -33345, -67798, -63354, 2353, -33985, -96141, 39353, -91985, 63725, -92580, 58881, -71047, 27216, 60341, -81885, 27358, -61376, tail", "row-1242": "head, -26512, 81552, 72334, 37922, -42455, -49529, -3786, -86492, 86313, 1632, -66214, 39359, 6756, -52648, 91188, 24246, -65787, 8497, 39605, 71797, 32000, -80482, 2630, 58782, -11582, -77418, -46103, 37684, 85307, 51201, -37925, 37834, 17004, 88464, 88441, 14831, -14548, -44648, 47669, 25032, 35358, -44101, 54576, -44206, -71749, 45396, -35894, -47243, 67317, 80838, -53119, -84375, 29060, -65812, -7618, -40776, 42033, -78243, 8486, 48846, 97213, 82528, -14348, 20482, tail", "row-1243": "head, -88964, 11295, 78978, -54533, -6635, 94653, 72242, -85103, 59969, -27326, 43628, -11233, -35311, 60795, -51623, 57665, 35415, 77957, -97320, -28651, -67099, -51417, 61185, -42033, 60195, 50060, 10424, -45990, 90687, -13812, 26842, 89048, -4589, 98779, -94506, -21985, -84727, -76552, -21007, 35996, 17223, 73952, 25898, -71054, 16174, -9363, -12205, 87509, -39613, 42886, 83593, -76154, 16251, 20830, 25803, -2575, 49016, 6521, 97327, -4945, 44416, 32117, -76538, -14480, tail", "row-1244": "head, 18440, 60974, -8761, 84776, 38502, 572, 66063, 26002, -22050, -24350, -18639, -63120, 20707, 3431, -50651, -15502, 54101, 37017, 2626, 21563, -25548, -10485, 44604, 73106, -41615, -35559, 89800, -65444, 210, -62610, -707, -94115, 94176, -80035, -55796, -37049, -52832, 81029, -89468, -49385, -75550, -91992, -15010, -72763, 84726, -85117, 56211, -43563, -10283, -30235, -33218, -66986, 64371, -42067, -96952, 47212, 72554, -73116, -17377, 52585, 47683, 3570, 97161, -47379, tail", "row-1245": "head, -67019, 87968, -32328, 23544, 49575, -19814, -46669, -31197, 43869, -49550, 34740, -62279, 6153, 6406, -21510, -39669, -98811, 25748, -69210, -92764, -83135, 45870, 76160, -51107, 94099, -70309, 36611, -70817, 82617, -91330, 6712, -40267, 56790, -42081, -62156, 59206, 98668, 57377, 89412, 94407, 54063, -95097, 95119, 11320, 96851, -7732, -30763, 39700, 59329, 25640, -69535, -19583, 72423, -72313, 53134, 41774, -71790, 18866, 32956, -3844, 61660, 78213, 79700, -60961, tail", "row-1246": "head, 95969, -48324, 10073, -58049, 44275, -6906, -27815, -86379, -68360, 98520, -16088, -2722, -38615, -94087, -13149, -80880, 82290, -42921, 55013, -65667, -26242, 20683, -62627, 61391, -94104, -10629, 59920, -56519, 58780, 52653, -90239, 17772, 49064, 15912, -35466, -91825, -87651, -21936, 31728, -39886, 81494, -44334, -21678, -99346, 57111, -60305, -46987, -27755, -59075, -66981, -29467, -97941, -83504, -5742, -56741, 31969, 65026, 95043, 89371, -68093, 42204, 40371, -56143, -72864, tail", "row-1247": "head, 14150, 18534, -77569, 56786, -53746, -35574, -15322, -96571, 86961, -51691, -74729, -86934, -13922, -89519, -61508, 6836, -5302, -99228, -82367, -5941, -41763, 63693, -89165, -34305, 45352, 66603, 41882, -16094, -67461, 81580, -95922, -31656, 55516, 41988, -95767, 60269, 2240, -31047, 93184, 46308, 72661, 58255, -64893, -17261, 27627, -58624, 59337, -8355, 22807, 65283, -17868, 20465, -75811, -72065, -71976, 61361, -77444, -32637, -99542, 67718, -73371, 97222, 9942, -80596, tail", "row-1248": "head, 35590, 67349, 92874, -36629, 41895, 15779, 86977, -52624, -9282, 92959, 92220, 29666, 30712, -94086, 72761, -63028, 36534, -96784, 63891, -31210, -19889, 67000, 25302, 66153, -35338, 57434, -12308, -88089, 31724, 5939, -58601, 51501, -63394, -23050, -94279, -93845, -83467, -2649, -28549, -83058, 96899, 77495, 16364, 13482, -21208, 65113, 13560, -16188, 32104, -96526, 79086, -59318, -37175, 50757, 57463, -22370, -61313, -66010, 8808, -76904, -62861, 46868, 82838, -28633, tail", "row-1249": "head, -15381, -26006, 26879, 59058, -28945, -92559, 93408, -46162, -13085, -778, 77589, 95806, -85505, -59854, -39371, -65444, 93210, 10990, 13239, -55583, -49333, 97092, 6341, 41005, -88697, 34741, -38120, -35047, -58458, -90085, -57567, -36639, 83899, -57323, -35660, -96814, -96559, -82694, -45846, 87673, -55658, 63728, 56052, 24352, -65544, 47950, 59498, -59566, -7253, 82096, -43746, -31926, 83811, -44668, 90335, 22275, 68971, -70428, 95427, -92439, 80474, 62496, -84037, 54416, tail", "row-1250": "head, 13189, 86278, 80137, -46087, -6505, 85877, 44179, 51053, 52095, -60319, 42920, -7687, -16136, -6991, -56347, 7489, 92772, 60544, -4720, -74841, -66574, 93828, 14730, -75497, 84546, 39354, -27425, 74292, 54294, -9058, -83253, -11741, -31989, -50934, -9047, 18397, -51834, -71778, 26959, 49593, -98163, -86935, -6135, 59807, 77862, 87657, -92498, 12264, -53756, -78868, 92600, -51281, -50624, 99245, 87001, -75663, -19794, -24545, 95282, -96382, 19224, -94297, 29401, 37861, tail", "row-1251": "head, -59836, 9430, 17747, -59376, 34858, -69312, -80940, -57559, 60273, 7306, 78311, 13919, 81597, -678, 50541, 19805, -88031, 62230, -86432, 48865, 72318, 32327, -1491, 35509, 72137, -92592, 39923, 27755, -5427, -14030, 40738, 54940, -17225, -78971, 76899, -2182, 10884, -35716, 67161, 64465, -59294, -50429, -98661, -85323, 70582, -48364, 77913, -63281, 69579, -60025, 69545, -84231, 40226, 99321, -77237, -64019, -29770, 18390, 74293, 44609, 84061, 56472, 61981, -17255, tail", "row-1252": "head, 84619, -94907, -3458, -96794, 98241, 1763, 72740, -73604, -96361, -45444, -68098, -34101, -88007, -91051, -25961, 54874, 10260, -74005, 77954, -10079, -50289, -20486, 88070, -48114, 35729, -38374, 53261, 19149, -76858, -67143, -25572, 99504, -68740, -46356, 87819, 45306, -97971, 56052, 66491, -15302, 31304, 89392, -87370, 47179, -69007, 21408, -28373, -46963, 82364, -42599, -449, -52353, 21711, -13251, 96861, -13002, 11506, 90028, 24677, -15763, 59270, 69582, 69263, 40037, tail", "row-1253": "head, 80767, 82851, 91489, 85125, 16009, -64335, 37772, 28002, -29768, -83514, 93847, 50487, -48080, 8225, 27002, 31629, 37234, -39374, 21471, -24327, -16121, 18888, -25048, 2836, 72065, 56928, 39919, 49415, 44249, 21951, -88138, 27291, 31553, -1882, 91731, 5507, 24137, -76921, 15960, 1967, -23835, -93215, 12691, -17289, 41671, 98025, -69398, 88992, -74726, -33840, 28207, 29458, -71992, 33109, 87361, -68972, 52757, 17268, 67589, -63432, 76010, 1551, -15455, -64550, tail", "row-1254": "head, 36350, -13063, -2864, 9931, -44691, -69417, 17307, -73277, -68806, 31107, 16269, 25811, -1603, -28470, 82776, -6102, 89249, -35790, -65436, -81442, 15302, -81176, 99248, 21685, -946, 93421, -73467, -87011, 44866, -34850, 79853, -20877, 77812, -82938, 81566, 81373, -47663, 79293, 71532, 90221, 37178, 15512, -32799, -40172, -69753, -45592, -29652, 1348, -9292, -73665, -74007, 76747, 73420, -22205, 50716, -44607, -29254, -63040, 42548, 50746, -93523, -71017, 81174, 53191, tail", "row-1255": "head, 48941, 85129, -83491, -65453, -39759, 90603, -93798, -9561, 94934, 67001, 50993, -31122, 90680, -56257, -12325, -54684, 82121, 48948, 63692, -45501, 62085, -50858, -45493, -20522, -49052, 13145, -10393, 18898, -32245, -20252, -53897, 29367, -26696, -98481, -85970, 86376, 41352, -12660, 31674, -52618, -6262, 9455, 80771, -49017, 44324, -57216, -75296, 52705, 37961, 83057, -19638, 95075, 52471, 61975, -17614, -25213, -64599, 11819, -52037, -38741, 57480, 43304, 21279, 40100, tail", "row-1256": "head, -62882, -11259, 87688, -19233, -6616, -4941, 40128, 60705, -27764, 67616, -81028, 45988, -13678, -32283, -46307, -1624, 51385, -84333, -38830, 71123, 90196, 52067, 80373, -93409, 6459, -656, 62649, 63432, 44608, -22467, -81455, 14040, 83729, 25705, 5790, -22376, -30924, 62232, -81527, -36793, -37389, 60717, -30881, -21697, 32504, -90630, -44767, -66453, 26254, 52228, -31348, 85836, -523, -60067, -93207, 63980, 40996, -78118, -90949, -58843, -85915, 89170, 35326, -826, tail", "row-1257": "head, 23759, 28661, 32779, 33753, -16543, -25464, 74448, 39159, 93930, -30019, 97814, 62062, 20963, -39292, -69342, -31342, 54236, 2547, -81735, 71076, -6941, 98200, -53280, -70572, 49118, 84599, 59618, -81705, -53521, -1722, 91832, -97344, -72261, -22471, -37078, -89893, 71047, -66578, -18098, -66968, 53784, -16688, -95288, 78011, -47499, 63056, 85731, 19337, -78545, 23737, -9688, -82835, 82807, -90363, -87716, 26963, -7501, -77734, 88047, -25629, -72026, -56577, 19619, -27854, tail", "row-1258": "head, 75324, -47879, 89485, -21108, 49790, -85662, -85189, 21992, 39668, -37932, 31356, 19642, 76505, 26928, -51870, -97596, -82497, 30768, 75259, -34110, 21066, -22176, 92454, -88683, -71182, -64599, -23192, 20191, 99949, -15225, -39992, 77896, -23936, 21677, 18710, -18384, -3449, -27675, -38267, -13574, -67001, -2696, 28926, -74227, 84868, 87927, -20875, -11368, 41733, -46289, 67791, -90447, 3738, 35116, 79865, -75984, 27633, -2613, 66334, -13973, -24795, -27925, -22477, -61709, tail", "row-1259": "head, -88777, 51592, -26173, 75950, -6715, -19242, -66228, 79966, -82752, 23744, -74238, 77229, 11239, -61329, -73025, 97041, -46716, -6685, -61015, 17511, 87526, -67973, -56454, 54831, 95272, 84489, 74656, 78452, 92216, 66376, -48062, 30381, 21753, 97059, 6990, 52613, -79116, -69826, 34496, 96942, 99990, -53031, 73043, 3493, -76650, -25062, -77254, -80710, -54402, -60336, -10869, -60035, 43125, -17555, -53500, 55047, -51023, 38275, 33202, 36970, -53438, 13535, 96057, 99904, tail", "row-1260": "head, 40291, -48439, 91537, -58029, -48354, 57419, -89808, 64726, -99034, 72435, 71031, -93846, -86606, -46569, -98174, 89660, -1266, -78444, -12392, 64034, 87462, 38583, 90700, 95993, 82642, 74143, -20524, -14806, 69863, 4383, -19886, -17706, 90699, 16195, 71649, -8294, -64520, -86703, 55253, -38331, -36546, 50546, 43617, 4889, 70714, -27307, 6655, -95189, 46669, 9682, -86184, 74833, -33526, -56786, 44320, -64628, 84373, -82465, 85303, 19692, 5392, 64340, 40907, 11248, tail", "row-1261": "head, -82534, -72268, -53936, 11193, -25918, 32736, 88229, -31299, 92156, 73586, -92486, 14696, 97763, -17456, 171, 15788, -41255, 89711, -46304, -11058, 61356, 34890, 70008, 21010, -6286, 15673, 82683, -89465, 47691, -41619, -2497, 2385, 4491, -62980, -2165, 52414, -41660, -7935, -7487, -91765, 82697, 67882, -62371, 88618, 15187, -78993, 5946, 87597, -88846, 91206, 16925, 13874, -14052, -23257, 98936, 65946, 90286, 42359, -16794, 15891, -67420, -98029, -77772, -46383, tail", "row-1262": "head, -34039, 63534, 2078, -57106, -23409, 86180, -4163, 22334, 37932, -6262, -88858, -96699, 98184, -74692, -81317, -58004, 81200, 8809, 54710, -19969, -81296, 55077, -91190, 94596, 34117, 61516, -27377, 57828, 3265, 33808, -3512, -97590, 1516, -11411, 72050, 59805, -19978, 90016, 51266, -70193, 62095, 42217, -36459, 97405, -63771, -67185, 80935, 41576, 13397, 31046, -50253, 48941, 84367, -1948, -82421, 37446, 32917, 39977, -50195, -13247, 76508, 38418, 74799, 35314, tail", "row-1263": "head, 39686, 59738, 95094, -48495, 57520, 3985, 48239, -97317, -84471, 40418, 65297, 78423, -65516, -61825, -82, -164, 30162, -82169, 54678, -18825, -80084, 17680, 70240, 91258, -84528, 82222, -13787, 6778, 80111, 50860, -27972, -58422, -30379, -28983, -39401, 20207, -4496, 32083, 86099, -45831, -81270, -44635, -37194, -7863, -97717, -33141, -2466, -13857, 81637, 9065, -24334, -76165, 7549, 92521, 38277, -56138, 22433, 93779, 14740, 91280, -98628, -81545, -20534, 43558, tail", "row-1264": "head, 47533, -43240, -78028, -27800, -37807, -68879, -9196, -39597, 47679, 5246, -48994, 453, 97079, 60313, 47534, -90190, 15995, 99997, -55803, 32970, -68233, 28832, -73161, -93024, -61792, -57981, 50261, 41654, 21152, 87954, 96237, 63158, 91641, -75482, 96546, 31193, -41557, -4845, 27683, -43790, 73049, -58234, 62796, 70806, 63001, 96143, 7520, 76060, 80461, -95707, 32617, -53177, 62066, -66022, -76124, -82226, 40707, -26891, 82093, -97956, -29916, 51037, -18856, 47500, tail", "row-1265": "head, 24149, -26914, 26109, -5705, -9956, 31304, -5569, 6776, 16331, -66698, 93439, -40222, -45022, 24985, 38034, -18060, -73134, 67533, 50372, -53374, -39347, -87221, 78424, -25181, 11410, 73092, 14303, -616, 85660, 29412, 9647, 31137, 81565, 50826, -19746, -49012, -68138, -81137, 60045, -30089, -19496, -79011, -43957, 99227, 99421, -54197, -46120, -22773, 79928, 12700, -43756, 98142, 10098, -60029, -71867, 67527, -71392, 85444, -56855, -71466, -24356, -72191, -94006, 10184, tail", "row-1266": "head, 13723, -78774, -81415, 64989, 94439, 34060, 39666, -33308, 66447, -10851, 47838, 79570, -65048, -46136, -15970, -99769, 40332, 55994, 43323, 37034, 36149, -74716, 62076, -50008, -79833, 39050, -67493, -59425, -80420, 23904, -72728, -21981, 30268, 29638, 39666, 79684, -70093, 61944, -59917, -62498, -30595, -29207, 58311, 44390, -41423, 78127, -86224, 91815, 59159, -25586, 65095, -37940, 97475, 41073, 9959, 28249, -21382, 21213, -57060, -10424, -90747, -88530, -52850, 71156, tail", "row-1267": "head, -1659, 98815, 5436, -26125, -43192, 32441, 96904, 95495, 18997, 42190, -70429, -63913, 15195, 84326, -57211, -57653, 61939, 2611, -58565, -32884, -60032, 36764, 17787, -28816, 39939, 13573, 79667, 99300, 91125, 45260, -41537, -87115, 38694, 45535, -13840, 89925, -70189, 81592, 53344, -27217, -79186, -3021, -39375, 16876, 84532, 28321, -45893, -42888, -20229, -36251, 74496, 11370, -56509, 22370, -58246, 34781, 58879, -53013, 53735, -16792, 16153, -750, -55395, -13296, tail", "row-1268": "head, 77903, -11522, 39472, 61287, 95934, 86591, -8187, -45407, -34229, -42678, 65581, -22446, -10113, -6490, 81956, 69644, -33262, 84650, -40685, -83716, -23932, -93940, 13422, -40304, -95380, -35484, 37634, -74135, 9280, -25919, -95883, 74088, 72707, -23500, -13920, -28589, 51951, 11173, 41597, -8773, -897, 23890, 69384, 91222, -44112, -88482, 87655, 16964, 48767, -93024, -71036, -61895, -48486, -25169, -70624, -82550, 87998, -52279, -76619, -65402, 67187, -78170, 45206, -56966, tail", "row-1269": "head, 13718, 11600, -47951, -4544, -6043, 17348, 86810, 53978, -17754, -24062, 23592, -57463, 68571, -81520, -62087, -64195, 8292, -26652, 90825, 10227, 99433, 20327, -2367, 71394, 34108, 12718, -51634, -97956, 65651, -63718, 13216, 72543, 96931, 87945, -40625, -74253, 27661, -78977, -17908, 56083, 45578, 76458, -78732, -60481, 11084, -43929, -37782, 26928, -97832, -56752, 15192, -1707, 61611, 20030, 4776, -42696, 35271, -43101, -62739, -40586, 44903, -26930, 53845, -21997, tail", "row-1270": "head, -3012, 45226, 13516, 84512, -43334, 8620, -60902, -90229, -60796, -1719, 50728, -38559, 32013, -78872, 90537, 32043, -71377, -96891, -51991, 61691, -64566, 88295, 61474, -97204, -82342, 29884, 77127, -61141, -28138, 49019, -7915, 30970, 69286, -46686, -5087, -47140, -4084, -20246, -57525, -6836, 62688, -4608, 11134, 35542, -50480, 4606, 39166, -78750, 24636, 28892, -64448, -62480, -95320, 66363, 29157, -45581, 14123, 79713, 79576, 37009, 17768, -40162, -67131, -41810, tail", "row-1271": "head, 94087, -51884, -64958, 8968, 75544, -45386, -23222, 1063, 85418, -51842, 93051, -53484, 45599, -27029, -70051, -25061, 61949, -39217, -9994, 18813, -38996, -39448, 16403, 91873, 54968, 5308, -38367, -21280, -67545, 85970, 88445, -65207, -39528, 38443, -50370, -25858, -58990, -78589, -45217, 80303, 85598, 8665, -46255, 84101, 64879, -83160, 787, -42137, -26733, 51967, 2752, -60253, -1634, -74711, -63538, -19732, -88999, -43289, -71743, 96195, 12971, -63926, 20736, 88090, tail", "row-1272": "head, -54580, -35236, 76346, -45830, -28296, 90079, -31309, -64382, 41111, -89009, 28192, 45313, -23712, -89916, 27482, 26323, 64954, 71431, -16299, -85629, 58527, -45699, 28813, -96770, -32398, 47763, 96730, 40905, 80906, -7061, -65809, -39954, 77206, -77831, 8657, -96026, 31084, 34074, -81636, 327, 86445, 26560, 7225, -90622, 34876, -23946, -86518, 92828, 70321, -22710, 21622, 1527, 1752, 66099, 64641, -47048, -28066, 43033, 62631, 45698, -25856, 6658, -90912, -56918, tail", "row-1273": "head, 28320, 96892, -15808, 88855, 54261, 37569, 68239, -49978, -12026, -34386, -23284, -40200, 37152, -54067, -90524, -35176, 94175, 32364, 11802, 90094, 25132, 59965, 53355, 21383, 99511, 92442, 99167, -40978, 87168, 58442, 56653, 84935, -68371, -393, 31561, 74720, 23259, 25201, 90210, -54653, 63710, -7629, 81418, 29818, -99738, -93369, -33139, 14360, -86860, -3655, -27122, -34407, 31877, 82302, -41455, -81907, 72282, 40439, 47655, 39843, -42715, -15558, -57344, 37765, tail", "row-1274": "head, -54436, 54642, 22998, -72367, -92920, -66084, 66685, -49676, -77351, 40156, 8614, -15073, -96409, 35454, 64335, -90791, -70543, -82296, -91125, -91262, -94424, -21218, -88230, -956, -79398, 86052, -70103, -21923, -53174, -46639, 94942, 81904, 81716, -86990, -38383, -65739, 43539, 19461, -61469, -4031, 94760, -35274, 76823, 64831, 87790, -93369, -38757, 90619, 8433, -95014, 22682, -78009, -32261, -12066, 18641, -75177, -66233, 77575, -33632, 63673, -51459, -75442, -66912, -70698, tail", "row-1275": "head, -32300, 42406, 51971, -32367, 67356, 10159, -16111, -4561, 15045, -83150, 58153, 87675, -71081, -14525, -84573, 27751, 23290, -50705, 38606, -52513, 4083, 26539, -25316, -52284, 25393, 55679, 59437, -14637, 53881, -45894, -31929, -33268, -56038, -51319, 39348, -74671, 12168, 76278, 84973, -62782, 87022, 95035, 95621, 34212, 47507, -90653, -96665, -47770, -60036, 56788, -53986, -91883, 84376, -68845, 65274, 34887, 70700, 98217, 94330, 35110, 28448, 4017, 44350, -99187, tail", "row-1276": "head, 50540, -48130, 88775, -33968, 92038, -54589, -11722, -86699, 41142, -92688, -61803, -46103, -97041, 46902, -42540, -50810, 34301, 50605, -11042, -26200, -66940, 20721, 16354, -90912, 52079, -10279, 16625, 22537, 86796, -2908, -2086, 49461, -45643, -10541, -60196, 29863, 61715, -46697, 46774, -4477, 49079, 60321, -58330, -87663, -1360, 8993, -86433, -88687, -66764, 24436, -26259, 94078, 85129, -75685, -64584, -70797, 84577, -566, 53991, 96141, -94550, 14745, 7510, 27233, tail", "row-1277": "head, 31384, -63466, -57640, 71986, -72828, -3030, 74373, 99900, 52101, -66116, -78130, -19820, -47760, -65439, -88205, 5813, -11794, 82997, 41304, 36248, 2985, 40717, -36534, -75361, -81972, -62253, -15747, -99728, -3501, 36742, 81035, -83896, -7844, 61380, 22645, 71534, -36815, -68260, -78303, 96386, 33678, -8313, -12636, -20734, -36144, -88481, -16170, -54656, -39318, -6577, -38662, 57191, -4494, -91192, 9438, 8894, -65062, 45269, -4050, -45208, 60583, -76125, 57262, 51040, tail", "row-1278": "head, -81052, -72301, 16952, 20139, -64793, 23900, -34354, -73121, 15594, -34428, -99368, 65182, 74095, -42599, 29759, 83883, -8484, 5444, 60270, 46597, -73924, -91668, 89683, 78606, 14657, -19033, 57620, -36996, 98306, -28527, 5934, 88482, 50687, -82009, -40686, 17864, 2438, -32236, 84297, -49293, 22072, 28188, 58644, 56715, -22615, -12387, -14205, 83203, 17700, -77127, -26120, 61789, -29020, 53065, 78240, -73921, -54249, -11578, 91948, -58929, -29012, -48605, 19973, 63329, tail", "row-1279": "head, 39010, 76001, -17114, -47066, 1614, 11576, -50044, -25061, -56087, -64330, 4633, 61980, 87784, 7259, -33785, 8111, -87033, -90852, 17859, 53787, 82495, 21275, 67654, -83353, -6806, -49923, 67409, 43669, 6657, -8949, -78706, -79806, -90713, -18577, 7704, 39587, 56826, 64155, 77941, -93744, 67654, -18969, 14531, -95168, -7136, 61061, -24185, -32844, -98691, 36496, -86969, -11230, -70187, 3206, 31253, -13117, -7322, -18422, -74670, -65237, 85354, -19275, -61329, -11173, tail", "row-1280": "head, -28235, 72790, 53642, -74214, -64233, 36734, 36126, 86753, 8863, 98383, -50124, 55167, -99810, 5302, -32370, 86159, 14071, -5241, 95583, -68822, -28814, 56054, -26217, -1409, 39252, 11309, 29602, 72169, 68626, 1060, 64783, 77628, 1495, 91687, -64415, 61943, 314, -39249, 46940, 85386, 4909, 64195, 38681, -50992, 57577, -23567, -12024, -7444, 93341, 19813, -61635, -9243, -64232, 61865, 58820, 96889, 81482, 16071, -21043, -87424, -18496, -95178, 75642, -60912, tail", "row-1281": "head, -29634, -52957, 66050, -74469, 94031, 68417, 6375, 44435, 62348, 49013, 27133, 24195, -4733, 28537, -64999, 57083, 43470, 31770, 99304, -41785, -46210, -62979, -34034, 6604, 94325, 25916, -33123, -79912, 90334, 59675, -43655, -29988, -35471, -85030, -57643, -42612, 31574, -73086, 30751, 65558, 68254, 98313, -42709, 31332, -79871, 40421, -12254, -74023, 82909, -73273, -58321, -19129, 89534, -2475, -38852, -80703, 72858, -22829, 79550, 92231, -28206, 43256, 47259, -34839, tail", "row-1282": "head, 53401, -92392, -67689, -87126, -77533, -43918, -81048, 11121, -20328, -27085, -27068, -84202, -92921, 76140, -6549, 63293, -63461, 82426, 27346, 44880, 79903, -9888, 6508, -40966, -49240, 15455, 31910, 7182, 75597, -68197, -56271, -23456, -671, 13653, 922, -36292, -12837, 14257, 45011, -7624, 38741, 72730, -97151, 97624, 15555, 67996, -96184, -66527, 37520, -7278, -11079, -17451, 40900, -7083, 5086, -15518, -51320, -30696, -9515, -23447, 74246, 51929, -59543, -83121, tail", "row-1283": "head, 596, -16494, 61050, 51273, -14424, -58927, 33816, 95262, 90597, 92267, 10870, 33892, -35099, 6977, 70848, 54674, -31612, -99589, 47011, 11401, 91516, -85704, -25939, -20735, -98756, -5759, 61803, -57196, 3696, -20233, 55630, 38647, -92578, -29274, 15526, 39650, 72165, -16594, 69448, -1238, 75234, 55610, 4784, 93731, -35006, 81371, -73347, -11685, 16816, 84627, 59062, 27397, -94171, 54859, -96253, -5103, 39038, -6551, 50368, -97784, 72480, 99696, 66491, -73145, tail", "row-1284": "head, 66546, 15911, 46704, 60440, 81243, 57812, -23313, -20659, -84765, -96402, -5843, -25104, -83074, -96346, -97247, 78496, -35702, 28149, 87639, 38443, -54465, -60307, -49368, -91041, -55251, 2387, -38572, 44356, 91240, -4807, 96583, 80369, -4823, 72710, 68818, -20989, 50621, -86569, -87255, 71587, -63418, 70690, -829, -14118, 17205, 49823, 41335, 61138, -74713, -90204, -30980, 38004, 65764, -16627, 53787, 73748, -71611, 67846, -92978, -93879, 69103, 50036, -65085, 20759, tail", "row-1285": "head, 29440, -55999, -68113, 13606, 33477, 68010, -71797, -7994, -31537, -12366, -15915, 14417, 46456, -68588, -65365, -37309, -17878, -6120, -79931, -26801, 99658, 20965, -59193, -6390, 59284, 54595, 15329, 73287, 13495, -37285, 22766, -7849, -33711, -69931, -59138, 6708, -46953, -43810, -76716, -21601, -89114, -89754, 74304, -64069, -95800, 12654, 69884, -31839, -83389, 50231, 56277, 17348, -83630, 18386, -71858, -71932, 34949, -70246, -45418, -91018, -80749, -96525, 58945, 82931, tail", "row-1286": "head, 23047, -86052, 25342, 59153, 84742, 74085, 17682, -1638, -83786, -1130, -55676, 26099, 38086, -39665, -96767, -61502, 50644, 65393, 25681, -4669, 85939, 4636, 95302, 10301, 64077, -37429, -52938, -35829, -23041, -22266, 19592, 17701, 66018, -58481, 70310, 73644, 41624, -65605, -1362, 81798, 60151, 25545, 79289, -74757, 42098, -9605, -55299, 77130, 42130, 22160, -74453, -43991, -13671, -25928, 34558, 71236, -54673, 70976, -78024, 55239, -26086, 19661, 12834, -83360, tail", "row-1287": "head, 26375, -48901, -90096, -73687, -70371, 79982, -48697, -52014, 4828, -80746, 60205, 82105, 45526, 79573, -81723, 99747, 81803, 1854, 10128, 95930, 62544, -2882, -59486, 90325, -21916, 31619, -95675, -28685, 86371, 29863, 43313, -39221, 93482, -94817, 22787, -19520, 57963, -75761, -2834, -88259, 45530, 7061, -54981, 73011, -36563, 56538, 91787, -59084, 97224, 86893, 70322, 60993, -16201, -97699, 36950, 30759, 82724, 26827, -6190, 93175, 9155, 76434, -45383, -77267, tail", "row-1288": "head, 90567, 39625, -96580, 67456, -25296, 5923, -5799, -10789, -34100, 98920, -84197, -17454, 17012, -38090, 25210, -15897, 61570, -71688, -95387, -70944, 88832, -88308, -33162, 52123, -28878, 37658, -30361, -73134, 79789, -1077, -72606, 3573, -1308, 9343, -35320, -64861, -32703, -56103, 81358, 53736, 71546, -78363, 76606, 81742, -34451, 60507, -81268, 75476, -10569, -96870, -35090, 52990, 42430, 18527, -86766, -41466, -80738, -24042, 34408, -35959, -58822, -41775, -48741, 66676, tail", "row-1289": "head, 70758, 81046, 30314, -13120, -96060, -63539, -44871, 86007, -45242, 73018, 21076, 86219, 99450, 38779, 63919, 56862, -29056, -14972, 51314, -32160, 38975, -92920, 32367, 81323, -15550, 88399, -24499, -74036, -11006, -42544, 43151, 32581, 82154, -45802, 42549, 61154, -84273, 5682, 38021, 32373, 83577, -10774, 21485, -44632, 27488, -59811, 17862, 99263, 6463, -10185, 76518, -9563, 19869, 77794, 43788, 77966, 42047, 91950, -333, 96365, 37069, -16636, -57288, -9619, tail", "row-1290": "head, -9147, 43864, 92370, 33362, 70235, -38251, -20812, 75247, 99714, 15199, 33003, -83061, -40087, -95067, 77980, -78580, -63460, 18128, 39209, 91873, -34966, -43530, 38000, 42006, 56226, -1547, 95428, 92880, -88817, -70372, 5740, -13427, 33233, 93680, 28411, 15104, 55510, -96725, 66464, -88145, 69144, 63370, -43528, -97039, -84767, 80663, 92276, -77708, -95842, 194, 54493, 74560, 91707, -48917, -28337, -50460, 37721, -35970, -61444, -98184, 19530, -77651, 75500, 49440, tail", "row-1291": "head, -48346, -5428, -30660, -11187, 62197, -43849, -16975, -71231, 26085, 69328, -23279, -16563, 42957, 40966, 14960, -20690, 29593, -49949, -42152, -93351, 34885, -44487, -29920, 37403, -40765, 61980, 81823, 41663, -71723, 97272, 51414, -50695, -94471, 87956, 25371, -21774, -33612, 94178, -77845, -74071, -20034, -32288, 73037, -74284, -55856, -38775, -11844, -6467, 90646, -99143, 94009, 90066, 61662, 95203, -11976, -81172, 35634, 2488, 74951, 98346, -8358, 81713, -26146, -84158, tail", "row-1292": "head, -37842, -98955, 27278, -83428, 7164, 35189, -14732, 95868, -32386, 92123, -93276, -78958, 41928, 36052, -69937, -14855, -24039, -79003, -30101, 11024, 11341, -80782, -91703, -87590, 63806, -99479, -11453, -11004, 37940, -78219, -29020, -86266, -26677, -73662, 95394, -95483, 30206, 34075, -78760, -94215, -43399, 14343, -93796, 28058, -14366, 87711, 22192, 94438, -9559, 71937, -69193, 76067, -65627, -33910, 69932, -72405, -38995, 99487, -59075, -65367, -46961, 55373, 28461, -32841, tail", "row-1293": "head, 23983, 67955, 32305, 121, 46139, -15695, 44140, -77418, -53636, 63783, 64997, -35130, -35863, -71044, 95838, 97374, -65052, 47205, -78003, 6484, 37383, 30823, 69325, -28150, -86967, -82541, 62503, -55890, 4655, 79064, -9637, 30316, -70491, 16159, -36706, -63103, 30864, 44336, -76397, 60050, 8468, 73590, 84552, -56041, 45689, 34616, -80744, 56098, -61312, -15999, -64321, 44466, -46385, 27749, 83812, 47563, 88544, 58599, -18676, -28382, -99933, -28316, -7978, -35404, tail", "row-1294": "head, -71865, -7775, -74923, 88072, -37997, 98710, 65449, -69724, -20642, 25855, -65757, 24321, 69756, 51744, 60772, -79557, -52619, -47796, -7934, -22462, 73781, -14090, -74487, 81005, 74280, 91865, -27074, 35313, 26027, -76949, 30158, -58585, 68887, -45614, 52393, -53034, -79811, -18429, -4382, 3048, -22471, -36458, -55128, -25710, 15226, 34953, 51468, -22629, 88625, -38, 20043, -49163, 92709, -3531, -76625, 71316, -68547, 92305, -83442, 68342, -19381, 37964, 70903, -55879, tail", "row-1295": "head, -15728, -56607, 7825, -72028, -710, 2279, -15259, -72419, -32175, 30364, 30545, -42520, 88520, 20161, -81765, -5177, 7227, 40333, -40686, 93185, 48599, 64779, 41750, -96126, 49741, 73958, -51669, -58016, -18938, 25848, -35318, 17364, 62459, -17430, 65225, -69831, 54209, 24816, 78491, 95987, -7370, 82284, -64079, 55635, 80122, -92669, 57546, -38207, 41209, 40280, -28990, -68195, 73129, 58993, 18676, 63513, -47002, -65382, 85588, -91215, 16842, -21660, -89507, -39129, tail", "row-1296": "head, 6641, -14147, -876, 51714, 52983, -27079, 13800, 6461, 16557, 58317, 93417, -26440, 52858, 46637, -79971, 59779, -63074, 44493, 42215, -52915, 69590, -35808, -49877, 20704, 59322, 92814, -39427, 37632, 42804, -43488, -56753, 88893, 16777, 99111, -11373, 12960, -130, 87546, -47458, -34257, 38897, -57591, 94459, -86267, -87123, -64299, -37964, -85633, 85236, 58005, -48283, -33136, 73273, -97431, 92450, -69215, 23882, -38173, -48210, 9295, -45788, -27483, 94755, 46210, tail", "row-1297": "head, 34735, 48272, -18228, -39662, 1102, -92222, -73580, -7641, 3125, 77555, -24250, 88769, 26433, -57101, -12263, -32548, 37592, 96625, -68917, -77803, -29832, 77079, -62830, 25619, 48496, -52512, -82574, 71966, 81158, 89439, 29626, -88387, 91245, -34983, -22855, -76132, -23107, 42054, 69911, -68375, -48648, -61167, 4299, -44584, 46324, 70031, -35584, -8387, 34354, 87845, 8310, -1948, -1130, 68952, -3970, -89243, -46775, -37573, 27455, -55831, 2770, -51062, 47510, -33854, tail", "row-1298": "head, -34374, 49919, -42517, 21766, -57699, -12930, -87228, -23060, 87486, -54960, 61275, 11481, -64124, 11423, -33155, 79915, -31808, -7137, -28730, 54596, -72262, -57748, -85969, 39890, -89073, -84039, -28796, 21053, 4252, 85139, -39555, 94108, -92319, -40770, -19625, 13250, 34849, 4721, 30655, 17916, -40832, -32275, -19207, -75689, 70946, -31969, -22068, 87544, -69481, 16475, 64187, 63007, 59106, -36783, 92264, -8358, 6048, 39650, 88744, -20517, 17341, -77270, -68194, 75972, tail", "row-1299": "head, 63069, -31175, -51560, 24456, 93869, -11814, 65463, -93032, -73697, -62660, 54345, -13934, -7819, 39751, -65691, 2346, -9888, 9159, 59086, 99495, -53253, 98984, 33624, -921, -2893, -69412, -65108, -65191, 41839, -59425, -10345, 38932, -66594, -4756, 19000, 53463, 3924, -74875, -87833, 49498, 38462, -23806, 62802, -8656, -47574, 73914, -77087, 18822, 67505, -17831, 96067, 4348, 93284, -63887, 32618, -53021, -99159, -81950, -82911, 92602, -71332, -40147, -55067, -39044, tail", "row-1300": "head, 16101, -28800, 81039, -4337, 36110, -70976, 53591, -58124, 36516, 51114, 99266, 98002, -1256, -66472, 40730, 38482, -48435, 91243, 27953, -36760, -49082, -3564, 17405, 88200, 82362, -14633, -44652, 92999, 57761, -4958, -29206, -59770, -22773, -47517, -27608, -60976, 9498, -18273, 73579, 64020, -54866, -36670, -85359, 61516, -21638, 35269, -31787, 16180, -80615, 56475, -67871, 16777, 5599, -16682, 96738, -41555, 34817, 496, 57667, -55495, 61435, -25950, 74871, -21230, tail", "row-1301": "head, 55087, 45901, -5025, -96094, 87053, 10037, -66121, 70772, 28344, 38042, -4210, -75732, 88810, 65151, 69842, 18129, 37095, 32527, 46343, -75103, -21326, 33831, 26802, 83545, -46066, 13495, -88447, 88220, 35705, -40063, 31705, 22526, -97399, -77074, -3238, -44069, -19355, -12201, -92303, 66707, 47150, -71673, 99035, 86250, 97316, -10341, -74627, 57071, 93513, 97647, 28850, 61124, 50831, -32261, -53625, 53365, -77351, 69511, -18809, -73068, -31092, 90003, 79833, -47993, tail", "row-1302": "head, 56588, 50, -59017, 39273, -99465, -34143, -12685, 42080, 63700, 14643, -36203, 97388, 62975, -15373, -89876, 57739, 40685, -13615, 33111, 18538, 22668, -38949, -17159, -92162, -73603, -647, 85837, 23472, 93530, 51863, -31201, -98502, -11716, -28143, 4223, 52691, -65490, 41553, -57952, 67133, -27225, 93123, -45527, 84319, 55447, 48134, -48403, -98309, 68683, 64980, -48284, -18407, 66537, -53009, -32155, 22032, 98249, -1930, -97722, -5999, 20844, 26606, -56316, -39249, tail", "row-1303": "head, -32998, 2911, 53887, 79248, 98067, 54818, -60818, -16233, -93847, 59774, 46519, 77041, -82636, 4064, -25624, -81001, -2932, 33947, -46692, 84288, 23204, 79811, -38894, -87170, -96236, -38533, -71618, 93113, 76748, -43220, -22418, 48031, 15543, 45488, 54263, 17930, 71746, -9459, 98857, 39255, 80526, 18908, -15073, 66762, -55482, 21337, -64264, 68295, -72749, 5888, 94091, -88790, -28274, -95029, -77507, 28615, 77579, -23413, 70621, -57790, -10567, -40349, 92989, -36508, tail", "row-1304": "head, 96238, -55785, 80601, -43191, -90923, 49577, 38152, 23217, -65120, 89341, -92085, -2297, -17339, -54396, 42641, 57516, 80665, -20340, 76856, 71166, -59017, 53188, 62197, 64550, -34770, 66217, 37068, -45527, 45911, -23725, -60688, -67703, 74577, 89864, 51601, 256, 44762, 34445, 24284, -3119, -15188, 89881, -10845, 9119, -96471, -65087, -67487, 64052, 8055, -37032, -3348, -99562, 43203, 78221, -75942, -95117, -52974, -61088, -58978, -68212, -66847, 71478, -76903, -53644, tail", "row-1305": "head, -79483, -44922, 49233, -85605, 38344, 41668, 67902, -52652, 99463, 38890, -17430, 60177, 44909, -33278, 8658, 27421, 8766, -1418, -78633, 42963, 37161, 84386, -79851, 30613, -53988, 65186, 11579, -21513, -5703, 92327, -95462, -92314, -32544, 36630, -70470, -36347, 16529, 30491, 16684, -1729, 41536, 78479, -89390, 84802, -64631, -98851, 62015, 1169, 48951, 15516, 10574, 73596, 89855, 4742, 78705, -57016, 3595, 19462, 39489, 20126, -93573, -94396, 49859, -90924, tail", "row-1306": "head, 29306, 39431, -2170, 19419, -55877, -49594, 31383, 27306, 75998, -70566, -71740, -83365, -91372, -46793, -26108, 57798, 41242, -57745, 50456, -41958, -39341, 91717, -14281, 34520, -44241, -57400, 80884, -12999, -93079, -46420, -9902, 28280, -99014, -16399, 14110, 89298, 75185, 18387, -67027, -14124, -93968, -74160, 23235, 35343, 4702, 45444, 7536, -34929, 7521, -4229, -45703, -36927, 99907, 19345, 62339, 57584, 70119, 98243, -35202, 25075, 76629, -2192, -40921, -58702, tail", "row-1307": "head, 65754, -39326, -96123, -78569, -31418, 44323, 52860, -11912, 35833, -56385, 80355, -56698, 83188, -23710, 89863, -5770, 57768, -41845, -61305, 27520, 41607, 67149, 56247, -24168, 60177, 48502, -61076, -1028, -72797, -4390, -58437, 49517, -71378, 34971, -92736, -36882, 44450, -52537, 86881, 2056, 94867, 87423, -10408, 53893, -86596, -70309, -17988, 82619, -39643, -31170, 79703, 71647, -39196, 57127, 8283, -80597, 7913, -46475, -80559, 54657, 80845, -18287, -92226, 88707, tail", "row-1308": "head, 17413, 94985, -19588, 33263, 26169, 7883, -43039, 83687, 61739, 31334, 35339, -67318, 78724, 97476, -9463, -16744, -28689, -67820, -43972, 12809, 35137, -48446, -89605, -15749, 84828, 58901, -41751, -62072, -64735, 39819, -56948, -35352, 13762, 37363, -81730, 52682, 18547, -19322, 71763, -92134, 56080, -47032, -29535, -85204, 96094, -19546, -66839, 41539, 51629, -11874, -1886, -39929, -43310, 26600, 65549, -71215, 74849, -73867, 41703, -7396, -47393, 60159, -73306, 49333, tail", "row-1309": "head, 35009, -95999, -30652, 33087, -32338, 36091, -34818, 89444, -1394, -20580, -78211, 76150, -7334, 79702, -43723, 14730, 96062, -27912, 98013, 59783, -20332, -53601, 25473, 56830, 26882, -30688, 95928, -32975, -93232, 22438, -99773, -48537, -51183, -75674, 65114, 26319, 77366, 90100, 59951, -20439, -56619, 63766, -84951, 26427, -74110, -98413, 61669, -76299, -35507, 17410, -34888, -5888, 41709, -31266, 3279, 84929, -40276, -55378, -31123, 70952, 66554, 56621, -19852, 88372, tail", "row-1310": "head, 18673, -72917, -69678, 48128, 83491, 29596, -13149, 96004, 87399, 14716, -97686, -48124, -92794, 72478, 71510, -76613, 64600, 40092, 1667, -66851, -69071, -25291, -96640, 58785, -36246, 29883, 38025, -5422, 38450, -85733, 60624, -78240, -61602, -21845, -69212, -73610, -49262, 7129, 47909, 92935, 67019, 15225, -69138, -97563, 66505, 13427, -25246, 79834, 40097, -70297, -74815, -26655, 36855, 14447, 23949, -9443, 26822, 85677, -8522, -91726, -79819, 80905, 98930, -30035, tail", "row-1311": "head, -62118, 46073, 33759, 71552, 80449, 68967, 87747, 89312, 75088, -797, 28362, 45708, -24971, 78919, 58545, 9458, 56087, 13076, 15804, 74521, -4693, -78978, 27624, 4609, 47227, 22888, -83283, 99176, -33873, -82683, 19449, -50564, -87912, 92118, 2536, -71958, 74402, -77168, -63294, 42829, -43130, -28665, -39093, 9383, 91202, 48679, 74685, -86742, -61502, 34036, 26316, 28490, 95929, 36941, -70970, 55159, 99636, -37532, 93655, 30181, 91232, 28677, 41949, -89478, tail", "row-1312": "head, 15094, 66780, -16149, 678, -57918, -61100, -30707, -17307, -21054, 5126, -22014, 97172, 95535, -64207, -19826, 98242, 27990, -38193, -54999, 67175, -77800, 51538, 1637, -75358, -35931, -28028, -70281, -10252, 95152, 65935, -90709, -88821, -55110, 3988, -51217, -28006, -93598, -85280, -59203, 10778, 92518, 60757, -65611, -42335, -25193, -52967, -14996, 13423, 48474, 56016, -32230, 92181, 34997, 61939, 88057, 24314, 4568, -50180, 35473, 72911, 33304, 32228, 45817, 36646, tail", "row-1313": "head, 20464, -99979, 82796, -28419, 36330, -27263, 29034, 8963, -66960, 16000, 42209, 47195, 75174, 3309, 2416, -12017, -46945, 85012, -42316, -13187, 58787, 31540, 8958, 17723, -93403, -9961, -11129, 42569, -46498, 54229, 49719, -67219, 36838, 49772, -25457, -46062, 93376, 90298, -45990, 99169, 926, 83075, 42799, 13770, 52779, -7016, -99974, -98943, -56314, -80044, -63724, -49053, 64564, 55179, 40703, -11297, -30524, -80950, -73028, 9599, 57255, 54955, -32496, -74427, tail", "row-1314": "head, -55841, 74836, -82757, -72868, 87308, 87600, -76236, -6224, -84939, -69826, -12432, 39188, 83094, 64644, -81080, 61901, -57047, 52557, -46008, -41141, -25019, -63100, 51805, 23929, -17025, -33033, -59630, -4714, 13028, 17061, -53038, 47557, -12173, 63719, -37780, 79078, 88614, 21497, 10069, -43628, -47866, 23817, 15499, 46635, 92479, -93743, -31522, 92445, -66729, -51268, -20619, 66608, 94531, -67298, -10038, 14067, -81962, 44635, 70678, 26413, 69772, -81108, -56737, -42665, tail", "row-1315": "head, -88224, -79312, 43948, 68398, 59613, 38201, 24502, -99120, -45249, -46250, -12596, -37039, -83754, -4488, 29468, 3373, -34721, 45331, 32984, 80561, -56299, -77895, -22632, -20464, 54401, -83525, -35831, -95055, 86517, -51737, -54664, -82977, -23779, -98886, -5828, 72908, 17500, -4669, 47482, -48273, -61274, 48304, 8670, 54441, 14662, 8765, -11497, -61702, -45526, 1112, -80856, 54609, 61684, 49431, 27116, 39301, 29548, 74796, -33324, -47275, 16020, -17131, 44492, -13357, tail", "row-1316": "head, -88933, 63851, -39807, -2496, -42492, -71909, -88428, -37662, -15888, 25388, 38056, 36403, -32864, 70567, 40284, -27776, 20590, -41557, -9731, -92829, -91400, 87368, 32557, 99004, -84161, -1056, -6672, 79762, 60860, 5453, 43148, -50122, 89682, -97976, 17023, -68450, 62347, 30570, 6527, -24489, 28780, -14584, -8099, -87430, 32560, -89658, 82055, -40461, 49133, -92894, 57734, -80380, -43761, -96671, -1197, -65315, -7517, -92418, -4401, 72913, -40462, 44789, -93109, 93991, tail", "row-1317": "head, 54025, -13463, 64039, -35641, 8878, -4152, -77975, 13177, -55057, 90095, -33928, 28662, 67611, -32659, 89881, 37029, 71977, -13946, -2440, -7659, -9144, 77749, -48405, -60778, 76493, -15517, -63633, -66940, -98186, 34387, 79896, 80308, -37375, 84386, -13150, 15355, 752, 58117, -28209, -63396, 46556, 25449, 52577, 50907, 41547, 34979, -17305, 65212, 58574, 9703, 10213, 62768, -59066, -23588, -19583, 72079, -37780, 32177, 68650, -15954, -30199, 79364, 16928, 38034, tail", "row-1318": "head, -86926, -4385, -81259, 42279, 74470, 5388, -87329, 53304, -5788, -88337, -69773, -29944, 89603, -39431, 36952, 25819, 63211, -95951, -49578, -13423, -6460, -54749, 60449, -6997, -3094, 35994, -35803, 86185, 74473, -25788, -56130, -91185, 88109, 95593, 43256, -25679, 44179, -42936, 53517, -74236, 22909, -33184, 32238, -63654, 30726, -17813, 33659, 59631, -2002, -11107, 91683, 51541, 72244, 20873, -63151, -97597, -16348, 79575, -80810, 76150, -41565, -79510, 31099, 62884, tail", "row-1319": "head, -84309, 66199, -99431, 84925, -91086, 46578, -48288, 15316, -12253, -58793, 43189, -4330, -65280, 20612, 23869, 6375, -23336, 54782, 901, -50910, -48727, -84852, -44504, 94564, -57376, 27925, -68369, -95998, -38224, -51549, -96330, -88459, 34843, -85127, 69145, -29474, -34387, -52387, -47678, -27599, 57038, -96654, 27498, -6023, 10445, 7819, 94003, -39525, 62386, 89584, 87107, 42650, 54763, 34484, -25524, 7749, 16854, -64349, -71808, 11428, -19331, 65658, 40750, -53424, tail", "row-1320": "head, -49218, 19630, 50489, -14793, 9730, -25077, 39252, -15422, -52774, -99740, 67392, -69453, 99758, 86846, -21561, 34141, -31193, -86604, -68328, 95036, 94613, 80527, 34500, -74976, -50794, -60948, 4125, 74277, 29961, 71456, 60466, -44093, 43427, 61742, -44369, 35644, 48592, 92150, 16805, -44638, 55022, 99501, 87639, 36837, -26340, 4102, 51534, -91625, -39793, 28889, -58136, 49856, -18014, 43867, -41300, 759, 59895, -78438, 33744, -56652, 26995, -83446, -57734, -1589, tail", "row-1321": "head, -14628, -27070, -95634, -26467, -81063, 67460, 51996, 95999, 59196, 60290, 67553, -38466, -74860, -40045, -84004, 83427, 21060, 35181, -43812, -22680, 89619, -13457, 52039, 77323, -66314, -9016, -52530, 5575, -35780, -3273, 98644, -28210, -36799, -82987, 57414, 30018, 21452, -67298, 39733, -54711, -88386, 30253, -42833, -41349, -28990, 20794, -58523, -64945, -48880, 57368, -78421, 26000, -8195, -19608, -63886, -15602, -22335, -8563, 92730, -57986, 11128, 74388, 7183, -36455, tail", "row-1322": "head, 79132, -95942, -44158, 54724, -89182, 65858, -63745, 57399, 58598, 78346, 22740, 75456, -27859, -62736, 98321, -75452, -99506, -4090, 51181, 31259, -65260, -38742, -89408, 29626, -35056, 20014, -61017, -20593, -61293, 64714, -77177, -30195, -93994, 96894, -83957, -20726, -65390, 60179, -88725, -72522, 33173, -98882, 76721, -53038, 41490, -48126, 8267, -8678, -16950, 98505, -84830, -41476, 20779, 44909, 50062, 97609, 72265, 70411, 31136, 76757, -77462, -24853, -52593, -51103, tail", "row-1323": "head, -91879, -4250, 62566, 59062, 63223, -50501, 13303, 33917, 49874, 78612, -33735, 8697, -48867, 80334, 91062, 56134, -94923, 8893, 50275, -51631, 57369, -8784, 26301, 50172, 89783, 33440, 20935, -72813, 22892, 91347, 88902, 9962, 58819, 57245, -69865, 85873, 97283, 70361, -47172, -19209, -74661, -41909, 68714, 82182, -89289, 48657, -79998, 41999, 47831, -13561, -20996, 21093, 23940, -70435, 9013, -91786, -28830, 60912, 98213, 76767, -41617, 34583, -72105, -25502, tail", "row-1324": "head, -15476, -2219, 64126, -39614, 58334, -3811, 4912, 94437, 43985, -25327, 48037, -39181, -1524, -62209, -25550, 26801, 91148, 40485, -6232, 64515, 21503, 33181, -82987, -47540, 80700, -17975, -33663, 59465, 81607, 70698, 97654, 99228, 90840, -99975, -72964, 56827, -39062, -45680, 2290, 38668, -38558, -92228, 56897, -79359, -90803, -66676, 8264, -75816, -96962, -46456, 45990, 49935, 27088, -94761, 94066, 21999, -59730, -94857, -46669, 82772, 29361, 59190, 85783, 87722, tail", "row-1325": "head, 75954, -52278, 97554, 72368, 70244, -7649, -45213, 2862, 27004, 10946, 64126, -37533, -66374, -10703, 48579, 99898, -58137, -29818, 33535, -15680, -9824, 97988, -22123, -95700, 59484, -87736, 53897, 34428, -49125, -57560, 31753, -55605, -12698, -47760, 82533, -59512, -18457, -78277, -56287, 91495, 73801, -3678, 76050, 18930, 35298, 13894, -28167, 69056, 7703, 57466, -58062, -67276, 31334, -60682, -62544, -60261, -8093, 79349, -64152, 54819, 11957, -37908, -4331, -72644, tail", "row-1326": "head, 51136, -76812, 88789, 821, -13991, -7904, -72635, -37435, 5609, -36364, 98909, -80047, 71178, 73129, 73599, -84320, -31807, 91598, -28800, 11714, -96446, 14515, 36277, 63739, 12004, -18026, 86752, 39501, -32255, 75769, 99371, -31745, 15272, 37686, -3862, -19293, -64819, 19465, 57866, 74216, -8739, -47333, -5088, 21724, 66006, 88307, 45055, -51570, 42065, 89818, 9160, -65368, -42687, -23568, 84861, -33341, -68905, -54131, 42234, 58699, 22857, 61436, 47749, -65524, tail", "row-1327": "head, 24233, 54399, -57549, 83940, -4804, 25988, 63632, 58906, -87841, -23362, 2060, 98153, 12845, -29742, -81426, -20375, 18258, -79300, 70610, -67253, -13401, 6234, -86778, 6138, 42213, 23225, -59461, -93077, -27880, 53074, 74401, 47758, 89810, -10459, 16048, 77168, -55218, -15091, -68380, -21052, 36671, -78691, 90997, -25087, 66612, 49427, 18676, 52921, 17798, 50917, 71455, 44552, -61694, 91644, -27562, 70894, -46614, -48192, 67241, -95444, 46612, 2272, 16804, 28409, tail", "row-1328": "head, 40203, -86870, 68040, 95206, -92381, -824, 98709, -83509, 27706, -71725, -57604, -6164, 12805, -71988, 22928, 70693, 23525, 51425, -236, 71641, -31864, -27731, -71480, -89182, -90833, -14693, 81182, 88693, -64189, -80302, 62541, -39158, -79655, 99144, -90362, 3342, -44752, 44780, 85837, -37564, 17007, 94075, 94188, -42749, 69301, 84190, 48932, -32499, 67855, -46465, 68898, -80715, -22953, 74048, 80085, -76501, 9692, 78136, -41875, 7196, 79930, -40526, -94288, -97524, tail", "row-1329": "head, -29639, -43793, 61362, -70375, 75291, -95399, -66682, -10310, 76423, 99457, -13113, -2454, 51803, 26352, -55065, 68363, 41505, -2137, 11947, 84905, -53957, -22441, -37116, -96230, -82828, -78969, -71875, 30404, -89328, -76433, -14658, 30486, 92036, 56006, 13274, -40608, 54573, 71946, 17904, 29476, 59392, 35853, -67715, 27986, 69577, 93069, 20969, -46763, 4895, 28542, -74871, -20187, -13617, 26741, -96020, -13343, -33616, -36993, -46200, -74592, -4917, -65525, -53113, -77156, tail", "row-1330": "head, -44586, 51764, -44272, -67783, -12966, 55058, 50307, 3077, 94491, -71760, -15328, 51404, -21290, 88881, -68352, -63136, -16470, 70317, -13721, -57068, 15759, -88423, -63246, 95270, -94845, -50229, -42342, -4063, -76490, 62248, -96713, -17112, 35293, 48688, -81253, -83226, -31480, 69474, 75434, 41421, -2097, 56920, -54996, -54200, -19889, -53944, 69371, 67216, -73962, -54434, -63894, -2388, -46058, 36741, 40688, 13710, -16916, 67396, 32621, 9455, 98401, 3138, 95354, 77906, tail", "row-1331": "head, -52179, 39570, -60580, -48343, -23344, 96118, -50469, 62245, -78202, -8201, 77589, -41646, -45969, -60043, -99545, -22044, -8063, -18152, -53877, 20253, -20556, 69478, -4645, -82492, -2974, -40308, 40105, -93698, -48634, 35443, 79289, -78114, 58863, 55084, -86222, 38693, 23331, -70886, 65933, 74499, 99989, 17541, -17008, 62408, -64881, -19217, -28241, 59479, -58730, 75709, -44071, 24379, 20079, -91573, 4058, -98730, 69815, 31624, 77120, -42574, -25533, -29184, 56363, 9878, tail", "row-1332": "head, 22046, -66798, 38825, -76674, -79868, -14187, -71651, 14229, -78930, 70672, -22285, 18379, -13180, -99680, 84171, -41342, 1380, -91620, -44383, 71373, -90990, -52537, -16905, 41149, 21605, -37515, -12390, 70247, 98393, -78212, 83911, 56200, -37290, -97020, 92090, -92270, -1097, 43804, 81947, -79360, -64502, -32419, -34972, 95059, 60221, 26556, 56759, -52378, -27582, 22289, 49018, 26708, -73798, 74889, -53353, 23493, 19972, -64360, -73293, 71579, -11674, -65441, -27254, -97524, tail", "row-1333": "head, 31866, -15267, -7663, -88065, -93048, 17860, -72621, 2925, 30155, -97345, -38423, 72938, 91635, 35915, -24451, 21607, -44652, -45783, -10069, 3771, -8812, -45415, 57490, -30853, -14604, 71579, -61847, -24377, 70708, -72125, -33832, -50194, 34333, 36005, -19572, -31698, 87389, -63316, -76768, -74302, 67027, -48716, -1515, -42990, -87852, 35816, 59933, -56853, -17825, 42260, -29636, 61066, -30936, -76755, -2336, -36316, -57323, 84227, -60900, 6355, -58922, -9435, 74561, 20915, tail", "row-1334": "head, -82654, 86396, -57628, 35812, -35889, -6861, 8426, -35547, -60971, -625, 59801, -60060, 2192, -87492, -77701, 64096, 14575, 99580, 78795, -52971, -2482, -81490, 14319, 86008, 45958, -90660, 21346, 5166, 47668, 10672, 62053, 90305, 13963, -1900, -22387, 85413, -35495, 94653, 89831, -91727, 39088, -4602, 60802, 54536, -52291, -15988, 35310, -77579, 58116, 64550, 77240, -31397, 68330, -46951, -51332, -7806, -37864, 78826, -98699, 83211, -26696, 67876, 5478, -89872, tail", "row-1335": "head, -55487, -71581, -37419, -74599, -90670, -80883, 36171, -24658, -19886, -74938, 27860, 99641, 21002, 11914, 1978, -44562, -72512, 87651, -85619, 92752, 62707, -46886, -1735, -57409, 34729, 61717, 69817, -92715, -15522, -3121, -70064, 8131, 29162, 65126, -64413, -50819, 43356, 20769, -75991, -42837, 95427, -97789, 16134, -27032, 81951, -34041, 11344, 48503, -68, -33030, -88790, 53228, -23110, 63958, 22373, -73127, 72656, -37045, -58857, 80112, 11323, -9714, 95859, 80506, tail", "row-1336": "head, -18488, 28629, -37101, 81907, -73379, 2172, 60066, -14716, 80788, -42413, 10992, -62917, 33934, -37436, 27625, 9623, -53149, 36949, 70723, -81875, 41685, -11490, 60353, 61642, -78671, -50069, -56572, 80288, -80227, -32154, 94081, 37770, 59162, 26068, -58615, -63988, 50861, 23473, 99164, -91706, -59557, 58898, 95139, 55627, 32280, 13749, 95517, 1617, -35375, -21988, 46749, 26963, -76807, 70596, 52406, 80412, 89026, 38088, -19578, 43889, 53540, -15067, 8600, 99927, tail", "row-1337": "head, -60588, -83713, -79731, 20437, 69065, -63141, 78533, 12649, -71441, -73861, -27969, -34921, 44286, -34866, 68354, -83341, 79246, -51720, 50406, -94743, 15114, -64366, 50730, -8076, -21188, 48482, 9427, 55363, -23433, 44151, -87394, -67791, 72090, -92239, 53994, 49115, -56733, -24433, -52468, -82003, -41840, 79809, 36892, -70929, -3403, 36757, 93204, -95484, 71209, 71008, -17699, 2552, -83120, -54135, -48133, 4116, -82109, 39523, 68694, -48614, -76233, 81400, 45936, -49414, tail", "row-1338": "head, -7007, -21179, -55775, -55132, 67367, 23087, -75518, 15685, 86833, 27850, -67937, 72969, 39589, -31647, -36218, -93298, 98590, -55177, -72953, -5648, 55947, -97701, 30252, -9790, 6824, -90364, -15369, -66226, -66742, -97905, -18307, 89825, -73149, -53676, 27862, -90098, -72617, 74574, -73325, -95581, -78492, 59013, 21722, -83468, -35652, 5317, -78613, -65406, -73348, -40123, -63545, 63582, -92430, 57354, 46679, -66485, 41672, 48497, -52669, 51332, 81058, 74171, 64951, -4886, tail", "row-1339": "head, 45209, 94964, -54053, 11402, -4524, -77548, 12031, 57376, 72341, -98922, -16324, -33816, 52069, -53684, -59500, 37894, 10524, -32569, 43329, 60590, -10985, 8481, -90709, 22307, -57983, 36767, -84118, -20306, 98592, 53170, -14089, 56678, 91039, -23251, 51751, -32067, -87138, 15326, 23936, 47137, -17954, 37112, 82089, -18683, 85014, -17249, 79715, -34330, -25845, -89973, -65141, 42710, -60545, -58069, -51655, -88034, 14413, -92700, 71339, 91626, -27325, 73850, 9237, -3404, tail", "row-1340": "head, -73975, 13357, -25667, 94598, -1950, -15266, -70701, 12991, -8481, -86425, -24701, -96798, -19831, -64464, -73126, -32294, -58441, -31957, 23456, -35014, 52839, -69290, 18158, -87961, -38713, -40475, 37004, 60508, 71339, -13055, 94373, 88951, -76936, 45609, 4087, 44041, -10529, -41969, 80639, -37590, -71601, -88811, 26853, 35802, 72466, -1728, 58831, -37804, -96770, -5653, 48968, -15510, 74392, 77430, -32839, -10978, 93443, 68835, -94379, -95006, 12494, 9009, 85259, -2621, tail", "row-1341": "head, -65282, -51504, -7008, -97106, 7405, 35239, -6851, -27160, 33755, 67571, 41730, 18523, 83618, -7641, 62661, 23364, 59894, -60228, -31738, 5369, -3672, 93635, 57628, -90834, 63295, -93730, 10937, 2593, -54258, 65588, 29977, -99703, -22402, -65401, -14072, 56619, 96253, -10998, -32061, 84796, -99915, -13638, -93649, -27775, -83489, -5455, -45561, -71052, -82648, -45749, 36970, -13494, 56381, 88267, 22082, 64320, -70885, 28502, 28042, 10017, -34428, -8265, 55923, 80535, tail", "row-1342": "head, 98620, -56960, 71606, 1247, -34300, -72710, -79447, 86902, 76324, 37202, 88286, -69126, 21308, 81167, -91871, 79732, 48819, 78726, 24648, -5428, 39236, -31763, -38702, -8211, 53823, 8801, 14009, 47737, -52662, 77283, -49204, -4243, 2340, 96062, 87051, -65028, 31804, -17828, 94952, 32645, -95461, -4172, -26186, 28466, 8532, -19220, -40414, -41009, 97090, 16085, 48947, 84845, 72461, 33111, -96573, -4120, 2289, -21575, -36048, 6294, 36935, -22155, -63790, 56707, tail", "row-1343": "head, -42670, -77736, 10278, -21288, 62499, -48192, 18404, -9144, 42830, 25970, -16289, 836, 22168, 6110, -55826, 49833, -2400, -58349, 51797, -13747, 38288, 26057, 49452, -98419, -7906, 76523, 93625, 69618, 9948, 95811, -57682, 91626, 94030, -52670, 5079, -31058, -60101, -17922, -74270, -69712, -2811, -64021, -26175, -61051, -99815, 85193, -62169, 72748, 74183, 35477, 25434, -42640, 96012, 23330, 25085, 19817, -2390, -88483, -3172, 39168, 33105, -6930, -74392, 60516, tail", "row-1344": "head, 45983, -41185, -2046, 61611, 46976, -44440, -78207, 59247, -72291, -17377, 55834, 75139, -25517, 29120, 40978, -88162, -73021, 5567, -80574, -49224, 22634, 82333, 35013, -83542, -4933, 14953, -25963, 85259, 11587, 4185, -13188, 40750, -28760, 62169, 800, -42057, 65990, 74303, -41118, -42483, -36955, 86135, -47162, -16282, -651, 9912, -21982, 19245, -99869, -87036, -26623, -23525, 69308, -48589, -66993, -67022, 46893, -25779, 13516, -16068, 69536, -65312, 59984, 880, tail", "row-1345": "head, -4607, -99819, -57791, 71315, -16510, 83249, 72330, 84165, 92634, 7711, 16823, 5027, -34659, -19091, -83606, 76610, 72432, -73564, -44299, -83155, 50806, -39002, -67246, -60636, -77714, 17619, -47014, 19904, 21949, -37390, -30930, -13232, -95978, 57545, 54892, 44606, 30142, -29773, 70870, -64248, 37294, 52165, -69829, 93450, -44152, 89124, -46907, -67934, -15937, 79099, 70571, 23335, -16395, 16737, -15167, 33363, 65213, 66945, -93228, -19380, -4611, -72254, -5664, 63223, tail", "row-1346": "head, -41375, -84679, -60214, -31322, 22479, 441, -51931, 42364, 53177, -74134, -18275, 72418, -1163, 31507, -95471, 78361, -57724, 33899, -56803, 91005, 30608, 71899, 57176, 14103, 16526, -29082, -28781, -46777, -74302, 25227, -57854, -15309, 59407, -61810, 73835, 37573, -75023, 65811, 36121, 95258, -47723, 1882, 96975, -70850, 98549, 97436, -51207, 75667, -13875, 43330, 72230, -96480, 78434, -79307, 62007, -29352, -97873, 38552, -39752, -49281, 13163, -22162, 94831, 35423, tail", "row-1347": "head, -47365, 81675, -1588, -20679, 64700, -83257, -75555, 3322, 22816, -30399, 88012, 53325, -49946, 54192, -15747, 53988, 47885, -11018, 39879, -53736, 85816, 21643, 91286, 74943, 67752, -917, -11618, -67439, 81088, 41857, -85908, 82178, 87801, 26607, 47067, 39156, -15711, -64849, -72109, -62785, -31536, -69321, 32234, 72568, -29673, 93908, -48731, 49547, -44266, -91681, -78262, 88710, 7480, -73475, 50231, 68221, -25615, -19805, 50462, -58986, -91552, -17741, 40528, 78956, tail", "row-1348": "head, -49544, 51056, -16164, 73635, 68122, -66664, 95538, -3702, -43617, 94842, -37701, -66805, 43565, 25950, 61349, -36253, 67444, 5572, 4429, -34784, -73934, -85251, -13076, -27167, 42391, 14879, 7180, 64758, -89566, -81557, -10459, -6813, 392, -52392, -91826, -69141, 45978, 6140, -52525, 25508, 62238, 53013, -42404, -56086, 4476, -23343, 82769, 9666, 75495, -30887, -77667, 51837, 34471, 3792, 99124, 45790, -97797, -52853, 69472, 23127, -12273, -3106, -35310, -53544, tail", "row-1349": "head, -29163, -90460, 27571, 80709, -6877, -30744, 61849, -27078, -61866, 31222, 77053, 30612, -4507, 68356, 3547, 19330, -90735, 13758, 79924, 61686, -98807, 81213, 52008, -18511, 36495, -84714, 38279, 68838, -16316, -94625, -61533, -39425, -64454, -43364, -66867, -5672, -91756, -9412, -92618, 29266, -42706, 37127, -14559, -11644, -87258, 93929, -55754, 66946, -66735, -18679, -83427, 62925, 66868, -8452, 73509, -49467, -2499, 47931, 46005, -7280, -26658, 22893, -91345, -79700, tail", "row-1350": "head, 21404, -23329, 595, 87616, -15107, 31359, -16849, 83846, 51093, -69836, 48691, -63443, 62825, 28484, 56877, 80145, 19178, 97594, 53906, 19198, -53133, 83555, 93162, 93391, 77780, -53163, 53270, -14043, 3662, 62664, -35643, -98403, 49900, -21764, -53565, 53358, 96393, 82945, 64354, -36596, 46148, 68753, -38521, -66378, -87736, -29693, -96709, 2128, 14214, 79528, 51208, -44067, 11541, 29275, 19765, 67281, 22752, 13892, -52567, -23434, -67741, 13705, 56777, 71058, tail", "row-1351": "head, -4942, -18006, -16843, -34716, 42436, -73785, 50874, -89327, -9340, 93446, -26084, 55625, -73682, 90068, -14669, -73002, -92377, 80585, -37552, -36820, -71022, -94661, -59528, -46023, 84492, 26640, 58596, -74792, -44138, -99906, 75381, 34689, 49792, 74293, 49301, 85777, -62324, 25903, -410, 76385, 43056, -78115, -81096, 48752, -72815, -76787, -9871, -9666, -8365, 60640, 30852, 34397, 99804, 2174, 92448, -6084, 33771, -53671, 92894, -22208, 1985, -31752, 20046, -73195, tail", "row-1352": "head, -67502, 83225, -94537, 76965, -94357, -85154, -67289, 5020, -57199, 34396, 99098, 79493, 63187, 43063, 48977, -36159, -70133, -42582, 36688, 17178, -94049, 48838, -36107, -75805, 3672, 47369, 80932, -96529, 46854, -1464, 6033, -8422, 27421, 11380, 23437, 26121, -43777, -87800, 52115, 36905, 54625, 15604, 46136, 85531, 33209, 90340, -53359, 52469, -74383, -58982, -87813, 45352, 42486, -15025, -77496, 47347, -17004, -14473, 29354, 80094, 47992, 10354, -80807, -89100, tail", "row-1353": "head, -59725, 96118, 20056, -26154, -40528, -5943, -5498, -45889, 95401, -23198, -52524, -23331, 33993, 92568, -15653, 19986, 35622, -33359, 67270, 36444, 36057, -91734, -82127, -38386, -28878, -93415, -38605, -34213, 76068, 48491, 52643, 59446, -61507, 89011, 13679, -3893, -68084, 49133, 41422, -52943, 39872, -77000, 9519, 28049, -92291, 76088, -9133, 88755, -90913, 18876, -98570, 49473, 7869, 87832, 18813, -96781, 74440, -42548, 45757, -44624, -80132, -28905, -39989, 18384, tail", "row-1354": "head, -80699, 14547, 89318, -94201, -12262, 55903, 90497, -5891, -46817, 25585, -38678, 1533, 22894, 55041, 38719, -75059, 16159, -83615, -25192, -1301, 72635, 3747, 35333, 6346, 88973, -7023, -65844, 11350, -69467, -64902, -92525, -42273, 21768, 97410, -64875, 45200, 54620, 28679, 94287, -88287, 90344, 14562, -71706, -94222, -88999, -92281, 42726, -70044, 94219, -21630, -19508, 67782, 2682, -62071, -23026, -12337, -23428, 78594, 48340, -20142, -9248, -80749, 45819, 84569, tail", "row-1355": "head, 34691, 77033, -807, 15661, -17192, 79407, 57863, -6067, -50262, 37265, -29024, -91191, -13973, 60964, -22183, -83964, 21210, 37847, 4729, -82349, 79032, 88881, 84835, 18988, -90241, 15008, -82083, -49817, -3691, 66960, -9046, 90761, -61517, -70311, 95642, 98059, -28035, 84695, -80863, -8082, -40705, 57904, 67642, 77020, -87847, -1235, 77150, -1589, -61990, 32106, -20380, 59127, -95609, -22361, -89426, -68842, -97435, 98321, -45958, -3979, -71879, 73973, 66824, -81418, tail", "row-1356": "head, 47036, -75189, -21567, -44096, 13991, -4519, -68346, 81663, 26154, 26534, 64006, -41842, 89673, 46360, -30241, 72171, 86760, -98118, 92896, -18896, -2839, 63572, -32129, -42081, -69499, 34247, -18389, 31187, 1879, -35882, -79830, -38286, 27216, 29964, -66606, 94631, 51666, 72481, 6074, -18805, -23743, -84826, 66859, -70512, -43975, 5919, -40721, 43015, 54742, -53346, 45452, -68742, 24670, 18132, 66030, -81514, -48333, -60186, 26400, -38515, -19623, -44845, 92969, -28733, tail", "row-1357": "head, -91503, 37300, -90323, -89821, 58539, 45317, 94875, 4059, 63672, 22354, -55967, -69385, -34926, -50838, -79577, -97152, 33372, 11421, 28485, -59760, 25863, 79202, 3291, 91396, -47233, -62091, -96835, 99706, -66166, 4886, -38466, 73117, 74103, -90245, 23109, 10322, 15899, 51751, 86595, 45417, -85922, -71096, 27002, 83763, -58611, -12038, -22488, 91249, -41991, 57583, -44756, -27300, -7044, -60141, 41949, 59451, -82045, 77692, -1909, 91019, 16514, -324, -75239, -46438, tail", "row-1358": "head, 50021, -2740, -30211, -3502, -62795, 56504, -84097, 8093, 47924, 66625, 98267, -610, -31798, 29308, -52127, 94503, -2063, -44689, -21248, -37194, 3023, 66724, -6762, 23019, -84574, -92169, -29984, 69048, 53588, 59389, -99908, 26214, -40526, 93451, 92928, -87965, -37318, -70752, 13091, 56383, 73155, 50808, 79261, -68053, -43111, -76802, 24893, -33679, -37200, 36051, 60047, -62548, 32937, 68209, -96049, 7312, 25789, -78301, -36119, -95382, -91770, -33238, -84919, -9344, tail", "row-1359": "head, -30881, -43633, 20921, 24776, -29594, -71569, 84373, 36594, 32183, 86020, -79585, 94865, 19772, 17687, 25524, 36221, -60316, -94207, -44779, 78975, 47181, -86315, -84959, 2283, 6552, -31152, 97074, -12510, 93895, 63121, 88600, -99091, 15476, -24762, -49481, -11794, 72937, 47200, 62284, 68443, 87704, 20735, -4974, 24774, 18708, 55608, 70146, -69743, -92644, 44438, 37540, -9130, 91591, -34729, -80503, -73481, -61096, -29885, 30752, -77919, 89351, 36549, -90506, 7943, tail", "row-1360": "head, -23968, -86351, -22330, 62961, 45985, 74074, 26843, -29986, 72160, -31080, 87669, -39798, 49207, 86004, 13227, -65242, -31347, 2153, -22484, -49898, -95584, -91815, -99963, -10448, -23722, -38645, 19484, 4999, -37571, -86160, -13683, -64206, -60114, -57515, -994, -63272, -23446, 11442, -82067, 22460, 43774, -54184, 50480, -58809, 67298, 81091, -89221, -28066, 69528, -23871, -29610, -63311, 5115, 54921, 42223, 24282, 55688, -42589, 72913, -39993, -34193, -16332, 4804, -72232, tail", "row-1361": "head, -23639, -98929, 76125, -98504, 49592, 40673, 87315, 48336, 92890, 53146, -36113, 3486, -55838, 73171, 8698, 3636, -4361, 37349, 90107, 96897, 6392, -88444, 70164, 74930, -32592, 7920, -15333, 27156, 93073, -19896, 23765, -89985, -72509, 67170, 40395, -66908, 23192, 76312, 24796, 70535, 60433, -49234, 44101, -23196, -79764, -19169, -26344, 73806, 41959, -21126, 63488, -66640, -62944, 53502, 30305, 51763, -92260, 58200, -51919, -83079, -28052, 45615, -24058, -54766, tail", "row-1362": "head, -62309, -5901, -76911, -11271, -34585, -57817, -91525, 39970, -61557, 50455, -15159, 72845, 24560, -61738, 47787, -77606, 74724, 3697, 24053, 65734, 26163, 319, -10054, 44797, 92380, 45812, 22431, 61501, -97076, -21503, -17866, -90856, -94095, -49021, -34335, 28706, -30500, 8614, -56069, -97986, 15094, -35688, 44928, -12505, -52462, 78341, -74618, 95303, -70897, -7717, 43468, 86847, -17203, -58218, -8252, 28902, 27211, 40424, 54055, -50799, -37839, -17877, 35804, 79212, tail", "row-1363": "head, 29668, -28443, -78610, 74814, 42333, -95396, 67473, -28711, 4270, 54466, 53001, 98665, 19119, 88022, 17095, -83722, 91891, -2925, -689, 47157, 59769, 87714, 74172, -90680, -71262, 10381, -69039, -72797, 17251, 16341, 15673, -13216, 1512, 72689, -70709, -94277, 7446, -50348, 92332, -83382, -55026, -49655, -8143, 48837, -27283, -57517, -60755, 50867, 65370, 61020, 76480, -92824, 95532, -808, -69702, 25424, -43673, -21117, 30842, -54330, 80121, -28861, 91156, 12778, tail", "row-1364": "head, -71072, -5493, -89764, -72418, 66045, -9375, 32631, 98948, 31843, -13068, 92019, 66770, -25754, -59214, 17617, -85241, 60784, -81898, 62891, 15277, -14998, -96668, 71121, -83728, 26954, 63018, 72953, 194, 43332, 49796, 71173, 35244, -75582, 54697, 87116, 66949, 33397, 92640, -63149, -25010, 74894, 95767, -85020, -36503, -79209, 41964, 34026, -50563, -84549, -77579, -61052, -5848, -35727, 47327, 52950, 58920, -5496, 80081, 93020, 98986, 539, -93486, 16339, 60941, tail", "row-1365": "head, -32490, 27380, 69782, -1064, -97166, -57075, -34469, -49498, 68748, 51683, -76915, -41069, -30558, -14193, -90033, -4109, -68690, -93617, -98054, 27776, -1887, -38872, 26330, -7979, -51805, 512, 93672, -43468, -22384, -8830, 40449, -446, -6235, 36618, 94694, -27417, 25889, 61547, 60881, 79395, 63212, -11570, -91430, -59269, 71528, 97570, 52875, -45645, 29111, 25530, -47022, -95416, 88355, 43107, 31953, -1539, -76075, -76773, 99250, -54694, -35278, -24708, 45109, 980, tail", "row-1366": "head, 76849, -12879, 17211, -39602, 83600, -30162, -32419, 72056, -44163, -82261, -89428, 74779, 34420, 13696, 30363, 18789, -27773, 80639, 59966, -7107, -99373, -75293, -40139, -90685, -57460, -37318, -83012, 94915, -67092, -71627, -85840, -47193, -66224, -58037, 34181, -40625, 63717, 82932, -97477, -94411, 52923, -91958, 36511, -5944, -80958, 2128, -24114, 77497, -79814, 4257, 98527, -67744, -65049, 55462, 19143, -23230, 36895, -51660, -67801, -84537, -98426, 79974, -98091, -58464, tail", "row-1367": "head, 34710, -97406, 74870, 30475, -13488, -19236, 58305, 36028, 98056, -52421, 81104, 50223, -55277, -76512, 83439, 29657, 8935, 21252, 688, 2930, -24006, 49785, 77161, 35951, -83817, 72952, 83755, 42397, 75676, 28620, -34619, -42989, 962, 5812, -1737, 13848, 11998, -41476, 78511, -29281, -6209, 84492, 7159, -26258, -51372, 30463, -22091, 73064, -49825, -75021, -94325, -97915, -98084, 98826, -39132, -13420, -99177, 42037, 46796, -33276, -11229, 10214, -1153, 43333, tail", "row-1368": "head, -82079, -7903, -65270, 83537, 95381, 27351, 12531, 53938, -57979, 68450, 42660, -29682, 53091, -39602, -22088, 27382, -76487, -21082, -23523, -98728, -44326, -79570, -19658, 71400, 34718, 81937, 34135, -29969, -74255, 40376, 72408, -23787, 32689, 56833, -80854, -48748, 31572, 16976, 78259, -63414, -57224, -10655, -36352, -38197, 84904, -18133, 97631, -33765, 32252, 77767, 74346, -56406, -79130, 38902, -17492, -54554, -53631, -38935, -69791, -82086, -82959, 76007, -57167, 31554, tail", "row-1369": "head, 77415, -96869, 55347, 93116, -92255, 23502, -12116, -16630, 5436, 38367, -45404, -95963, -89588, -79129, 91584, -40831, -42153, -66408, -2105, -19646, 30139, -35103, -76570, -22349, 21710, -22314, -48411, 38800, -33502, -96486, -65080, 66239, -79443, 59803, 89524, -74525, -15195, 16325, 19484, 92758, 85447, 9006, 99485, 46296, 56371, -1930, -21011, -39453, 37674, -79680, 81441, -4921, 99969, -10520, 62849, -37227, -97896, 99088, -45201, 56237, 16944, -13796, -81719, 8781, tail", "row-1370": "head, -20994, 71041, -58779, 41638, 62161, -41023, -71416, 85839, -579, 61858, -27583, -28963, 84692, 1497, -21810, 84171, 28778, 47417, 23145, -39470, 40906, -33341, 12804, -8594, -38885, -23246, 78973, -69545, -67276, 86948, -90788, -15094, -76606, 99620, -89493, -22802, -98630, -33804, -18840, -96812, -74989, -68698, -25919, 46008, 79272, 16589, 6071, 51270, 37588, 37898, -16836, -33292, -29991, -43563, 78040, 95838, -37115, -8133, 88606, -90968, 64579, 75562, 19354, 67630, tail", "row-1371": "head, 3200, 16053, -34484, 75253, -45655, 44902, -54158, -27568, -10767, 81072, 98209, -19460, -66611, 58934, -50757, 97767, 21436, -21295, 76018, -53851, 97519, 36082, -84980, -21781, -32903, -91758, -73496, -31418, -11853, -98325, 33401, -18949, 66995, -63919, 41729, -13444, 36815, -4353, -30486, -45738, -98025, 45812, 61319, -65646, -61942, -60144, -62631, 72617, 24493, -53763, -64775, -75851, 52364, 11319, -67454, 57832, 83506, -49632, -24999, 35981, 64489, 84010, -83309, -83524, tail", "row-1372": "head, -44576, 98557, 87732, 92395, -96396, 68527, 24004, 3756, -64860, -49381, -63391, 30454, 24037, -85536, -64497, 37177, 31142, -15964, 85128, -3831, 67796, -13697, -72731, 14710, -26007, -12915, -84154, -49641, 11386, 24372, 47827, -25352, 18397, -92295, 48164, 94650, 89769, -44627, -5407, -27392, -22069, 57442, 17236, 59426, 17988, 42135, 9374, 25513, 91774, 84747, 6200, 95117, 55361, 40076, -43305, 68141, -94599, -34891, 60734, 29293, -21601, 57937, 65293, -73669, tail", "row-1373": "head, 32729, 75017, -77560, 255, 433, -93907, -1274, -65752, 91275, -91154, 54630, 89706, -71623, 73381, -78167, 96186, -34816, -36983, 84179, -40020, 52787, 54969, 51246, -5852, -51112, 60356, -15907, -78164, -95034, 81896, 91398, 84863, -40258, 47471, -78313, 92478, 94164, -13390, 42452, -4478, 35619, 3302, 80848, -90686, 30171, -91904, 82657, 86757, -34680, -9087, -99189, 41848, 50823, 13697, 41862, 48147, 13980, 89528, 75239, -3127, 27705, -92067, -49791, 27432, tail", "row-1374": "head, 72705, -40446, -54519, -56349, 51461, 21768, -90256, -71353, 37337, 30399, 69645, -5166, -10483, 68179, -59684, -85529, 56669, -42167, -9583, -48403, -1301, -18259, -53352, -82227, 93404, 35521, 88216, 45761, 23520, -96404, 96747, -20369, 68645, 14097, 18398, 62105, 47936, 3600, 96569, 38360, 80091, -79288, 43992, -86826, -1512, -70723, -29068, 68501, -68933, -32637, -96980, -70709, 91052, 61822, 55431, -53832, 91720, 47545, 57587, 16393, -81981, 61398, 68366, 60764, tail", "row-1375": "head, -42580, -1494, -41849, 42659, 6674, 4762, 50449, -78488, 21327, 2759, 88763, 87150, 88474, -17680, -99105, 9821, -56773, -18353, -63182, 6495, 69682, 48401, -51130, 63135, 16608, -65574, 71909, 76920, 31204, -29473, -30609, -43804, 3163, -2968, 25219, -97904, -2066, 69404, -84464, 9046, -6277, 55973, -38672, -67772, -17071, -84149, 4044, -11875, 9020, 90997, 32956, 68290, 38305, -74755, -82930, 66572, -52831, 55033, -88799, 76114, 83696, 91145, -22221, 34236, tail", "row-1376": "head, 48129, 29910, 2945, 22268, 700, 262, -33389, 17833, -80299, 60619, -13747, -50169, -51057, -94079, 95390, -9317, -5436, 21278, -94072, 30326, 93165, 78055, -48133, 57653, -43883, 47866, -5658, -45787, -25779, 16735, -79863, -61972, -25681, -67529, 65485, 34659, -57517, 7662, 6277, 4725, 38126, -72859, 83940, 52857, -57956, -48837, 45141, 26736, 80680, -32679, -18457, 69778, 27514, 25380, 87649, 42720, -52431, -45983, -84106, 39037, -41666, -76065, -97932, -99302, tail", "row-1377": "head, 95364, -98590, 13483, 79435, 11624, 1802, -48472, 8191, -83667, -18375, 72333, -39602, 86931, 48370, -87961, -64138, -65323, -47999, 34338, -3680, 19668, 45257, 57143, -89931, -7977, 75971, 21415, 41628, -65042, -61979, 10339, 49201, -71720, -21174, -5736, -29660, 89786, 15048, -69636, 74286, -58469, 53366, -6502, 81016, -60326, 6785, -93907, 2032, 98580, -64563, 61434, 61209, -98990, 82119, -75390, -72623, 63705, -17532, -97900, -95366, -11507, -18949, 37343, 25329, tail", "row-1378": "head, -28286, 92286, 1544, -50138, 91449, -94734, 18783, -51571, 49949, -50710, 22236, 28661, -40215, -96090, 2924, 3996, -10398, 92513, 7979, -90925, 88576, -29243, 38799, 99367, -84738, 6137, -23564, 25491, -40217, 44105, -37793, -34834, 26921, 6917, 78914, -94310, -86227, -21696, -27403, -81619, 8471, -83547, 97029, 54710, -39711, -76781, 16314, -63545, 90528, -64260, 13105, 64896, -29542, -13928, -55430, 40906, -78192, -92033, -96557, -70466, 79758, 72571, 26193, 73086, tail", "row-1379": "head, 72040, -33939, -56788, 39309, -22409, 31151, -78159, -55264, -30781, 26708, -39643, 37038, -71516, 88145, 70090, -59269, 92369, -45083, -7534, 84528, -45929, -17792, 28027, -15809, -70773, 88308, 69656, 28378, 90627, -18731, 29115, 15610, 81116, -58747, -67404, -67532, -32739, -64357, 41921, 47327, 66945, 7398, -25840, -6422, 90823, -81125, -68240, 85048, -94235, 98534, -41176, 58455, -89516, -86280, -55671, 66867, 14388, -47699, 28779, -64175, -89282, -23763, 25895, -3445, tail", "row-1380": "head, 50724, -20707, 83707, -32919, 70235, -45685, 7941, -73068, -27169, 47499, -74856, -5868, -45644, 73786, 85823, -58334, -33895, -50702, 64954, 50859, -21902, 98993, 37406, -67390, -50808, -82944, -42317, 93459, -14192, 70140, 8890, -661, -23357, -32053, -23070, 78492, 72218, 92322, -84727, -69189, 26934, -12440, 85528, 489, 81768, -23396, 24888, 46086, 35561, 43322, 38697, 19632, 32517, 43380, 23889, 13506, -32169, 66659, 21556, 7646, 20597, -14583, 86107, 16805, tail", "row-1381": "head, 36970, -10245, -25446, 58850, -33047, 52234, 81435, -46174, 38141, 88581, -91249, -39533, 14805, -47813, 75962, -12869, 88426, -37369, 28141, 23535, 79369, 70977, -84328, -45403, -99744, -13348, -56594, 77015, 385, 20164, -77977, 35224, 12678, -82644, 47852, -62283, 642, -38216, -494, -66088, 82064, 8344, -68851, -87952, 86926, -37566, -40091, 88436, -68376, -64267, 51267, -50768, 36573, -30442, -85592, 82667, 3603, 98264, 23311, 85422, -57713, -72519, 89167, -99153, tail", "row-1382": "head, 40262, 76020, -32406, -68542, -77307, 99186, -25774, -90453, 12310, 19032, -52883, -80653, -34239, -97176, 19089, -22595, 17391, 56298, -4526, 55409, -92518, -23172, 49821, 73162, -58928, -20746, -9885, 53313, 46929, -82250, 53268, -43391, -1634, -42690, 25721, 39811, 95572, -89312, -18256, -63462, 40469, -53484, 82367, -15817, -35729, 9911, 4961, -45713, -71092, -66248, -70725, -48715, -55844, 71039, 63139, 34768, -18070, -24078, 52857, -23341, 1763, 22807, -40241, -12145, tail", "row-1383": "head, -60932, 78258, -36569, -44680, -55633, 69672, -43801, 64554, 40235, 54169, -46865, 28836, 65532, -77380, 61095, 19013, 76490, -91185, -5894, -38144, 68274, -85121, 62281, -48163, 78557, 20557, 94171, -27124, -92842, 9706, -56113, -51994, -88590, 74589, 72094, 41689, -18610, -63114, -99843, -26669, -12912, -50329, -50944, 85621, -21281, 39768, -11082, -94825, 46467, -99307, 44575, -92640, 70604, -99577, -85305, 14007, -10326, -9584, 15538, -32502, -91334, -29598, 74447, 85228, tail", "row-1384": "head, -22086, 76051, -32375, -1110, -69446, 16851, -88890, 9488, -50460, -54901, 20381, 55624, -82673, 67676, 2898, -24181, 95991, -97246, 30316, -84062, 63018, -25000, -36506, 38540, 19346, 25943, -75417, -43775, -29493, -95107, -62255, -50613, 18152, 95098, 27693, 55691, 85868, 59114, -2693, 79426, -42293, -4732, -63072, 22864, 70520, 45367, 75064, 52066, 6911, -31592, -58830, -31838, 47396, 16618, -26563, -9007, 31840, -75866, 82719, 14529, -74571, 86926, -41327, 62551, tail", "row-1385": "head, 66327, 30056, -96076, -1076, -58797, -43821, 39686, 61614, 95180, -20358, -16188, -14318, -37053, -18787, -60464, -29407, -10705, 35324, -94236, -15066, -79965, 95173, -19191, -33857, 6647, -31436, -49211, -81963, 95307, 60642, 42773, 14106, 12410, -30156, 96246, 1159, -45330, 2970, -84399, 71047, 94826, 11246, 215, 74768, 13989, -22495, -34677, 45021, -89998, 47226, 89369, -46491, 98767, 47409, 60086, 77498, -56721, 82993, 24922, 97061, -6143, 48737, -17969, 15354, tail", "row-1386": "head, 86312, 15321, 51923, 482, -95185, 22123, -25211, 88699, 14175, -2403, -82230, -32553, -73243, -11942, -56693, 7844, 51218, -44283, 56396, 75294, -93473, -86969, -94932, 45719, -79186, 79114, 83754, 75554, 22099, 1198, 6570, -90723, 66697, 51719, 99034, 46491, -24019, 89751, -48277, -87751, 63813, -95905, 54498, -89795, -67231, 24910, 46827, -56138, -19975, -53923, 18553, -28699, -64789, 47922, -75136, 29081, -96282, -86169, -33398, 52866, 56507, -54344, 20205, -35385, tail", "row-1387": "head, 72390, -59225, -73718, -84479, -87744, -86948, 60230, -15864, -27499, 77244, -50198, -16878, 27954, -67249, 34817, 18309, -29110, -2396, 19415, -9978, -56573, 11531, -82945, 67423, 93962, -87871, 56101, -8235, -90832, 5255, 75132, 738, -11366, -78559, 5656, 46895, 45721, 69422, -90903, -94540, 8162, 3628, -18180, -77056, 67739, -72934, 36636, -468, 25987, -70305, 25361, 27987, 51985, -35471, 68019, -47697, -37426, 2624, -64658, -16315, 51691, 90642, 30088, -38330, tail", "row-1388": "head, -82558, -73310, 99145, 41940, 45032, 32157, -96919, 94048, -5489, -50226, 71609, -28197, -8490, -85452, 14032, 87847, -63724, 11385, 11411, -91946, -42240, 29835, 50714, -18811, -18732, 86179, 96347, 78175, 23020, -81627, 7900, 30023, 82350, 41954, 20980, 60859, 24275, 91656, -79278, -1094, 80226, 23131, 47196, 14229, 32866, -12753, -43706, 39393, 39173, -71045, -41508, 81528, 87357, 91647, 66034, -73228, -95356, 33444, -91819, -51321, 20128, 2483, 88538, -43381, tail", "row-1389": "head, -55538, 95650, 36418, 8427, -71112, -59545, 38826, -98033, 12040, 55161, -14550, 39225, 76800, -27430, 79730, 87628, -64052, 22292, -15089, 17927, -84715, -32173, -70285, 24861, -80279, -20528, 62087, 34205, 11782, -45894, -98967, 33801, -95882, -25327, 56046, -94384, 97292, -57750, 88972, -3823, -92012, 1613, -16220, 42395, 54214, -79659, 50697, 51753, 83276, 3311, -97692, 77947, 64640, 43912, -98026, 14091, -62343, -97561, 44297, 68553, 47596, 39908, -93517, -86094, tail", "row-1390": "head, -51771, -41428, 69432, -44852, -65963, -6041, -29187, -74799, -22454, -42631, -51036, -41180, -73348, 21958, -54673, -76937, -73595, -70684, 10111, 69276, -99881, -1053, -84274, -63866, 81910, 65290, 6846, 91951, -82385, -49930, 74225, 13740, 33533, -60342, -36774, -54882, -29231, -13109, 27972, -52806, -53968, 71355, 1186, 51384, -81107, 88574, -72151, -9714, 54162, -85264, 32738, -37789, -27245, 49997, -49058, 3737, -66907, -64064, 28828, 91516, -53118, 15067, -62936, 5956, tail", "row-1391": "head, 99197, -70198, -64442, 47458, 56275, 75672, 71061, -65733, -65165, 29355, 24883, -36782, -60782, -87390, -84895, -30268, -80965, -95897, -28002, -26987, -27476, 51574, 49738, -93354, -92209, -3986, 56398, -88287, -44993, -83421, 3021, -37109, -79426, -63708, 18104, 15459, 22225, -30312, -34339, -45837, -45198, -30780, 26590, -5682, 17803, -92679, -28225, -48037, 98270, 62996, -95632, -35545, -98437, 89902, -79132, 97067, -69363, 12929, -69882, -84192, -34130, 76505, -47643, -43467, tail", "row-1392": "head, 98200, -6839, -90990, -20776, 88525, -3919, 60444, 85772, -12652, -63802, 11105, 90815, 51679, -89883, -22805, -65498, -60122, 10485, 46071, -1257, -16387, 25217, -47014, -12218, -43770, 32150, 29977, 80157, 49232, 1932, -53208, 34312, -43407, -17063, 85673, -85781, 74289, -29220, -34996, -53033, -28972, 20977, -86348, -4081, -90873, -84922, 80274, -19568, 20601, -71232, 66300, -81756, 98852, 72528, -55704, 52843, 41375, -81881, 86007, -4221, -27882, -75416, 39729, 85473, tail", "row-1393": "head, -41961, -54817, -71043, -24925, -75222, -50963, -80767, -62621, -75580, -35524, -34112, 15900, -37491, -27577, -44992, -62964, 3134, 98813, -85710, 17468, -82296, 24835, 52886, 62841, -34565, -49788, 29204, 3548, -59849, -37396, 81615, 18565, -52549, 61493, 6164, -75832, -90442, -79584, 59666, -13850, 93584, -88254, -18114, -31738, 79329, 71401, -76272, -27927, 12655, -99141, 77003, 66476, -72401, 73150, 32921, -82043, 82988, 80586, 46014, 49358, 93190, -55710, 97552, 62658, tail", "row-1394": "head, 65177, 47683, 71824, -79850, 27644, 52011, 88118, 34688, 96438, 23224, -50819, -90400, 14051, 54285, -55727, 32363, 44738, -35707, 74269, 11254, -43944, 762, 29538, -95740, -6235, 90561, 21494, 70723, -63062, -96160, 95502, 97613, -70512, -37627, 64124, -6356, 49638, 86891, -94386, 23662, -49497, -53001, 43867, 56619, -15435, -15604, 9609, 64554, 6517, 94681, -56598, -36656, -92776, -59044, -89404, 29292, 74580, 44398, 32505, 89436, 4115, 3322, 93697, -34178, tail", "row-1395": "head, -73665, 75304, -78660, 16157, -30309, -95275, 7048, -59293, -70733, -7237, 99158, -9595, 35719, 47165, -72474, -7373, -86855, -4213, 52698, 55481, 37742, 98217, 28747, 41603, 77779, 73967, 63453, -88677, 16195, 71043, 30006, 64487, -79555, -95037, 84902, 73357, -89601, 33113, -65114, -23902, 43381, -86696, -59640, 13893, -39942, 41178, 70844, -38560, 98426, 60185, -18605, -18066, -20850, -7089, -37438, 66828, 28401, 80896, -55320, -23941, -86519, -45304, 71490, 699, tail", "row-1396": "head, -26951, 42268, -73409, 41120, -48746, 26404, -30751, -77121, -31223, -26239, 56687, -53222, -52493, 76959, 58043, 34435, -42886, -48320, -65053, -29782, -6273, -99998, -24218, 82915, -52410, 33816, 31128, -99619, -47592, 86181, 4818, -7349, 36309, 99450, 58221, 96872, 45517, -83800, -39131, 96739, -43873, -58866, -61968, 93011, -95483, -88069, 62834, 35728, -95445, 35155, 8039, 35590, 14509, 51889, -77214, 472, 56480, 48255, 12713, 10110, -55521, 35325, -46077, -97388, tail", "row-1397": "head, -55212, -94395, -3995, 71742, 27992, 85923, 89246, -56481, 95458, -43249, -80567, 94216, -1663, 89091, -82062, 35114, 36220, 35317, 23271, -40837, -76554, 78153, 72549, -63456, -16014, 51881, -99117, 36241, 86738, -42686, 8599, 79862, -43316, -77810, -44771, 39355, 42891, -13433, 37804, -25940, -26383, -85980, 68597, -91084, -2882, -14660, 59627, -91739, 85734, 92663, -18057, -97297, -77541, 38223, -86791, -63692, 52540, -54946, 47199, 98211, 24432, 5327, 85127, -54373, tail", "row-1398": "head, -891, 34087, -81347, -44797, 58517, -29922, 17932, -31530, 60692, 76273, -85599, -56400, 34002, -5561, 44969, 36608, 71756, 81909, 65497, 92320, 24314, -36818, -96351, -14857, 91203, 93999, -99300, 77175, 8184, 2782, -81443, 3951, -15922, -54196, -5568, 51296, 20641, 40160, 18373, 66901, 79120, 4139, 83751, 81986, -9088, -72900, 92160, 29787, 55966, 23544, -18366, -3534, 35914, 73665, -60144, 80477, 56980, -73936, -41028, -89670, 18740, 32466, 97804, 54128, tail", "row-1399": "head, -71260, -92936, -32219, 52284, 54332, 86868, 87637, -48673, -43435, -96950, -26635, -32271, 98463, 18682, 8921, -26505, -80669, 76133, 69806, -3101, 7700, -66148, -24679, 20877, 95122, 44347, 2940, -34965, 58286, 77980, 11618, 97811, -8952, -93349, -75964, 21464, -41445, -75258, -81482, 3854, -43006, 67582, -62807, 26140, -61952, 50620, 85399, -38407, 4227, -96807, -7958, 81268, 3338, 49602, 39088, -89929, -38189, 14649, 85828, -63850, 43047, 8745, 11500, 50978, tail", "row-1400": "head, 11642, -69127, 31797, -42828, -1398, -31844, -50330, -78173, -3742, 49055, -55091, -6041, -81811, 13195, 74647, 59148, 91986, -67519, 46604, -47298, -22841, 27925, -54989, -29881, -52746, 84753, -94013, 12461, -93324, -33060, -31148, -4587, 49342, 78982, 33467, 3040, -58679, -99803, 10180, -2045, -25334, -97236, 16111, 75131, -39545, -11048, -49796, 67751, -28857, 64484, 51546, 47705, -35321, 53724, -59765, 21276, 83976, 11656, -7168, -10686, -24641, -76123, -20770, 24145, tail", "row-1401": "head, 9621, -94767, -16726, -90125, 45859, -40272, -10702, 11931, 39986, -84952, -33362, -1887, -83607, 82062, -6088, 42070, -90726, -26132, 59575, -35684, -57226, -56039, -34155, 55389, 90071, -45386, 53269, -44650, -93908, -90407, 75658, -20486, 70996, 84267, 98927, 93489, -12310, -86374, -17361, 24336, -45674, 51119, -31322, -84324, 37076, -15117, -35001, 11695, 72246, -57684, 86129, 99151, -35583, -28568, -77775, 49124, -94050, -32514, -22065, -75751, 70463, -55480, 32056, 63060, tail", "row-1402": "head, -63434, -51556, 20157, 43314, -74658, -34803, -95644, 10026, 48128, 2098, 12223, -70286, -72124, 26761, 86498, -81642, -77703, -78328, -95330, 38717, -46132, -20617, 4045, 12237, 38026, 99542, -73234, -82574, -61603, -97843, -54321, -28622, 21916, 21298, -30479, 89145, -15258, -19609, -89917, 38014, -96777, -12168, 98933, 7279, 4106, 423, 6823, -80108, -29417, 16083, 48783, 2910, 33482, 28827, -39107, -20210, -71351, -45964, 43102, 87942, -77528, -40269, 75976, -96998, tail", "row-1403": "head, -76802, -29631, -76610, 15209, -80438, 37245, 40239, -57559, 69914, 94698, 26789, -61268, -44765, 4245, -74776, -49989, 71545, -31625, 46245, 79984, -5647, -53260, -14690, 41120, -6527, -96564, 94390, 3945, 74824, -2944, -37521, 12766, -71975, 15364, 95277, 71989, -44251, 30431, 49652, 83491, -54148, 73409, -61564, 92125, -62245, 73921, -37816, -9890, 19279, 96433, 76068, 6981, 45781, -49228, -99415, -60247, 62867, 40121, -31423, 9985, -33894, -40882, -54117, 25059, tail", "row-1404": "head, 31010, 8536, -32580, 59713, -87074, -48775, 90214, 70390, 8154, 74644, -55404, 23265, 15955, 20035, 24041, 97596, 20271, -12040, -72798, 61808, -31726, -12412, -89417, 84342, 82512, -15480, -86795, -64412, 11829, -58507, -65044, -50330, -36640, 36892, 49375, 46172, 8899, 68186, -77484, -24585, -47953, -16233, 7082, 16600, 39433, 75582, 36080, 61351, -84034, -24924, -97895, -51276, 32869, -17345, 46316, -10999, 78531, 81332, -20926, 68907, -29002, -76750, 45393, -82702, tail", "row-1405": "head, 67548, -18719, 3263, -84599, -92013, 27575, 61227, -82727, 41536, 53375, 11198, -85167, 97652, -82846, -29339, 75590, -50447, -39234, 51496, 85969, -39324, -97121, 1559, -29401, -4503, -48013, -72263, 22059, -15792, 25052, -85062, -1771, -920, -16883, -26332, -82702, -13300, -17255, -47424, -81111, -14248, -52141, -55397, -95013, -20970, 74198, 80976, 83807, -43885, 79927, -68064, -56935, -14396, -25604, 24993, 92817, 19435, -91338, -35437, -66326, -87060, -46369, -87794, 92007, tail", "row-1406": "head, -42376, 12457, 22370, -85435, 88325, -61814, -62209, 97029, -95951, 53186, -34836, -9128, 18084, 3829, -70168, -19518, 99177, 88285, 55689, 32004, -33634, 67991, -92112, -58801, -91538, -89066, 15099, 58734, -33824, -38207, -60068, -68450, -59518, 34691, 39593, -14, -85873, -40399, 94767, 33093, 89846, -4893, 80767, 22874, 16208, 28640, -96525, -15047, -22509, -88083, -34674, -65230, 77279, 35722, 96603, -80901, -60252, -18998, 49195, 32956, 12498, -42060, -26409, -2413, tail", "row-1407": "head, -92509, 68161, -8295, 72343, 41384, 18430, 12785, -87190, 85677, -40956, 78493, 21019, -30820, 39749, 57514, 81278, 1656, -40241, -54497, -92379, -52551, -220, -20316, 30590, 47871, 74630, 18419, -27037, -89504, -4447, -28067, -36119, -56453, 98970, -54807, -2081, 31319, 86043, -13546, -42918, 33742, 77243, -61924, 13919, 48372, 68571, -83325, -38514, 97969, 41311, -56428, -52364, 63625, 22279, -19213, 42352, -82115, -52104, -99396, -43224, 8033, -62030, -12083, 32972, tail", "row-1408": "head, 49528, -48844, -78769, -34942, 64414, 3619, -47162, 74959, -19724, -66576, -38593, 10359, 54956, 299, -7369, 14277, -4903, -58484, 41890, -42500, 4450, -40514, 29365, -6930, 46042, -13476, 16782, 56513, 28783, -34128, -9485, 93292, 91991, -49721, 93803, 39154, 10811, 50631, -68118, 26699, -95931, -84184, 71163, 43994, -85687, -9520, 36132, 92583, -38486, 36448, -51485, -32151, -51509, 33084, -62754, -27082, -38658, -61266, 35716, -63919, 66610, -10549, -45670, 91492, tail", "row-1409": "head, 16309, 58797, 21735, 41898, 83146, 10391, 11409, -1031, -41032, -48571, -46389, -2277, 89896, 44405, -45226, 99511, -66964, -66269, 5400, 53618, 57751, 75670, 33717, -1285, 29288, 59727, 8721, -99769, 14136, 43770, -39710, -33372, -83334, 73763, 29650, -81526, 2923, -28308, 56380, 64101, -73115, -4603, 21166, 2775, 45336, 84916, -91191, -56681, 54637, 88247, 55167, -1829, -47540, -42870, -54900, -35264, -34606, -34434, 43869, 61178, 26661, 72218, 29276, 38578, tail", "row-1410": "head, 97999, 52599, -63986, -28249, -33, -77178, -44538, 74487, -67582, -70985, -15731, -30296, -4559, -72488, -99398, 14215, 90654, 64510, 239, 12100, -58742, -896, 42572, -19285, -54048, -78185, 35947, -60442, 28744, 66315, 35218, -77445, 24413, -23919, 45747, -65534, -27391, 85541, 3448, 14249, -65281, -37223, -82665, -88828, -52674, -12160, -74701, -57256, 77663, 76529, 76073, 10959, -78691, 93555, 880, 94000, -54773, 64321, -20928, 47620, 62928, 65893, 20321, 50589, tail", "row-1411": "head, -59521, -6824, 59589, 56779, -18309, 91445, -59609, -81630, -81640, 40733, 88289, 85246, -1767, -62322, -51475, -41255, 3464, -98324, -29732, 70768, 97210, 97912, 72139, -91720, 97306, 64851, 26867, 59411, -41289, -29921, -80268, 26708, 93077, -53438, -20730, 31560, 40365, -80637, 1468, -33214, -30799, 53809, -3865, -22819, -46519, -20292, -64526, 31651, 81837, -94383, 68097, 59448, -54532, 84295, 25097, 93580, 21778, 60587, -56184, 13871, 98162, -54947, -33731, 52744, tail", "row-1412": "head, -5626, 92457, -97367, -49427, 63954, 39268, 80139, -37158, -21924, -6989, -94095, -79336, -42734, 40106, -22070, 49790, -74708, -20725, 94643, -21142, -35236, -66965, 30145, 33424, -2686, 82529, 99336, 15191, 89479, -19013, -46337, 44373, 33972, 64758, -3594, 36414, -22067, 82480, -80353, 44914, -55375, -70735, -59638, -20950, -13920, -60432, -16270, 86593, -48597, 72877, -77124, 7775, 86883, 35439, 59418, -77061, 8239, 9396, -43486, 9829, -27679, 65846, 76646, -95808, tail", "row-1413": "head, -99960, 31739, -21698, -56880, -1666, -74355, -70366, -27721, 75292, -73447, -91977, 4509, 58164, 53929, 86690, 43139, -7174, -78303, 58313, -23196, -31551, -50509, -23463, -20255, -4744, 56349, 89572, 83436, -8715, -88198, 88283, 1887, 35294, 58944, -6861, -12386, 92986, -20700, 96833, -96440, -37011, 39043, 26125, -91492, 73729, 72625, 30290, -91537, 76098, -24707, 50178, -44358, 46359, -15579, -30570, 63154, 45647, -51950, -10278, 62765, -32746, -66680, -24645, -60241, tail", "row-1414": "head, 19078, -6325, 75380, 44794, -8140, 3870, 12765, 84341, 63644, -74323, -19867, 98200, -72772, -14649, -87435, 3421, 45253, 66939, 53322, 70354, 85525, 59081, -9804, 61639, -58765, -92368, 48736, 25362, -11357, 17041, 92371, -13468, 47599, -86710, -93466, 59797, 29905, 4563, -87829, -67193, 52866, 18354, -71876, -99417, -89434, -23758, -38560, 78318, 45790, -7154, 67964, 97838, 25553, 47830, -70195, -4840, -47379, 53419, 13340, -48334, -26560, -27356, -29562, -80724, tail", "row-1415": "head, 97350, -34776, 56946, -40235, -54804, 95006, -75756, 47988, 17540, 21824, 86452, -13409, -17060, 86864, -17678, -40350, -57451, -89525, 85318, 51183, 84710, -38724, -47193, -11783, 58364, 67642, 79277, -66661, 41149, 50951, 7882, -77348, 90118, 24465, -52724, 59218, 14491, 42011, 12337, 39195, 7421, 6462, -95978, 201, 17353, 8692, -74067, -49251, -98073, 50552, 83439, -52106, -32598, -35357, 30598, 23126, 40246, 40022, 41352, 75213, 65441, -14953, -45466, 92781, tail", "row-1416": "head, 80467, -91523, 79284, 20856, -78509, 2947, -70049, 86910, -67227, -38489, -64848, 71101, 20040, -68143, -18022, 57881, -63126, -51260, 75104, 64462, -48542, -55429, 6111, -17410, -95852, 1707, 73891, -99731, 75855, 90262, -78212, -86333, 83621, -35368, 91120, 32402, 39344, -18660, -50579, 50926, -46894, 34773, 84836, 97052, -98901, 58355, 44568, -82795, 291, -74719, -80480, -19229, 27369, -99945, -5342, -7845, 85183, 1229, 22418, -24566, -53260, 19818, 25795, -99648, tail", "row-1417": "head, 40371, 31544, -43129, 77821, 50997, -48944, -86484, -95839, 73851, 73015, 4427, 86661, 74305, -46593, -41228, -28583, -79495, -17937, -92525, 75706, 47865, -45888, -37694, 38446, -93281, -36020, -73611, 83393, 34649, 10759, 42071, 68858, -88934, -18944, -90139, -96704, 44316, -7311, -45152, -52097, -35020, -38378, -60776, 27346, 63413, -74191, 77344, 1141, -8689, -13264, -93264, -91192, -73107, -71118, 37558, 63204, -64662, 65506, 77085, -53526, 83684, 57333, 21624, 72842, tail", "row-1418": "head, -10790, 5174, -95993, -4214, 21313, 33363, -75970, 65667, 9237, 50987, -51927, 93134, 36640, -41716, 33759, -59769, 98952, 24590, -5731, 89748, -146, -99841, 24162, -502, -19622, 24547, -44450, -44902, 60716, -10367, 69221, 32255, 66999, 7724, 23554, 92190, -24973, 63879, 12218, -91989, -2242, 23977, 99989, 56288, 49031, -47181, -66760, -87791, 23466, 21885, -5026, 25414, 57975, -54923, -46193, -36062, -38179, -22832, -87125, 90042, -33385, -43496, 81876, -15962, tail", "row-1419": "head, -34976, 49604, 44634, 77494, -22965, 8508, -37953, -76083, -36838, -2151, 746, -57197, 13575, -67610, 29007, 72944, -48475, 77144, 27709, 62587, -71456, 73955, 6266, -75306, 14524, -28700, 18799, -98032, 45572, -70932, -99359, -49224, 36543, 75242, -77363, 68556, -35746, -59515, -39956, 80719, 95880, 59638, 12335, 8592, 20784, 27050, 23138, -4354, -65030, -93785, 84976, 31610, -43908, 11547, -85652, 95934, -82279, 13688, -64552, 25998, -56444, 70697, 3379, -58918, tail", "row-1420": "head, 88131, -4475, -34372, 33841, -83710, -25489, 85821, -89743, -87612, -33496, 35212, 22667, 96799, -84324, 70937, -59446, -98657, 38071, 93909, -64223, 61177, 78637, 60460, 10223, 40374, 63261, 67643, 3316, -53634, 56570, -1473, -57287, -38287, -1949, -70590, -13632, 38410, 26296, 381, -638, -28275, 99085, 93612, 6554, 98789, 34177, 78710, -31084, 85418, -87260, 36771, 4068, -54059, 80910, -35849, 86569, -97637, -8909, -54445, 22886, -17353, 36832, 9213, 54678, tail", "row-1421": "head, 59532, 39268, -90026, -66593, -9902, -76635, 7434, 58590, -65155, -26534, 10341, 95808, 64909, 65260, -86267, 87168, -77770, 2709, -72890, -9002, -88970, -58367, 63933, -60528, -31849, -83078, -92603, -82568, 27460, 83329, -68661, -57916, 84709, 96527, 74501, -57191, 84627, -48034, -42520, -36174, 49545, 98389, -94443, -54696, 67322, -58022, -32007, 78652, 23732, -44092, 771, 93711, 21491, -40850, 98902, 34686, 16160, -89787, -80180, -94624, -30207, 58518, 26408, 63248, tail", "row-1422": "head, -99599, 88517, 54471, -62463, -1750, -47878, -93527, 25179, 59353, 61282, 94714, 57651, 2972, -96058, -958, -90678, -67293, -89129, -52495, 23125, -62595, 77333, -74551, 88839, 68258, 14371, -29233, -47810, -21460, 79917, -93099, -83953, 91997, 5186, -11173, 83341, 64270, -94156, 83310, -57492, 48830, -7850, 39095, 93041, 80823, 70852, 6510, -61362, -35744, -62176, -62247, -1898, 33714, -89247, -17425, 31975, 47711, 90067, 33690, 17051, -67417, -23511, -31193, -84970, tail", "row-1423": "head, -52353, 28801, 28191, 65777, 70297, -67605, -24845, 78722, -11330, 72477, 65791, 30289, 44736, 45980, 75008, 64323, 65346, 32582, -15516, 50117, 56073, -15281, -77195, 26574, 83202, 31851, 20481, 31884, -25624, 581, 52051, 67428, 34027, -83212, 1668, 28786, 49259, -56593, -29142, -98586, 14226, 96924, 54853, 86469, -65420, 23992, 26747, -18435, -40555, 45561, -50854, 89488, -70803, -48989, 79395, 44955, -25631, -95171, -87155, -52880, 56841, 15363, -54740, 57095, tail", "row-1424": "head, 97017, 20767, 40745, -11422, -82160, 31085, 72694, -12310, -64239, 9031, 35997, -7636, -7917, 29360, 91697, 68940, -75941, 91190, -87121, -38391, 45714, 82816, -96291, -50914, -65362, -66986, -65448, 51754, 12845, 70581, -6371, -48426, 37117, -59740, -53348, -63126, -54772, 98848, -26666, 97631, -58109, -85179, -90183, 36451, -19866, -57604, 29724, 29205, -14766, 21316, 55323, -12930, 8701, 48941, 45897, 46273, -84882, -93828, 69381, -29904, -17047, -99695, 26415, -12257, tail", "row-1425": "head, 28122, -78465, 93717, -38674, -66714, -97867, 50817, 61460, 41431, 16528, -35981, 29146, -97624, -9380, -88209, 50482, 56060, -27069, -15804, 31449, -50537, 73361, 33764, -29446, -81960, -20853, -1050, -22613, 69220, 47508, 13334, -72410, 98070, 63026, -7204, 226, 75963, -78085, -86443, 43065, 48193, -60921, -19385, -97819, -65728, -27244, 99353, 5552, -56178, 46228, 96818, -50800, 74486, 45155, 93531, -22789, 94338, -36566, -21027, 57988, 35716, 64278, -69048, 59627, tail", "row-1426": "head, -68344, -97751, 5203, -18234, -47119, 10096, 95672, 42833, 65265, 93189, 64313, -3330, -75226, -28321, -57168, 30387, 15472, 61024, 43980, 30590, -42732, 67381, 7348, 45289, -65744, -83980, -29091, 23525, 17102, 74366, 13311, 42156, 12897, 56756, 85306, -50941, -25718, -39872, -31548, 19111, 50339, -56750, 79225, 45720, 33868, 36171, -56366, -57900, -24870, 69263, -60818, -99249, -45378, 28959, -88577, -93159, 84938, -77758, 33014, 36941, -28981, 13884, 61086, 69085, tail", "row-1427": "head, 6523, -13985, -47125, -83609, 11712, -35105, -67717, 35592, -44413, 22241, 8008, 51235, 21376, 38869, -82997, 30802, -35596, -66034, -59174, 66360, 35381, -59739, -64042, -27448, -17443, -52782, -15472, -71955, -33392, 7836, 76286, -79504, -95295, -16974, -99982, 49471, -14651, 64592, 58261, -87997, 94456, -5248, 68681, -91022, 79808, -15217, 11312, 92754, 73469, 89195, 78485, -40996, -59708, -53510, 78327, -75262, -32298, -40291, -99634, 8807, -80842, -81938, 94074, -49513, tail", "row-1428": "head, 55025, -94138, 30799, 79018, 80951, -66220, -39527, -30929, -29582, 31479, -15291, -29244, 21294, -33548, 63924, 33856, 22997, 29466, -96190, 94913, 79318, 59446, 66240, 98643, -87740, -56699, 45584, -55816, 60212, 15793, 5743, -95526, -35267, 24221, -66955, -16348, -60660, -50124, -67652, 8746, 38220, -67782, 59004, 51003, 67057, -98121, -31171, -20089, -3969, -59400, 2456, -86580, -82485, 63067, 38209, -14659, 13151, -54521, 9473, -29684, 63937, 65423, -62863, 98356, tail", "row-1429": "head, -72229, -80812, 61004, 73865, -21399, 6006, -89364, -90842, -72458, -72863, -29439, -92757, -20918, 81512, -22204, -74003, -96504, -2590, 93943, -40468, 61543, 9131, -94780, 99476, -15482, -21937, 16258, -13305, 66689, 55971, 93021, -19100, -80326, -94852, -4770, 19531, -16222, 41224, 14967, -47107, -21570, -72273, 13295, -23878, 32987, 520, -33590, -49024, -82365, 31664, 22511, -20527, -36791, 26812, 39620, -59590, -36198, 85731, 93848, 49032, -12453, 45986, -16117, -13995, tail", "row-1430": "head, 30761, -45704, 52160, 43836, 83279, 84135, -14806, 84479, -42912, -96819, 21656, -39885, 61204, -89591, 87337, -1450, -79017, -39289, 24019, -8197, -91018, -20458, -60150, 42644, 78113, -28104, -75262, -83941, 5926, 14708, -83178, -25004, 85670, 27683, 15990, -16699, 36651, -42081, 7826, -65708, -2132, -12998, 73063, -90418, -39446, 1130, -67629, 38254, 54896, 91501, 52614, -3392, -753, -44755, 83376, -66477, 85468, -6964, 17603, -97408, -44964, -81326, 46559, 3333, tail", "row-1431": "head, -18017, -85869, -29479, -51504, 92900, 61893, 25854, -84211, 66359, 67713, -77720, 59618, -12330, -20554, 75039, -48066, -45806, 19247, 81050, 70917, 55720, 90472, -18062, -23334, 22928, 2939, 47333, 16501, -69035, 31507, -48412, 67665, -65093, 94418, -3593, 75813, 60510, -39711, -51669, -74125, -60113, 70041, 16995, 63551, -34086, 21538, -31238, -99565, 77241, 37635, -97462, 99473, 14802, 11929, -78852, -13911, 37623, 2930, -32730, 63479, 7664, 23162, -70280, 53138, tail", "row-1432": "head, 44025, -16609, -68687, -66536, -99701, -89361, -96917, 11400, 14527, -25627, 9013, 64122, 55791, -74187, -17089, 55738, -71050, 56029, -14945, -42062, -59170, 28193, -10525, -85778, 23771, -630, 78710, -38221, -92639, -54283, 15959, 1035, 76899, -19004, -50586, 55435, -48083, -77151, 56236, -86099, -74870, 88540, -3164, -62605, 93516, -42242, -55299, 53549, 31014, -33910, 39687, -16311, 52479, 3617, -913, -14086, -21746, 41239, 82656, 91705, -96031, 72130, -50131, 5894, tail", "row-1433": "head, -75843, -66931, 85849, 62909, 73172, -82883, -39419, 88787, 3053, 78124, 53510, 80477, -26893, -30931, -46626, -23353, -47035, -16456, 87176, -39351, 30435, 88699, -8227, 50065, 79404, -92334, 49384, 98351, 48812, -55115, 63915, -26080, -50530, -67675, 42994, 29627, -94170, 36319, 97729, -47986, 45692, -28301, 42188, -7926, 72729, -12993, 80679, 71745, -98262, 99611, -14657, -8780, -43950, 3801, 52354, -51180, -19446, -76021, -62223, -99038, -42375, -66167, 69778, -21998, tail", "row-1434": "head, 45811, -86697, 34491, 26585, -99675, 22195, -10748, 91565, 82354, -13145, -33150, 60443, 12048, -84949, 50834, 6731, 99334, 7862, 90590, -65510, 72364, 64472, -36730, -85978, 34927, -20550, -87297, 85146, 2990, 9055, 35634, -96205, -6531, 19347, -39895, -2054, 82550, 48042, 59787, -88676, -52841, 44119, 15951, -732, -36655, 17450, 71223, 51960, -72800, -79471, 56, -55552, -40772, -12467, -18498, -77251, -95757, 4314, -42692, 77830, 59593, 47809, 95320, 52103, tail", "row-1435": "head, -35196, 45178, 17107, -84827, 64203, 30611, 69717, 30961, 85469, 1729, 54432, -83376, 17735, -81623, 59781, -18435, -79088, -8985, -58936, 48986, 46178, -72739, 65969, 33778, -97681, 87353, 37824, -47873, -2076, 59252, 33271, -21288, 96591, 16022, -50473, 53978, -21347, 82142, -51374, 17206, -14644, -26681, 71841, 27589, 90714, 34719, -48775, 73587, 45696, -71031, -61429, 43795, -90158, 91470, 73991, 80847, 62575, -41633, -78034, -14049, 52061, 64457, 40155, 66981, tail", "row-1436": "head, -94058, 74940, 60007, -1699, 25527, -78139, 2381, 85932, 72227, -6061, -87611, 47222, -60307, -76720, -23455, 15195, 76195, 96956, -65658, -38823, 72230, 44914, 43934, -15904, -3005, 5876, 35263, 71834, -95644, -86523, -73342, 89212, -86686, -330, -54927, 72368, 16053, -93726, 62622, -87869, 67665, -11687, -52501, 54848, -75443, -82058, 81963, 89985, 49877, -25059, -11226, 408, 68735, 2482, -34125, 84290, -83021, -2274, -16631, 84090, -9698, 33596, 43077, 45976, tail", "row-1437": "head, -25466, -58828, -40836, 63290, 33258, -86030, -871, 91904, -47831, -85607, -59241, -39112, 88776, -6379, -60657, -11651, 12297, -69765, -94096, 38764, -39818, -95013, -37743, -72278, -76808, -41294, -73233, 67036, 81403, 56039, -36016, -91536, -34324, 30778, 4748, 98909, 52269, 93364, 76073, 82495, 88911, -64276, 96830, 36323, -91330, 94114, -29845, -28969, -87840, -46246, -88995, 22113, 21782, 17385, 27401, 84387, 63944, 84677, 40078, -56871, 83782, 75450, -66206, -9356, tail", "row-1438": "head, 5305, 4721, -67472, -44332, 65003, -29029, 84606, 3414, -93335, -63962, 37464, -11644, -1833, -27862, -12435, -22985, -45208, 61106, 26111, 26119, 4423, -3242, -35758, 72545, 1719, -25963, -95557, 60812, -2205, 15361, -72036, -78188, 65364, 55430, -65918, 52012, -97536, -13310, 33174, -81388, 91046, -16604, 54061, 35468, 76692, 44182, -41214, 96119, 99675, -5929, -70890, -90833, 77335, -49565, 56705, 68228, 1026, -26452, -26533, 92728, 48519, 70839, 42794, 98026, tail", "row-1439": "head, -31122, -66983, 69080, -37716, 44640, 55219, -62328, 61686, 20487, 40700, 54817, -7428, 32492, 75540, -44828, 78208, -96930, 9203, 24735, 99887, 53787, -70395, -62105, -7139, -29706, 80364, 24049, -69653, -20160, -25310, 34700, -5399, 39520, -90930, -68902, -55202, 62221, -79774, -50724, -12394, 11879, 10218, -37815, -37737, 11693, -37391, 83299, 16934, -12008, -83344, 9741, -45601, -55747, -26097, -36829, -76604, -39321, 19875, 52649, 62581, 42309, -60732, 31862, -26732, tail", "row-1440": "head, 61276, 32430, 33851, 51188, -45662, 90178, -12670, -42091, -22369, 97891, 68000, -89201, -83859, -61140, 35531, -61485, 30151, 55807, -64416, 51383, 91018, 36840, 14120, 43196, 16879, 61877, -63568, 29825, -40556, 33953, 25842, -27803, 49983, -27184, -76907, 51991, -63034, -62145, 86155, -99808, -47355, -994, -74074, -30302, -3425, 34897, -81437, 78448, -46216, -95479, 7394, -66990, -9984, 30240, -87539, 94287, -73492, 39429, -70532, -32326, 865, -98384, 96040, -17502, tail", "row-1441": "head, -54807, -65209, -68861, -47093, 20178, 80907, 39588, 67920, 67304, -11191, 68392, 84032, 16767, 52660, -29870, 20708, 1710, -76464, -51692, -31369, -45221, 37934, 91483, -79980, 55392, 3432, -7406, -29048, -9407, -21307, 21966, 15277, 90024, -59035, 83653, 47374, -77202, -13449, -79921, -95649, -70237, 46687, 24629, 59688, -35240, 98359, -80561, 2489, -48747, -5228, -31336, -62085, -99794, -32497, 10807, -70215, -24311, -25469, 19625, 26154, 29229, -20578, -14497, 65196, tail", "row-1442": "head, -93441, -75635, -91925, 88114, -36988, -89425, -79317, -32775, -63870, -75264, -72851, 35388, -10493, -24558, 20852, -7764, 42611, 14605, -69985, -93373, 62221, -6797, 4010, 69293, 13889, -94597, 25972, 30985, -65341, -21826, -54952, 37850, -51691, -26384, 31911, -68397, 21608, -28216, 11817, 10878, -71136, -39154, 48940, -89781, 71787, 54552, -60870, 85617, 16790, -77136, 83023, 74064, -70949, 78114, -57526, -53520, 9505, -43924, -23081, 82090, -95306, -64535, -2718, 91137, tail", "row-1443": "head, 3426, 44975, -48707, 42040, 83386, -82920, 72613, -55445, 73523, 65291, -90489, 12026, -86618, -67277, 67026, 57706, 61188, -30690, -45943, 39868, 99365, -92935, 46957, -68762, 85435, -35813, 73542, -50580, -48226, -94805, -37925, 41066, 12168, -91045, -7028, 98372, 66880, -28622, -37900, -57703, -30614, 50175, 65552, -84595, -58004, 70156, -19302, 601, -78453, -5909, -95770, -61550, -53096, -47484, -80386, 6277, -49950, -3735, -74899, -88572, -43770, -7683, -13127, 47140, tail", "row-1444": "head, -45604, -49060, -7079, -7853, 32587, 94417, 47198, -78110, -71562, 81543, -40340, -17732, 11967, -88634, 35964, -11533, 80244, 37679, -45568, 29363, 52764, 61561, -82722, -1860, 51101, -85917, 60509, -3437, -75903, 8163, 20268, -41025, 68348, 84532, -68194, 78627, -7655, 16459, -52489, 48048, -79722, 76965, -85030, 13880, 7866, 80516, -14967, -50144, -35091, 86385, 75928, -36342, 14777, 78700, -17679, 6811, 78877, -23095, 31162, 1907, 54504, -34230, 60558, 57793, tail", "row-1445": "head, -13788, 99400, 40964, 84607, 90525, 40992, 30887, 74769, 70472, -82811, -72429, -13255, 54459, -90636, -76388, 59698, 44251, -63715, 64916, 76759, 69057, -3777, -34530, -9939, 49120, -38583, -49069, 14574, -33658, 66855, -8010, 509, -30128, 12557, -64922, 61234, 35894, -26972, 8982, 5131, -37110, -39842, 12008, 71555, -18330, 74484, 56394, 54502, -19745, -32579, -94410, -89997, -16489, 25151, -21506, 75481, 65137, 23794, 87804, 10584, -17904, -55624, 98153, 76191, tail", "row-1446": "head, -61687, -42239, -80068, -94192, 49334, -57276, 74223, -35524, -5271, -9289, 22672, 42913, 16593, 59066, 87797, 20346, 2296, 62196, -7252, -61011, 31545, -92374, -10910, -46555, -57555, -22058, 12359, -46147, -86754, 20197, -51380, -41518, 13658, -16991, 53404, 22598, 10812, -89579, -9081, 58507, -58265, -91019, 58929, -89479, -65798, 4118, -88710, -76561, -41361, -89490, -92457, 41360, 59940, -49556, 31304, 20076, 68115, -42530, -46373, 89287, 99841, 77284, 15006, 24422, tail", "row-1447": "head, 30304, 68258, 90646, -23909, -43090, -15991, -4134, 99347, -28534, 1851, 69962, 70644, 11056, 2805, 20004, 15156, 33968, 21352, 87732, 76782, -70898, -42774, 8304, 49916, 76886, 91205, 24272, -95853, -77958, -23285, 64289, -65559, -85795, 81627, 24180, 61543, 3928, 36218, 65575, 50975, -33056, -86308, 53567, 67387, -16052, -31297, 11892, -7455, -9985, 23125, 10121, 15058, 49304, -5988, 24633, -48627, 51064, 51657, -88254, 30848, 73727, 58000, 8888, 25454, tail", "row-1448": "head, 62237, -54807, 81999, 82025, 13478, -97620, 64203, 91712, -22884, -68349, -80643, 51740, 70564, -18880, -92062, 7295, 19718, 30261, -56717, -35507, 81432, -17245, 2117, 5098, -72048, -88064, -49755, -92572, 41909, -13262, 16369, -45246, 42084, -22889, 24934, 42822, -96523, -87332, -99447, 7244, -62941, 33434, -36766, 68137, 20374, -74596, 79967, 1862, -22739, 6265, 90210, -28272, 76023, -6995, 51954, 86640, 9465, -27207, 43961, -2618, -4212, 34239, -29083, 24451, tail", "row-1449": "head, 98993, -14244, -4027, 12335, -59906, -36829, 21602, 48335, 52906, -40460, 29431, 81642, 25284, -71110, 81227, 25570, -58318, 56570, -93851, 70048, 43974, 26623, 23865, -48026, 53414, 87297, 30503, 68318, -742, -65022, 13129, 91763, -11197, -58949, 10365, -61353, -23106, -34733, 33661, 32157, 42853, 33419, 96549, -9012, -86879, 9170, 75598, 65972, -19428, 76990, 65180, -87314, 22957, -98237, 79723, 80794, -74871, -65245, -42980, -32581, -15824, 94154, -3116, 44382, tail", "row-1450": "head, 24228, 59167, 99327, -16622, 19521, -20356, -30568, -53634, 68442, 12804, -18535, 60989, 28446, 47951, 90286, 25187, 4953, 53116, -4798, -79279, 94196, -56116, 21576, -34799, 63252, 26093, -42708, 96146, 87490, 65964, 91729, -85752, -69592, -4710, -81324, -30833, -51951, 482, 5765, -95242, -29812, 68301, -9838, 99985, -88562, -75691, 12393, 27779, -50379, -78131, 84605, -19434, 2945, -38972, -82744, -45955, -24544, -65186, 55939, 18094, 25192, 56516, -23257, 90321, tail", "row-1451": "head, -73594, 33868, 30552, 24573, 74246, 82414, 20323, 42528, -14914, -89345, -25845, -3432, 12530, 72047, 69580, 27288, -44293, 54213, 79912, -85895, -20213, 73927, -95132, -7702, -83665, -89098, -89608, -31938, -19810, -50731, 52295, 90123, 34653, -88909, -91025, 77335, -62581, -60915, 5480, 73764, -23984, -55287, -2787, 46587, 98368, 14782, 61122, -99997, 27021, 28977, -37163, -62014, -94932, -43344, -63262, -75750, -12715, -87493, -96522, 11851, -45178, -38270, -28847, 2267, tail", "row-1452": "head, 15386, -69474, 79618, 11958, -85845, -77227, -553, -42015, 65469, 41563, 80623, 61409, 9058, -1417, -90792, 56927, -70366, -71075, 40548, 9889, -15839, -39789, -31023, 33408, 69774, 4813, -78722, -29615, -66854, -12808, 95714, 31754, -65053, -31199, -83565, 88619, 39145, 45794, 31739, 92442, -34335, 5494, -79171, -21817, -43790, 66819, -88374, 1242, -72612, 29815, -28986, -41334, 4519, 95260, -75590, 83583, -89145, -85213, -96128, 10731, 30199, 49177, 20287, -11533, tail", "row-1453": "head, -88750, 98028, 12070, 47220, -52137, -25970, 88857, -12517, -78694, -40350, 28216, 1478, -93411, 74754, 25542, 52432, -80200, 46240, -34784, -27323, -50197, 95661, -7973, -27256, -59582, 80629, -43979, -577, -99651, 87566, 90153, 10578, -14033, -58301, 13763, -17506, 61858, -25399, 16313, 91977, -458, 29912, 43542, 45116, -52091, -26283, 83748, -74636, 76991, 68554, 64011, 26761, 12631, -96987, 84520, -8445, -49680, 4774, 66986, 66288, -65549, -72552, -51428, 63651, tail", "row-1454": "head, 74277, 72019, 37126, 2733, 87934, -63273, -23413, -92852, 44439, 35421, -75646, 69392, 70735, 45522, -69554, 64348, 98160, -83101, 48571, -79862, -60839, 34318, -81354, -79901, -7371, 16890, -38051, -47541, -86135, -94141, 93573, -71243, -84005, -13773, -94622, -4661, 46988, -82635, -56091, -98006, -42606, -8140, -99814, 52106, -45607, -1755, -56316, 71634, 34672, -34857, -9032, 41712, 9413, -92643, 21703, 46786, -30254, -30437, 89443, -94115, 53131, 56955, -17716, -34403, tail", "row-1455": "head, -23038, -81323, 7506, 59775, 84573, -40314, -19754, 65525, 22650, -50057, -34025, -67707, -99058, 68487, 33751, 21888, 11412, 64389, -59761, 45823, -10038, 88243, 92507, 64555, 78512, 99224, 24864, 63722, 7650, 15375, 50209, 79742, -21701, -96638, 74861, -35314, 29403, -90842, 91716, -37256, -67379, -76755, -85051, -43812, -36469, -81217, -99809, -8170, 9387, -13545, -45378, -30399, 2989, 67037, -73709, -21637, -40799, 11624, 56406, 14037, -3343, 69856, -8829, 73059, tail", "row-1456": "head, -83326, 8290, 68952, 85889, 25161, -87887, -14672, 27499, -43676, -61610, 91871, -14081, -47647, 98021, 52684, -24032, 65197, 41558, -13373, 39174, 73049, -22411, 31043, -48241, 62269, 87657, 70677, -77175, 45976, -20833, 14131, -97998, -78058, -52890, -31076, 65946, 78908, -36716, -44453, -20437, -97076, -53273, 26658, -90627, -86041, -88670, -36460, -7420, -37847, 33776, 36614, 22261, -85777, -50350, 91187, -94130, -65215, -24753, -67151, -92299, 50678, 51654, 18003, -70300, tail", "row-1457": "head, -65721, -69042, -14901, -60182, -47648, -80241, -34144, -96410, 11869, 30059, -90871, 31220, -88907, -47176, 94889, -89686, -65029, -13453, 95843, 36867, 88208, 97379, -7379, 65392, -57645, -73953, -24502, -3552, -45133, -30657, -50497, 77403, -79098, -58587, -96557, 24334, -434, 15192, -10585, 30487, 41284, 95382, -76878, -71865, 27045, 50276, 70004, -21008, 1452, 81427, -20127, -78317, -72559, -83248, -13533, -30413, 90813, 15059, -72473, 18162, 55929, -6055, -26595, -67305, tail", "row-1458": "head, -9267, 34066, -73920, -27630, -79482, -96430, 3868, 4874, 36284, 85851, -5500, 31586, -50305, -77975, 50983, 27026, -63928, 11099, -88406, -48596, -4545, -91009, -23574, -65508, -15731, 22837, -34431, 10870, 30499, 962, -41917, -46382, -42232, 37002, -33924, 58067, -32092, 21871, -13805, -43715, 17248, 28803, -51651, 61906, 62609, -18183, -91296, -26350, 94878, -64866, 23435, 9066, 1833, 72911, 3758, 26259, -55002, 11667, 38103, 34266, -74133, 2624, 88557, -89123, tail", "row-1459": "head, -35508, -69559, -1357, -10371, 49816, 51996, -40441, 71590, 13894, -352, 64128, 2652, -8359, -37155, -60878, -93728, -1105, 28414, 64236, 51564, 18321, -21527, -24893, -3893, 33258, 38424, 99371, -94625, 54386, 73937, 3144, 16789, -80763, 56294, -40344, -75296, 92494, 29452, -71994, -14185, -78853, 2017, 21714, 93562, -71578, -70820, 29381, -92946, -96546, -80829, -26992, 99725, -10042, -73667, -83366, 60354, -45454, -22457, -64685, 3638, -5405, -24682, 10183, 60234, tail", "row-1460": "head, 63709, -93396, -92381, -86035, 11309, 28575, 31494, -58190, -57812, -28583, -78744, -95932, -8927, 49420, 62582, 78223, -90342, -88673, -25191, 53555, -86704, -33690, 11911, 28793, 92935, -75715, -15023, 6300, 64187, -48444, 93437, 7392, -47599, 15583, 85560, -58460, -20465, -33564, -21622, 63534, -69693, -20977, -18600, 45385, 91346, 23582, -76924, 77489, 67512, 76142, 27494, -18527, 51137, 93171, -57581, 22197, 8649, -58206, 45684, -81620, 67234, -98362, 53764, 25796, tail", "row-1461": "head, -80512, -58468, 53562, -35846, -88975, 73261, -60786, 58114, 35023, 90885, 46963, -2176, 52338, 8946, 2848, 43960, -88206, 20612, 75303, 42750, -8029, 1373, 61840, 39701, -27364, -42822, -46612, 98638, -85913, 36823, -79813, 49432, -89709, 74617, 95395, -68408, -52453, -96176, 1795, 65191, 96762, -54498, 81347, -68428, -91039, -51420, 1960, -33525, 81035, 24379, 80464, 48790, -80518, -56877, -19765, 17494, -66289, -67540, 81512, -16455, 1746, -85923, 83055, -10872, tail", "row-1462": "head, 48395, 39489, 51414, 67371, -50693, 43313, 37855, 93876, -95331, -77073, 97907, 74478, -22917, 7864, -27740, 97474, -52718, 20550, 95535, -36942, 29863, 23494, 70970, 24177, -52596, -25934, -16467, -73592, 57178, -57713, 46207, -17582, 83061, -89234, -16037, -41882, 23787, 92277, -52807, -36629, 19055, 86516, 5133, 99968, -4061, 32537, 7333, -3630, 23506, 11231, 30289, -1795, 97938, -91036, 62278, 74906, -43233, 89929, -26167, 16895, -25152, 74837, 99801, -44221, tail", "row-1463": "head, 54284, 57398, 46083, 81255, -10902, -4351, -64872, -42486, -51844, -1533, 4335, 6922, -3067, -20094, 27026, -23371, 96344, -66484, -98563, -81770, 47719, 40856, -4365, 59118, 97246, 64627, -56612, -19386, 90235, 80551, 63719, -31229, 29709, 70200, -58409, -2352, 79081, -74320, -2718, -23706, -40775, 20979, 78510, -49546, -65502, 88386, 15603, -3942, -23087, 24889, 97454, 1954, 13828, -10230, -90157, 78362, -93680, 95333, 88259, 6558, 70185, -84893, 94570, 52477, tail", "row-1464": "head, 58238, -44401, -27931, 47504, -45282, 48894, -60194, 93393, -4598, 82887, 95252, -66650, 47610, -79080, -94674, -34509, 40753, 40617, -87844, 85498, -70050, 57652, -90472, 38789, -55554, 48366, 68424, -26690, 60211, -62267, -72999, -84850, -14286, 41141, -83169, -41725, -11653, 3800, 60472, -45337, -87616, 63967, 28836, -18418, 83339, 94121, 72139, -34165, 98184, 87236, 84133, 64729, 69558, 18373, -67511, 12817, -11566, 72865, -85102, 87237, 89161, -64636, 67859, 35114, tail", "row-1465": "head, 60036, -45954, 42598, -56275, 21630, -7914, 16438, -30706, -71853, 57267, 89698, -91258, 93534, 41915, 6584, 93043, 16370, -9247, -55672, 29978, -57193, -72405, 72480, 51644, -85556, 11673, -10758, 32246, -60914, -83811, -38738, 57513, -57110, -74077, -37255, 27713, -81949, -16759, 79121, -79907, -37366, -81905, 54955, -49350, -23125, -2225, -32663, -6438, -82598, 39074, 59579, -26772, -63756, 34868, 24183, 85478, -23433, -58713, -395, 65197, 54221, 89833, -29430, -13295, tail", "row-1466": "head, -1417, 52683, 41106, 53914, 88317, 28152, 88203, 5283, 45577, 61718, 20913, -12423, -18606, 34961, -5811, -19552, -82294, -97446, 67814, -81527, 48381, 67966, 98270, 1795, -52543, 96373, -92341, -83052, -22841, -43173, 361, -10780, 66481, 22278, 61686, 61312, 34515, -51153, -57750, -29820, 26332, -5104, 69879, -39949, -34463, -6141, -90591, -15483, 43167, -72077, 69177, -29540, 5085, 61843, 36678, 28874, -39016, -77268, 81292, 34388, 55395, -95948, -95761, -59955, tail", "row-1467": "head, -71378, -40551, -24692, -79317, 3573, 49037, -75706, 26215, 95448, 39767, -13357, 25221, 16857, 86123, 15457, -68867, 41391, -93506, -45276, -48650, -82552, -49710, -87455, -79258, 32255, 21455, 63651, 80588, -89647, 96654, 5812, -92438, 41332, -56525, -8901, -72236, 1709, 60907, 84116, -94001, 87190, 89296, -28147, 37376, 7899, 35950, -93254, -36872, -7440, 74345, -79903, -65365, -62066, 24054, 24986, 19717, -10158, -91560, -33844, 65604, 29513, 50815, 21104, -97036, tail", "row-1468": "head, -91964, -7108, -55845, 71749, 25198, -95948, 88338, 55700, 2901, 99653, -35002, -97829, -56237, -60401, 13697, 54158, -62677, 34021, 70070, -18632, 73419, 16405, 75090, 23514, 6436, -65047, -51035, -99834, 60405, 29561, -15853, 50387, -16527, -86438, 66857, -52814, 9789, 87667, -49155, -40801, -60298, -23823, 19030, -31441, -33494, -34089, -42675, -4124, -25172, 58028, -60924, -18286, 96221, -26229, -98225, 11717, 43979, -52000, -10463, 52940, 62241, 15442, 58031, 70242, tail", "row-1469": "head, -9359, -55997, 79238, -93695, -955, 33773, -51215, -18384, 59146, 89681, -82633, 33453, -22987, 85610, 28458, -56689, -39122, 94437, 52207, -32700, 91235, 16021, -98971, -35861, 43871, 98959, -65614, -49407, 12431, -34606, 3833, -42536, -10276, 15391, 13505, 39485, 83285, 41066, 82070, -57444, 83917, 14074, 68805, 50590, 88528, -82307, 59114, 73762, -61845, -43626, -24690, -12621, 57680, -19878, 76927, 89038, -94132, -57013, -76926, -73690, 44147, -27607, -40037, 55219, tail", "row-1470": "head, 20027, 67799, -38704, 2651, 85374, -28216, -4969, -9740, 94086, 97647, -66597, -53762, 79951, -74870, 5598, 61306, -23227, 20589, -4542, 98184, -45663, 48483, 97997, -6070, -92300, -14157, 35377, -89950, 99759, -95523, 23103, -32482, -73031, 45930, -23000, -9157, 82269, 38239, -78846, 34636, -335, -85812, 55991, 15632, -60993, 4676, -75098, -94139, -63103, -6887, 51941, 20013, -96879, -23316, 78547, -82551, 20546, 19700, -88484, -14003, 43891, 72327, 3343, -65870, tail", "row-1471": "head, -16793, -77005, 37685, 5566, 90105, 14866, 21328, -41304, 58151, 41829, -38559, -9463, -34381, -47998, 68866, 43681, -37833, 87652, -53666, 91738, -6705, -95545, -93506, -56027, 71150, 88523, -29846, -34045, 71998, -60650, -72805, -22746, -26281, 92961, -77955, -50966, -63568, 80220, 86487, -64653, 21323, 60488, -61657, 89648, -97870, 98177, -14755, 84657, -84856, -56631, 1607, -94632, -73367, 1125, -71606, 57008, 72027, 1312, -66170, 13425, 54030, -63667, 21131, 39471, tail", "row-1472": "head, -77316, -99687, -38100, -51603, 65313, 8084, -18999, 83449, -70189, -16011, -61656, 657, 37104, 50433, -3660, -38361, 25915, 90891, -35548, -32861, -55398, 36351, 68420, -40059, 79673, -48061, 52658, 13623, -8758, 97249, 93734, -86374, 50449, 30053, -96392, 78160, -70805, -94386, -10530, 16871, 6414, 45446, 64094, -72966, 91885, 52242, 23468, -85259, 62473, -43393, -85100, 93492, -96003, -83187, -76817, 95855, -95630, -56835, -12174, 938, -73670, -5834, 5334, -95218, tail", "row-1473": "head, 89986, 93946, -28445, -48016, -60042, -25500, 30160, 86851, -26087, -19644, -22305, 48545, 24517, -76976, 86367, -35228, 87015, 47956, -90741, -89653, -43377, 77471, 68961, 30976, -92473, -10728, -66100, -51840, -55357, -93016, 71308, -79438, 40461, 21433, 80533, -56171, -24385, 38352, 41856, 82220, 35284, -59908, -15321, -38809, 29643, 20095, -60065, 47398, -68525, -52736, -45327, -5521, 74142, -82308, -72332, -19714, -39544, 75985, 51778, -85362, -67977, 96525, 56389, 29634, tail", "row-1474": "head, -36834, -79892, 91171, -90662, 77508, 31720, -58869, 56863, 60404, 6028, 10975, -94438, -40670, -34758, 40518, -32088, -17172, 49810, 90569, -52813, -21017, -33131, 87690, -90508, -82650, -70978, 96020, 21619, -97276, 29510, -55122, 31559, 7086, -9922, -11007, 13148, 72737, -30777, -9919, -82024, -740, 39921, 14164, -54503, -58003, -86513, -67870, 31071, -16289, 84724, 45572, -75359, 82109, -45930, 65078, -13064, 47641, -77477, -17701, 45021, 7129, 45191, -74054, 3149, tail", "row-1475": "head, -13245, 55669, 64148, 5965, -89360, 38004, 40768, 17822, 91818, -53432, 24173, 99167, 6241, 92708, 58713, 49299, 93418, 7353, 61304, -14045, -58027, 47161, -4520, -50001, -20353, 18968, 59466, -11056, -89189, -55782, 37006, 91891, 97370, -4656, -88093, -55138, -45300, -62622, -30709, 80162, -46487, -22442, 36689, -84046, -35523, 34831, 13568, -8912, 46154, -6827, -86858, 21795, 1744, -87936, -59005, 51444, -86920, -8844, -82947, 18422, 21684, -21019, -81619, 93242, tail", "row-1476": "head, 89140, 94353, 74675, -33811, 22241, -52406, 3098, -64936, -14555, -40796, -24272, -63188, 85482, -60270, -171, 9851, -29660, -28176, 94160, 2676, -2045, 21606, -75154, 23573, 65537, 61699, 97534, 5413, 26778, 699, 32224, -81263, -66697, 17917, 25738, -36213, 39070, 35372, -35757, 80973, 49507, -70479, -21848, -43213, -10795, 49970, -72320, 69611, 74462, -94921, 97994, 69742, -43421, -86931, 61392, 63232, 92769, 3741, -94732, -44012, -40558, -76680, -60446, -67015, tail", "row-1477": "head, -87437, 86563, -10661, 56439, -50995, -98319, -3536, -15821, -88870, 2508, 66984, -9783, -87291, 51084, 1463, 54079, 96352, -32323, 73008, -94483, 87458, -9014, 78021, -34913, -52544, -3559, -53291, -96233, -86001, -35646, -66083, -12846, -48738, -46620, -73014, 91852, -95896, 65264, -63003, -86553, -1190, -30128, -41737, -73365, -66575, -43629, -81707, 22928, -78475, 98735, -2897, -59771, -13385, 21405, 36497, -54247, -33775, 81405, 62437, 81399, 88670, -70700, 21028, 91096, tail", "row-1478": "head, 21984, -24969, -36569, 17585, 84724, -47355, -81635, -12496, 83926, -79325, 63843, -21745, -61670, 63627, 95188, 94201, -553, 11007, -28667, 84502, 92069, -44013, 58557, 15472, 68059, -31093, 19071, -35016, -32638, -20694, 69398, 96695, 23949, 92554, -4284, 65835, -49175, -35229, 10244, 4396, 95014, 50906, 49125, -1189, 13819, -55926, -55127, 13623, 41402, -89202, 22246, -20684, 70963, 81674, -93370, 33696, -12576, 19301, -98789, -8552, -2975, -25080, -72501, -59378, tail", "row-1479": "head, 38766, -58582, 30034, -59472, 97022, -74961, 51252, 91365, -58821, 50383, 92353, 68088, -51174, -74933, -60380, -19721, 55710, -21071, -77912, -35942, -79854, 80681, 23749, -43152, 13325, -97604, 11867, -55921, 47465, -97606, 55296, 3913, -72402, 33398, 49053, 16618, -26309, -12290, 86159, 75872, -39532, -32044, 2356, 90707, -17097, 20673, -23157, 85660, -80302, 57704, -70887, -49875, 7005, 46809, 77679, -16522, -62819, 16451, -71039, 49121, 68764, -13937, 71731, -10875, tail", "row-1480": "head, -24978, 32287, -86504, 96508, 36141, -38202, -16784, 10062, -84982, 39127, 90719, -43680, -44193, 19340, -83825, 41786, -56164, 81288, 73172, -82561, -46822, 38881, -16253, -72576, -29463, -21267, -8980, -95703, -55953, 93791, 60422, 52416, -20838, 9829, 86296, -21927, -26888, -3090, -86558, -60605, -49211, 56520, 33538, -6352, 21089, 36481, -53562, -71373, -96470, -77625, -29043, 98714, -6267, 31084, -39836, -68257, -37523, 55460, 67914, 36390, -90078, 4607, -81448, -6526, tail", "row-1481": "head, -78650, 27212, -53289, -62984, 41000, -80508, -65200, -61714, -36575, 46207, -82136, 55134, -35158, 25006, -58423, -45061, 80503, 51683, 52407, 56668, 48498, -9213, -91324, 35749, 16261, -82560, 46496, -69636, -50579, 15892, -35743, 18083, 22596, -21880, -50694, -89675, -55355, 5814, -87536, -47892, 78302, 73821, -16748, -11840, 32240, -90009, -17434, 55507, 24867, -75128, -87678, -9153, 37343, -7216, 31218, -16940, 72019, -4137, -69268, 5488, -88391, 36678, -98352, -29513, tail", "row-1482": "head, 81453, 72988, -44153, -75688, -78834, 33897, 19410, 3089, 13294, -35227, -89865, -87563, 60446, -66147, 61460, 79332, 87950, 90404, 16349, -41007, -70303, -28141, -83105, -14855, -25785, -69535, 56604, -9740, 17827, -62478, 35229, 21246, 12240, -4078, 9635, 87428, -41030, 94285, 72207, -79622, -68018, 60614, 5221, -50236, 45777, 13434, 76367, -37621, 81269, -60203, 79919, 94545, 7635, -91402, 43118, 54542, -46887, -607, 82157, -4647, 52930, 14415, 45589, -79957, tail", "row-1483": "head, -73404, -92140, 36325, 38995, 83352, -8193, -99058, 96523, -20575, 58298, 42973, 77900, -75457, 88241, -81959, -70546, 9594, 12806, 88718, 30598, 25013, -56730, -40041, 31821, -81060, 9353, 28894, -86081, 48897, -85503, -70036, -93397, -54344, -95141, -84789, 85605, 9602, -5725, 26093, 46595, 39106, -67389, 62377, -32891, -62555, -10453, 38591, -87078, -18200, 9496, 2288, 25993, 77304, -4153, 63035, 17420, 58482, -92652, -8703, 45371, 66618, 76597, -95132, 84620, tail", "row-1484": "head, 41737, -27659, -46797, 98471, 45177, 85488, -60851, 61544, -22253, 17373, -28625, 22992, 76465, -13589, 79203, -81479, 97173, -41040, 35366, 80584, -16186, -39784, 13416, 72372, -84608, 67582, 43525, 38809, -11512, 26281, -39948, 20547, 15023, 40877, 61341, -27925, 72841, 38411, -89601, -24040, 16049, -52285, 98526, -44997, -1477, 2287, -54203, -15313, -43152, 45866, 32137, -97020, 36279, -98634, -87568, -30623, 88063, -83915, 78379, -54069, 44490, -31616, 87271, -20674, tail", "row-1485": "head, -11216, -51006, 86102, -32244, 52209, 19127, -71298, 3130, -40484, 41482, 35702, 68958, -67815, 79652, 36705, 68978, 5680, -44115, -39085, 79571, -88226, -8671, 51347, -93443, 26475, -19849, 23068, 42821, -40094, 23331, -27741, 20813, 84123, 6503, 31420, -28449, 27433, -95454, -88004, -53469, 59639, 30411, -80571, -16508, -94655, -3651, -84853, 22943, 11562, 52642, 75525, 87711, 73906, -49191, -32583, -48734, 37173, -16213, 67067, -38434, -40775, -569, -40759, 24781, tail", "row-1486": "head, -30133, 91779, 42169, -76347, 39974, -85945, -45846, -23109, -58325, -17299, -73246, 16407, -5380, 28556, -31792, -30734, -58699, 49471, 59634, 65715, -67580, -32679, -35364, -27134, -73387, -53407, 67828, 83708, -14050, -83436, 71824, 89489, 83847, 9615, 75857, 21532, 13576, -62710, 28568, -23721, -50216, -83201, -28252, 82324, -8895, -84283, -47863, -83573, -6347, -47711, 99659, 35523, -40234, 9655, -67390, 55248, 14576, -60845, -57846, 54216, -44789, -79949, 67801, -56484, tail", "row-1487": "head, 75539, -53727, 65508, -57764, -729, -4919, -8604, 95184, 65073, -47474, 16029, -84922, -51764, -71240, -16, 89306, -21004, -72615, 83664, -18359, -22444, -26107, -21674, -78015, 37242, 35723, 31974, 63955, -16654, 6961, -42636, -40750, 60284, 64443, 96127, 68146, 88109, 10540, -59892, 84350, -4056, 54879, -45573, -82991, 9092, -71931, -43488, 73234, 9124, -61402, -71757, 56471, 61524, 58369, 49694, -95719, 24051, -73187, 78808, -5751, 36930, -40066, -8507, -49295, tail", "row-1488": "head, -3978, -11216, 6076, 43078, 68056, 13126, 34842, 35030, -53756, 98531, 99417, 31340, 31564, 17794, 10412, 42501, 12546, -8590, -60177, 29715, 68132, 77276, -18674, -90405, -19536, 64894, -31473, -7114, -5047, -82026, -61116, 300, -16170, 46506, 43380, -70008, -70011, -3790, -38115, -7224, 28859, 70619, -94202, 87629, -9510, -76430, 3168, -57692, -56841, -3557, -25336, 72900, -97828, 27514, 67732, -57111, -7416, -30626, -61328, -18044, 5027, -68808, 28159, -53094, tail", "row-1489": "head, 80600, -22346, -88861, 44668, -93812, -130, -50075, -23675, 17302, 25051, 82609, 55781, 43489, 30723, -15731, -65412, 22917, 35625, 75540, 640, -98579, 34479, 54523, -74988, 10534, 15133, 61792, -7704, 30675, -42469, -64356, -57021, -98611, -52952, 54046, 98389, 47468, 70588, -84720, 41267, -10615, -99499, -99124, 36207, 54066, -24577, -11376, -4561, -64450, 16784, -67367, -10744, -16414, -66581, -1699, 50783, 66659, 5185, 47955, 40687, -62485, 78609, -35529, 51472, tail", "row-1490": "head, 65580, 56515, -43086, -41421, 27509, -16291, 95226, 28942, -98556, -48560, 55957, 28311, -19934, -86627, -15242, 91231, -97632, 95632, 4921, -70921, -98276, 38893, -13769, -26514, -56485, -59946, -37003, 31300, -48143, 31043, -15106, -36954, -75258, -58436, -98602, -5099, 81395, -23131, 95868, 2784, 76740, 41948, 64416, 23900, -27343, 70792, 61736, 36484, -78213, -53869, -53512, -79245, 73621, 69684, 76051, -90495, -30595, -61145, -52319, -16557, -81726, -79477, -58705, 47289, tail", "row-1491": "head, 8853, 85512, 49312, -88764, 26300, 35473, 61533, 1910, 8595, 96570, 64800, 51006, 5547, 52796, 54231, -71543, 23219, 82654, -63362, 70244, -21919, 33519, 34111, 14335, -44325, -78084, -23626, 91876, -61172, -59808, -24698, 39258, 79822, 43832, 43980, -32670, -71413, -26883, 82509, -16651, 93127, 61215, 16489, 1143, -8374, 78202, 16376, 16362, -19541, -80241, -805, 20808, 60348, -12132, -6462, 87723, 5905, 73426, 10541, 64346, -61981, 3866, 44201, -67696, tail", "row-1492": "head, -99294, -13241, -71336, 37823, -86422, -65096, -8517, 23538, -32429, 28250, -82979, 28787, 48349, -34950, -8045, -90246, -42261, 91407, -86033, -29631, 53901, -28454, -62839, 79142, 20274, 37847, -11319, -35731, 11865, -97748, -39788, -85980, -11445, 92015, -11342, 98700, -86999, -77256, -38800, 65252, 48613, 7528, -60873, -41934, -50727, 78767, -1712, -96485, -24509, -11862, 95136, 86219, -16704, 44386, -77106, 4449, 21174, -64347, 18308, 22740, -63193, 26258, -88906, -83630, tail", "row-1493": "head, -69535, 8084, -9991, -68996, 91765, -23081, -43497, 36348, -15865, -64858, 69127, 85283, 46379, 41406, 15432, 97712, -71939, 39923, -65489, -79033, 95400, 31676, 462, -7404, -76732, 71150, 65897, 1160, -36305, 67185, -24336, -19261, 8981, -90917, -91388, -3508, 96280, -567, -9255, -51062, 45104, 35766, -60019, 52631, -46886, 66115, -56310, -25193, 68314, 65978, -91213, -68673, 75411, -50924, 62096, 17620, -33747, 15773, 99949, -14379, 40488, -76881, 13579, -41517, tail", "row-1494": "head, -63606, 77037, 90812, -52243, 64553, -78860, -13582, 10314, -44492, 50991, -88524, 46613, -80939, -89119, -45446, 46125, 51404, -31852, 11898, -16749, -81378, 17415, -59032, -73109, 9079, -31469, -5273, 25992, 23129, -88366, 35173, -79883, -81254, 97917, 73870, -36748, 40110, 69332, -80795, -40489, 73365, -72776, -41300, -16953, -17511, -76591, 36664, 62267, 26872, 75088, 72913, -93973, -97828, -18778, 5827, 20106, -46907, -70212, 91786, -50474, -45609, 24186, -48843, -13783, tail", "row-1495": "head, 14027, -74622, 53798, -56587, -14953, 69163, -22850, -10505, -69348, 38050, 90178, -26828, 84454, -24057, 47266, -67870, -7065, 23788, 36282, -97139, 46599, 47787, -97513, 15374, -21571, 34952, -25868, -7710, 53029, -57155, 5459, -74096, 99807, -54015, 79146, 91547, -44836, 24862, -83307, 94154, 79534, -3151, -54009, 4643, 21456, 61413, -84513, -28926, 43720, 39737, 20161, 61759, 64161, 97738, 73062, -20195, 24848, -17, 85403, -83738, -89396, -60499, -95535, 8401, tail", "row-1496": "head, -92796, 21318, -2250, 19579, -2822, 95597, 84863, -28792, 46745, -85145, 52425, -85514, 36569, -5753, 53095, 64975, 88781, -13611, 2256, -95297, 87639, -84562, -48407, -19801, -93291, 39531, 39943, -40099, 11060, -53826, 63331, 80714, 60797, -30280, -36470, 52586, -33363, -73218, 91801, 12752, 14299, 54997, 69902, 73996, -37510, -75745, -68279, 24902, -23547, 80123, -10875, -45161, 17415, -86152, -41822, -73788, 85984, -61014, 65619, -9184, 72880, -82268, 12711, 82383, tail", "row-1497": "head, 27993, 8423, -24049, -73238, -40594, -83875, -71601, -97606, -9809, -37820, -37922, -21122, -86425, 39836, 17589, 46794, 38402, -44345, 47440, 69510, -1241, 51594, 37969, 66096, -79140, 7904, 9273, 92975, 88637, -98924, -66322, -59071, -29705, 71884, -12983, 23270, 28519, 47967, -55447, -75628, 4606, 69294, 55042, 32091, -2142, -94399, -54226, -40899, 47420, -68954, -20201, 98143, -52740, -52191, 18089, 17787, -27406, 51862, 68755, -44205, -86604, -6105, 58690, 49030, tail", "row-1498": "head, -12003, 64956, 86057, 69482, 29230, 8337, -82600, 18856, 47515, 64226, -77425, -1345, -25095, -46182, -11732, -93962, -89094, -57885, 75924, -78329, 27464, -59080, -4132, -79546, -17654, -66897, -22492, 89279, 6741, 6552, 28504, 43365, -52703, 25637, 78118, 95753, -3640, 51170, -15393, 7110, 31950, -69778, 60449, 43999, 88532, -26504, 63021, -53892, 31854, -29281, -11793, -79052, -52631, 44816, -64815, -77222, -47551, -8695, 12734, 78328, -98232, -97837, -86831, 22983, tail", "row-1499": "head, -62038, -30982, 29071, -11010, -663, -56509, -26306, -76643, -15692, 74681, -97331, -30766, 69805, 58046, -11838, -63888, 85401, 34414, 56495, 82874, 80736, 73550, -30779, 13123, -85442, -10683, -91179, -50950, -26811, -71060, 75363, 5163, 31249, 84691, -80604, -57716, 26137, -30021, -82715, -94121, 13414, 1666, 30938, 57214, -19605, -19368, -87935, 54739, 16872, 23419, -62220, -49743, 40413, 66562, 1436, -97071, 74717, 44733, 18524, -57565, 7592, 77810, -72302, -87645, tail", "row-1500": "head, 31326, -19654, -21144, 95643, 52766, 69066, -18842, -50163, -69640, -6919, -870, -22764, -82413, 49525, 63988, -18179, -70684, -29571, -54816, -7706, 52452, 53486, 5787, -60985, -55685, 47169, 69983, 80623, 52140, 53352, -27502, 26819, 45925, -57246, 99431, -60174, -85410, -16542, -64944, -49480, -86484, -32353, -51986, -6637, -28725, 26595, -45177, 71692, 27022, -81844, -46912, 63706, 65373, 27100, 13616, 509, -57278, 71895, -91860, -28445, 21683, 17860, -82693, -23072, tail", "row-1501": "head, -67025, 87605, 31916, -12380, 23875, 80888, 19128, -8416, -65696, -11302, -65257, -97681, -39797, 17856, -29799, 21082, -85057, 56056, 8984, -98713, -48407, -19335, 62852, -86624, 32722, 3709, 85779, -52757, 82909, -2164, -27706, -92926, 51999, 97995, 74694, 42744, 36478, -58834, -84701, -17472, -76469, -14963, 63592, 49273, -86458, 79935, -9284, 54953, 90171, -53841, -56744, 40166, 80814, -41087, 72407, 73217, -45953, -14489, -56432, -33302, -48012, -58986, 40967, -22866, tail", "row-1502": "head, 9058, -63836, -10044, 64876, -88160, -7654, 48862, -49896, 19847, 95138, 97990, -19062, -51, -35859, -18222, -21513, -56395, 10495, 46478, 1087, -5314, 39799, -28590, 18738, -96097, -19719, 67936, 67869, -40815, -86854, -8981, 33895, 78829, -62473, -81565, -38485, -12667, -77290, -66075, 3652, 39039, -80070, -96146, 31390, 87964, -17181, -78342, 89760, 13147, -92676, 41424, 95143, 11180, 54815, 90460, 89438, -45498, 54072, -81202, -42024, -7970, -26687, -52893, -32988, tail", "row-1503": "head, 32377, -97504, -21351, -36039, 54104, -67203, 40972, 36409, -24525, 6656, -18336, -63939, 15989, -97983, -11981, -25826, -8131, 20344, 45554, 92105, -33733, 31389, -68604, 81314, -86447, -37942, 72739, 88887, -14274, -68872, 18090, 25275, 64655, -75997, 17972, 22898, 94316, 40018, -98882, 80746, 82555, 70906, 83233, 70980, -17028, -20794, -50079, -58284, -31700, -53110, 39719, 91386, 4157, -50327, -12898, -15094, -99468, -70539, -46976, -96335, -70343, -29281, 87077, 945, tail", "row-1504": "head, 17010, -77950, -67163, -10771, 1861, -40893, 47149, 85449, -81745, 87017, 50470, 71145, -79505, 95975, -60359, -48549, -39802, -89384, 51515, -69999, 3447, 45579, -29093, -61467, -31254, 15255, 6787, -8304, -80977, 11855, 13708, 92050, 92779, -20748, 52324, 90742, -97098, -2295, -79680, -63694, -4566, -66262, -53904, 58185, -7005, -78799, -37047, -37156, -78145, -97825, -78193, -97903, -15842, 78202, -18875, -99490, -41571, 37602, 78303, 33338, 10172, -66256, -55103, -16571, tail", "row-1505": "head, -55681, 45554, -29296, -5917, -36709, 90456, 65734, -66082, 83222, -62151, 92509, 61083, 40757, -40425, -86050, 44746, 3868, -879, 50236, -31287, 75536, 24876, -51206, -72137, 4008, -25580, 22528, -40427, -43334, 87411, 57523, 91436, -47629, 28518, 94430, -710, -44967, 7632, -72184, 27268, 36560, 34615, 76200, 5701, -29277, -84368, 95873, 82484, 64974, 94639, 90704, -72959, 74997, -90867, -22656, 79378, 91369, 25321, 68569, 26161, 69380, 56711, -68090, -47772, tail", "row-1506": "head, 61029, 67719, 36097, 7571, -54546, -17310, 86541, 7876, 16606, 64527, -38267, 38693, -97444, -5224, -40815, 44565, 85625, -42131, 44443, 34150, -9646, 27039, -71330, 18691, 69634, -40563, -73400, -35724, -1171, 13578, -57714, -7578, 53818, -88334, 35791, 62239, 13676, -45728, -66533, -3929, 67110, -75781, -2790, 24157, 17416, -48201, -5219, -35449, -66166, 7370, -73846, -64230, 74691, 16456, -10277, -63334, -96464, -13624, -72623, 35855, 57311, -27282, -96063, -63536, tail", "row-1507": "head, 82799, 85655, -74632, -2992, 50392, -10020, -11428, 12883, -13759, 48534, -38124, -97566, -13797, -95777, -34384, 87796, -83665, -90052, 55702, -14450, 31090, 83254, -44439, 35261, 5779, 50638, -61477, -32191, -7687, -18377, 3233, -99216, -74714, 93077, 8051, -9977, 58729, 17271, 78250, -81206, -58739, -63715, -72688, 56091, -38286, -62997, 8438, 71478, 90846, -58624, 33425, 70855, -79737, -10625, -97275, 890, -65505, 43628, -11041, -54960, -48353, -7694, 17301, 81180, tail", "row-1508": "head, -54586, -96816, -39252, -12099, -32185, 11216, 20879, 37220, 61443, 20618, 34731, 21083, -923, -79471, -4939, 54111, 81553, 42262, -96628, 94457, -71706, -90913, -75396, -90507, -32235, -53554, 57254, -30704, -65304, 90684, -76608, -28810, 46936, 79372, -83688, 69508, -87911, 13944, -59188, 62005, -94824, -9733, 62388, 82621, 89036, 67323, -49561, -23062, -82640, -2553, -52468, 59892, 67619, 19244, 62923, 86468, -83070, 78207, -10539, -21624, -50906, -17585, 59158, -73004, tail", "row-1509": "head, -38599, -93490, 64661, -4462, -35732, -87122, 86691, 40102, 29597, -16042, 92220, -13492, -31175, 97508, 23244, -8826, -83752, 47679, 32518, -50515, -60819, 32862, -26333, 3188, -67655, -82074, -51074, 47062, 11319, -48604, 93482, -62907, -58570, -49170, 93229, -39451, 38942, 5403, 5752, 99625, -9490, 41073, 86132, -97685, 46129, -48076, -5879, -67471, -33099, 93236, -25556, -94380, 40663, -75288, -5603, 92652, -5857, 6164, 64184, -26724, 55413, 78266, 44144, 57355, tail", "row-1510": "head, 69943, -78539, 16900, 55643, -97173, 55225, -80327, -33562, 40755, 18308, -81286, -62383, -56059, 75896, -6568, -91339, 58341, -52464, -87727, 48408, -99158, 70841, 94153, 43814, 39735, 94507, 57993, 46040, 8876, -66112, 31365, -15223, -96101, 6179, -44447, 42724, 95015, -65534, -48989, 66243, -48536, 6700, 29507, 88691, -79924, -58263, 94885, 37380, -94584, 49130, 47894, 9879, -95453, -40062, -63770, 41023, -81023, 65704, 9193, -40074, -77231, 44741, -51923, 18191, tail", "row-1511": "head, -5902, -64384, -37654, 35354, -33416, -13801, -69023, -28582, 9267, -95947, 26339, 49796, -46680, 50896, -63229, 99516, 25033, -72370, 2426, 3069, 72322, 10854, -4741, 32830, -98837, 2303, 23956, -98621, 88995, 24382, -86036, 83413, 14656, 97993, 68116, 21698, 75047, 86991, 32916, 24532, -34, 51443, -89611, -21572, -10475, 7659, -5701, -35115, -44554, -38453, 99833, -49365, 56008, 16622, -9376, 65049, -10532, 93256, -10775, -18652, 58977, 38335, -70300, -45156, tail", "row-1512": "head, 74404, 56893, 31688, -35729, -61762, -18854, 4304, -76301, -61270, 80511, -43199, -93775, -90266, 46878, -91295, 74705, 41317, 82991, 68669, 66794, -94611, -95004, 63020, -59969, 70023, 61792, 27368, 36583, 90927, 90772, -36199, 46618, 91009, 53482, 28660, 13190, 37786, -87357, -83192, 10258, 32834, -74234, -83826, -13690, 82360, -66276, -26109, -78034, -16279, -7962, 89278, 94354, -26676, 13443, 91050, 32807, 30664, -43528, 2427, 64964, -37181, 67677, -45004, -55290, tail", "row-1513": "head, 18960, 63648, 50866, -43687, -37894, -50975, -30848, 75279, -63404, -87036, 66372, -31297, 34210, -14081, -16361, 66327, -16156, 86346, -59584, 28222, -17430, 69847, 88484, 20716, -49374, 48095, -49819, -18778, -77862, 37079, 17956, -25335, 64389, 96012, -40997, -55689, -98918, 40317, 16177, 82102, 1289, 68158, 27637, -30855, 70366, 20563, 85651, -14045, 62203, -35500, 62139, 4397, -96987, 26627, -44414, 41759, 13434, 9349, 63373, 66523, 96133, 80276, -39668, 60831, tail", "row-1514": "head, 45706, 24032, -27047, 80828, 80511, 85656, -54746, 21309, 1524, 90024, -99859, -66323, -47043, 91799, 82610, 32084, -80482, 66937, 63915, 4644, 88259, 86914, -10533, 57397, 92355, 38441, -87662, 95084, 88293, -11111, -73051, -98667, 85650, 18599, -61317, 50782, -11185, -64645, -66204, -11657, -25285, 82080, 27945, -87983, -25141, 54002, 7790, 60002, 17640, 38645, 45993, -4430, 99118, -53462, -21447, 20813, -41450, 83588, 34442, -6450, 16895, -69475, 28261, -55883, tail", "row-1515": "head, -53125, 70473, -84572, -21012, -95863, -75116, -30415, 17891, -32751, 11173, -54220, 66624, -53212, -67663, -10253, 37297, 2529, -85644, -753, -38788, 65888, 69342, 69627, 49040, -26178, -44217, 88022, -36064, -94028, 15180, -76885, 71890, 9138, -12233, 62152, -3123, -41591, 96957, -83372, 56348, -87943, 31785, 56832, -46518, -76610, -72575, -19920, -5149, 68192, -81613, 69891, 92903, 67635, 17052, 93023, 27596, 21422, 4932, 21553, 43318, -52480, 47210, 66257, -29861, tail", "row-1516": "head, -49113, 65599, 46928, -2383, -78399, -71402, -88635, -11499, 27392, 95451, 40459, -90429, 12667, -19348, 3681, -10503, 8263, -90336, -96123, -78240, 89408, -18485, 20474, 11364, 70531, 26647, -57568, -68047, -13159, 59398, 68245, 75219, -8246, 12361, 84707, -83419, 64713, -37600, -55423, -35422, 96740, -52322, -1959, -36721, -57842, -80536, -86348, 62827, 81517, 61238, -1373, 46757, -97957, -83150, 83440, 53926, 51640, -78434, 41200, 90838, 58414, 85184, -71726, -34523, tail", "row-1517": "head, -15317, 28338, 56903, -67965, -11035, -22801, 40989, 58, 79556, 87439, 57331, 81167, -89872, -31920, 10099, 55551, 85106, 14720, -69876, -14777, 538, -2648, 66332, 47253, 15410, -18077, -25998, 98407, -9904, 74186, -79360, 42985, 40986, 75273, 52243, -39420, 64411, -70256, 99375, -16371, -87750, -14063, -52264, -73681, -2283, -24714, 33321, -78203, 27034, 48322, -65757, -77421, -58197, -5055, 89721, -75069, -21007, -42374, -69738, 47957, 82119, 25020, -36335, 46154, tail", "row-1518": "head, 75919, -2090, -17214, 82575, -24673, -42039, 63437, 88403, 47693, -76064, 40, 97243, -81605, 44107, -82233, 3319, -26853, 96014, 5518, 27588, 37913, 97321, 91808, 28185, -93919, 2129, -93073, 1805, 92901, 26710, 85070, -60068, -67323, 54402, -77164, 98513, -91088, -43488, -70414, 19034, -36336, -42037, -52245, -2215, 27112, 59576, 39985, -382, -24815, 98423, -31302, -95727, -69525, 92398, -88731, -60134, 25388, -90382, -67750, 44147, -1700, -98280, 71888, -61149, tail", "row-1519": "head, -15452, -26301, -88372, 58066, -50637, -47667, 82970, 21915, -16017, 44543, 72574, 48787, -81010, 53200, -60364, -86447, 1714, 1519, -39951, -55540, -60024, -84043, 14797, -42921, 84957, -3552, -36866, -46982, 73904, 63311, 69566, 76480, 53325, 53830, 21747, -62181, 13598, -48331, -80689, 13253, 13307, 60612, 36203, 6957, -53293, 89605, 90895, -74694, 26024, -16963, 51803, 96054, 50698, -99533, -12739, 85921, -32937, -89014, -87431, -51158, -67898, 66347, -14076, 33620, tail", "row-1520": "head, 88718, -94245, -60443, 86599, 33981, 35220, 3323, 45284, -14030, 12823, 14189, -54397, 69342, -5849, -7885, -141, -60877, -41135, -99090, 48997, -30646, -65172, 64315, -18135, 64683, -38989, -92975, -37107, 67060, 42722, 44521, 82615, -47631, 96267, 58143, -17230, 83265, 89257, 80329, 1252, 28405, -91176, 40538, -194, 1431, -70966, -55237, 5803, -18826, 77162, 98331, 22339, -82079, -47420, -5442, 12056, 35801, 75198, 82474, 2746, -56596, 67870, -53334, 13707, tail", "row-1521": "head, 22750, -67252, 49898, 55189, -87835, 93121, 2677, -93889, -33303, 82391, -36381, -96127, 97224, 27427, -42948, -6300, -77140, -42890, -7865, 70717, -78075, -58152, -60861, 79909, 89327, -3726, 51579, -71692, 44708, 32515, -70177, -93531, 28112, 78839, 31854, 41609, 14061, -2363, -74378, 76232, 77805, 29877, 68298, -80569, 23914, -50548, 46337, -55369, -25638, 64317, 25263, 38163, -14122, 63584, -61943, 50881, -32161, 27431, -81628, -81190, -88137, -97461, 82829, 52055, tail", "row-1522": "head, 13448, 51065, 52512, 5338, -87802, -17337, -42463, -41336, 64961, 37746, 93026, 13184, -49887, 35469, -23158, 78630, 15290, -60420, -56922, 65969, 20211, 89688, 55241, -71425, -89638, -53966, -33503, -92409, -47055, 35238, 91293, -58234, 77749, -8220, 67477, 91630, 10809, -29592, 80861, 32113, 93554, 69918, 29405, 54859, 54986, 87753, 49314, 65480, -26969, 88035, -93321, -93031, 76060, -28744, -60765, 75540, 27719, 53436, 75358, 31764, 56344, 34115, 75307, -49193, tail", "row-1523": "head, 27360, 97558, 13657, 63300, 94622, -98154, 19115, -88303, 77936, 46153, -16175, 65969, 9842, 5322, -39806, -42576, -65363, -40497, -76181, 56276, -43917, -44017, -4048, -20868, 51256, -3513, -96839, 20770, 88315, -96098, -55664, -70089, -59148, -47629, 69127, 223, -67067, -38985, 53184, -65557, 52289, 80972, -52038, -65905, -62471, 22540, -20982, 10467, 62251, 10194, 66634, 34647, 30923, 7529, 43885, 43413, 64929, -94301, -35811, -9205, -97689, -66955, -9179, 29058, tail", "row-1524": "head, -94145, -96284, -19092, -79626, -81519, -51864, -64322, 9748, -72114, -46437, -73068, -62908, 67521, -47228, -61505, -5319, 71063, 40344, -51750, -76103, -19477, 58939, 12781, -85987, -81114, -53840, -67018, 38684, 57143, 43077, -79217, 17667, -36050, -44987, 25845, 67284, 16289, -31723, 14390, 72798, 83435, 51665, -83220, -31485, -72502, -20945, 39603, 95732, 43555, 97903, -15130, 43813, 14115, -15655, 3254, -75480, 96593, -24963, 35375, 26032, -39948, -64669, -83837, -66494, tail", "row-1525": "head, -63976, 89022, 86710, 24637, -32300, -43396, 18680, -5186, -89759, -21802, 8615, 57733, -72104, -76569, 93147, 52282, -13567, -81028, -31550, -47853, -97694, 78280, -78633, 51222, 92242, -28394, -22069, -2719, 2501, -98444, -14564, 35148, -63034, -22920, 42733, -4743, 25630, -86800, 57781, -86046, 31797, 39934, -25940, 65361, -17473, -18504, 31590, 54914, -11481, -25475, -51593, -10321, 80671, -86504, 92013, -1744, 52305, 29903, -94836, -45947, 15577, -13573, -63427, 7676, tail", "row-1526": "head, 88393, 70842, 85398, 47458, 5255, -11073, -55706, -12655, -74337, 22091, 99418, 5437, 87573, 4491, -67620, -63565, 44697, -13440, 55747, 20775, 55946, 97398, -28098, 31690, -48138, 54027, 74465, 65842, -32224, -28056, -33889, -50849, -46686, 72676, 87773, 33901, 94815, 6549, 80196, 52020, 16929, -83820, -81122, -14576, 19022, -12091, 21846, 96331, -50163, -6701, 59589, 41106, -72069, 50983, -78716, -85164, 98533, -92164, -36610, -98600, -12177, -50651, 2402, -58161, tail", "row-1527": "head, 56909, 83111, 649, 76527, -55030, 19554, -32623, 78895, 9192, 89541, 62200, -14442, -86054, 87344, 64344, -52611, -24373, 90670, 42684, 4797, 70041, -95817, 37133, 36221, -74394, 83634, -99940, -77773, 70288, -28521, -33564, -94206, 64903, 5288, 80667, -78716, 59679, 51388, -10098, -25381, 83397, 11813, -13247, 98592, -28960, -71280, 5949, -87126, 13463, 46158, -21167, 41309, 52217, 41910, -38579, -2472, 13123, 10, 71169, -39224, -42812, -85380, 9084, 21451, tail", "row-1528": "head, -89815, -65627, -7170, 47364, 99967, -74564, -19339, -98258, 64691, -64137, -1014, -69136, 39764, 52609, -437, -67832, 53611, -92272, 81480, -73011, -23679, -37344, 97829, -61212, -66076, 9655, 18605, 74384, -25973, 75387, 18190, 19078, -5471, 37944, -61812, -16277, -8227, 93609, -39059, -80206, 73437, -61821, -26349, 16402, -82661, 1279, -99572, 37919, 92678, -22531, 58505, 52324, 83912, 87697, -25379, 9396, 1760, 26845, 99214, 52731, -86037, -6438, 47263, 3765, tail", "row-1529": "head, -40193, 57692, -73588, 15245, -5973, 86010, 63181, -69365, 44591, 873, -4694, 80871, -97331, -55006, 61737, 8029, -78505, 84878, 4866, -34113, 14985, -63925, 38598, 33656, -25166, -68557, -78182, -36480, 10819, 15260, -77636, 79437, 56742, -29651, -45748, 20693, -34886, 41340, -63822, 34734, -55543, -87032, 75468, -43994, 54487, 10869, -77817, -27134, 12720, 99219, -40547, -68559, -1098, -20902, -69610, 2889, 99529, 27027, 12495, 43528, -92972, -57714, 78189, 49577, tail", "row-1530": "head, -91605, 82946, -20995, 27032, 11941, 57600, -54584, -65918, -73832, -70225, -27613, -20218, -35403, -47876, -39359, 47490, -8441, 82898, -20606, -43733, 69417, -56161, 64208, -67490, 51970, 56103, -31011, -50044, -23104, -19573, 9931, 92663, 70685, -89455, -27768, -95920, 3759, 97410, 15118, -55869, -27517, 61228, -58051, 46976, -90757, -79147, -4236, 38202, -70388, -40759, 21389, -96332, 98029, 37297, 69762, 4582, 80909, -6852, 99359, -5786, 27296, -25696, 35801, -12524, tail", "row-1531": "head, 64363, -21929, 8649, -45338, -44026, -53595, 53867, 98015, -94784, -87416, 15047, 52509, -48455, 84703, -92523, 36243, -40214, -95280, -99404, -59579, -94290, -54458, 70085, -64988, 31041, 7441, 84291, -89581, -79184, -53006, 83481, -56749, -57979, 3875, 87089, -41600, -86385, -44939, 73047, 3998, -37801, 4084, 76559, -32263, 57379, 75650, 54027, -61858, 51974, -57120, 45216, -40204, 35655, 91691, -72585, 6510, 2356, 42536, 7191, 38864, 1165, -43892, 96766, -84327, tail", "row-1532": "head, 41909, -92100, 8381, -48703, -79692, -30836, -3746, 74329, 93764, 1390, 90456, -57838, -37771, 15040, -91148, -78904, 28986, 65284, -34545, 71929, 75402, 34167, 44767, -34168, 52354, 1592, 7880, 31607, 22350, -29160, 60316, 27116, 90574, 15667, 56808, -34257, -21464, 47149, 73382, -90827, 61177, -24291, 5810, 10546, -2736, 42158, 75105, -40523, 6827, -5490, 83740, 66375, 78948, -51466, -72718, 6184, 68234, -85708, -60325, -23809, 14746, -63652, -58172, 14585, tail", "row-1533": "head, -34420, -11722, -57299, 32540, -67406, 25309, 10933, 20084, 84414, -53095, 59795, -32280, 9438, 41620, 54385, -5775, -33917, -71732, 51342, -23161, -80459, -59702, -6628, 35860, -44683, -36144, -20530, -22394, 55226, -71368, 25229, 67040, -54111, -43024, -16649, 26418, -82759, 94262, 75972, -35684, -30516, 49743, -54902, -60005, -57298, -6417, -42177, 91087, -10899, 12338, 98138, -21449, 56396, -26481, 36566, -54997, -97746, -32986, 75491, 56772, -66427, 89862, 82098, -81378, tail", "row-1534": "head, 74639, 45111, -47243, 88902, 97965, -91371, -60999, 12856, 47045, 43959, 98407, 31334, 90133, 89772, 8340, 63051, 43822, 47841, -7286, -90829, -75804, -58729, 39120, 16346, 13481, -33399, 21392, 20255, 96087, -20514, -78900, -30255, 78489, 96625, 72947, -162, 28423, 97919, 4175, 33596, 59681, 46308, 5029, 27404, -8681, 99625, 83434, 82359, 19041, -64156, 75026, -67755, 14292, 61335, 13502, 65801, 85656, -89514, -41809, -28015, 70737, -37309, 80409, -46001, tail", "row-1535": "head, -51481, 89836, -84852, 32128, -76307, -60865, -42407, 30772, -87020, -76769, -78791, -31576, -29686, 77048, -77070, -38178, -94682, 22887, 14771, 35174, 82166, -71114, -92472, 97148, -37558, -71389, -7337, -16186, 69906, -55083, -28399, 53749, -9292, 30911, -73817, 6970, -86539, 94469, 83127, -65535, -25843, 1662, 90095, -59526, -56585, -9499, -17062, -71081, -18823, 38415, 10871, -11925, 96899, -57627, -61053, 34635, -52259, 21083, -30578, 28148, -3241, 55013, 11218, -67895, tail", "row-1536": "head, -44426, -68695, -79136, -91401, 77300, -96447, 97385, 22139, 25531, -54966, 87906, 57941, -58081, -16078, 33207, -78106, 83280, -37617, -83568, 87936, -48694, -25959, 18172, 25900, 83908, 11027, 51588, -19293, -36332, -25500, -12450, 7419, 29925, 38633, 48378, -77752, -26666, -83815, -34818, -97625, -91461, 22234, -48131, -59363, -8705, -21010, 70096, -95770, 58889, 90122, -5499, 42482, 80389, -4413, 59301, 45399, -40254, 90855, -13060, 66398, -39108, 44658, 92407, -71727, tail", "row-1537": "head, -22774, 47458, -19465, -75110, 69003, -81432, -42993, 12402, 10193, 52648, 12558, 20714, 20365, 17824, 48827, 4248, -95937, 64317, -10241, -7786, 35207, -36342, 44724, -4807, -97743, -47884, -51360, -83995, -79694, -91818, -40226, -21485, 57326, 17400, -18759, -56383, -58293, 53919, 12684, 76446, -7976, -27286, 9259, 1358, -26452, -15264, -79584, 59995, 26854, -77287, 15273, -67095, -82787, -77324, 39961, 2139, -51257, -22256, 69743, 83075, 55355, -15161, 88669, 44928, tail", "row-1538": "head, 80782, 65557, -35, 23271, -29544, -12482, -14254, -4221, -89915, -51610, 86218, 3174, 75933, 14993, -6764, -69485, 26881, 58521, 17770, 20937, -23182, -32996, 16859, 65797, -95661, -34217, -4060, 52176, -57508, 72527, -96451, 65894, -16363, 82532, 39916, 97140, -75213, -98385, -54886, 43677, 53562, 21854, 29847, 49459, -17908, -3995, -26570, 91913, 9461, 3129, -80019, -45248, -82516, -9292, 12659, -2495, -26676, 94368, -94318, -15957, -4660, -28737, 21787, -80476, tail", "row-1539": "head, -84859, -50147, 56399, 79399, 33167, -35065, -82285, -33729, 27651, -26281, -31262, 30332, -92412, 95870, -24251, -77121, -61572, 57036, 3152, -70075, 47066, 43599, -55134, -96027, -80400, 60034, 99124, 46846, -59582, 92372, -1591, -5788, -17659, -97953, 20357, -21762, -39160, 49222, -29711, -62432, -89402, 58336, 56009, -58905, -51447, 2128, -48105, -76043, -60179, 56179, -79297, -23740, -81277, 61435, -74472, -22289, -75774, -8092, 6480, 80456, 83203, -29637, 4683, -7574, tail", "row-1540": "head, 81022, 64315, 11322, 89781, -30058, 67588, 30123, -17401, 4288, 52410, 88581, -95093, 74558, -36556, -62905, 57575, -87007, 92125, 27171, 25235, 82069, 7043, 72688, 88212, -66281, 35036, -68069, 34729, -11230, -31134, 43726, -45493, 78989, -22821, 27960, 84538, 57028, -48182, 10488, -64624, 88974, 6058, 59502, -62400, 88999, -58969, 90278, -45018, -49715, 40037, 65642, -8348, -76821, -19176, -48505, -37081, -17856, -85160, -12267, -80815, -68752, -6176, -16750, 68184, tail", "row-1541": "head, 46650, -8667, -16733, -23280, -70878, 11994, -32954, -70121, -33818, 11682, 68914, 68252, -40496, 79141, 41084, 4339, 40275, 4224, -4291, 84849, -41992, 83776, -20509, 4342, -68738, -18614, -99928, 22142, -98015, 68696, -26682, 44223, 64466, -55656, -94443, -67346, 62795, 72761, -11171, -82688, -41900, 49833, -66202, -31079, -38420, 97435, 71700, -49155, 98286, 19033, 54405, -626, 8690, -79400, -48651, -62908, -90035, -353, -46622, 20302, -32971, -84471, -55200, -73410, tail", "row-1542": "head, 3117, -14056, -91110, -78465, -88413, 40008, 25278, 16297, 27838, 85339, -55760, -270, -16702, -46488, -6992, 86396, -11443, -47237, 92085, 17261, -92643, -68708, -69623, 33541, -96657, 19898, 84988, 7172, 24624, 11457, 30265, 3038, -10353, 84405, -76703, -50963, 95152, -46472, 34209, 80847, -71797, 645, -28388, -56980, 90802, -67101, 32833, 88755, -91681, 71072, 51519, 64703, 93469, 42297, 80846, -7415, -3794, -85516, 98062, -21007, 26860, 14427, 49400, 96365, tail", "row-1543": "head, 37435, 95504, 80349, 45767, 72122, -20847, 4726, -85144, -68833, -43930, 97988, -21655, -94667, -85642, -72620, 8120, -12118, 16680, 16884, 57215, -24191, -48711, 804, -82834, 75336, 2990, -640, -41822, -21456, -94891, 51413, 47346, -16327, 40790, 32910, 46006, 41608, -98723, -31709, 17096, -66694, 16642, -31995, -79270, 95860, 85000, -35654, -55666, -63405, -18100, -93529, 94323, -72339, -54946, 6954, 82850, -53708, -195, 87476, -77078, 99437, 89352, -59549, -77628, tail", "row-1544": "head, -72840, 25350, 10202, 6315, -787, -28163, -94512, 56051, 27198, -53242, 94097, -99012, -57842, 62295, 12561, 60915, 59812, -15172, 10189, 96382, -89616, -72079, 92073, 80172, -28761, -71107, 99926, 74883, -26383, 43951, 4318, -10162, 10373, 26143, 43895, 89048, -43062, -72846, -71603, 54533, 831, -18554, -40337, 505, 99131, -11705, -30333, -70370, -67617, 54951, 38383, 73604, 76519, 1211, 76204, 54671, 19128, 11541, -23125, 61355, 57067, -54770, -76398, 67615, tail", "row-1545": "head, 72577, -55908, 15660, 47383, -72232, 2550, -14381, 50383, -72616, 80994, -57472, -70902, 27541, -57395, -9250, -70608, -92482, 26949, -93689, 97429, 5088, -84195, -55848, -77416, -55802, 25668, -75230, -47520, -58339, -26506, 97529, 55906, 33480, 99877, -79372, 18495, 13747, -61441, -8351, 39881, -34677, -95428, -30623, 49565, -95374, 20225, 49126, -39079, 9583, 36191, -15840, 92004, 85231, 61865, -23292, 89091, -65377, 39994, 82893, 9874, 90324, -52114, -27701, -87809, tail", "row-1546": "head, 96627, -97486, -21443, -62030, 49770, -57618, 81706, -44750, -49119, -10825, -8416, 6285, -30253, 46711, -54052, 70203, -15753, 36891, -10749, -14544, -84244, -80680, -51454, -57621, -6749, 37467, 18302, 21463, 90508, -73675, 34584, 36043, -36536, 2616, -30879, -19277, 38069, 51335, 82506, -35462, -2704, 76234, 97498, 14749, -87890, 48393, -75903, -21540, 28152, 83631, -47922, -70348, -15932, -88884, 6928, -92364, 36514, 56917, 4058, -89493, 46047, 95135, 84319, -16034, tail", "row-1547": "head, 93559, 18513, -80103, 3188, -3463, 39111, 42763, 94712, -61816, 68438, 14176, -49028, 29157, -13989, -40186, -83099, 91592, 8884, 2612, 74727, -28710, 23283, -10780, 60084, 88824, -53380, -30748, 71202, 89045, -22141, 52970, -18233, 85631, -17808, 63667, -4743, 53408, 91322, -61690, -97242, 8320, -51157, -48708, -86461, -83222, -46731, -5097, -38194, 58130, 83510, 81660, 15189, 91724, -92488, 52583, 3081, -82301, -12156, -35931, -15626, -11583, 296, 98377, 42630, tail", "row-1548": "head, 28214, -433, -54855, 19315, 34339, -49102, 38979, -25986, -94346, 92852, 93019, 63017, -65178, -81543, -47112, -58098, -66772, 30640, -96900, -59710, 21062, 72569, -37620, -89634, 72100, 66048, -9465, 73385, -27642, 64780, -96173, 16585, -84496, -72217, -31097, 74523, 63033, 34852, 29336, -91093, 41689, -7143, -2764, -69467, -55151, 68946, -66694, 66042, -93731, -97925, -19251, -10747, 65412, -92022, 88023, -95929, 85184, -65665, 54955, -83321, 72012, 53397, -72664, 58143, tail", "row-1549": "head, -84934, 89835, -55263, -86169, -63851, -56635, 65858, -14785, 49915, -56159, -60594, -87038, -79011, -45963, 17308, -99459, -52063, -17509, 29300, 68143, -33868, 28900, 32659, -62622, -14838, 70701, -46646, -60640, -41616, -46901, 4260, 52113, 56475, 31877, 76160, 74512, 86752, 89163, -89466, -54859, -12232, -85640, -28578, -55939, 19785, -33358, 78990, -13738, -69817, 35710, -92187, -97716, 96828, -20383, -31228, 84334, -96239, -72284, -51496, -77375, -71459, 53163, -85308, -99571, tail", "row-1550": "head, 31039, 94638, 76014, -54991, 88261, -37344, -20017, 13158, -17950, -32393, 35366, 18518, 49313, 75801, 83147, 34353, 22490, 20251, -30084, 26335, -5888, -26711, -72816, -62072, 54978, -11481, 99542, -12727, -84485, -3592, 84018, -35022, 50799, 92145, 95110, -14633, 59581, -20362, 26797, -92484, 59373, 93170, 3193, 90190, -6796, 68708, 3104, 44938, -79987, -31539, 98349, 28852, -69870, -33256, 93485, -7374, 81048, 28641, -88922, 90240, -67479, -22194, -52921, 44315, tail", "row-1551": "head, -76022, 62968, -43003, -68349, -73228, 35277, 87612, 12728, 60308, -95461, -90306, -57560, -76563, 17094, -57938, 54008, -1057, -385, 80682, -29416, 80367, -51104, 35014, -53620, -18091, -47049, -53556, -17142, -63043, 34330, -4268, -70354, -52211, -42616, 74914, -64370, 44314, 43679, -35600, -584, -96448, 51072, 56249, -30539, -87442, 80691, -70149, 60435, 57241, -208, -51150, 21444, 89866, -79457, 43863, -24516, -28869, -15766, 56238, -2028, -9948, 61931, 18006, 288, tail", "row-1552": "head, -60045, -2816, -74588, -18917, -92598, -52721, -60398, 48964, 66678, -23253, -21612, 95674, -72375, 69870, -72132, -72723, -35891, -4241, -73649, 46933, 83950, -65408, 29182, 45, -8702, -57286, -17064, 80454, -64301, 3780, 33297, -57772, 66044, 41078, -56511, -57000, 98388, 10613, 38900, 41651, -73891, -83400, 54512, -98777, -54217, 38989, 78828, -5911, -86299, 70826, 38836, -72240, 11236, 76736, -73251, -23544, -71155, 42376, 11236, 68674, 80974, -3790, -85486, 82869, tail", "row-1553": "head, -12843, 24045, -70380, -11570, 80503, -84645, 96343, 24269, 43365, -55501, -35452, -17417, -36252, -42773, 63356, 94234, -79404, -60554, 91372, -64185, 73837, -82931, 31342, 52573, 4435, 3203, -53221, 80348, -65721, -25776, -53682, 67071, 44939, -78927, -26799, -50047, 89984, 30416, -60447, 15514, 28147, -38888, 23579, 43084, 95222, -78680, 59881, 53951, 59801, 65189, 59069, -97007, 40938, 97355, -46637, 42915, -57764, 3132, 18394, -72204, -95738, 61056, -49367, 10594, tail", "row-1554": "head, -62771, 55280, -62623, -67612, -20357, 24724, -48813, -76208, -62643, 15882, -63403, -72447, 76011, -975, -74753, -45590, -36599, 4483, 21822, 19136, -25506, -88190, -2684, -24773, 65, 80889, -15728, 16386, 99866, -19014, 66787, 13814, 15114, -13943, 34677, -68262, -33077, 4909, 99627, 60757, -88757, 56410, 80221, 20026, 86909, -20423, 69298, 8052, -69609, -82510, 58242, 36132, -75722, -54973, 7908, -84528, -21326, -56063, -59658, -76540, 48535, 58660, 94292, 42941, tail", "row-1555": "head, -52276, -41929, 79289, -68784, -22641, 95822, 64516, 42393, 29274, -67505, -34338, -20082, -64195, 26867, 27097, 80980, 65452, 64584, -80315, 46501, 90133, 5408, 39456, 13905, 27729, 13200, -89166, -7470, 9929, -97512, 44756, -68994, 78260, 20787, 50312, -71354, -14762, 66760, 45395, 44257, -25010, 31442, 59087, -70157, -29021, -34565, -11236, -28879, 6614, -30748, 41577, -20991, 28255, 49975, -58944, -1780, -13401, -86800, 24015, 41145, 73168, 75547, -48225, -51997, tail", "row-1556": "head, 67599, 22937, -25330, 60092, -95193, -95385, 22754, 99300, 75201, -41679, 43693, -18834, -70638, -90431, -62886, 28541, 51234, 49320, 27531, -99841, 88142, 496, 56496, -39792, -72876, 35480, -4406, 7471, -344, 52317, 3275, 81264, -28415, -8206, -65225, -53910, -39260, 31857, 28133, 86142, 16943, 50744, -4590, -42460, -77839, 14600, 12604, -10655, 33810, -54451, -90912, 18993, 33874, -47837, -52416, -82636, 7602, 90797, -41716, 81337, -53028, 9887, -86646, 52705, tail", "row-1557": "head, -55978, 6699, -70483, 8230, 83221, 87087, -52673, -4282, 15621, -77309, 12659, 73837, 50276, 94058, -19122, -7679, 91832, 10262, 13501, 2814, -944, 17546, -80874, 1124, -59591, -83549, 28571, -7227, -86094, -71087, 28967, -15672, -9241, 47872, -51070, 43030, 12999, -31793, 87928, -11834, 48779, -68016, 80066, 95140, 73748, -31367, 87101, 85300, -67619, 1900, 91325, -38019, 21656, 16759, 66885, -80364, 38039, 9422, -26349, 81326, -51120, 23296, 6161, 56811, tail", "row-1558": "head, -99990, 84134, 53103, -23992, 79076, 14787, 35719, 68933, 14570, 62446, 63465, -61145, 93701, -77211, 41730, 48184, -97642, 62360, -81042, -51148, -94927, -20719, -95354, 43860, -8300, 24595, -51463, -7754, 4881, -27080, 42593, -1600, -68545, -76681, 58203, -28022, 40839, 24070, -10929, -15352, 74101, -88161, -40948, -66433, 59916, 57056, -58781, 64660, 67974, -82029, -32190, 28954, -16376, -40495, -89639, -94791, -63901, 47569, 88600, -33965, -52891, -89238, 80691, 90659, tail", "row-1559": "head, -53760, 49403, 82676, -68220, 23555, 28367, 28469, 81021, -6821, -6843, 11227, -20909, -38971, -8888, -42433, -7847, 98452, -40106, 50404, -18445, 39612, 3529, -16024, 99672, -1833, -8086, -46924, 44571, -44882, 35882, -26047, 67849, 50285, -85808, -84083, -73301, 13780, 67146, -197, -30798, -22324, 57090, 75701, -86219, 55046, 78357, 88972, 73963, -70040, 22830, -48152, 79736, -97250, -52352, -39927, -28504, 62037, -75362, 56096, 63475, -46660, 37629, -52638, -95008, tail", "row-1560": "head, -83778, 43016, 10268, -87395, 2385, -66364, 76453, -27352, 99916, -48164, -23343, 76194, 63001, -61089, -82649, -48552, -60365, 85476, -1423, -50980, -23622, 57364, 29627, -91671, 39306, -4395, -72042, -72983, 62701, -62864, -38098, 42933, 68015, 80974, 23248, -69949, 46040, -78910, -9917, -73126, 23672, 58193, 88602, 6588, -85788, 1091, 30491, 30579, 17997, -90193, -50188, 40199, 71128, -78143, 23992, -46933, -24736, 68406, 43465, -63643, 15685, -88491, 71738, -90739, tail", "row-1561": "head, 5319, 50994, 56392, 11165, -73780, 65646, -9580, -10187, -23687, 91178, -48521, -34864, -33808, -94301, 33562, -33331, 29122, 56087, 20828, 13399, -48335, 91709, -41839, 93357, -67253, 77254, 14589, -44293, -92344, -50181, -84198, -72766, -86706, -80480, -86255, 81535, -9557, -6634, 18871, 19723, 85134, 6843, -95530, -7231, 72213, -91857, 28398, -45358, -18255, 97141, -44863, 57796, -46920, -58401, -3216, -70182, -75314, 58300, -22116, -84808, 84655, -65946, 69890, -19618, tail", "row-1562": "head, 5092, -65506, -15339, -33522, -97483, -15399, 69109, -78601, 33649, -69183, 55990, 55911, 19988, -38846, -7124, -95949, 99110, 25150, -73380, -41181, -55177, 63255, 88387, -6476, 96936, -32505, -10805, 67008, 8916, -9130, 1469, -87461, 81625, 26431, -85031, -57253, 90190, -71179, -96792, -15069, -65020, 41199, -12211, 14890, 60957, -5571, -64879, -78821, 98727, -25701, -99210, -34176, -19132, 39560, -88073, -63433, -31583, 30942, -4709, -48094, -21882, -235, 88802, 25354, tail", "row-1563": "head, -75945, -4170, 49916, -55448, -22747, 32951, -48062, 55181, -55085, -27553, 63240, 94379, -2432, 79367, 19272, 42350, 86708, 26179, 92763, 56408, 71947, 11730, -71043, 93791, -59169, 30420, 27059, 95518, 1098, -45394, 40381, 77306, -46919, 82892, 48857, 87905, -59569, -53491, 15526, -24512, -86260, 20887, 64024, 34648, -74780, -31713, 9757, -61297, 38207, 70441, -42892, 15139, -46038, -97226, 66790, -77946, -32588, 84888, 66044, -83161, 45189, 30229, 93651, -56775, tail", "row-1564": "head, 87744, 41206, -53225, -36675, -77831, 37979, -34562, 80248, -67783, 49511, -79979, -33757, 38517, 39262, 18349, -19804, -30036, -69371, 10677, -16575, -20969, -73428, -21106, -34313, 33550, 86723, -95887, 17872, 82758, -22484, 91813, -70308, -1760, -6498, -13599, -73978, 64748, 39706, 49792, 52008, 63428, -40510, -72810, 16184, -60667, 14658, -32957, -20283, -4055, -64683, 45730, -21676, 14261, 87989, 50919, -80431, 36834, -52184, -3212, -20009, 75964, -86995, 50209, 61534, tail", "row-1565": "head, -72029, -93433, -64271, -28985, -61879, 11623, 74970, -62798, -91644, 97425, -16373, -88684, 26892, 80341, -7169, -35645, 43504, 75886, -96471, 59284, 91268, -94295, 37811, 15855, -24762, -49531, 12315, 70565, 88549, 38017, 58341, -96578, 68366, 54737, -68539, -59165, 94216, -96169, 57771, 2446, -99770, -99072, 69946, 14274, 40751, -61317, 75105, -69324, 15708, 41020, -70357, -11552, -40353, 96237, -26346, 35035, -79528, -40977, 33551, -67888, -35546, -63211, 77595, -54055, tail", "row-1566": "head, 67845, 94453, 76300, 65822, 15425, 16065, 90024, -64230, -43321, 65508, 23296, -84189, -5061, -30244, -35153, 50717, 20154, -52639, 47088, 97641, -69234, -70922, 76823, -44932, -67963, 21409, -51228, -77233, -36939, -58184, 42793, -23991, -21334, 73834, 69598, 18986, -67436, -1908, -18298, -2464, 93776, 55515, -71677, -58798, 26205, -4371, 31851, 88097, -17457, -22893, -11596, -84314, 1879, 95281, -77688, 59611, 4877, -81967, -45376, -60519, 2020, -85187, 97169, 21422, tail", "row-1567": "head, 23529, 99331, 59207, 7648, -36713, -3437, 53478, 20332, 50494, -32141, 30204, -14054, 40107, 39246, -76516, -14895, -89589, 77508, 29156, 96446, 39532, 64307, 98251, 54373, -13835, 77757, 3571, 31287, -73714, -6176, 39098, -87536, -91122, -27079, 85655, 69795, -93329, -52986, -26753, -82470, -24171, -21975, 50007, 4814, 50946, 20021, 93414, 24019, 81553, -3614, -83266, -16594, -51117, 52385, 36823, -10521, 28170, -18828, -94444, -37919, -29211, -37513, -29734, -44179, tail", "row-1568": "head, 24363, 4463, -56402, -86734, -17307, -49174, -2649, -54552, 55392, -88936, 68424, 31212, 99506, -7987, -2263, 84415, -9098, 78943, -18337, 83338, 33719, -1364, -29561, 39621, 97370, 68760, -14565, 93270, -20761, 79356, 942, 85596, -49815, -79268, -72514, 73921, -13932, -40787, 65186, -21037, 75318, 22195, -4050, -63653, -87407, -15024, 86326, 33142, 24154, -64711, 14937, -22811, -14178, 868, 79690, -2347, -15154, -61710, 20049, 15891, -11800, 29140, 82594, 24290, tail", "row-1569": "head, -2864, 94596, -88289, 55231, 93130, -72324, -29267, 89203, 85888, -26074, 41565, -92930, 67433, 42337, 49815, 62255, -72553, -40555, 33911, -95437, -88771, -3622, 98657, 27880, -76154, 92327, -94879, 26765, 7469, -46206, 38395, -61236, -89423, -37845, -45764, 16940, 92713, -33242, 98999, 71715, -24188, 94720, -52309, -26568, -85007, 61980, 96798, -75957, -37599, -32617, 72094, -15339, 15152, -12694, -83801, 17353, -67995, -68096, 40598, -77602, 96230, 77763, 6073, -94828, tail", "row-1570": "head, 93492, -37100, 65188, -11215, -48188, 73929, -52711, -70166, -69161, -47378, 43044, 91954, -7961, -83909, -83023, 52207, -21981, -80079, -70887, 78429, 8678, 53000, -98452, -59680, 65576, 69493, -22058, 37794, -8886, -2558, -22988, 84046, 50338, 38599, 26210, 44937, 88653, 90015, -75012, -19772, 5176, 8767, -97685, 72480, -64759, 66788, 81775, 261, 75853, 91017, 38126, -92683, 97543, -43949, 17903, -85390, 75630, -85035, 54912, -32835, 80087, 9770, -34200, 9669, tail", "row-1571": "head, 60483, 39480, 93715, 69570, -74012, -54224, -75636, 19398, -95161, 87384, 41722, -81065, 22854, -10498, 62793, 95927, -29222, -96529, -63693, -86041, 31438, 97809, -95573, -42267, 3177, -68109, -92277, -62436, 38715, -45857, -27155, -79223, -13489, 92341, -38567, -8352, -81365, 91521, -11849, -4824, -20508, 51197, 76634, 32809, 95543, -46687, 66263, -52869, 31788, 14978, 33403, -56479, 68483, -86016, 78929, -7327, 63117, -66755, -24314, -90150, -76849, 87110, 19432, -26031, tail", "row-1572": "head, 3025, -31888, 65575, -49792, 31519, -86430, 28311, -11421, -57493, 10722, -39660, -29060, 86649, 87432, -5779, -76908, 54377, 25209, 57623, -25807, -54968, 48157, 73218, 74398, -32867, 25766, 92749, -3174, 59789, -46885, -29619, 74933, 60355, 63990, -11625, 67135, 63773, -58384, -9313, -42831, -8434, -8363, 60012, 82491, 86257, -72129, 4300, -11323, -93371, 65754, -17677, -72193, 12773, -58058, 52712, -42512, -38941, -11216, -982, 71970, -55797, -23672, 55693, 71440, tail", "row-1573": "head, -20341, -41611, -23871, -43436, 92515, -76535, -73493, -71366, 65844, 67860, 75537, -50226, 84407, -85207, -48708, 35885, -17946, 2323, 2096, 11797, -35458, -70627, -11466, -5040, 25044, -85273, 75082, -24298, -66834, 67340, -80609, -79361, 32789, 23303, 30944, 49935, -41182, 54339, 66393, 22194, -42487, -29222, 76134, -67249, -61073, 63127, 237, 77906, 57179, -91694, -78839, -52707, 71975, 40906, -8999, -38365, 89289, -28588, -6170, 7062, 22636, -61734, -99149, -75809, tail", "row-1574": "head, -24756, -67723, 39305, 24993, -65583, -92600, 14671, -69789, -61927, -27597, -94634, -24780, 74998, 1416, -45555, 53934, -1639, 84058, 71172, 11449, 57184, 91986, 71273, -99234, -7971, -87081, -47116, 21491, -28927, 77950, -89026, -37232, -72298, -78983, -13436, -18150, -13658, 28292, -6568, 528, -52889, -54809, 78980, 48353, -92795, 85840, -52628, 54995, 73196, -92161, 41401, 26892, 51472, -34242, 80297, -44618, -32270, 5199, -49842, 52686, 15591, 50322, 16091, 25473, tail", "row-1575": "head, 43133, 58688, 5473, -27222, 35972, -34081, -75028, -7028, -71248, -37589, -93565, 91556, 77169, -22538, -26044, 97384, -30080, -52987, 91869, 23378, 82471, 39435, -14953, 93868, -87487, 83709, -82851, -15737, -86110, 23109, -79099, 70987, 53206, -1996, -13932, -32071, 48989, 50242, -99999, -73361, 34060, -3147, -60997, 18010, -21252, 89230, -45360, -37997, -74040, -98201, 83603, -3475, -94171, -76050, 71062, -94305, -45097, -93332, -9802, 32427, 35639, -28458, -31977, -38764, tail", "row-1576": "head, -13367, -87382, 72842, 16872, -89351, 9709, 17446, -96844, -66419, -38774, 47039, 83563, -5232, -96860, 87482, 12065, 85229, -15164, 4974, 13694, 21226, 87395, -52473, -60556, -89475, -50634, 86356, -65288, -29029, -81247, 72025, -81388, 72148, -48460, -6434, 77033, -81246, 66310, 56733, 23106, -14470, -15593, 48576, 21024, 58369, 11712, -65000, -52031, 38125, -25537, -53260, -6066, 54915, 70575, 41059, 6406, 19706, 38429, 8572, 75285, 6, 22975, 3766, -47278, tail", "row-1577": "head, 75367, -33485, 54249, 71203, 52269, -52310, 58151, 9416, 34907, 47453, 15597, -88894, -93193, -58634, -48183, 26568, 60223, -39415, -61543, 71161, -59987, 68334, -169, 30493, 88065, 17703, 41554, -23049, -50989, 66685, -41747, -93986, 95775, 99444, -46150, 62553, 77025, -82421, -75982, 27169, -70946, 1327, -25301, -81053, -61009, -47345, 7811, -65944, 475, -11460, -62463, 50247, 13739, 24648, 16362, -84364, -9654, 32917, 99146, -12799, 2835, -9879, 44574, 67138, tail", "row-1578": "head, -63932, 19425, -97658, 97820, 37783, 15089, 84898, 8085, -21920, -99605, -23821, 36556, 40822, 69268, -97419, -15116, -12322, 36617, 15900, 46486, 19124, 34841, -15669, -86871, -54738, -12391, 88593, 38872, -43689, 66543, 12499, -48685, 75368, 25398, -38720, -94243, -74314, -73597, 93378, 69174, -93354, -91966, -53148, 26664, -46844, -96382, 58600, 26808, 40415, -53273, -202, -38646, 41588, -36419, -20115, 59949, 33913, -23870, -45011, -36913, 21365, -66245, -60049, 98202, tail", "row-1579": "head, 33277, -34077, 59392, -88487, -58920, -62616, 7069, -42523, -78703, 17273, -57767, -10493, 41637, -63572, -28105, 70838, -32093, -78594, 93624, 55035, -89185, -17746, -45076, -75875, 48976, 3687, -44684, -57897, 24693, 41501, 78999, 10383, -20441, -60683, -17082, -20468, -89185, 26380, 52015, 75530, 29611, 38283, -22241, 64838, 68116, 44671, -58064, 67365, 88486, -94946, 94917, 37759, -18446, -82228, 26553, 92343, 49736, -96976, -55209, -10494, -2186, -29474, -87391, -97896, tail", "row-1580": "head, 30662, -56320, -10384, 64567, 30763, -23897, 10554, 31790, 63491, 14982, 35032, -99487, -2101, 68225, -82562, 68614, 94360, 13455, -44515, 34419, -18579, 64170, 35138, 68542, -70268, 6709, -15769, -6798, -19998, -47869, 1755, -11412, 16993, 61179, -45388, -58785, -11751, 35761, 72667, 4068, 2939, -87774, 62967, 5646, 68890, -39078, 576, -54804, 84217, -46556, 41147, 81911, 43366, -88714, 32305, -5526, 17497, -14214, 82658, 42573, 5426, -93320, -48397, 78957, tail", "row-1581": "head, 75963, -30909, -76426, -2639, -75254, 79131, -12454, 44101, -24961, -29859, -39578, -75401, 79989, 26674, 75631, -92213, 85922, -59675, -18505, -57633, -86580, 35934, -55978, -58483, 30502, 9797, -62963, 93889, -2376, -12388, 15418, -11139, 60268, -43682, -51336, -28167, -54660, 65224, 63859, -16105, 37808, -90219, 2131, 28143, -92673, -53216, -22685, -17030, -16551, 62082, -40042, -80268, -10870, -80079, -95809, -88437, 72542, -43852, 63138, -33461, 17249, 71276, -4626, 10977, tail", "row-1582": "head, -82741, -42023, -87794, -88732, -31816, -46910, -74824, 57550, 10836, 57295, -56283, -22269, 62085, -26850, 98762, -69025, 11502, -29144, 79122, 31059, -49312, -15716, -21045, 68177, 91687, -49782, -46498, -60866, -15571, -64987, -44806, -73052, -73760, 81257, -29215, 36073, 57388, -7341, -33575, -14776, 93069, -30377, -55447, 94948, -656, -74592, -21311, -97965, 5929, 31595, -48119, -36629, 72680, 77664, -63295, -32772, 8994, -46716, -44895, 21263, -18140, -75404, -80179, -17633, tail", "row-1583": "head, -40817, -67011, 19112, -36873, 61583, 54228, -880, 85373, 90725, -93791, -31736, 89383, 32686, 25845, 49646, 13614, -5729, 21749, -85352, 45149, 47173, -38782, 50772, 2033, 11496, 10271, 70200, -30772, 78676, 30074, 48665, -44625, 77127, -3213, 17615, -83546, 61730, 32840, -86942, 88364, 3785, -95942, -71764, 89060, -65192, -15370, 80039, 49646, 71341, -45500, 40475, 33829, 20326, -25559, -38607, -54824, -29144, 34317, 59605, -53682, -68851, -75876, -23970, -84260, tail", "row-1584": "head, -65721, -75827, -48555, -43673, -45234, 82684, -62612, 77208, -88305, 9413, 57280, 53450, -14093, -46339, 79498, 96944, 99056, -78325, -51245, 94947, 65938, 19364, 16261, 37409, -42779, 40213, 15210, -312, -74117, -44336, -91193, -7503, 65261, -73131, 16899, 33489, 58282, 44004, 54415, 85175, -98844, -27270, -64901, -68102, -63029, 51451, -46581, -73307, 746, -8722, 34601, -2975, -64947, -61642, 62292, 26247, 15203, -45474, -24208, -33982, -2845, -32411, 52542, 32723, tail", "row-1585": "head, -74234, 28537, -68052, 62964, -49161, 74860, 33302, 34074, 72011, 89018, -90512, 50456, -9590, 60842, 38391, 61105, -31574, 57058, -71538, 49194, 51046, -88000, 99387, 18284, 73567, -26658, -25520, 68498, 28024, -15784, -14392, -408, 78477, -53188, 11409, 4998, -98165, 99464, -14595, 49790, 36902, 18821, -12511, 49985, 95463, -42125, 58723, -17858, 56328, 49949, 47780, -75847, 12891, 15772, -77238, 64944, 31101, -78064, -36147, -45175, -79187, -31104, 44357, 28946, tail", "row-1586": "head, 22559, 75485, 32770, 23543, 68712, -55526, -35944, 4443, 12830, 59957, -63967, -72846, 65504, 95283, 65408, 47066, -5170, -71020, -71538, -5894, -68006, 68382, -17230, 21940, -60916, -29162, -33198, 79575, 94697, -21081, -10197, 95132, 92969, 61942, 19388, 71360, 38388, -51932, 4541, 15949, -30679, 716, -12659, -65584, -26226, -24124, -63817, -82568, 31638, 94865, -11813, 64199, 74216, 49322, 78850, 94130, -93378, -86361, -656, -43817, 61804, 55862, 77062, -57335, tail", "row-1587": "head, 68106, -99961, 29124, -87796, -86071, -79241, 57499, 78529, 50865, 35130, -44723, 33218, -69506, -54682, 54043, -19083, 24077, 92458, -30829, -80499, -30914, 94892, -35429, 39592, -11675, 78091, -27601, -79654, -46822, -12525, -61007, -89695, 88978, -43936, -35897, -10291, 47576, -33844, 39325, -2538, 12213, -89200, -41920, 77296, 5463, 66391, 31106, -53435, 1847, 75914, -5813, -211, 51753, -41338, 55547, 87745, -32853, 84483, -50092, 21568, -4630, -42676, 51131, 10632, tail", "row-1588": "head, -2557, -2226, -12826, -13334, 89795, 10198, 84551, -51757, 32882, 52132, 48094, -60891, 82099, -6871, -31129, 64690, 15065, -93772, -38550, -10031, -94960, -14985, -29518, 38139, 42292, 76876, 78501, -39698, 29662, 66526, -98454, -11273, 56867, -95676, -88091, -59967, 35752, -76110, -45607, -7951, -68670, 9341, -79929, 371, 50236, 77339, 12453, -89360, -22316, 99660, 51900, 72489, 40890, -84752, -46642, -36791, 47277, 59544, -30226, 2112, -53878, 2193, -14062, -65137, tail", "row-1589": "head, -51143, 29144, 4511, 19519, 70710, -24289, -84993, -14205, 93781, 35174, 89193, 48878, -33792, 55898, 80490, -58955, 63752, -68376, 14817, 57705, -65755, 5598, -81966, -65157, -35539, -57988, 92963, 3879, 4840, -20074, -84136, 67026, 54397, -18173, 86579, -31678, -66388, -60949, -74337, -65256, 74443, 27315, 62459, 51775, 63473, 39804, -92466, -84715, 96106, -14191, -26992, -24843, -98324, 48363, -68474, 41238, 95291, 41247, -69950, 72782, 74724, 14534, -60846, 35815, tail", "row-1590": "head, 51974, 77082, -73007, -49053, 89101, -54505, -74708, 69568, 32181, -11174, -46110, 63960, -67213, 55372, -28923, 65477, 10216, -84136, -62831, 59677, 97611, 51148, -30127, -6840, -94081, 77954, 30140, 31459, 75284, 99340, -72096, -22791, 42696, 50866, -35504, 42147, 90367, 62992, -30550, 64557, -79960, 92646, 15399, 49131, -43550, 27083, -32155, 85711, -39912, 23620, -53950, 23758, 79847, -36491, -64123, 500, -94243, 80213, 39746, 66056, 33151, 76824, -47691, 77428, tail", "row-1591": "head, 35678, 66631, -92103, -40689, 97416, 63243, 66500, 2260, -46945, 68523, 29140, 74127, 32679, -60918, -30662, 75406, 47449, -69114, 80120, 12416, -13519, -75694, -62151, -59280, 62817, -42741, -55628, 8499, 88263, -63187, -54586, -47059, 47389, 93275, 66218, 24865, 76476, 86268, 30785, 33402, 96306, -82758, -21605, 88984, -75111, 5734, -5920, -30795, 15828, -28215, 85666, 82948, -89519, -11972, -65944, -83160, 29390, -62848, 9090, -35630, 45239, 53118, 5468, -49577, tail", "row-1592": "head, 77348, -97098, -52147, -76753, 39123, -17977, -20370, 95545, -33704, -22851, 6058, -52206, 43759, -65317, 81573, 56509, -44320, 30679, 16723, 78013, -49131, 26051, -58898, 11797, 83409, -55650, 35783, -83309, -13397, 60699, 41095, 98787, -17343, 75789, -34138, -19889, 20813, -93465, -70355, 50015, 98795, -31460, 44767, 32107, 86476, 71154, -23127, 98488, -5921, -14092, -5105, 10487, -90441, 71047, -63285, -38521, -76461, 70246, -97323, -96035, 38228, -90991, 41233, 25121, tail", "row-1593": "head, 37471, 86226, -53951, -84172, 29694, -84810, -25370, -64883, -7716, 85058, 81776, -44108, 96529, -53303, -10205, -20536, -35063, 51410, -93804, -57769, -85779, -51152, 55822, -5473, -89150, -26720, 34773, -42476, 58538, -61937, -43354, -51829, 45711, -67061, -28774, 52286, 37424, 44351, -29380, -3973, -94595, -96229, 10374, 13946, -19074, -29572, -32974, 99275, 90390, 6143, -75364, -98887, 50708, -53406, 36805, 54870, 63939, -12444, -16027, 35510, 41063, 11754, -15617, 5778, tail", "row-1594": "head, -35584, 14191, 16662, -24606, 68581, -49658, -25322, 91978, 3056, -17804, 28975, 45658, 24089, -42001, 25758, 79108, -43717, -335, -55635, 8982, 39164, -77799, -78968, -71007, 67477, -99835, 66657, 60601, -88577, -56102, -17382, 14324, 27013, 13269, 65731, 12167, 82950, -66185, 63546, -80540, -24549, -5727, -16700, 38504, -72332, -59494, 79017, 7779, 86030, 93497, 14464, -40528, 70390, 23825, 83000, -57073, 98821, 30393, -99472, 57999, 86441, -3927, -92625, -88785, tail", "row-1595": "head, 90164, 72511, -25230, 12010, -69244, 15134, 47375, -24618, -28097, 5192, 46686, 6514, 53465, 51061, -69863, 31161, -94338, 17715, -24568, -58250, -7565, -55732, -75381, -27913, -28436, 49162, -55680, -37871, -50171, -8074, 50643, -67748, 79171, -93763, -22202, -3262, -66638, 99284, 12946, -29193, 85960, -11293, -65915, 58161, -16402, 45946, 57721, 54645, 1309, -83394, 13155, 86064, 52529, 97108, 83385, -58365, 79713, -2698, 92316, -50496, 47359, 28679, -85378, 49859, tail", "row-1596": "head, 63724, 18001, -13167, 86849, -88685, -53095, 99412, 36718, -186, -60326, 42430, -10769, 27273, -42758, -81278, -23699, 73462, -12066, -79689, -60480, 4130, 89408, 73939, -24588, -85745, -66388, 72864, 95612, 96741, -90034, 10984, -47426, 4995, 16849, -12022, 46853, 15095, 74629, -63967, 27471, 7730, -61131, -20630, -85370, -90961, 71441, -95754, -78802, 78179, -9932, -66995, 22380, -5637, 66842, -85881, 83154, -54408, 16541, -15712, -83016, -30922, 17871, -18502, -23692, tail", "row-1597": "head, -67692, 37504, -15658, 44459, 58562, -51733, -44470, 86414, -20864, -12889, -70469, 41426, -66134, 98083, -21199, -60608, 69786, -34325, -85392, -1518, -56309, -67543, -87940, 31245, -36720, -83349, -94002, -39051, 47565, 38724, -17286, 80225, -44108, 21248, -91736, 9570, 35789, 8985, 95391, 34089, 15892, 50167, -65360, -90452, -47776, 24696, -55605, 88024, 37156, -85747, 40530, 40503, 6788, 50400, 87115, 54815, -61129, -54124, -21158, 94303, -11423, 37464, 16857, 66183, tail", "row-1598": "head, 70882, 40579, -65559, -20500, 55824, 48288, 48211, -78393, 64810, -70158, -74101, -48160, -79050, 49747, 71795, 14230, -34204, -89635, 8249, 39573, -57022, 36620, 77849, -74014, -56913, -49812, 79411, -2845, 24848, -40760, -89626, -22961, 35312, 78765, -6928, -6013, 59878, -86462, -32191, 93669, -53351, -28719, 55319, -39295, -94623, 67103, 23420, -60156, -35872, 10448, -24156, -73311, -10150, 1021, -82303, 14887, -69528, -10273, -76674, 31887, -99739, -95694, 64998, -3995, tail", "row-1599": "head, 11061, 11, -56460, -32529, 1255, -11891, 6129, 83316, 77801, -37922, -90944, 44050, -87525, -83397, -43361, 2228, -46835, 47335, -38185, -32274, -79223, 1752, -59366, -57226, 46131, 32472, 3453, 34652, -37307, -64285, -50837, 39470, 94108, 10065, 71862, 23198, -1445, 91498, 55939, -40733, 64255, -73546, 20231, 27503, 89168, -3153, 46900, -7438, -69040, -29340, 36330, 58695, -26932, -49263, 99504, -25397, 78160, -9821, -82461, 82560, 99462, -73934, -51448, 89276, tail", "row-1600": "head, 24969, -59198, 73730, -77342, -69263, 66087, 46402, 27439, -96264, -24871, 37713, -55094, -10443, 87289, -52045, 22543, 63649, -51972, -53370, -56924, -37148, 65289, 54510, 49377, -34659, -13875, -52580, 22464, 90995, -27226, 42851, 79459, -89999, -95822, 7566, 85642, 33299, -7831, 52599, 63536, -99346, -65290, -83390, 20695, 52450, 11051, 82056, -25557, -34800, 30323, -70961, 52035, 15813, -71195, 90967, 67717, 69396, -78344, 62840, -27788, 54559, -23620, 1197, -58714, tail", "row-1601": "head, -46433, 52566, -36639, 16380, 86611, 6006, -98446, 25916, -18191, -57480, -71577, 22522, -17808, 6527, 77346, 29216, 85312, 63903, 66096, 56369, 97381, -65418, 10950, 24310, -54987, 27576, -60221, 10315, 79154, -10924, -9039, 33576, -87702, 89983, -61442, 40016, -98352, 11886, 54312, -95100, -47344, 5431, 1448, 93408, -35754, 86671, -10028, -71876, 17717, 32149, 3741, 99389, -41094, -3770, -35066, 9736, -88418, 38004, -94275, 21024, -88311, -16017, -34986, 87736, tail", "row-1602": "head, -12607, -76249, 51275, -49101, -80043, -57037, -90709, 15984, 96411, 89436, 18465, 30425, 76849, 3755, -1796, -59140, -34801, 42045, 67734, -15961, 27747, 32495, 12378, -56905, -31387, 41826, 66597, -97129, 88897, -21793, -48014, -61469, 7316, -10873, 37114, 441, 93825, 28312, -56094, 34968, -44361, 56299, 66133, -59209, -96200, 9210, 75965, -21703, 26563, -97370, -26722, 951, -8730, 67689, -51715, 37895, 47605, 37152, -79343, 2318, 28763, 1956, -57391, 27345, tail", "row-1603": "head, -47058, -14512, 43057, 10860, -28328, 30476, -99570, 91561, -87785, 98765, 82728, -27488, -42464, -65809, -72509, -34937, 22203, -4887, 59136, 59793, -13858, 74643, -29317, 14534, 4191, 51312, 86252, -63549, -42530, 14713, 20096, 80249, 67049, 45250, -93630, 37433, -2893, 67660, 77832, 19727, 88077, -85697, 91932, -3279, -75873, 1177, -95264, 11995, 7372, 5645, -95474, 81605, -57659, 14562, 58019, 70511, -95690, 97888, 709, -14499, -61527, 51166, -97691, 19420, tail", "row-1604": "head, -95779, 93644, -39636, 53465, -55748, -80110, -2903, -65421, 26321, -87570, 30935, 38766, -58206, -50996, -14316, -3489, -19681, 99861, 29590, 78785, -85397, 41647, -55964, -53358, 65649, -76445, 13511, -58365, -71377, -44426, -52997, 20849, 8779, -36986, 50763, 58548, -17498, -50818, 93692, 66699, -60207, -6209, -44279, 59040, 96837, -57073, -71886, 78504, -12972, -59788, 69937, 73478, 16724, -37474, -33919, 36242, -88723, 62348, -64869, 48518, 5588, -81301, -44819, -23809, tail", "row-1605": "head, 38347, 69281, -44449, 52219, 27885, -69496, -44802, 59023, -58505, -97199, -19788, 4327, -97299, 99295, 70690, 32065, -68002, 5246, -26222, -90351, -57287, 97018, -3707, -96602, -57833, 37680, -25093, 83960, 75634, -67088, -96154, 29299, -53506, 24194, 60099, -86109, 43038, 62679, 89322, -82795, 31580, 31803, 56385, 65842, 59505, 85858, 74692, 6529, 71833, 40804, 84465, 30543, 91478, 78099, -92974, -53209, 83789, -97274, -39500, -20689, -36257, -56808, 75460, -97777, tail", "row-1606": "head, -54215, 19403, -27239, -52320, 50472, 28548, -84530, -58154, 75847, -69497, 83948, -51173, -87089, -32314, -91604, -43662, 22724, -93240, -57920, -82236, -11864, 7477, 16067, -91030, -46946, 98259, 64728, -39615, 67550, -51748, 70686, 85928, -42379, 67578, 66674, 82648, -37856, 65675, 7279, 99115, -62285, 56298, -84304, -51032, -77420, -80248, -23732, -87644, -54147, -89290, 80005, 88111, 58515, -58400, 21964, 19369, 65213, 34094, -28669, -6912, 15165, 70555, -25277, 68482, tail", "row-1607": "head, 85910, -31223, 65209, 43289, -54713, -31203, -12203, 23172, -60438, 98898, 33903, 37844, 85658, -8261, 84241, -53991, -58431, 77391, -67184, -76971, 36480, -99537, 88772, 90025, -68016, 445, -57031, 15420, -26682, 7419, 21224, 21668, 32932, -88000, 66671, 22322, -50216, -53758, -11656, 11300, 86062, 81187, 30760, -41931, 9641, -15047, -48322, -42842, 81775, -30198, -99138, -44116, 18513, -32843, -24342, 73646, 25817, -59856, -85616, -63303, 77315, 57061, -43802, -95025, tail", "row-1608": "head, -80925, 4159, 15673, 83185, 40306, 71479, -72781, 89292, -31401, 58901, -4325, -95507, -83502, -30504, -80481, -86805, 48001, 18957, 74647, -20739, 31145, -90024, 8091, -97922, -86049, -68514, 59262, 4507, 28621, -31008, -49096, 23262, -85125, -85729, 38991, -91291, -31873, -13213, 21132, 85837, 30093, 67477, -19326, -18961, 7825, -17126, -91592, 10503, 40009, -58328, -57488, 32982, 34812, 27845, 23000, -46318, 47616, 83793, 56835, 16840, 25312, 22234, 7049, 14203, tail", "row-1609": "head, 56518, -62885, 8911, 7790, 68723, 55901, 43611, -93181, 77805, -40734, 69580, -16052, 35428, -13554, 47528, 66417, 11208, 5240, -55890, 77120, -69543, 45561, -49028, -8348, 66724, 34527, 38132, 17080, 695, 91654, 14043, -6737, 67287, -81472, 26300, -7658, 92589, -31517, 70783, -42476, -72849, 94066, 77800, -54321, -47994, -71146, -43163, 55618, -81495, 85454, 68074, -61331, -59862, -68186, 52194, -52567, 17555, -88044, -54029, 54715, 24132, -13256, 82734, 49844, tail", "row-1610": "head, 94506, -49970, -78375, 83671, -72362, 80617, -81968, -79472, -48826, 9847, -91776, 65391, -70085, -63795, 77059, 89709, -10804, 52959, 19987, 13030, -81952, -1621, -83068, -92666, 32479, 29977, 46040, -78671, 3406, 69637, -69570, 26604, -21505, 88115, -44204, -76964, 87737, 68342, 49683, -38050, -93000, 58069, -81295, 42900, 50424, -84465, -55379, 8163, -72530, -87148, -42649, 64048, -37418, 53035, 60271, 25796, -79670, 68156, -82164, -42576, -82429, 35681, -28588, -30237, tail", "row-1611": "head, -18067, 44481, -82788, 78139, -21844, -42850, 35283, -64818, 62435, 42787, -80260, 22695, 3154, -13022, 56358, 73013, -99051, 87222, -67343, -41550, 13488, -40750, 45923, -96176, -79744, 12570, 13345, -29446, 32892, 63764, 27167, -68135, 6272, -79724, 61028, -81482, 13813, 89381, 85464, 7085, 1695, 62011, -18602, 3166, 49711, -66514, -73268, 84301, 79063, 30258, 79436, -78875, -35264, 9881, -26715, -58186, -77615, -79487, -72, -51210, -30741, 72289, -99428, -70706, tail", "row-1612": "head, -36994, -67701, -94157, 2760, -75649, -55077, -65930, -72673, 27374, -66465, -41241, -22019, 75466, -57795, -43782, 6138, -75893, 92472, 76097, -34128, -38709, 20751, 1481, -13574, 37968, -19562, 84168, 48700, -28347, -71123, 52213, 49823, -3575, 74402, -82822, 33073, -17481, -26729, 36781, -38729, -90161, 92622, 64772, 18573, -75641, 10678, -52972, -26668, -82866, -65670, -96170, -6240, 71107, -82982, 16503, 30763, 74058, 20326, -79558, -26458, 30470, 52073, -87809, 54626, tail", "row-1613": "head, 8009, -68912, 2695, -57488, -42373, -1945, -43588, 6201, -65065, -14834, -38117, -13597, 18063, -21861, 41814, -46942, -28232, -81141, 49259, -60660, 19193, 9796, -29071, -56117, 75867, 54454, -60695, -94540, 10550, 95482, 69080, 96506, 92160, 99099, -55070, 37356, 42756, -94233, -70621, 55219, -97145, -4199, 94655, 28336, 9145, -80413, -66224, -15678, 98283, -72227, -57556, -88142, -41827, 43907, 42274, -14390, 52934, 190, -98743, -45008, -83959, -38482, -11707, -18400, tail", "row-1614": "head, -79522, 32249, -66978, -98140, 12965, 50977, -52400, 97854, -97232, -62618, -22075, 55961, 54790, -78025, 2901, 62699, 52433, 20997, -43314, 12093, 5423, 65336, -968, 63947, 19495, -25627, -61118, -62805, 44947, -48910, 21298, 46509, -79374, 98565, -52699, -53800, -49421, -76101, -5955, 65650, -29206, 49108, 97661, -88200, 58626, 14954, 74853, -24321, -78192, 41017, 80006, 50371, -94108, -59843, 68485, 56578, -74328, -22071, -73254, -18946, -59575, 57767, 59355, -43272, tail", "row-1615": "head, 54023, 12278, -53230, -88298, 3852, -90403, 61652, 22986, 87680, 13224, 77765, -67763, -23993, -4211, 82969, 73098, -80917, -95481, -91883, 45315, -94401, -51607, -87904, 75304, 43186, -24440, 90785, -49742, 19990, -58852, -80142, -18388, -43207, -64333, 5630, -53872, 49189, 72549, 97593, 21766, -11346, 29869, -30305, 14809, 77404, -9617, -92549, -95544, -9434, -69596, 40637, -55115, 36523, 90042, -24418, 98776, 36559, -67339, 20518, -82742, 94428, -64962, 13470, -65286, tail", "row-1616": "head, -99052, -56324, 86668, 67518, -3792, 72059, 26178, -18736, -57541, -89603, -49669, 40433, -21738, -49176, 85273, 18737, 49946, -52483, -49702, -51409, 74812, -33881, -75872, 82481, -49770, 17466, 17158, 92821, 61920, -13480, 95112, -77802, -74056, -71267, 88065, 14991, 57112, 52451, -75646, -26777, 64948, -91131, 9323, 38581, 4737, -21908, -60487, -37547, -44076, -58958, 76738, -14182, 73487, 30052, 98351, -87058, -87915, 83584, 48282, 189, -76970, -73916, -15766, 10661, tail", "row-1617": "head, -53536, 82301, -15082, 14192, 54897, 94133, -20744, -77782, -74253, 82294, -9436, -99534, -90148, 27448, -34684, -85142, 89053, 96979, -96503, -74443, -32027, -51016, -78454, -21926, -33491, -68287, 95501, 17749, -25018, -94999, -52715, -5914, 60796, -37488, -22692, -18446, 39442, -6871, -37627, 43868, -62010, -30062, -24513, 48551, -77634, 73957, -79474, -93080, -73270, 26376, -7683, 92799, -51478, 93527, -75134, -6513, 38680, -80280, -7072, 14160, -99527, 61192, 11563, 59045, tail", "row-1618": "head, 77976, -26822, 13154, -13695, 36399, -24863, 44970, -94729, -99976, 94815, 88732, 20493, 85891, 55969, -22930, 67453, -53939, -92699, -22922, 8029, 75132, 55825, 74482, 10899, -45334, -16092, 87608, -22135, 18317, 49940, 94967, -75786, 99018, -46553, -53599, 37720, -31007, 54698, 96578, -59851, 12763, -17824, -19475, -5972, 24596, 80232, -49646, -41557, 62544, -12728, -20436, -27032, 1963, 56179, -24591, 92914, 89736, -33989, 50104, 20022, 83937, 20530, -41013, 4025, tail", "row-1619": "head, 42376, -54481, 36592, 40626, 38385, 94046, -48955, 50187, 44211, 35545, -6344, 30315, -63107, 39479, 1163, 93139, -77520, -48145, 45914, -72092, -29919, 11345, -43555, -91669, -96731, -30559, 69444, -34289, 5560, -91097, 68771, 77331, 21695, 59217, 56534, 8327, 33837, 41557, 71454, 6162, -70004, 37873, 68993, 8154, -88315, 70028, 88453, 887, 72966, 73542, 65099, 71072, 45641, -78077, 1004, -24346, -35613, -48369, -62137, -79703, 82067, 74368, 5305, 63550, tail", "row-1620": "head, -55269, -31715, 71256, 53724, -24999, -80466, -65524, -9199, 78996, -10277, -52491, 62075, 2375, -78401, 5726, 71571, 23258, 47387, 93882, 37180, -68374, -37145, -56716, 57852, 17160, -51047, -50260, 83530, 69586, -10976, -39501, -5673, -75121, 15417, 82325, -17205, -43033, -19612, -90645, -10151, -56842, -83623, 81987, -52355, -81733, -56900, 2967, -46797, 18432, -45906, -70315, -11462, -57808, 27635, -69318, 6813, -23731, -88826, -83816, -53761, -2261, -74027, -3826, -94483, tail", "row-1621": "head, 51942, -89157, -6121, 76058, -87778, 78575, 97055, -70570, 27695, 94388, -84377, -9435, 62400, -3289, 11182, 39495, -65328, -45192, 3901, -54712, -44161, -84269, -91841, -88626, -89369, -84962, 96976, 17312, -15245, 7165, -748, -77491, -97001, -82166, -81164, -91138, 30584, 76136, -44439, 92742, -95293, 94325, 65446, 31727, -60388, -58166, -36716, 2106, 33413, 82877, -5502, -89275, -38738, -93407, -60879, -77872, 81046, 56945, 76791, -87205, 13289, -37015, 17743, 99381, tail", "row-1622": "head, 69600, 18288, 73357, 82456, -23136, 906, 75745, -17365, -67150, 68534, -1928, -28001, -16670, -13595, -35730, 13754, -77926, 52354, 3837, 38373, -48225, 86994, 43222, 91599, 44675, -59764, -69999, 91313, -82846, 12779, -11610, 17602, 47834, -1732, -62114, 80816, -82545, -11480, -19220, -98479, -78646, -53888, 81329, -76710, 11299, -1576, 27338, -56443, 60963, -1798, 51261, -76559, -24940, 45413, -67943, 52345, 26930, 77939, 31177, 33548, -18615, -1564, 63546, 22762, tail", "row-1623": "head, 7407, 7937, 10489, -29317, 75189, -23179, -525, 23729, -57194, -80320, -50430, 83850, 30404, 2936, -9682, 30035, 40224, 1552, -84310, 39655, 2468, 24990, -9824, -41374, -87370, -36927, -56754, -81779, 66749, 46546, -59728, -97188, 3664, 51655, 23564, 10887, 69469, 68316, 62230, -32811, -49242, 73759, 88630, 28027, 28570, 77044, -38642, -29717, -22467, 19959, 3741, 41093, 96490, 12853, 63069, -51705, -53095, -72929, 16691, -71121, -99060, 5118, -87183, -8581, tail", "row-1624": "head, 90654, 73174, -67960, -63625, -45778, 96051, 5890, 97854, 80885, -21836, 79919, 44052, -91652, 39260, -70158, 54928, -94185, 99963, -32835, 75013, 63624, -93934, 26492, -49141, -82714, -29560, -67987, -1025, -76169, -32333, -55307, -27546, 42809, -37759, -34938, 38985, -61575, 33197, 60270, -697, -75083, 70237, 88721, -51076, 36517, 10929, 26785, 29076, -43075, 65305, 32811, -24834, -48786, -3149, -97370, -96464, 23836, -47196, 36760, -20526, 38289, -16266, -1537, 23831, tail", "row-1625": "head, 61061, 99528, 38279, -24260, -67683, 31818, 93699, -26753, 68198, 40757, -16398, 14527, 65857, -84591, -48850, 90602, -13249, -40124, -22875, -90115, 66760, 90299, 59841, 37327, -37907, -76070, 60718, -77569, -8895, 38661, 98918, -63342, -52668, 92208, 87247, 50487, -10268, 70542, 64263, 13471, 88396, -96335, 95466, 31552, 51731, -60450, -20326, 71896, -47151, 35021, -99764, 11098, 94043, -2480, -87866, 61010, 20285, 18902, 9089, 46788, 69316, -49641, -59133, 47995, tail", "row-1626": "head, -67387, 25757, 34296, -88528, 31248, -56773, 85274, 64878, 67710, -42969, 21101, -18370, 53527, 99660, -55690, -30299, 57590, 54805, -4180, -91296, -52352, -84364, -39738, -30197, 6115, 10, -97161, -40371, -25631, 25640, 18804, -52543, 86523, 17435, 11537, -37663, -56852, -4058, -58341, -72602, -91022, 88829, 92202, 97432, 60493, 28572, 51050, -15998, 74397, 51237, 74341, -14099, 36156, 52257, 39739, -23697, 47985, -21202, -76998, 98256, 64573, 79879, 68519, -86966, tail", "row-1627": "head, 39212, 56825, 30258, -62473, -48337, -24191, 53039, -98572, -63256, 93397, 12761, 29501, 66272, 27904, -71748, -55056, -97224, -86972, -84723, -99740, 91868, 48952, -78769, -59368, 93856, -61060, -75194, 5481, 60596, -6937, -66778, 97681, 54756, -95436, -4711, -8726, 24784, -66084, -98554, -85875, -62949, 41344, 92869, -73816, -20177, -45309, 42074, 69727, 71345, 1396, -14105, -78098, -32764, 3263, -88336, -4901, 79001, 82222, 18068, 93330, -36590, -22961, 35475, -95738, tail", "row-1628": "head, -28329, -41160, -72813, -83522, -65522, 66242, 60608, -62745, 40326, -58423, 53987, -89038, -10452, -33040, 91211, 20891, 95445, 72903, 50259, 53008, 51615, 45849, -24881, -61638, 46843, -15816, -97015, 9784, -78896, 10482, -79548, 84425, 72502, 48297, -52239, -56253, -77870, 40888, -15539, -61987, -33999, -43236, 49220, 84908, 37669, -5708, -65874, 21970, 36824, -13423, -45853, 26476, -81887, -4919, 82514, -76424, -59942, -5714, -12915, -74905, -56547, 27873, -65572, 75721, tail", "row-1629": "head, 82628, -50284, -65025, 39626, -11136, 58147, 84358, -66204, 68540, 3225, -29589, 43174, 58280, 14051, 13436, 49076, 83811, -16410, -82888, -26716, -20091, 66052, -25430, -93430, -66627, -70821, -26191, 30571, -6148, -80283, -79799, -20795, -9631, -60160, -52258, -27099, 96604, 39891, -90866, -32864, -43633, -56511, -49993, -46455, -2357, -35639, -37783, 84918, 91991, -23333, 44144, 26159, 34090, -44018, -22420, -71871, 62745, 58603, 67323, -599, -30287, 5066, 92751, -93499, tail", "row-1630": "head, -15107, 51083, -45951, 74951, -18119, 19567, 15725, 69385, 74363, 95319, 77710, 69792, -27199, 98162, 65391, -42241, 5527, -75337, 91731, -79452, -51078, 93315, -17831, 43118, 88439, -53801, -11282, -65278, -59333, -93901, -53477, -65659, -61120, 3434, -53296, 68198, -89113, 39632, -4043, -92097, 48094, 30023, 32154, -87008, 4179, 87752, -55866, -80962, 56405, 58606, -69263, 90930, -10701, 67955, 9487, -71628, -4031, -20027, -39596, -85437, 34397, 27089, 43124, 86341, tail", "row-1631": "head, -94587, 57797, -2071, -30336, 40403, -69928, 93345, 51668, 26521, -87526, 71978, 81547, 20886, -5757, 9263, -25398, 6571, -34265, -72651, -91069, -68882, -68256, 44089, 21497, -36527, -39381, 41951, 15384, -18126, -57968, 46600, -25812, -2833, -7871, -56367, -4190, -74496, 87290, 40422, 87158, 18992, 10678, 36701, 85693, -6232, 92724, -91870, -45341, -89594, -90798, -34490, 74612, -29492, 71965, -95702, -73719, -22083, 94322, 17107, 4858, 63916, 70316, -87101, -76896, tail", "row-1632": "head, 72462, 1417, -25340, -98554, -88919, -78621, 95324, 79406, 31934, -2879, -31384, -2324, 25210, -35015, 14838, 72677, -18850, 24185, -71971, 54278, 54101, -21895, -91405, 98666, -58924, -91541, 80824, -82742, -21845, -30656, 29139, 44780, 60592, 42675, -83695, -88813, 82425, -55020, -48219, -75421, -96773, 56100, 42840, 6460, -367, 97703, -27487, 88730, -48985, -19026, -32742, 50313, -36945, 22548, 4753, 26666, -30938, -25047, -84957, -71464, 26303, 27609, 23221, -94598, tail", "row-1633": "head, 53261, 79808, 10756, 44661, 83372, -20814, 27989, -29685, -96310, 26905, -84561, -45492, 44626, -4436, 90966, -63297, 17700, 90986, 4288, -62440, -28319, -34734, 79049, 86344, -98050, -76144, -56427, 9626, -86831, 92179, -55667, -70566, 74714, 23681, 32164, -11229, -55140, -97328, -46717, -60390, -18136, 96908, 70797, -57679, -44936, -1253, -59094, 15409, -61035, 2519, -28088, -75155, 58877, -30543, 13237, -29963, -78315, -89966, -62028, 80356, -58298, 35738, -76951, 46099, tail", "row-1634": "head, -49913, -21838, 11756, 24820, -62598, -46725, 37811, -45877, -38413, 76050, -5522, -17105, -781, -74446, -45842, 55637, 75719, -6295, -11446, 86861, -87101, 39679, 80734, -58210, -74063, 4399, 61056, 29685, 59270, 58606, 81359, -63924, 78220, -47723, -23706, 85600, 529, -24440, -73322, 46755, 10646, 14938, 78153, 12804, 29614, 98591, -14463, -15599, -88281, 61213, -10062, -33406, 71251, -18103, -45424, -38806, 63336, -77334, -32802, -88231, 24272, 80682, -64933, 76259, tail", "row-1635": "head, -69112, -56101, 51764, -4863, 21870, -51349, -87159, 4957, -47349, 29279, 13470, -45767, 70343, -44286, 31412, 57731, 55518, 44065, 20970, -39884, 33951, -65259, 2757, 32890, 34245, 81049, -55672, 96656, -86634, 65609, -67337, -16110, 27238, -76520, 74861, -4492, 19663, 80950, 99225, -26832, -90748, -15570, 14595, -33347, 47320, 99286, -33904, -27897, -39608, -78743, -66936, 39582, -61592, 60003, -22629, -96128, 80734, -77617, 96171, 32737, 83805, -7626, 96884, -21901, tail", "row-1636": "head, -70487, 94077, -83485, -72142, 12163, -7895, 66604, 16406, -42189, 75053, -68443, -75193, -76217, 4126, -72270, -32667, -20047, 75806, -87192, -1074, -24702, 89934, -91828, -64909, 5286, -26771, 64043, 85179, 72858, -22347, -6036, -43733, -33098, -2498, 4313, -89234, 7452, -23157, -26669, 42198, 51983, 82918, -98070, 92641, -35915, 15187, -66561, -91846, 30770, -54890, -99514, 13199, -6983, 33075, -39239, -227, 39988, -90825, -30901, 20141, -28272, 92794, -31826, -93589, tail", "row-1637": "head, 34549, 41529, 92846, -80193, -23528, 30378, 7908, -45621, 8162, 11456, -62171, 73187, 37918, 89945, -88751, -38630, 69690, -27984, -73824, -4468, 90129, 61867, -13749, -47021, -11882, 77184, 12141, 36284, 43032, -18871, 5297, -26350, -97509, -90252, 69432, 2473, -46502, 12586, 57725, 518, 65040, -13989, -36754, 86604, -32442, 55944, -9433, -78379, -80654, 15054, -15610, -25155, 3108, 84189, -42489, 67261, 10204, 39374, -75804, 77887, 85043, -73327, -96235, 93039, tail", "row-1638": "head, 45355, 66128, 78806, -37331, 57226, 47457, -77445, 6802, 58395, 84754, -25877, 23403, 62832, 64847, -78770, -5219, 96746, -45868, -89691, 24620, 29223, -61954, -82985, -27134, -62686, -10023, -67591, -66014, 54971, 51802, -37029, -32542, -70659, -13772, 89923, 33704, 51588, -55903, -2078, 26103, -60920, 48300, 51274, -51031, -48077, 10687, -31115, 49802, -2194, -88026, 22350, -43769, 30316, 10848, -70361, -8058, -94803, -35041, -71349, -8254, 13172, 63020, 90852, 23948, tail", "row-1639": "head, 92590, 8152, -16704, 5564, -42565, -38932, -48450, -74457, -87017, -43334, 82239, 2182, 46464, 28764, 60694, 40387, -5786, -77494, 40259, 91763, -96010, -87586, -48996, -63890, 44622, -809, -64238, 7718, -13662, -11992, -10386, 28253, -37348, 6949, -59178, 50724, -3587, -52976, -91071, 16921, 62816, 66465, 10637, -59812, -21896, 25603, -2385, -28020, -57022, 19961, -76033, 26410, 97952, 82996, 77689, -41480, -68777, -9340, 76291, -59096, 41688, 67259, 11518, -97394, tail", "row-1640": "head, -54490, 54659, -66875, -88363, -84295, -3764, 54784, -18438, 72639, 35913, -54803, -32908, 61158, -9813, 45429, -20635, -32584, 19273, -90402, 28675, 39374, 37119, -31835, -78467, 9958, -19789, 39647, -45975, -50977, -63620, 44072, 5985, -89898, -7881, 39115, -35284, -98645, -2383, 48232, 62098, 45257, 77895, 45026, 25884, -24757, -87620, -57810, 69411, -77128, 80855, 77895, -11200, -95385, 55341, -63448, 9123, 24143, -89830, -13664, 41670, -1057, -19408, -10131, 96974, tail", "row-1641": "head, 23683, 12072, 82476, -45933, 36323, -83973, 26540, -65534, 20703, 29634, -64310, 47033, -48429, 52441, 29599, 36962, 38855, 83164, -83552, 56841, 69298, 55705, -39213, 85033, -84856, -40436, 97487, -93871, 63273, -35709, -36620, 76495, 92128, -45410, -54413, 41385, -97296, -67546, -72722, -9226, 94055, 76813, 55372, 32730, 3967, 30842, 31660, -47480, -66135, 97987, 42787, -46434, 16028, 22895, -17895, -16358, 43299, 55963, 94455, -42800, 92994, -50489, 27990, -35285, tail", "row-1642": "head, 61785, 13132, -62974, 22988, -28858, -83229, -29923, -96784, -80691, -21640, -46812, -50998, 25472, 59976, -29048, 16862, -5042, -26624, -16759, -45951, 37268, 94626, 40787, 39585, -7470, 45884, 52790, 32984, 79753, -85941, -47978, -66475, -46411, -60940, 88942, 31358, -10455, 12201, 39586, -9210, -54196, 98500, 56377, -27471, -43223, -82581, 95830, -49416, -25529, 57168, -73919, -5570, 34691, 54749, -35600, -16712, 69996, 80573, 83662, -64863, -83764, -65975, -46127, -61008, tail", "row-1643": "head, -1175, -66009, -53007, 64245, -860, -43312, 76816, -15150, -39839, 42208, -58873, -26423, -99109, -70689, -41861, 23294, 88555, -24462, 70738, 74676, -53872, 7706, 86564, -22627, -8417, 19600, 58995, -3594, 3600, 61720, -70007, -63265, 24595, -71115, 99670, 3320, 11645, -11783, 60932, 66577, 71184, 16208, 97045, 49461, -93996, -50372, -55461, 45315, -11705, -99132, -12862, 17103, 87074, -63898, -58135, 16285, -48741, -18216, -79823, -83050, -55081, -61774, -4976, -62387, tail", "row-1644": "head, 752, -92419, 73004, 16620, 72973, -319, -86791, 84918, -51901, -5846, -15370, 86339, 89894, 65688, -8300, -50112, -73166, -22569, 52708, -10768, -51424, 30734, -54649, 36355, 35671, -38748, 21318, 75, 34105, -52861, 99536, -82121, 69724, 20406, 47827, 97704, -95376, 40569, 43981, 46823, -572, 34671, -22668, 15677, -44602, -34154, -73425, 10164, -11052, -90785, 36825, -77984, -2990, -37852, 26502, -66635, 61024, 11012, -40318, 6735, 85054, 32023, 57702, 36522, tail", "row-1645": "head, -24960, 66130, 6146, -2769, -94451, 90049, -90977, -65614, 42301, 10329, 68071, 22118, 69747, 85601, -36718, 42705, 41162, -47495, -25591, -64118, 89192, -52230, -20864, 42159, 21905, -84604, -22854, -3367, 36483, -67367, -12498, -30666, 77509, 8507, -2757, 67580, -67793, 96315, -62533, -39719, -24992, 79384, -25770, 51064, 37715, 3037, -77148, -9832, -68163, 39498, 5162, 52584, -12640, 88315, 80872, 25057, -50119, 71034, 77858, 47886, -76692, 12707, 76648, -46705, tail", "row-1646": "head, -2344, -45268, 85354, 40984, 44547, -72315, -72201, -37348, -63224, -99611, -54233, -66383, 91575, 29894, -56306, -37051, -25547, -47263, 22674, 90591, -68563, 15936, 1248, -21977, -57584, 97728, 70858, -45433, 22281, 69817, 81089, 2102, 26192, -32667, -98370, 41208, -14650, -22669, -21177, -51353, 41836, 9190, 62969, -35283, 62631, 44449, 24571, 88953, 70496, -28364, -41166, 83044, -40011, -29430, 81822, 73668, 4125, -38181, -73842, 14453, -60019, 62810, 45590, 3258, tail", "row-1647": "head, -87975, -52556, 10313, 1366, -2247, -66098, 51388, 59392, 66192, 75549, 90919, 65678, -22916, -53886, 69121, -67721, 99221, 35482, 47145, -41795, 29425, 65075, 96978, 78527, -70797, -7059, -61063, -67467, 2472, 95771, -90868, -30840, 54883, 5783, -91210, -48753, 20459, -51487, -82773, -20752, -32093, 83392, 11232, -9219, 39267, 17455, -85462, -72107, 97004, 78487, -54653, 45343, -88389, -10060, 62959, -25389, -7548, -66718, -67561, -86756, -18672, 1040, -6663, 41411, tail", "row-1648": "head, 48607, -50522, -56016, -76984, 96304, 63888, -17682, -83922, 35223, -23462, 21583, 75756, -32262, 38377, 14996, 4128, -38984, 9424, -88768, 7019, -25225, -63279, -47344, 43868, 7764, -91160, 12501, -58180, -64999, 80554, -6388, 89490, 784, -59703, 10427, 14688, -4004, -41443, -99183, -67987, 49850, -55424, -57885, -46, -54289, -44865, 89450, 42089, 88558, 83238, 10652, 96276, -54429, 67809, 1024, -22231, 88417, 47879, 71677, -21069, -1895, -74787, -51181, 97803, tail", "row-1649": "head, 28244, 49655, -32245, 34338, 81879, -23098, -53364, 91716, 46198, -62503, 21508, -84307, 55226, 67123, -39347, -40822, 78854, 50764, -11336, 32026, 56217, 18944, 19257, -67857, 35346, -50642, 14592, 78718, 31788, 18230, -81147, 90856, -45044, 35690, -9729, -39860, 39240, 63836, 45425, 62001, -50761, 79597, -39945, 20113, -49866, 48890, -4462, 29139, -3787, 3476, 44899, 14737, 4546, 17031, -8686, 21306, 12695, 22117, 18823, 28258, 81427, -35398, 70483, -14906, tail", "row-1650": "head, -97274, 32608, 81253, 10804, -59384, 158, 34136, -12360, -57292, -22037, 44946, 70219, 30741, -6148, -83677, 31216, 42648, 87439, -13366, 13778, 60114, 31967, 27819, -63216, -26315, -6627, -93004, -22639, 96789, -68846, -50666, 77665, -66583, -21745, 4881, 45650, 39348, -59813, 74023, -77738, 76586, -70125, -56651, 70244, -11736, 57283, 74704, 95688, 92010, -21597, 56105, -49939, 48193, -45055, 18989, 66057, -22105, 9616, 61275, -10787, -37344, 26385, 42718, -34943, tail", "row-1651": "head, -21940, -95082, -18404, -95781, -87941, 49854, 37296, 21858, -49858, -12496, 88820, -13865, 91139, 99809, 88151, -24750, -15083, -89531, -69367, -57877, 83078, 27420, 9935, -90532, 50654, 45845, -19071, 1395, -53708, -33655, 30097, -56290, -24493, -8489, -57873, -17618, -76984, -86678, 61724, -48598, 93149, 740, -13262, 6291, -98345, -68378, -21371, 35432, -2630, 1690, 42630, 22430, 15068, -22483, 87726, -82481, 78607, -53201, -94833, 54983, -50070, -51051, -73775, -36456, tail", "row-1652": "head, -56551, -3022, 12519, -53060, 52079, 34396, 89400, -26875, 70946, -29664, -89135, -93935, 87636, 77252, 18242, -31564, 57949, 18688, 89273, 19237, 54141, -69758, 36874, 93440, -72006, 94888, 8884, -66321, 37981, 18344, -53548, -14643, -35272, -90433, -91558, 92154, -30636, -91369, -22447, 41020, -22215, -70960, -78892, 3926, -8935, 3844, 98874, -96751, -31419, -10405, -86090, -96213, 83354, -29655, -67474, -40301, 21249, -50375, 34164, -51957, 57597, 43229, 21598, 46054, tail", "row-1653": "head, 49879, 75525, -92971, 68192, 18505, 64890, 92169, -68252, -34367, -63174, -26774, 4299, -56311, 829, -36608, -7802, -11973, 88504, 44741, -37240, 70601, -58906, -26992, 65432, 94178, -67682, -39253, 37119, 40671, 86387, -76504, 83719, 38013, -39393, -67539, 55674, 6510, -31680, 4204, 67543, -15508, 79550, 90496, 3188, 43783, -37434, -30367, -4210, 81786, -38226, -43464, -46035, 61382, -54198, -97342, -77179, 5449, -2049, 37716, 5392, 43837, 40335, 57981, -95372, tail", "row-1654": "head, -2576, -12821, 67900, 31924, 18417, 11792, 66003, -78815, -20731, -93769, 704, 41243, 18271, 75860, -36950, -57343, 620, 52933, 27447, 29592, 72877, -10862, -94294, -60001, -81760, 49754, 33669, 91011, 55069, -13143, 83987, 10431, 30333, 94477, 88638, -43340, 20129, 37458, -94441, -87210, 22375, 25779, -30700, 82977, 11406, -68426, -97301, -71105, 38102, -717, 27631, -22526, -74427, -53679, -12880, -322, -59641, 13655, -58061, -16030, -7941, -25892, -73948, -18317, tail", "row-1655": "head, 91392, 14571, 16451, -15651, 56661, 33461, 64481, 80412, 80203, 37706, 59322, 23695, 38818, -77571, -10767, 36496, 61246, -30291, 99794, 2323, 595, -86941, 73177, -63109, -83655, 68332, -41480, 52654, 41019, 63329, -60937, -45551, -62856, -2466, 90626, -3912, 84595, 81264, -46329, -74179, 31981, 42542, 14038, -28733, -12283, 24531, 8702, 47993, 36162, -41023, 51817, -19960, -65321, 31846, -49658, 29017, -73156, 78565, -4413, -52799, -20930, -33578, -58785, -86709, tail", "row-1656": "head, 98888, -44710, 62844, -80576, 68388, -3862, -98158, -25801, 22862, 43233, 39605, 28054, 68751, -67645, 42428, -36505, -58326, 255, 6810, -43767, -38966, 29279, 43612, 65148, 45229, 74079, 88973, -98568, 1859, -57343, -97318, 2946, -70293, -34857, -19616, -7361, 14994, -33553, 32860, 81858, 64583, 83329, -50132, -23549, 8623, -43826, -39465, 96962, -14101, 60453, -21716, 2266, -13731, 31222, -84004, -50304, 11332, 58892, -57010, -74524, 6601, 39158, -89535, 43075, tail", "row-1657": "head, -38357, 64983, 34561, -69532, -2631, 46153, 94246, 33946, -12603, 43718, 18138, 38923, -42156, -17667, -25563, 3707, -51208, -64589, -67176, 7758, -88857, -44896, -71334, 7854, -79806, -38420, -42441, 35012, 56701, 41030, 49372, -79444, 6455, 13156, -13095, 40794, -7256, -46897, 52241, 60740, 36050, -1446, -91037, 95295, -8599, -82492, -54416, -70539, 14971, -45196, -18803, -83140, 44384, 7884, 67165, 12301, 79950, -88289, 41117, -48861, 12513, 82454, -43525, 5065, tail", "row-1658": "head, -36107, 24257, -48802, -82746, -72256, 56704, 40228, 63785, 62592, 69491, -74503, -48323, -54112, -86642, 4954, -73495, -47795, -76386, 6292, 3738, 4587, 18522, -78078, -65620, 67733, -21011, 9109, 90435, -82850, -9328, -72453, 84276, -2254, 88860, 969, -24678, 14468, -70561, -58057, -68386, 25478, 96762, 3156, 78114, 48826, 27525, 16133, 40143, -40680, -36243, -96011, 56996, 14951, -44086, 47957, 48157, 39918, -93600, 71062, -80423, -75814, -15560, 52532, 73313, tail", "row-1659": "head, -2895, 17454, -7792, -19465, 43122, 21487, 31827, 62337, 53579, 22837, 59427, -24107, 83397, 8378, -58457, -6504, 712, -82672, -63782, 5793, 40594, 36213, 3201, -29352, -66422, 58026, -99831, 47772, 64983, 30018, 23172, -98955, -16981, -84772, -57566, 63103, 5130, -49700, -14480, -43526, -41407, 6566, 13280, -95839, -95336, 3517, 44136, 25007, 53749, 41327, -39343, 96194, 99766, 87940, 74604, -25850, 6103, 30920, -57594, 19879, -58070, -19681, 38884, -93454, tail", "row-1660": "head, 82418, 94140, 63046, -23500, 33966, 4612, -3256, -76466, -93695, 61800, 29802, -88356, 47071, -55079, 93011, 44140, 74711, -42273, 85193, -27083, 64073, -66659, 99169, -97584, -92702, 14020, -43036, 77829, 28764, -74016, -2431, -98880, 92180, -2298, -25143, -65083, -50064, -17361, -18449, 26090, -12020, -64179, 6822, -17742, 41351, -31230, -15308, 92069, -15352, 85444, 92983, -36285, 72408, 84049, -25399, -86395, -38278, 34460, 8514, -78759, -16375, -70424, 28594, 88283, tail", "row-1661": "head, -18831, -41839, -18666, -20961, 80792, 76135, 47695, -3972, 25442, 56505, 94277, 32974, 10206, -13717, -99673, 2996, 40843, 44577, 26711, 96044, 21749, 44633, 60550, 90602, 49869, -44920, -96008, 3152, 49504, -59365, 78214, -88118, -945, -54395, -2950, 41178, 79008, 62129, 66573, 65826, -92972, -75746, 13689, -39101, -83772, -72533, 97968, -5878, 7597, -43009, -71456, 48667, -40627, -19244, -61275, 25748, -3062, 42565, 90220, 31241, -21909, 22760, 27420, 58417, tail", "row-1662": "head, 23024, -71942, -65534, 65558, 57026, -37964, 107, 65387, 68867, 60153, -82553, -35080, -77239, -88570, -96785, -71684, -97322, -26371, 78280, -94441, -8090, 35560, 21396, -87982, 21842, -70542, -45369, -6578, -1851, -7505, 16273, -61060, -2316, -67668, -3241, -92555, 29517, 61645, -97140, -40010, 74350, 81438, -24774, -96400, -49560, -32847, 14250, -45326, 55007, 53052, -51847, 80428, -70323, 31134, -6000, 98631, 18609, 57252, -82888, -76130, -39478, 30129, 81178, -84762, tail", "row-1663": "head, -43912, -84055, -40497, 90320, -80657, 38998, -65386, 1514, -53888, -76877, 56030, 88511, 79314, -49377, 41683, -57480, 97885, 73405, 24378, -10216, 69854, -90757, -3506, -92027, 13777, 49027, -36961, 60809, -71000, 12089, 10560, 95371, -93372, -9950, -41778, -7403, -28877, -7878, 57293, 70076, -91334, 91730, 72794, -83946, -29867, -42835, -33499, -55144, 46957, 10806, -9015, 25031, -49966, -22165, -27089, 69836, 11729, -25534, 46991, -95935, -39742, 36693, 32113, 39787, tail", "row-1664": "head, 99734, 95345, -51184, -61689, 32455, -68610, -53330, -98942, 78809, -19916, 59252, -14189, 67956, 23253, -32622, -77220, -74398, 67680, -2010, -72702, -42037, 41376, -92708, 72839, -15951, 91762, 92490, -58941, 10260, -25558, -20846, -95148, -9472, -6882, -39441, -43741, 57877, -35495, -19576, 1509, 84429, 84967, 69088, 44956, 34848, -10907, 99085, 62775, -84070, 32782, -30882, 57779, 21490, 44253, -37424, 36047, 94502, -59042, 15538, -26696, -8692, -10412, 66842, 90614, tail", "row-1665": "head, -13571, -64064, 18215, -51531, -40055, 24616, 18861, 97234, 72034, 8934, -13925, 58842, -79282, -71032, 92606, -31374, -91166, -35661, 78074, 29883, -15441, 77972, 47917, -77700, 36931, -25008, 58701, 67842, -18196, -25168, -68191, 89909, 49152, 80971, -17732, -38544, 44849, 25909, -17473, 92390, 74182, 39943, -99372, 65474, 2740, 89995, 64781, -68127, 49351, 50938, -61758, -78384, 50278, 95679, 12842, -65215, 14440, 76220, -92457, 49227, 19499, 51823, 62946, 57468, tail", "row-1666": "head, 75924, 51239, -51983, 61771, -93159, 95179, 48818, -38650, 56785, 62349, -35271, -11004, 13647, 92351, 88027, -80385, -76046, 28134, 44608, -80001, 90525, 33336, -4390, -10338, -17356, 73860, -92179, -26233, -73599, 50607, -48113, -58081, -43090, -31604, -28337, 55064, -36214, -2576, -9955, 38670, -69954, -86972, 59871, 37171, -68038, 94583, 10533, -37869, -6808, 84291, 25434, 58867, 20298, -23857, 64023, -28697, -77119, -68306, 76959, -41038, 88234, 38810, 51502, -76793, tail", "row-1667": "head, -11909, -90281, -6653, 50428, -62989, 40674, 13136, 61640, 71203, -82927, 57925, -51344, -6796, 81580, -95440, 88311, -57288, -82042, 91671, -45100, -33234, 14528, 45504, -30400, -9873, -15123, -38976, -27981, 79434, -96002, -52271, -67484, -59100, 10393, -87581, -29672, 49977, 56158, -37189, 62678, 86317, 50780, -72296, -64206, 62558, 40426, -5992, -45108, 68760, -78235, -93643, -54720, -60992, -72266, -3695, 24261, 82267, 93968, -52350, 17852, 21806, 26596, 20190, 16809, tail", "row-1668": "head, 50998, -41701, -71289, -43793, -18446, 57752, -94879, 80789, 3993, -2614, -84941, 46890, -98839, 63459, -10825, 25830, -35745, -84056, 60136, -4073, -4092, 4167, -64168, 88798, -97643, -15479, -3036, -2043, -98574, -29267, 8252, 75256, -13572, 28417, 75147, 62631, 1220, 77492, -20869, 29956, 8363, 28763, 3004, -39503, -88458, 61936, 5962, -61197, -36230, -32264, -90813, 79817, -26880, 62317, 51012, 94469, -93839, -92302, -56978, 98602, -16488, 1926, 34120, -7886, tail", "row-1669": "head, -84793, -64079, 16652, 56628, -61863, -9428, -6182, -72158, 94660, -65298, 8787, 65416, -76706, 75525, -78687, 55947, 29988, -51986, -20804, 49292, -68140, -44604, -17865, 38302, -22473, 23357, -82206, 66086, -89807, -3314, -18092, 43375, 50169, 76658, -38761, 15779, 59704, 24671, 38733, -36803, -46002, 38864, 46831, 69249, -18414, -6589, -9015, -94444, 74397, 55132, -60674, 57711, 43133, -8337, -22160, -29816, -67713, -3637, -68958, 54132, 1369, 37153, -84063, -16338, tail", "row-1670": "head, -14585, -76368, -52750, 92468, -27529, -2355, -70372, -9409, -48167, 61813, 27779, 67182, 62279, -13172, 79624, -23529, 62191, -37758, -55607, -84573, -56425, 59053, -58330, -27710, -76427, 16386, 11147, -7191, 69127, -36497, -22460, 64169, 40271, -36223, 96076, 43371, 42856, -5157, 54738, 66289, 16426, -26065, 4783, -92663, 38823, 64196, -50269, -94211, 61557, -59524, 43501, -16649, 29713, -3669, -72936, -94914, 4683, -22441, -18982, 14955, -42804, 28495, -16700, 23568, tail", "row-1671": "head, 71521, -70983, -6142, -82800, -53232, -53164, -75922, 14942, -34183, -96332, 15185, 41467, 78422, 17644, 8060, -58547, 35758, 32506, -50079, 77255, 83107, 69025, 43732, 85026, 66044, -23511, -80022, 56781, 21761, 7552, 4487, -61568, 43775, 33514, -44725, -51700, -52610, 37029, -93015, -6793, 70065, -53863, -13874, -80125, 8586, 520, -54418, -80143, 38714, -39909, 84893, 4309, 39400, 88802, -29938, 70310, -77618, 2683, -93930, -79010, 56154, 90449, -48159, -55605, tail", "row-1672": "head, -76677, -54221, 76891, -72785, -53833, -86338, 26726, 66561, -98377, -49148, -70206, -15426, -22036, 92060, 39738, -42597, 69110, -27197, 44193, 36240, -63313, -82362, 71755, -95905, 26533, -76733, -212, 31445, 77954, 20398, -90841, -23153, -66978, 44741, -28817, 45037, 9050, -60977, -84103, -90260, 59191, -80838, -63150, 77957, -24776, 1357, 8708, 90835, -24401, -96336, -10216, 85832, -48420, 84027, -123, -91587, 84670, -46137, -5032, -3723, -85226, -74937, -6349, 66549, tail", "row-1673": "head, 52681, -1794, 74824, 51005, 38617, -81321, -91578, 33048, 52771, -23828, 92133, 86023, -75841, -89441, 56368, 61798, -9856, -19740, -55762, 22415, 4901, -55491, 50187, -77796, 48089, -66700, 22646, -66948, -45011, 99684, -86321, 35842, -99425, -57522, -59931, -42865, -52233, 40356, 78778, -10584, -80578, 8181, 30552, 14470, 95304, 23455, -41618, 80073, 94718, -4650, -73915, 83920, -40904, -92949, 28991, 5433, -62967, 10597, 99947, 90046, 18598, -6509, -18463, -77755, tail", "row-1674": "head, -26179, -3942, -23854, 5724, 19983, 73835, 72080, 31746, -82608, -5806, 88321, 90969, -21168, -48949, -96213, 28251, 23561, -15027, -34567, -20041, 54747, 3422, 49550, 2268, 49309, -98186, 99709, -78664, -53098, -38364, -99960, 10610, -97069, 33124, 31148, -37144, -98371, -78879, -20596, 88080, -36076, -62399, 6935, -65702, 35237, 49004, -72725, -4237, 85926, -93583, 10110, -99723, -71830, 96149, 22079, 92318, -13534, -8509, 67279, 16716, 95896, 57051, 62512, -83271, tail", "row-1675": "head, -64785, -42361, 74019, 9724, 58630, 34444, -45898, -22144, 94555, 18524, -7385, 73931, 44462, -7253, -54350, -98704, -10580, -89916, -93794, 80199, 58808, -61401, 70329, 11231, 41981, -81585, -18632, -35250, -46392, -13950, -81846, -24825, -92029, -13498, 69214, -66733, 97493, -3363, -73206, 94011, -60992, -66487, -49972, -33805, -20115, 7347, -29055, -90768, -84215, -28754, 49824, 14846, 88672, -12749, 42800, 81665, 35348, 61049, 95181, 39421, -66981, 71675, -85336, 46870, tail", "row-1676": "head, 92103, -46132, 74816, 16214, 56721, 7331, 79249, 64176, -7559, 15087, -28718, -47208, 74680, 36696, 24015, 16173, -98152, -28166, 97138, 31150, 45126, -24488, -98423, -15052, -86097, 80954, 153, -52130, -93036, 62561, 39013, 53976, -10265, -58621, -24166, -41399, 81859, -51049, -25489, 15879, 64859, -87620, -56356, -27857, 21624, 12522, -21257, -74367, 96509, -75326, -73870, 5701, -78772, -39362, -93693, -44984, 32313, -27088, 77939, 41804, 52781, 43771, 30942, -18333, tail", "row-1677": "head, -691, 82307, 2243, 49896, 34787, -35466, 82103, 97722, 74373, -77499, 5180, 14335, -26331, 64682, -95008, 93423, -6914, -56893, -88551, -7961, 69676, -81264, -2614, 59255, 47751, 14833, 8620, 63508, -64900, -30443, 22264, 17957, -2955, -98155, -84836, 51599, 63805, -67801, 95847, 47315, -38061, -3000, 94693, 45256, 11066, 20529, -92433, -63636, -75921, -18539, 78212, 44482, 24586, -63280, -26837, -83353, -82471, 24064, 25031, -11988, -65088, 37576, -3300, -70276, tail", "row-1678": "head, -23646, 9944, -12903, 92653, 85299, -55688, 96245, 94818, -95578, 85766, -1589, -32188, -9754, -29584, -46571, 16659, -36415, 29521, 20774, -65643, 10862, -41519, 92819, -71012, -7806, -17920, -414, 7901, 29996, -17349, -21109, -50786, 23868, -26212, 38262, 34408, -14488, 65239, -12002, -54174, 1776, -98804, -15302, -10391, -7534, 5048, 18224, 67801, 48834, 41304, -64318, 65669, 15003, 85003, 20612, 42566, -9524, -76249, -63289, -78951, -13709, -99608, 64113, -83520, tail", "row-1679": "head, 19161, -5558, 49306, -54290, 58822, 43392, 14607, 73604, 29677, -4540, -4845, -81918, -73800, -66183, 71473, -6820, -52561, 21217, 28607, 77742, 21945, -9144, -35218, -46401, 50177, -45426, -74521, 37199, 23217, -89213, 14143, -4300, -21354, -2246, -24053, -60625, -79302, -36082, 73884, 12756, -7596, -10361, 42593, -22106, -59021, 59691, -25874, 11139, 98441, 96151, -97054, 5732, 37284, -2933, -68324, -10152, 61897, 92910, 29811, -57585, 61612, 92296, -7992, -68871, tail", "row-1680": "head, 19230, 22510, 83067, -98278, -59201, -96299, -82143, -4850, -36659, 96105, -89163, -14279, 73376, 71935, 18897, -55836, 60959, 16491, -13843, 34608, -70052, 54842, -21928, -79902, 32409, 24909, 38284, -3320, -54503, 98861, -53541, -5462, 63403, 98624, -53032, -27428, -42967, 56289, 6900, -79446, -70276, -93737, -41670, -81135, 99707, 73075, -91595, 57532, 70144, 60573, -78156, 1870, 29707, 34813, 42226, 69279, 27815, 85249, -96419, -71780, 51875, 35287, -90116, 78461, tail", "row-1681": "head, -6466, 91022, -53338, -82738, 52674, -76505, -22944, 68023, 49835, -78908, 19514, 32093, -24595, 75523, 92219, -9879, -1738, 56996, 33669, -36670, 73178, -72662, -564, -14827, -29086, -64643, -71834, -17392, 88439, 78416, 94807, 71982, 85536, 23753, -6643, 82766, -6510, -75748, 43234, 11394, 94997, 49199, -26288, 99583, -71198, -72137, 24863, 87526, -84828, -76134, 35544, 15260, -77177, 94314, 30445, -72743, -59670, -24364, -26922, 54815, 76478, 63154, -51254, 82567, tail", "row-1682": "head, -47970, -41425, 53776, 27673, 53738, -81785, 25565, 91334, -13842, 36672, 36067, -2486, 66514, 59251, 95254, 20904, -3166, -49661, -85397, -43606, 86791, -38474, 77206, -19062, 38485, -45372, 10136, 23252, 11669, -80903, -82341, -58406, -64545, -67795, -6782, -55384, 93730, -64309, 18895, -739, -67716, 38627, -41178, -11160, -57034, -35372, -7687, -71674, 36746, 63368, 15672, -35208, 35300, 83870, 59246, -90023, -78701, -22633, -80655, -64940, 96376, 29603, 47249, -27318, tail", "row-1683": "head, -40836, 69925, 27994, 40729, -31523, 6805, 79831, 67610, -33087, 58009, 89562, 65571, 15560, 15416, 70626, -67113, -70786, 92716, 82263, 75667, -39566, 87506, 73178, -88529, 38247, -66281, -79001, -49391, -5466, 77056, -56335, -70831, -34713, 90110, 49731, 11842, -67482, 21803, 97368, 84223, 64480, -53345, 56251, -11059, 8575, 32730, -19278, 66626, 46525, 94260, 63484, 47985, -68076, -54524, -65642, 64654, -29835, -74918, 69941, 45808, 30707, -21253, 69967, -91406, tail", "row-1684": "head, 547, 55738, 54016, -52027, 85235, 76342, 22734, -33887, -7573, 6142, -75100, -17267, 62921, 21347, 81566, -14496, -46142, 14895, 82797, -86169, 68936, -48337, -1982, 35640, 55173, 62797, 12798, 55090, 99680, 14901, -15732, 30790, -32673, -15288, -75715, 63755, 95721, 26520, -60475, -23958, 87551, 71034, 95338, -17031, 43247, -61165, 30142, 98475, 23117, -9907, 93897, 44480, -66924, 47633, 96722, 99579, 64316, 20785, 94171, -24927, -39402, -45419, -71450, 35222, tail", "row-1685": "head, -45055, 29327, 83205, 84237, -76290, -32859, 97924, -81289, -65140, -89794, 40670, -90753, -19653, 67951, 6437, -80562, 95560, 15412, -78535, 43512, 29130, 6383, 17148, -99073, -44154, 93118, -81979, 57340, 74813, 99, -75593, -68479, -7814, 72904, -33057, -72085, -68108, 45022, 81587, -83462, 73233, 14938, 61521, -29484, 90557, 21625, 34942, 38676, -94512, 64714, 20947, -59632, 66828, 31565, -74580, 46409, 56204, 8078, -49535, 41176, 85845, -6052, -47638, -36151, tail", "row-1686": "head, 2037, -80005, 79916, -77842, 23722, -64673, -24514, -58798, -97220, -74230, 32878, -95531, -32169, -5728, -5649, -69703, 56959, -5569, -78662, -9671, 8446, -44193, -67083, -24164, -29822, 71792, 41559, -71853, -13945, 79442, 96909, -36572, 1343, -65868, 83259, 20078, -48787, -63893, -86981, -89085, 46709, -74937, 24060, 2413, -19658, -55001, -79368, -39738, -67696, 72537, 32095, -61268, 49871, -14072, 32119, -72951, -12126, 57530, 55262, 97519, 65757, -1562, 18324, -89266, tail", "row-1687": "head, -524, 43072, 6312, 75879, 19648, -91897, 44004, 88480, 9186, 69311, 84475, -40873, -22221, 17252, -90546, 4282, -52647, -12954, -18945, -56850, -93689, 34482, 94973, -63288, 7183, 57188, 52501, -73884, -67731, 18147, -6514, -61758, -6575, 88836, -34442, 49907, -49624, 43831, 77060, 34768, 65613, -13106, -96562, -20952, 75645, -14979, 81165, -23492, -18783, -64276, -90102, 33713, 80205, -33923, -16409, 88761, -97376, 26394, -18111, -55876, 43236, -50804, -50986, 68037, tail", "row-1688": "head, 66548, 86108, -34793, 33143, -264, -21564, 44834, -60369, -36019, -40758, 42281, 12965, -64567, -1214, -51661, -12719, -23078, 47408, 63487, 5037, 92937, -43718, 87263, 67714, -11445, 33464, 72033, -90700, -8092, 46099, 57786, 43680, 9001, -66157, -53109, -74650, -59739, 29784, 58349, -6166, -39334, 53021, 88517, -14817, 54202, 41165, -83919, -7633, -92960, -5031, -22862, 61843, 12682, 87501, 39489, -63108, -73848, -59951, 30312, -89567, -97239, -65314, 73133, -15172, tail", "row-1689": "head, -20548, 72457, -81889, 9458, 40290, -38880, -8983, -17044, -1946, 78442, -81973, 87192, -5255, -8074, -49851, -95514, -79803, -38311, -39219, -67800, -24690, 14722, 44818, -1039, -66942, -63668, -36507, -20248, 20861, -93087, 91056, 50677, 42340, -48921, -45855, 45311, 79599, -51823, -89456, 38882, 50115, 85623, -98488, -83603, -32272, 16396, 37025, 22439, 85091, 10936, 3171, 36833, -7889, 75064, -16309, 84432, 6303, 63928, 82, -95038, 2739, 46676, -51880, 63897, tail", "row-1690": "head, 36046, -87598, 2632, -48790, -88761, 94845, -77301, -53188, 87923, 18557, 16127, -6318, -12815, -46211, -47737, -66667, -56279, 51633, 33331, 87212, -9876, 33182, -97771, -92310, -24514, 81547, -29499, -86456, 96199, -68599, 3832, -32089, 80353, -89786, -49015, -3127, 2840, -7184, 92564, -57415, 32295, -60878, 61414, -7986, -2571, -45439, 34675, 28044, -88697, -38791, 60253, -82164, 78101, 63420, -93822, 14243, -81913, 80100, -46866, 62910, -49560, 5400, -91652, -17597, tail", "row-1691": "head, 65285, 4619, 83178, -50802, -46436, -16783, -93929, 75400, 54597, -39375, -50213, 77091, -81387, 53678, 13939, 59563, -28358, -78614, -84442, 56114, 23313, -7076, -60460, 53985, -68286, -90642, 95794, 92223, -43054, -34025, -75314, -72646, 8107, 75502, 72467, -30865, 35852, -18389, 37334, -47436, 42656, -58142, -7079, -12078, 78045, 50013, -20958, 4094, -43934, -70709, 98935, -41185, 14704, 23905, 77696, 73955, 32976, -62790, -9760, 25764, 59461, -29544, -44961, -97539, tail", "row-1692": "head, 47580, -50909, -87367, 45982, 45192, -16965, -35029, 7789, -98611, 95630, 13332, -18261, 66167, 60051, 1236, -36710, -68060, 23574, -67497, -11575, -26137, -43937, 24252, 77423, -68443, 11493, 86208, 87284, 40447, 60570, -13387, -48386, 49914, -9910, -69219, 20865, -6471, 68117, 97322, 34914, 52261, -26827, 73031, 36068, 91635, -74445, 25410, 79105, -44729, 84277, -25576, -23695, 24915, 58432, -5237, -46189, 42363, -41888, 20175, 82826, -13706, -30818, -30275, -28977, tail", "row-1693": "head, -71397, -34843, -80980, 5327, 79235, -56400, 52500, 78257, 15915, -33858, 22872, 14636, -33321, 22648, -55095, -55007, -34269, 82046, -64339, 29621, 18787, 70409, -52325, 96791, 42274, -78283, 68168, 90834, 23457, 8650, -74071, 26482, 87461, -33487, 1532, -80525, 75717, -21220, 79251, -36745, -11607, 71335, 23949, 79677, -7922, -15738, -49470, -96258, 5779, 4541, 45388, 25471, 94062, -16914, 47878, 77252, 43228, -36072, -35074, -93491, 17996, 51412, 20782, -26281, tail", "row-1694": "head, -70716, 48738, 86896, 80337, -98502, 25367, -53642, -74645, 74074, -50974, -7838, -4155, 31622, 35385, -33971, 4191, 91299, 96463, 29634, 66790, -78106, 57196, 42685, -61940, -17539, 5239, 44935, 54663, 4364, -9957, -79092, -64343, 67550, 90321, 95432, -15116, -16540, -61842, -9112, 6537, 90787, 54946, 96901, 72548, -16832, -50800, 2624, 96671, -81869, 30296, -43220, -29027, -91240, 20160, 54267, 5229, -80906, 14657, 44909, 56877, -13207, 53859, 85283, -43947, tail", "row-1695": "head, -92680, 86605, -63075, 72847, -93849, -73527, 45032, 60161, -17949, -66649, 27619, 38828, -9666, -68969, 44714, 54966, 21023, -24213, -31404, -39601, -11158, -11948, 56916, 82228, -23140, -41326, 50209, 51562, 81920, 94014, 1665, -4571, -1389, 7247, 2217, -84712, 99204, 93522, -13843, -92464, -72856, -61521, 15127, -54541, -2313, -47580, -13552, -8393, 5538, 3160, -36558, 46629, -59553, 37779, 95278, 65002, 4445, 35271, 71200, -64510, 61353, 46940, 13289, 45329, tail", "row-1696": "head, -95655, 19826, -51941, 93465, -3245, -47956, 76482, -54622, 65613, 97432, -60178, -1205, -47501, 85654, -62105, 21914, 60768, 29987, -13568, -76768, -44686, -36892, 58770, -27739, -33509, 5304, -27326, -98289, -67570, 52689, -49219, 44029, -91283, -85521, -47333, -50597, -12745, -1680, -87019, 23725, 71957, 60082, 29883, -31056, -28682, 97597, -67063, 22975, 10569, 94312, 33828, -86172, 59211, 27622, 51276, 95339, -86576, 53248, 5480, 64203, -44027, -32784, 24558, 29868, tail", "row-1697": "head, 65883, 43998, 88803, 57279, 97907, 61137, 22778, 60945, -26828, 20244, -52253, 82956, -69977, 79021, -56628, 64589, -48286, 39809, -30337, -56343, -61937, 62758, 1390, -27306, 29263, 12584, 1599, 19269, 14068, 86921, -69148, -8225, -92392, 39701, 82642, 22220, 15534, 5814, 36386, -98642, 63646, -15227, -28208, 16467, -32211, -56517, -53144, -18154, -10848, -41575, 46149, 82048, 69565, -2338, 44100, -93727, 52828, -99433, -19222, -86750, 11370, 55949, 58284, -61887, tail", "row-1698": "head, 78622, 33595, 21300, 94373, 41358, -21248, 87224, -83313, 44491, 31438, -58642, -74053, -19546, -26764, -60560, -33256, 43651, 24049, 42100, -421, -12219, -39479, -57565, 73102, -57522, 4067, -87134, -8794, 12757, -54508, 35502, 30429, -2383, -71238, 1253, 33508, -37283, -1874, 94814, -56481, 59884, -345, 37423, -72943, -41374, 69807, 1352, 38541, 61404, 30433, -21720, -67533, -78265, 24059, -89215, -69695, 70214, 10379, -24828, 87758, 86174, 91810, 52071, 89672, tail", "row-1699": "head, -96061, 3651, -70852, -48989, 3126, 69838, -74047, 58874, -38682, -83573, -34465, 1848, 36302, 72524, -42451, 64382, -43658, 62182, 36846, -65684, -12925, 54063, -5202, -90567, -21521, 23136, -57642, 60301, -43627, -8460, 47497, -1174, -30267, -72309, -84032, -35510, -58357, 79289, 28636, 49151, 22811, 49784, 28889, 67089, 28792, 145, 16427, -11638, -81925, -49567, -94918, -17751, 95360, 61889, 22120, 4260, 31091, -22798, 8024, 21003, -5853, -35206, 12129, -79942, tail", "row-1700": "head, -90080, -87568, 35589, 22423, 77870, 5377, -82577, 80204, 93628, 2873, 5303, 39969, 24660, 99172, 79064, -60929, -70079, 74234, -93681, 28250, -87240, 68257, 34827, 55891, 33519, -21540, 22787, -62611, 13563, 4193, 60680, -66035, 86132, -48554, 74218, 9609, -91116, 62540, -51485, -6148, 84934, -45835, -83037, 93645, -63, -68510, -53945, -70537, 50643, -76145, 81207, 72329, -86726, -18764, -40011, 20672, 15234, -69546, 80841, 73628, 99248, -99692, -27170, 89145, tail", "row-1701": "head, 69523, 44204, -52895, 64465, 98211, 60404, 16996, 81690, -63409, -15608, -13854, 41364, -27455, 67266, -39102, -36362, -28003, -71233, 18928, -40937, 677, 61675, -26407, 42334, -33132, 22073, 62210, -62522, -21690, 27462, -74747, -13694, 50997, -65677, -97391, 97091, 76415, 59378, -99975, -55153, 34108, -69388, -32280, -81559, 6522, -19896, -5975, 64335, 36307, -78484, 43818, -85387, -20408, 46802, 54832, 30412, 40976, -29730, 97930, 11595, 57761, 98928, -76406, -81345, tail", "row-1702": "head, -81541, -32361, -65041, 8016, -46484, -56359, 72086, 33990, 74374, -94678, 23844, 96729, 35364, -31275, 21375, -6052, -39571, 73943, 11371, 45429, -87448, 28798, 33436, 10387, 14369, -64358, -73520, 58654, 31297, -26080, 23305, 38959, -70819, -63363, -58627, 22228, -44607, -81902, 55055, -82281, -37698, -71676, -74686, 83440, -63291, -80477, -96547, -66862, -30509, -32409, -52676, 91208, -37081, -82848, -17821, 92428, -4523, 23717, -33778, -59037, -89480, -20241, 93939, 29219, tail", "row-1703": "head, -93703, 48636, 24333, -46479, 7911, 53562, -66716, 38340, -52365, -31133, -76889, 45729, 14593, 39050, -84106, 99240, 44452, -67725, -15246, -4872, 31160, -1943, 78342, 74980, 48130, -86787, 45431, -19041, -21449, 65855, 79792, -63197, -23905, 21314, 10046, -32798, 69161, -18300, -35622, 74130, -92092, 1413, -55921, -57469, -59757, 61788, 47932, -86903, 86960, 83848, 84454, 53610, -61355, -93631, -61406, 39062, -43727, -87129, 26987, -39492, 23521, 4784, -30660, 49332, tail", "row-1704": "head, 20590, 43550, 26850, 47576, 68854, 65497, -38152, 18997, -52488, 68417, 20824, -53429, 26361, 38889, 89711, -1866, -46556, -88146, 57652, -26431, 96606, -13446, 96821, 28331, 84975, 89726, -2308, 19763, -68492, 98211, -54809, -56307, -76634, -79790, 77976, -52430, -71200, 45385, -32939, -35652, 97984, -33406, -24301, 26296, -45442, -51681, 26892, -32552, -63019, -55085, -93402, -59946, -2048, 71881, -92221, 99408, -75701, 50643, -89259, -62581, 40003, -71655, -34513, -16557, tail", "row-1705": "head, -11515, -79136, -83986, -29365, -72038, 42505, 50044, -89151, 56471, -95268, 5202, -25469, 6793, -20221, 11177, 84471, -34460, 8008, -60877, 48912, 1560, -36143, 11758, -83053, -40908, -24736, -79993, 28909, 3027, -91912, 23712, 81657, 62605, 41754, 45719, -35619, -17190, 32533, -17603, 45261, -51572, 34989, 68598, -6995, -92988, 54781, 5922, 96542, 33574, -57185, -28233, 18878, -31947, 49364, 43130, -92882, 20425, -78653, 61435, -25978, 58417, 3588, 69117, -23979, tail", "row-1706": "head, 36446, 10543, 30150, 95583, -53983, 86161, 26217, 52335, 12029, -8015, 33334, -88762, -97575, -65476, 45015, 70084, 33126, 26019, 21914, 34058, 73974, -30011, 92019, -58405, 32213, -50822, -14854, 11133, -39402, -90469, 92401, -65462, 69692, 56458, 40287, 26861, 91057, -58021, -6805, -3502, -83869, 60360, -48262, -19712, -3468, 33988, 6606, 32186, 15041, -6816, 85093, 18293, -64563, 42215, 86848, -52750, -88478, -82906, 3903, 63773, -80307, 54971, 61147, 97838, tail", "row-1707": "head, -12848, -80979, -98970, -85823, -3576, -11277, -16695, -61727, 22815, 35902, 99194, 13931, -72460, -99201, 93054, -86898, 73679, -81778, 82016, -20118, 62930, -73848, -37864, -90974, -15023, -975, -2700, 49246, -3894, -86790, -93179, 93502, -37912, -69910, -97826, -94394, -96102, 51314, 51549, -92528, 10914, 25453, -24191, 64437, -13931, -10964, -89993, 1313, -41785, 70960, 39337, 56257, -90460, 62324, -75538, 26233, 2394, -79960, -53354, 65494, 4765, -38898, 75310, 48670, tail", "row-1708": "head, -73152, 82802, -18091, -45799, -2148, 85111, -9771, -41967, -85682, 20701, 39142, 66243, 69064, -23605, 60577, -12931, 76917, -74716, -18608, 37106, 89450, -63681, 72966, -83838, 43062, 44386, 716, 68556, -99447, -25535, 59808, 35767, -22187, -7880, 88001, 66857, 63767, 48649, -87312, 35116, -50595, 59199, 10090, 1893, 87592, -67425, 54368, 9962, 17391, 55818, -77265, -94697, -74944, -75935, -8749, -31922, -40610, -39879, -94198, -90271, -50512, 42470, -78929, 20523, tail", "row-1709": "head, 43389, -82497, 17820, -44235, 7683, -71569, -2669, -19029, -11917, 44056, 167, -82589, 44046, 87327, -82844, -36906, 20120, -4454, -27488, -7518, -98113, -46757, 19936, -14675, 31683, -91287, 20397, 98061, 24561, 55227, 16023, -1972, 40530, 51667, 11094, 63915, -5650, 27085, -66058, 56982, -15700, 10438, -83069, -66299, -5873, 95435, -70102, -26981, -25694, 86416, 41214, -13999, -10703, -50102, 33457, 93212, -17454, -59594, 59688, 23994, 64505, -87808, 82416, 78703, tail", "row-1710": "head, -81262, 85689, -25708, -2560, 48690, 41084, -37837, 36024, -81195, 76818, 28995, -29939, -97605, -66620, -80883, 54449, 37413, -6440, 46007, 61572, -14433, -27528, 21737, -7315, -68538, 13039, -43967, 17623, -27917, -73704, -90151, -42225, 6883, 92322, 33038, 34586, -13237, -37405, 91400, -40221, -7284, 6176, 38963, -99384, -18270, 55954, -13136, 37043, -84906, 86298, -63276, 50111, -99954, -72265, -88057, -79782, 5200, -98933, -34657, 61139, 91992, -23983, 58500, 97279, tail", "row-1711": "head, -93946, 57351, 17279, -58808, -84983, -14458, -37582, 53486, -93066, 80381, -96756, -29355, -1586, 22782, -10993, -91879, 24817, -75591, 32090, 43066, -16642, -75923, 46898, -35296, 46487, 41457, 53223, -68749, -74183, 77163, 80416, 60484, -11987, -77494, 75357, 74457, 14056, 10512, -2982, -38083, 31576, -33718, -17577, -73796, -83, 51607, 11954, -90375, 39476, -14674, 38160, 12108, 31916, -52503, -81783, 9518, -59933, -51348, -1194, -33317, 94795, 83476, -76669, 12534, tail", "row-1712": "head, 12680, 22395, -75741, 21053, 56708, -24331, 8294, -31722, -9041, -85816, 24820, 64323, 39224, 48004, -34811, 35541, 72404, -43287, 90240, 2563, -43297, 64590, -92489, 73085, -24195, 78557, 48285, -9300, 89910, -83730, 19147, -79335, -65308, 74558, -36814, 89272, -40031, -73721, -5514, 9591, 50692, 19093, -64015, 47071, -97419, 91788, 51489, -7214, -2418, 36289, 28469, -92362, 19780, 11148, 80707, 3672, -69595, 25390, -86210, 42508, -46963, -28063, -53597, -75976, tail", "row-1713": "head, -85729, 43031, 14116, -97778, -40004, -37672, 78652, -12645, 69768, -98151, -61715, -84558, -11259, -80368, 48056, 51204, -80356, -75096, 69081, -75392, 90053, 67264, -25412, -30471, -71355, 87105, -40096, -91936, 44523, 33078, -31560, -95461, -28627, -75895, -6182, -47783, 8569, -13853, -147, 17679, -12973, 50774, -10613, 1203, 86369, 39276, -17724, 33372, 69189, 15545, 4254, 51045, 96414, -75654, -6018, -12703, 87435, -54499, -97917, 99394, -23634, 96913, 66315, -39824, tail", "row-1714": "head, -81211, 38436, 94782, -4498, 21843, 62764, -92950, 59140, -20618, -92447, -72307, -6270, -94117, 78989, -81781, -32044, 99990, -98588, 59023, -8296, 74859, -23322, 48155, 94762, -52110, 96031, -74050, -57626, -26239, 89930, -16620, -64284, 78007, 64433, -88205, -58880, 52864, -76622, 302, 42668, -58272, -38091, 21372, 14889, -40416, 20315, 47529, -75024, 76303, -65952, 33146, 57388, -55710, -9807, -49144, 34195, 80860, 82961, -78520, -56105, -88703, 54496, -42994, -26354, tail", "row-1715": "head, 49416, -62587, 51348, 60714, -19117, 44380, -39270, -62153, -44735, 74149, 2448, -27677, 5547, 56435, 65432, -50067, 99268, 49839, 55724, -13507, -45987, 35518, -70746, 51406, -80481, 70160, 40619, 95600, -58181, -52279, -85690, -69456, -46983, 46499, -99558, 86192, 57379, -57043, -46249, -25863, 64323, 24383, 88610, -3214, 59935, 31227, 75670, -85292, 15245, -22794, 40407, 74526, -29455, 64216, -91058, -47952, -4261, 87325, 67379, 71679, 55756, 16200, 42592, -14434, tail", "row-1716": "head, -71273, -62360, -10235, 88612, 91280, -51312, 36926, 6535, -85972, -1558, -11930, 47088, 36534, 19492, 85536, 54000, -56914, 19521, -44647, 82693, -52737, -80569, 90234, -18172, 77666, 73411, -38278, -48473, -11664, 19441, -59999, -12055, -79644, 49411, -65309, 18453, -33366, -60451, -13217, 48635, 82317, -19217, -54351, 16440, -89629, 3675, -82957, -94140, -99105, 58431, -33229, -39600, -11518, -63368, -51906, 27599, -1025, -63071, -75606, 62267, -20322, 28730, 29673, -69000, tail", "row-1717": "head, 42986, -84151, -60308, -53717, -94587, -71704, -94439, -3977, 89686, -29401, 95751, -78161, -41667, -94950, -49131, 47850, 19105, 33012, -51933, -59747, 58382, -9047, 14215, -18502, -44245, 58815, -57541, 49960, 68777, 32875, 75957, -12415, -78826, 96286, -53411, 53157, -37507, 49509, 45223, 77842, -31841, 70154, -8440, 29704, -39875, 40584, -46589, -10914, -84398, 81470, -4763, -70276, -40374, 4273, 25413, -85390, -67588, -54023, 26826, -46085, -96225, 51807, 65731, 16149, tail", "row-1718": "head, -92303, 91258, -7171, -76683, 86352, -57794, 2225, -7606, -63980, 85084, 70709, 92256, -35226, -27863, -7755, -86288, 59416, 13312, -41903, 28658, 85237, 99917, 34392, 65558, -49291, -1519, 7946, 73297, -79681, 77141, 77836, 16479, -47549, -43118, 93698, -31266, -17852, -99745, -17541, -95432, 39848, 88691, -49137, -87862, -88495, 59107, 35661, 39874, 18254, 2058, 3318, 38969, -44625, -51382, 47888, 22623, -60330, -42662, 25399, -98927, 68906, 22532, 78009, 49861, tail", "row-1719": "head, 52948, 31590, 58148, 37623, -96765, 39803, 85917, -14290, -63223, 82271, 64985, 84098, -46609, -34478, -23514, -2120, 16572, -17797, -67780, -39939, 4610, 79815, -98620, 94421, 79820, 93509, 2464, 6034, -77894, -33037, -8588, 43796, 19658, 73196, 73183, 47421, -72505, 79122, 97656, -15879, 41382, 80944, -8904, 71996, 36240, 68382, 98274, -71555, -12573, -60303, 55577, -24096, -54080, -3484, -35065, 96924, -5399, 97116, -67682, 95566, -95958, -99710, -40980, -44745, tail", "row-1720": "head, -16663, 94241, 51291, -49311, 7929, 55096, -61903, 89257, 39077, -52516, -10687, -51598, 71085, 76073, -42664, -26170, 38710, 42185, -60818, -99089, 81523, 56644, 78749, 70934, 51572, 92899, 80455, 42018, 41082, -99039, -9188, -2735, 67385, -81176, 80506, 38438, 73647, 61388, -71682, -29814, -51954, 24995, 62092, -93336, -77778, 19732, -49722, -45290, -42876, 52145, -6403, 37367, -80133, 36369, 3686, 87590, 91394, 64988, 7239, 83871, -2494, -67858, 98280, 36356, tail", "row-1721": "head, 75726, -7938, -91147, 16782, -55585, -48366, -56799, -33295, 36369, -22697, -12813, -57005, -94813, 91278, 9086, 89645, 18692, -76109, 25191, 25049, -22135, -52508, 64281, 54550, 77656, -95603, 29685, -17677, -4521, 7589, 78046, 88245, -98382, -27616, -87948, -92722, 30853, 89857, 73519, -10197, -86650, -14321, -13295, -4399, -54287, 8751, -55840, -95171, -27387, -91147, 40645, 93896, 61392, -80471, -65172, -91570, 76127, 56278, -33223, -82605, 44996, -63006, 43118, 34897, tail", "row-1722": "head, -50576, -34537, -53061, 19923, -43989, 27112, 49499, -67463, 74049, -88322, 30207, 42814, -80715, -82216, -49392, -60353, -87094, 73544, -46612, -61153, 81556, 32231, 95065, -78020, -12002, 57858, 72930, -68683, 10860, -44903, 29278, -61435, -55484, -19264, -80325, 24511, 71721, 42562, -85910, 23264, -9739, -76082, -21465, 75780, 67186, 47597, 88628, -50087, -23982, -36973, -65233, 99580, 46719, -41591, -94062, 62862, 65038, -12799, -1494, 42039, 96262, 81831, 78052, -40344, tail", "row-1723": "head, -12033, 34450, -34542, 94777, 40406, 82912, 1966, -84188, 28508, 93570, 99690, -9435, -47819, -10115, 3632, -23744, 61467, 85663, -73250, 44366, -36371, 74076, 13154, 82860, 76821, -98604, 79756, 84631, -39754, 90033, 91570, 79882, 84914, 58864, 90159, -64881, 44985, 11398, 2674, 64030, -1027, 28225, 87638, -4344, -77362, 10492, -29434, -25516, 56070, 89950, -30672, -57417, 16413, -97943, 16677, 12016, -91539, 15913, -17638, -57950, -69756, -62480, -59824, -90356, tail", "row-1724": "head, 78708, 34945, 41034, -64656, 36089, 51398, -89497, -73844, -20774, 98371, 74132, -47281, 8789, 68120, -86741, -37912, 38659, 82637, -50919, 55877, -80608, 70861, -91555, 41786, 17813, 27507, -92359, -34820, 81418, 64262, 58591, -37102, -99641, 76031, 26219, 86554, 34587, 91234, -24647, -64282, 21516, -12359, 53337, -1374, 66793, 47341, 70479, 32024, 51760, -27289, 35610, 39751, 97150, -93298, 85553, 91344, 14723, 1963, 97308, -89153, 63131, -42260, -56935, -67985, tail", "row-1725": "head, 96812, 19664, 52076, -58971, -2611, -34423, 24789, 47783, -67144, 66601, -89981, -74675, 80187, -22289, 85111, -68500, 6445, -80433, -20419, 24521, -47250, -14804, 24235, 92240, -12133, -13984, 22003, 79986, 12768, -75259, -9511, -84908, -30302, -69968, -50309, 6245, 75521, -49678, 25056, -81688, -76218, 39971, 9167, -68588, -27450, 61125, 35407, -95422, 3438, -65819, 55641, -21962, 27171, -4139, -99248, 62757, 86530, 31991, -58021, -31894, 67380, -29884, 36349, -5439, tail", "row-1726": "head, -53895, -73496, 12658, -26535, -62131, -41772, 48277, -46452, -37307, -99295, -52278, 78985, 77258, -31573, 26732, 30769, -79195, -15234, -25056, 57177, 46933, -77242, 3140, 61933, -81283, -84162, 24268, 35277, 20405, -98229, 19760, 9439, -22841, -64635, -84709, -63155, -16403, 13206, 74678, -34509, 34739, 31380, 86429, 38891, -71970, -78993, 53314, -97923, 67221, 96426, 72519, 30601, 35474, 52730, 77199, 13178, 43627, -98596, -13478, 25878, 78712, 7566, 33631, 42454, tail", "row-1727": "head, 72943, 91440, -80346, 11777, 53135, -6286, 78327, 80062, 2092, 52458, -77670, 93566, 64679, 76205, -96319, 4129, 45634, 60904, 23200, -57221, 30092, 94974, 25051, -47883, 7568, -98650, -28470, -32460, 1626, 12376, -96229, 19084, -4740, 85550, -84784, -70397, -65606, 36675, -39153, 71775, 83653, 74856, -58818, -6684, 75323, 57353, -31948, 12006, 4717, -65375, 75174, -50336, 12854, -64984, 43443, 82363, -72164, 67239, 59565, 1482, -52337, 246, -83454, 46854, tail", "row-1728": "head, 85975, -23636, -57803, -21873, -7422, -71847, -16164, -37512, -11568, 17163, -988, 9957, -82058, -50770, -55225, -94998, 94903, -64108, 6431, -6748, -11564, 82121, -63761, 70470, -18651, -72953, 38117, 62376, -70857, -99305, 72956, -50547, 85477, 81364, 73684, 56479, -23115, 45498, -2055, -61054, -25588, -56626, -19236, -40550, -11198, -53518, -94308, -99545, 50310, -64235, 4532, 15996, 31146, -31898, -81865, -88321, 1613, -94862, -57491, 96217, -84781, 75462, 91448, -9887, tail", "row-1729": "head, -50617, -91962, -37165, -64672, -47640, -15415, 59640, -73077, 42153, 27727, -10356, 30046, 55656, 7966, -9595, 35467, 3116, 8265, -99760, 33247, 60763, -24130, 26364, 24344, -79835, -20410, -24285, -93291, 90115, 35584, 54634, -64803, -58930, 48950, 56251, 96772, -70507, 12627, -92867, 69120, 46178, 7378, -60774, 23594, 12697, -32211, -40269, -11748, -16280, -11676, -81669, 64551, -10115, 85606, -59060, -5763, 38276, 5841, 59516, -22058, 80130, 38990, -46907, -67506, tail", "row-1730": "head, -15676, -65401, -27894, -84128, -79639, 17321, 10352, -7114, 63001, -55878, 59781, 15436, 48734, 68445, -94714, 76748, -96080, -15864, 25023, -28107, -99573, -40505, -72487, 61627, 543, 78880, 41054, 12234, 18343, -15901, -89754, 78737, -64753, -37828, 98947, -64342, -25400, 3669, -64747, -46805, 38080, -35954, -87667, 68425, 63428, -76284, -4537, 31065, 64623, 25474, 74756, 38444, -56942, 80670, 49047, 56470, 31150, -77346, 90711, -31978, -97128, -24897, -70185, 26474, tail", "row-1731": "head, 96995, -58180, -85636, -48145, -22698, 98599, 73575, 30365, -23155, -23621, 30343, -77862, 96139, 9106, 64972, -8159, 34620, -32384, 89117, 33287, -73845, 79781, -52702, 91507, -67545, 8419, -87856, -68859, 31771, 11372, -9707, -89942, -54882, 19419, 42503, -15661, -11442, -42827, 87293, 85292, -22637, -85056, 86114, -39699, -85691, 43101, 34696, -96608, 90741, 50697, 34505, 48992, 63256, -81206, -95811, -64100, -53888, 22122, -46810, 60814, -57707, -79204, 90798, 30775, tail", "row-1732": "head, -4475, 59242, -14827, -93354, -28069, 10113, -78680, -16134, -75812, -56630, -38865, -99879, -92909, 79802, -20940, 52607, -53584, 87781, 81495, -4143, 18631, -39504, -27733, 45945, 70856, 59858, 4775, -86983, -69125, 7986, 26347, 62454, 11068, 16223, 47828, 78113, 92150, 95107, 95286, -63390, 60058, 65584, -33157, 71531, 99291, 62427, -55977, -41403, -86605, 80504, 85494, 3070, -8469, 25627, 5247, -15572, 94117, -61156, -59217, -67810, 90319, 12375, 77030, -19018, tail", "row-1733": "head, -28153, 93164, -93472, -96631, 92724, 34024, -13321, -18774, -52406, -73606, 15982, 23601, 26644, -41328, -63233, 92637, 46438, -45165, 34716, 33446, -32654, 36977, -93467, 38492, 35439, 68266, -71900, -43764, -27772, -16046, -27774, -9534, -39562, 99188, 15463, 28759, 41845, 79922, 73474, -14256, -23422, -22418, 55005, -67342, -94289, 17392, 73343, -38827, 92651, -48009, 52026, -46711, -39158, -1798, -81344, -95000, 21218, -35575, 52241, -62261, -10219, 86614, 56500, 30205, tail", "row-1734": "head, -13649, -59414, -41091, 86425, -37655, -47423, 20466, -26378, 53032, -25749, 93185, -20213, 42150, -85679, -97129, 83685, -58709, 47901, 5116, -31317, 83205, 79557, -76851, 646, 24756, 20231, 175, -71551, -51983, 36992, 73530, 86307, 94287, 21837, 35631, 70250, 45411, 41927, -93551, -30734, 15363, 15405, 25829, 70625, -83911, 19859, -62722, -95327, 97404, 90713, 65059, 33692, 10141, -18250, 72414, 60377, 48053, -48843, 71106, -40026, 27927, 87621, -1424, -93195, tail", "row-1735": "head, -35687, 23221, 85554, 23883, -43667, -43817, 73557, 84426, -59220, 89734, -97265, 81562, 9913, -64819, -50832, 28543, -65279, 80957, 29876, -58800, 64932, -70029, -29992, 30591, -17313, 63408, 78544, 84743, -28884, 44505, 3493, 12489, -52553, -89490, -65227, 36327, -99917, -75846, -13449, -68064, -67487, -58774, 39402, -19015, 11590, -38417, -2871, 91577, -86756, 34192, -75841, 27434, 31406, -68979, -18959, 81166, 3100, 27569, 94828, -80246, -83046, 55905, 79190, 75130, tail", "row-1736": "head, -42181, 66062, 73624, -63766, -64022, 13456, 43309, 521, 18754, 37954, 64368, 2470, -34404, -75498, -27951, -30212, -98933, 85693, -28587, 83879, 81408, 1099, -15207, -96146, -44484, -31817, -58704, -80864, 62982, 43785, 66089, -58903, -41212, -37256, 9358, 73138, -92503, 58844, 62560, -69148, 15051, -57480, 12339, -59234, 75607, -13802, -71338, 54243, -72204, -94222, -25455, 15814, -64781, 28009, -3233, -62533, 88743, 98316, -27083, 36195, 16481, -82582, -10241, -45515, tail", "row-1737": "head, 19642, 58237, 42399, 65672, -61221, -68551, 96282, -78869, 61652, 84567, 27940, -79507, -52512, 80193, 29470, -51837, -87947, 8159, -18427, 96097, 85810, -73716, -75944, -92105, 91248, -89579, -48209, 87485, 98127, -52610, 3462, -74216, -90710, -24770, 71016, -23721, -34833, 73651, -10307, 23729, 2503, -9460, -76249, 64364, 2567, -44795, 94852, -68671, -52657, 72092, 37542, -43009, -92366, -18197, -6630, 38616, -62268, 12076, -15943, -15111, 61629, -62084, 40177, -38196, tail", "row-1738": "head, -15104, 52477, 42010, 52515, -89241, 63665, -32074, -93597, 45500, -51559, -49773, -65506, 75529, 33089, 33479, -85518, -9588, -41355, -59760, 71452, 43902, 5707, -24620, -50334, -21038, 64962, -65564, 19898, -56847, -67211, -48104, -82801, -59594, -61077, -26411, -88824, 18674, 93605, 77782, -95137, -2917, 51384, -35979, 46983, -67830, 21197, -43734, -28660, -19013, 60397, -34161, 74876, -81593, 94672, -90353, 73659, -89235, 9870, -87263, 30289, 87338, -58158, -63852, -48960, tail", "row-1739": "head, 75673, -30179, 16871, 43185, -56384, -83896, 25777, 52239, 65345, 58034, -72329, -17065, -38364, -26311, -61318, 15302, -78024, 4983, 91397, 34210, -3371, -42924, -20874, -38937, -20567, -54001, 97039, 71696, -4417, -39579, 53247, 39780, 2689, -63598, -30530, 58173, 67288, 37944, 41772, -86577, 95014, -40749, -2391, 83449, -53580, 30884, -98277, -94200, -9573, 1543, -24128, -12647, -91949, 43394, 48336, 66538, -21958, 66129, -52046, 41796, 14826, 54362, -99241, 27980, tail", "row-1740": "head, -69566, -69363, 72490, -87966, -82614, 89857, 73495, 87124, 11293, -92993, -15753, 10177, -58044, 4302, 55979, 81921, -62399, -33088, -81616, -62356, 15326, -15768, 13230, 3501, -14857, -61714, 96780, -10496, -7442, 44153, 89758, -4648, -18196, -54426, -76334, 16120, -85496, 50237, 42157, -70585, 13859, 39699, 30562, -75934, -34835, -23717, -6039, 26322, -76534, 74307, -24721, 14580, 46812, -86629, 81547, -35148, 26812, -51022, 25035, -37748, 73469, 68379, 96747, -42429, tail", "row-1741": "head, 88430, 7570, -98313, -94968, -87356, 77929, -94729, 71760, -62073, -74915, -25752, 62148, 48944, -68374, 2948, 92521, -67830, 83961, -6912, 94316, 2362, 19740, -80138, 44743, 60914, -13585, -81930, -6101, 45008, 4758, 1470, -68400, -21002, 35677, -61062, -91991, -22313, 9179, 48561, -4890, -7458, 47612, 99064, 9770, -99711, -39621, -56384, -82790, -38930, -45186, -12501, -77940, -46162, -67673, -1572, 55611, -47107, -14755, 75310, -80093, 36025, 70462, 88410, -86323, tail", "row-1742": "head, -2531, -34896, -51437, -31215, 71028, 27073, 82265, 28111, 64353, 74125, 59768, -57331, -40555, 67576, 13543, 17307, -14183, -6690, 19093, -85343, 22733, -24243, -9316, -20950, -89976, 8704, 59349, -1717, -58663, 70070, -23724, -76160, -3766, -15714, 70115, -20135, 622, 20194, -8365, 61096, 31672, 71113, 5920, -54650, 78397, -52114, -92495, -89664, -90339, 26047, 92470, -25097, 14341, 48224, 3875, 36624, 193, -98073, -47770, -50075, 59720, 78102, 8478, 36399, tail", "row-1743": "head, -86601, 68247, -7301, -90854, 22396, -60308, 76954, 96504, -7140, 34782, 25716, -13892, 18315, -34846, 27530, 80040, 70145, 65140, 85540, 70985, 79736, -58069, 29665, -1590, -15166, 49050, 69796, 43673, 3804, -22295, 16461, -68102, -91117, -25463, 25856, 68696, -64597, 32845, -45295, -21265, 48918, 30683, -34347, 78487, 57435, -96678, -87023, -12946, 33076, -29858, -23587, -63484, 24372, 89480, -62256, 26390, -1797, -55446, 9141, 99927, -40216, -49159, -61609, -35109, tail", "row-1744": "head, -26938, -22261, -16334, 14344, 16933, 14190, -29279, 53731, -41357, -18473, -84735, 45323, 78645, -55338, -36186, -21138, 74623, -95132, 65888, 17397, 62362, 38520, 75523, 89066, 73040, 3663, 35205, -78973, -94772, -22290, 45558, 80184, -60488, -95848, -48658, 71184, 89344, 83982, -41334, 19004, 17401, 40905, -67262, 28476, -14046, 64290, -23822, 98352, -97235, -72669, 45029, -54952, -73385, -95292, -25759, 4711, 52456, 81606, -81343, -34393, 53487, -33, 44574, -99224, tail", "row-1745": "head, 666, 64315, -45344, -96187, 82259, -83528, -29195, 93274, -57690, 45923, 31391, 80224, -81307, -31944, 48876, -63908, 88833, 41744, -81498, -40917, -69593, 15103, -4962, -11956, -62998, 89515, 63156, -21792, 47209, -87856, 98215, 30916, -28909, -39325, -42598, 41884, -83096, 14148, -3884, -96176, -55906, 35221, -38977, 46843, -89977, -54027, -67503, 26786, 20777, 41580, 4869, 32292, -51838, -24115, 96141, -2579, -6961, -61219, -68565, 88702, -92412, -54351, -36124, -77067, tail", "row-1746": "head, 6101, -74104, -58464, -25076, -53502, -34357, -54731, 94450, 89751, 13217, 19892, -45538, -35744, -73258, 94735, -46251, -90047, -29903, -53714, -91255, -22404, -53742, -25041, -2132, 53954, 27878, 85318, 65335, 11761, 21193, 4620, 79581, 46721, -48113, 69709, -20122, 7520, 11833, 85339, -36752, 66793, -84224, -33197, -15843, 68647, -81334, -78, 5501, -12768, -65532, 4550, 16326, -76664, -47570, -52176, -78906, 45003, 94925, -82190, -77159, 28546, 99266, -20360, 75166, tail", "row-1747": "head, -38714, -86197, 82379, -60521, 82175, 66058, 52601, -22200, -16396, 54403, -60145, 7067, -20146, 78776, -35105, -72430, -5446, 25707, 32669, -63495, -77246, 58522, 50460, 91257, -56034, 24830, -96957, 43057, 45374, 8119, 64198, -80317, -76066, -46998, -61842, 41140, -80976, -21462, -42667, -36323, 58813, -30060, -1014, -53785, -28977, -54594, 21197, 84773, -37951, 8161, 87292, 90986, 82534, 38311, 75698, 60668, -69631, 79403, -37627, 11278, 83927, -99911, -27223, 867, tail", "row-1748": "head, -42103, 28723, 17596, 68414, 68569, -780, 81104, 7039, 35664, -53196, 78071, -9780, -13552, 94420, 28308, 26762, 62455, -14913, -91173, -75105, 9422, 11093, -61813, -51996, 44775, 57764, 86725, 31938, 32778, -45868, -71988, 1790, 50684, 53995, -61996, 36442, 6045, -25913, -86164, -29996, -72454, 70526, 32808, -73037, 56037, -66585, 51202, -84648, 51706, -12487, -44095, -65690, -75884, 19027, 42758, 79919, -24608, 91813, 64148, 51622, 50885, 25693, 73847, 83270, tail", "row-1749": "head, 50031, -10631, 98799, 15348, 56642, -6656, -10537, 72315, 78293, 8619, 27789, -52564, 36243, -27485, 74287, 97949, 62945, -41069, 56456, -72393, -98160, 81080, -54842, 37477, -6223, 75328, -93979, -48126, -99582, 32792, 83159, -9329, 53483, 84232, -37083, -6796, 844, 84887, 12450, 23401, -51329, -21963, -32039, -70009, -87518, -4015, -7836, 87882, 44492, 45807, -70904, -65407, 22287, -746, -73999, -96223, 81578, -23207, 32464, 96538, 73304, 15954, 67971, -89467, tail", "row-1750": "head, 14107, -42721, 13296, -85819, 81985, 865, -60545, 28519, 8541, -19294, 45854, 86054, -54349, 66434, 8712, -88206, 3804, -32027, -91853, 1248, -30460, -15222, 21566, -28759, -63621, 27462, 80698, 84538, -34359, 37909, -41294, 66466, 12518, -41301, 13524, 18180, 66158, -41491, -75448, -86025, -55028, 79919, -38042, -16981, -25185, -79797, 36692, 4444, -61224, 82640, 88461, 26981, 62624, 54915, 14431, 30680, 90122, 63257, -85098, 30658, -25711, -25253, 15199, 47426, tail", "row-1751": "head, 60192, -84490, -25754, 74332, 34029, -55256, 57367, 86505, 36193, -92277, -34761, 64745, -74201, -2349, -74130, 53930, 48569, -96168, 52980, 73849, 63444, -98920, -23015, -9622, -76656, -52967, -69600, 85257, 70393, -42259, -36591, -61220, 49053, 58644, -43759, -60452, 80328, 34462, -22418, -14759, 35700, 52059, -16954, 25698, -47965, 59919, -11018, 58083, -22608, -32576, -42400, -63924, 28857, -33001, 37844, 38654, -83688, 39834, 69024, -41393, -67387, -93247, 11964, -92409, tail", "row-1752": "head, 17543, 69732, -36539, -53653, 49080, -88556, 16750, 15190, -14810, -79201, 31764, -87078, 37586, 69445, 37155, -26272, -70808, -56472, -17877, -96051, -68071, -67502, 33653, 75352, 36756, -15865, -63931, 27173, 66189, -13073, -16222, -28103, 33620, -3273, -59186, 27107, -54282, 94578, 15877, -90761, -40867, 151, -44900, 16325, 86130, 24011, -10909, 60569, 54560, 50913, 85261, -7859, 28967, -56351, 70829, 19616, -32838, -80741, 11561, -47924, 97314, 5746, -24911, -61846, tail", "row-1753": "head, 2855, 35361, -8726, 65747, -17770, -98368, -55632, 88385, -49298, 75097, 16339, 11406, 34559, -88088, -6079, -85629, 37616, -10340, -20313, -38359, -95644, 34274, 2403, -55231, 851, -18088, -90009, -35338, 44634, 53835, -61922, -8706, 80476, 44788, -27709, -60318, 76996, 92418, 75394, 5175, 37749, -91826, -39490, 16368, 62881, -79640, 14300, -46999, -61508, 11139, -92231, 95468, 16790, -48271, -27035, 5443, 88539, -94435, 24435, -4572, 43338, -60810, -5040, 29048, tail", "row-1754": "head, 82578, 53880, 21325, 42802, 35592, 48258, -78765, -18340, 36512, -52804, 51218, 56898, 58772, 50880, 38846, -89901, -13292, 60368, 46713, -21334, -15987, -11008, -42522, -87885, -67412, 28909, -33280, -50075, -9603, 21198, -3881, 7284, -71911, 95663, 70041, 90998, 56512, 90290, 3053, 15217, 55367, -6934, -84738, -35036, 62823, -5599, -56705, 38410, -92592, -92286, -3329, -28588, -68774, 93343, 1162, 34296, 92353, -70387, 55922, 40501, -38947, -17626, -88750, 87640, tail", "row-1755": "head, -23809, 1065, -56123, -9825, -15629, -31102, 98393, -6830, 68584, 47633, -11355, -60878, -73882, 12620, -69619, -57929, 82018, 29192, 41522, 12059, 54817, -33746, -60776, -11678, 39854, 85361, 44501, -11324, 63458, 21389, -15334, 17181, -79646, -42913, -58665, 69634, -852, 52860, -83447, 43742, 49365, 35281, 54584, -94138, -69175, -89920, 308, -52804, 27073, -76644, 31527, -69044, -36656, -88815, 71656, -97841, 86899, -23589, -84946, -84355, -37790, -85950, -69494, -92712, tail", "row-1756": "head, 97754, -30053, -72042, 99580, -68198, -3299, -95145, -34639, -37597, -21270, -20020, -1826, -96028, -55214, 64376, -93434, 35612, 49538, 56114, -892, 50381, 52445, 30864, 40058, -38351, -5578, -37790, 70668, 97494, -63221, -57519, 73053, -15258, 35159, 85568, -19790, -93453, 20551, 90182, 13483, -91130, 53521, -6893, -66475, 54281, 90242, -15973, -89844, 25017, 98035, 70304, 10311, -14802, -36783, 91183, -23024, 29657, -86778, -38436, 26020, 71120, 95793, -15113, -23129, tail", "row-1757": "head, 32408, -33315, 91993, 18186, -29283, 91726, 95534, -78703, 81114, -47631, 61088, -40461, -96265, 75817, 45584, -52414, -42597, -72333, 25464, -59402, -34480, 40639, -45189, 23080, 29180, -45752, -89109, -19667, -19943, -26905, -76574, -42369, -87907, 97581, 13266, 91864, 64085, -3795, -6354, 53976, 63423, -16258, 30313, -20420, -84048, -15628, -3042, -91529, 10094, 10220, -46961, 18601, 50332, 36785, 2675, -7535, -4807, 30818, 46321, 74474, -79569, -40310, 22076, 23250, tail", "row-1758": "head, -67286, -42262, -36700, 59111, -61884, 66241, -80714, -98556, 29905, -23625, 85399, 73934, -61628, 50629, 16797, -61003, 2009, -90068, 10709, -73616, 79406, 70765, -91723, -19533, 86844, 21377, 53190, 51167, 23701, -24463, 97066, 36051, -12608, -44920, -6096, -51846, -81549, 10984, -13305, -45037, 60448, -24256, -12064, 36612, -99128, -87923, 72477, 57751, -97799, -2342, -15988, 4934, 70714, 67859, -96779, 61291, 89292, 86549, 3615, 58941, -34990, 97316, -20145, -36358, tail", "row-1759": "head, -12212, 64870, 1908, 35298, -33733, 82306, -83943, -14407, -90019, 86242, -4151, -84478, -15055, -14285, 75496, 65797, -55657, 9188, 94996, -20351, 99073, 50811, 24063, -30029, -95444, 87872, -35106, 40051, -10991, -17410, -84699, 34485, -2991, -72002, -6729, -78065, 84898, -72838, 87297, -70260, 47788, 29581, 75043, -87061, 70634, -71427, -56386, -60579, 37199, 99124, -33246, 78112, -37853, -25130, -60780, 57879, -7629, 69835, 77580, 70712, -39658, 19636, 9219, 94207, tail", "row-1760": "head, -32671, 99101, 54318, -39792, 23178, 29423, -67299, -66856, -6243, -8258, 91399, -92001, 73203, 45099, -40370, -71766, 29795, -77316, -49476, -80902, -59093, -66887, 58697, 52763, -34439, -32078, 86764, -66018, -81263, -79824, 42249, -7751, 18384, 12503, 25350, -8069, -73747, -34498, 50015, 20181, -43690, -27080, 17756, 93718, 77356, -46271, -56110, 40534, -77939, -15709, 39363, 65863, 81599, 94319, 90527, 54276, 68080, -21904, 86168, 503, -58287, -40166, -34265, -57331, tail", "row-1761": "head, 39508, -66567, -20905, -60078, -32655, 18222, -90760, -87859, 82172, -92132, 93539, -88553, -78343, 15685, 93461, 19315, -39051, -58767, -75166, 37935, 60544, 16701, -51529, 31341, 83140, 99106, 26428, 16449, 39481, -21079, 46400, -63884, 78828, -68550, -12392, -8857, 77780, -42459, -98398, -3224, 91911, 7803, 98302, -87096, -66895, -22570, 93475, -84794, 34160, 77822, 56688, 58610, -43913, -71918, -32960, 55526, -64702, 7650, 36167, -16556, -13667, 4537, 42167, -18814, tail", "row-1762": "head, 87469, 78975, -82645, 70836, -51849, 83681, 97399, -32089, -24659, 56465, -75519, -17045, -26753, -87342, 5952, -15863, 95819, 58215, -74954, 31345, -97772, -29121, -20655, 19829, 91495, 54861, -40515, -49786, 71441, -97305, -11016, 8676, -43685, 29253, 28390, -60669, 29941, 41976, -80396, 36013, -13473, 15499, -81131, 95185, 42547, -290, -26985, 60443, -74743, -69756, 45274, 64384, -49718, -55343, 8033, -20417, -91145, 75891, 136, -67271, 38134, -16304, -64921, 80662, tail", "row-1763": "head, -28787, 1633, -69654, 54168, -74198, 28384, 70898, 50548, -2085, -89232, 44390, 34832, -19490, 12889, 85011, 20457, -67717, 14144, -84372, -50007, 95115, 22649, 20731, 78743, -28575, -67102, 85780, 41152, -19818, 76900, 64051, 51256, 84364, 47777, -92492, -42212, 73, -33681, -29128, 44428, -29580, 94811, -42299, -12981, -25185, 71485, 49837, 19594, -38576, -16023, 87442, -39001, -3970, -27202, -26037, 77188, -88020, 6703, -17953, -78833, 74730, 50289, 33079, -11609, tail", "row-1764": "head, 65770, -59290, 73474, 44959, 87951, -41931, -47809, -8784, -2736, -41077, -22806, 52126, 5257, 14913, 19157, 94522, -18264, -47364, 20268, 8694, 44475, -8374, 42639, 32222, 97674, -94973, -33008, -74106, 55457, -89612, -16011, -82769, 45972, 79469, -45177, -55372, -27869, 91951, 53413, 40808, -76539, -91248, 19915, 30828, -66820, 62352, -66177, 15694, 6561, 40269, 60130, 91925, 25151, 75636, 79692, -44574, 570, 45299, -23932, 18900, 2556, 42981, -52555, 95300, tail", "row-1765": "head, -85245, -68758, 90043, 734, 27106, 12882, 26577, -29332, -89935, 78838, -75621, 8313, 26524, -13525, -88455, 77362, 78479, 76583, -34910, 8395, 46792, -45790, -72353, 34267, 79751, 98181, -96706, -62596, 76648, -49102, -20916, -91714, -80792, 81238, 94264, 39753, 38500, -149, -52978, 6232, -90878, 98669, 93451, 95086, -88952, -4050, -67010, -28012, -57374, -12460, 10816, -83640, -5178, -91857, -71375, 63880, 91546, -11492, -50028, -62948, 72836, -8792, 38479, -64836, tail", "row-1766": "head, -92373, 14347, -25313, 19185, 92448, -43795, 366, -47243, -42908, -19550, -17143, 59049, -4549, 25451, 63125, 43512, -95189, 55307, 19404, -79990, -33828, -48789, -87978, 65705, -90432, 22048, -23960, -13396, 13425, 44672, -78366, 35575, 49792, 13566, -94128, -75440, -76669, 16950, 59797, 92425, -40240, 12291, 51686, -21352, -62889, -76449, -2613, -81030, -11255, 72454, 50804, 45856, -46806, -93452, -50211, 78580, 84060, -62368, 41639, -93763, 85143, 86120, 5767, -48762, tail", "row-1767": "head, -70644, 24435, -52319, -56415, -25673, 79758, 79929, 50259, -78724, -96928, 263, 22721, 10794, -88910, -73175, 96633, -93938, 62551, 34983, 22410, -92314, -41590, 96087, -63634, 43553, 86313, -88920, 58415, 58517, -10236, -76615, 92467, -34128, -19241, -91539, -16745, -36337, 7636, -30721, -94996, -78632, 96046, 51073, 99211, 60698, -75595, -82338, -39038, 2406, 41118, 18758, -49600, -7255, 69500, -45143, 95218, -74388, 83976, 52735, 39156, 49487, 84950, -82866, 80933, tail", "row-1768": "head, -43004, 82022, 84221, -4293, 70273, 60796, -82145, -29437, 77602, 20018, 17793, -29518, -8425, 1281, -37384, 8753, -46509, 39700, -76277, -83238, 56944, -88883, -31325, -56549, 7794, 85888, 57966, -85555, -35675, 81482, 86469, 85413, -67354, 17694, 59498, -12380, 29739, 19077, 69022, 53073, -80814, -14396, -45481, 54839, 31736, -66008, 40972, 26369, -99286, -43617, 94634, -25899, -91864, 81635, -18793, -14774, 76556, 94758, -44519, 90256, -16889, -80778, 78593, -96939, tail", "row-1769": "head, -52630, -89271, 39857, 93067, -74488, 92704, -1998, -72545, -37013, -23453, 23539, -16905, 58468, -33654, -10584, 14296, -94990, -70258, 38844, 44232, -36467, 50110, -11022, 8635, 12588, 32112, 8702, -42663, 59095, -85517, 7812, -67010, 17667, -43252, -15367, -93762, 10506, -47808, -13454, -23119, -70599, -70346, 72089, -24070, 29803, 4197, -70559, 15206, -89823, -44899, -94099, 59202, -84186, 72858, -60833, 67782, -87644, -40581, -83317, -62163, 11427, -65942, -58001, 80235, tail", "row-1770": "head, -9586, -31277, 69047, 42324, 83960, -81537, 88389, 43276, -21810, 43877, 26679, 41404, 5235, 93437, -51754, -27100, 90176, -37083, 36213, 86560, 95418, -11166, 63141, -18530, 37703, -84258, -6996, -99619, 88666, -65726, 53567, 11996, -89986, -97301, 80913, -82773, 1001, 10634, 92139, -41689, 34966, 42993, -16190, -70556, -74764, -36935, 33258, -76057, 67810, -28583, -60669, -98793, -36845, 49203, -63260, -96232, 78175, -47419, 37609, 90736, 33591, 2337, 47204, -8502, tail", "row-1771": "head, 48247, -30601, -62624, -85960, -25978, -39703, 58955, 48209, -93737, 84126, -86396, 10032, -80961, 71473, -48540, 79068, 80016, -86953, 17141, 11268, -93215, -24, 47642, 96313, 45363, -848, 38968, 56189, -44876, -91899, -56923, -33198, -8085, 34662, -48879, -45823, -56278, 85751, -83070, 88471, 45128, 85105, -89710, -66993, -30275, 10446, 26818, -52991, 23147, 57352, -58346, -25231, -97365, -61651, -55893, 30173, -27905, -16310, -97170, 80005, 60450, 96022, -72273, -1205, tail", "row-1772": "head, -22212, -18062, 75393, 18978, 55986, 77734, 14550, -20105, -20721, -36264, 75129, -56680, 72143, 76429, 75206, -85295, -29597, -4878, 87210, -91498, -94814, 89024, -66254, -13445, 15246, 46928, -68658, -49695, -67353, -6180, 19152, -4189, 76591, 67964, -55756, -5809, 20276, -20580, 97345, -56071, 48127, -66259, 97597, 75218, 84456, -89472, 82332, -33307, 65634, 36508, -94811, -6592, 19379, 20473, 38634, 38990, 39237, 36907, -40072, 94620, 53140, 22432, -91208, -35174, tail", "row-1773": "head, -65223, 63080, 20649, -50473, -48962, 4259, -55261, -28510, -92807, -52970, -57016, -52437, -60507, -60865, 71504, 11448, -71955, 53790, -83022, -58201, -51256, 74503, -48800, -18650, -82459, -60832, 42680, -84678, -64729, 68209, -9977, -77247, -87150, -2001, 38130, 22923, 15983, -44714, 92849, 10502, 51937, 55768, -26415, -37004, -15766, -8926, 50605, 76006, -96876, 83653, 35115, -63034, -69646, -63364, 47363, 4712, -50014, -63987, 96373, 11654, -8042, -94798, 44895, -51166, tail", "row-1774": "head, -51352, -10808, 22146, -35375, 10760, 25275, 57640, 32661, -92322, -95804, 95095, 86954, -9738, -45496, 88419, 31747, -20527, 66659, -61687, -26491, -99699, 18215, 97155, -88459, 62691, 57816, 58949, -67444, -37194, 51573, -83980, -45579, -11106, -79363, -44372, 26816, -8471, 55078, 94910, -4961, -98356, 85108, 8873, -72423, 35313, -1490, 2024, -53050, -35966, 94722, -61004, 13699, 63641, -19031, -34107, -18299, -91991, 8437, -9775, -94764, -6293, 13767, -53979, -69708, tail", "row-1775": "head, -73594, -43678, 99325, -30529, 67527, 63430, -57526, 92577, 33562, -33036, 34889, 99953, 99915, -18222, 15960, 40959, 59959, -80740, 78928, -59560, -58516, -71665, -46739, 1794, -10104, 93257, 77780, -88865, 81639, 13181, 23461, -79610, 10286, 52309, -21425, -6511, 11774, -91444, 30700, -3228, -42552, -48652, -82666, -85257, 26387, -89394, -4623, 4954, 49314, -91960, -82926, -28967, 43026, 89325, -47451, -69363, 92517, 57696, 40644, -50093, -18396, 28816, 44712, 72126, tail", "row-1776": "head, -13563, -53704, -62946, -95778, 53363, 27492, 89340, -64491, 54281, 64057, 3381, -74952, 52243, 2347, 61124, 19383, -18623, 47672, 34351, -30795, 71750, 82082, 47701, 3149, 6537, 11428, 9837, -46838, 7460, 36966, -84103, -61846, 78368, 56646, 46066, 52974, 40287, 5427, 89962, -47724, 14914, 39637, -2454, -78340, -27058, -54578, -602, -73266, -39241, -24110, 60282, -80329, 53529, 27676, -29860, 84756, -32082, -31882, 19466, -39616, 80455, 2869, 58834, -43460, tail", "row-1777": "head, -36048, -51215, 15532, -61403, -43897, 51226, 76822, -75858, -21949, -8462, -14808, 95037, 65221, -27576, 24303, -3207, 55983, -8449, -57657, -8909, 85505, -17879, -29728, 52909, -61995, -57806, -42570, 22294, -69577, 693, 30127, -36840, 76914, 96200, -42553, 40486, 26917, 61018, 47999, 71247, -78004, -63201, 16659, -36122, 76546, 36503, -9096, 77375, 99673, -79775, -31736, -73225, -46955, -17235, 88071, -55379, -19314, -24067, -74044, 77046, -11837, 7199, -68891, -27896, tail", "row-1778": "head, -29166, 11691, -60441, -56541, -43645, 72237, -2515, -43380, 12684, -13361, -27724, 61907, 90538, -55516, 61429, -43870, -54800, 74573, -51872, -8485, 67926, 64413, -10085, 98188, 74925, 89014, 82687, 69713, 63807, 3805, -24188, -55012, -48382, -77844, 24724, -68242, 90442, 61172, -82809, -11114, 4399, 33188, 62021, -93817, 86170, 37834, 71451, -14029, -14196, -63770, 32, 73785, -44492, -83605, -33850, -58685, 56158, 99948, -17098, 22992, -48652, 40407, -51128, 49908, tail", "row-1779": "head, 18537, -42781, -48898, -73638, -30157, -51306, -16647, -14027, -77755, -17266, -63129, 89425, -63682, 42899, 40080, 68616, -1597, 93005, 10168, 89737, 6609, -25628, 47086, -56124, -64189, 89775, 45335, 36410, -55986, -89962, 42399, -41510, -7213, -94612, -23838, 81629, -49220, 1611, -72002, 45936, -47607, 23105, 63327, 90844, 22460, 27159, -29068, 44805, -81941, -66775, 58425, 4313, 60416, 33903, -62732, 41208, -75371, 59577, 90064, -11061, -17888, 67482, 25117, 30166, tail", "row-1780": "head, 52745, 91821, 52096, -12131, -1737, 93472, -56239, 80574, -73405, -5700, 32427, -818, -83435, 71626, -17639, 26421, 25830, 86577, -73657, 88192, -63232, 88421, 73688, -2099, 40403, -99501, -56032, 48886, -42648, 33353, -3104, 54018, -5841, 47002, 99988, -72662, 52790, -20887, 44477, 15016, 97151, -59083, -84384, -73891, -10021, -36236, 99984, -99884, -10011, 52327, -58724, 55301, -10113, -36462, 7870, 4038, -18403, -75624, -99321, -87650, -97333, -72207, 65526, 95318, tail", "row-1781": "head, 87703, 91072, 73525, -4246, -37927, 27400, 77450, 88015, 86624, -8298, 97909, 2123, -13929, -13925, -55897, 1842, -77160, 16026, -12595, -90053, 58412, 54834, 25038, 59987, -16840, -30720, 39905, -27289, -79827, -24189, 99299, -96601, 34481, -89085, 98410, 96918, 59965, -2115, -63142, -36541, -42272, 49565, -80162, 31761, -88279, -6517, 34476, -17699, -39117, -75003, -39619, -67858, -99447, -57703, -90848, 33083, 38434, 25831, 15950, -84798, -88904, 81687, -25766, 89537, tail", "row-1782": "head, -15300, -94720, 62300, -25414, 83017, -76796, 89696, -69253, 79877, 37810, 75878, 73090, -92334, 72727, -55825, 51217, 58291, 17216, 53997, 12720, 43145, 37420, -64438, -95513, -85563, -50107, -60215, -93692, 4205, -86543, 51594, -42223, 62698, -69122, -66196, 98064, 76494, 96592, 51865, -78450, 13249, 19867, -91919, 71517, -47346, 94911, 7143, 46987, 36438, -59390, -30118, 48775, -42805, -15547, -32733, 34187, 87946, -66879, -27291, 39717, 11312, -45802, -10632, 15065, tail", "row-1783": "head, 4686, 89617, -52307, -8342, -79804, -35152, 21389, -48261, -43258, 23706, -78497, 61725, -81114, -20793, -62685, -94327, 18127, -1271, -87842, -27978, -7875, -41366, 52326, 70688, 34001, 65408, 7306, 23917, -83760, -39739, 8272, -46639, -97336, 54511, 77936, 89341, -12876, 88980, -27402, 91524, 4338, -87532, 57460, 54860, -72178, -86816, 98309, -44083, -83714, -35329, -31379, -19936, -19042, -77790, -69037, 47103, 53992, -79142, -16579, 49043, 53648, -51755, -67385, 7854, tail", "row-1784": "head, -66289, -91277, 30776, 33139, -45686, 86017, 61524, 885, -81893, 31827, 39855, 54008, 46025, -95096, -8679, -44766, -76075, -56262, -13673, -75564, -16272, -80146, 56999, 71121, -61317, 36212, 95443, -38881, 12237, -12435, 97311, 3188, 64640, 56902, -5138, 54077, 64950, -59869, 81421, 65949, -7628, 1587, -25718, 51729, -99179, -55913, -74118, -21363, 75294, -15789, -19437, -74894, -8310, 46045, -7231, -79997, 72860, 77867, 48735, -14383, -20743, -74959, -15529, 9196, tail", "row-1785": "head, -25200, 70545, -59487, -62057, 69943, 3252, 20675, -78366, 69476, -41358, 60394, 79854, -31841, 7464, -20546, 95888, 1626, -98473, 33196, -16939, -46127, 3137, 74003, -15312, 83992, -79043, 91417, -57803, -5179, -77986, -28834, -8858, -84575, 59521, -79770, -71622, 10148, 4782, -39703, 21502, -78381, -48826, 25753, -85758, -70942, -21590, -96420, 61588, -53063, 84526, 286, -34042, -34643, 2472, 80829, 88877, -42932, 27580, -94591, -93698, -63380, 79210, -25713, -53274, tail", "row-1786": "head, 64543, 88361, -62949, -58681, 96524, 52817, 76993, 86949, 86286, -61138, -44763, -76163, -19779, -8661, -96656, 7709, -63283, -16502, 47391, -69287, 55863, 60672, 59272, 16611, -24780, -31706, -10022, -22635, -48364, 27376, 51870, -74187, 5139, 70279, -33799, 12425, -96799, -27191, 47927, -58141, -42750, -37610, -15913, 30715, -35598, -21720, -89514, -81675, -77849, -29195, 68601, 90149, 26403, 15830, -85642, -58471, 99212, 44166, -1881, -85199, -96106, -4870, -18162, 86577, tail", "row-1787": "head, 1525, 9762, 47969, -88307, 76459, -12653, -87782, -69138, -39523, -40441, 97527, -65038, -60492, -77485, -40182, -59422, 21493, 7436, 41509, 55822, 51350, 87400, 67422, 90596, 57231, 50006, 64301, -20437, 22700, -37021, 64302, 49178, 3581, -30660, 18727, -78268, -46539, 2681, 86478, 53592, 13097, 9234, 13502, -26048, -89735, -51068, 50920, -34744, 31947, 17402, 89500, -19057, 22065, 27209, 48558, -51667, -24914, 36420, 9069, 70257, -41483, -97840, 13151, 94383, tail", "row-1788": "head, 15692, 29487, 94390, 2759, 73815, -75423, -35345, -50631, -4359, 12868, -29543, -16574, 98614, 44327, -68255, -46486, -31323, 39635, -6710, -69495, 71640, -45239, 43561, 60815, -28897, 91080, -83161, -16614, 37803, 497, 53307, -86771, -57155, 66517, 43090, 93531, -63820, -86835, 34209, 54252, 99685, 58939, -9297, 56167, 67918, -72917, -13786, 94588, -4660, -95848, -40678, 86394, 36688, -42820, 263, 37046, 48202, -11628, -32222, -27461, -22069, 64862, 52252, 70408, tail", "row-1789": "head, 34616, 70595, -66713, -98852, 68940, 84784, -34421, -56232, -98770, 12053, -57489, 6826, 49470, 30030, 45103, -12021, -88466, 62107, -62340, 45254, 73826, -30792, -50265, -89320, 42442, -81061, 51093, 20901, -91468, -92347, 80343, -69024, 93286, -29336, 24112, 47580, -23443, 32235, -12563, -41046, -49101, -86169, -50446, 63354, -62046, 75965, 72284, -93933, 60567, -89549, -11497, 48897, 40857, 40702, 99711, -95692, 99344, 53097, -10962, 69739, -67072, -57433, -95912, 16076, tail", "row-1790": "head, 87155, 82356, 93173, -30920, -15754, 1823, 44227, -19342, -83728, -34801, -79095, 12953, 85530, 63162, -54687, 64911, -62707, -62887, 75508, 23678, -52245, 29564, -10702, -56862, 86129, -26425, -51731, -59987, 76146, -67876, 51389, 47414, -69619, -38536, -13089, 80003, 29113, 37890, 93351, -38085, -76238, -54049, -25319, 28408, -15834, -81498, -67013, 46464, 54692, -1317, -3785, 13410, -95706, 94301, 55353, -48710, 15512, -45960, 9076, -35011, 63776, -38349, -94207, -70205, tail", "row-1791": "head, 53374, 65777, -85796, 21216, 26955, 39204, -19426, 60108, 37470, -28096, -76004, 32776, -83183, -38287, -90155, 75214, -84779, -89837, 58411, 21744, -90816, 41936, -82516, -35619, -87748, -96191, -43942, -28414, 83973, 4954, 93932, -4367, 75484, -58737, -9025, -82678, -36656, 43212, 76577, 58511, -23772, 91109, 1927, 52970, 52099, 40572, -86905, 41087, -55225, 74824, 91387, -25499, 96015, 18398, -18974, -90664, 7080, 26110, -47409, 72087, 23618, 48176, 8459, -10199, tail", "row-1792": "head, -39398, 48301, -47988, -92445, -73594, 63149, 43653, -17665, -51093, 39905, -65234, 68055, -62043, -19244, 50168, -4640, -29131, -23097, 21658, -35492, -44497, 97597, -55292, -92951, -39663, 92853, -15289, 72177, 65202, 43337, 10863, -41165, 71097, 12713, 80388, -93643, -827, -32960, 30023, 73675, 71363, -46741, 55732, -75918, 60893, -36321, -44975, 26288, -99545, 23943, 66944, 65014, -84763, 54908, 13088, -90064, -99911, 52887, -70834, 41692, 38624, 51270, -70374, -41086, tail", "row-1793": "head, 5590, -28007, 51863, -38206, 82353, -42933, 11154, -72257, -15182, -25020, 57762, -62150, -49343, -36631, -53965, 85102, -72131, 89206, 90995, -24626, 70383, 51064, -20364, 18908, -16559, 48238, -26949, -11042, 41218, -89476, -22494, 36103, 89883, 80746, 83692, 96797, 35569, 27991, 64473, 50243, 74426, -26307, -41474, -97815, -53523, -71512, 93415, 79497, -24966, -84938, -70284, 71193, 36417, -69720, -51697, -6124, 19704, -98565, 83424, 98601, -291, 9976, -77652, 44555, tail", "row-1794": "head, -88841, 93949, -58575, 62438, 44710, -59914, -27999, -942, 36932, -9812, -59775, -862, -30405, 44915, 88012, 20530, -15584, -62186, 47511, 5056, 66773, -56835, 44622, 92886, -92088, 21805, 43981, -40525, -89579, -46607, -82542, -73029, -71598, -55899, -22561, -24970, 32046, 24318, 63501, 13515, 92711, 85194, -49379, -84237, 10183, 83665, -5358, -56520, -15686, 61670, 92235, 27376, 5128, -44247, 95286, -84459, -75274, 92516, -72658, 63457, -34319, 98134, -61896, -29565, tail", "row-1795": "head, 93571, -49385, -89828, -89227, 46537, 56782, 14697, -47776, -62663, 68175, -82992, 77806, -50605, -60443, -61883, 34658, -36840, -36411, 5499, -81544, 63657, 66823, 36874, 19918, 9924, -82730, -79758, 54057, -65693, -16380, -34438, 64755, -2677, -48278, 49024, 28623, 78328, -71329, 38434, 6337, 99079, -57159, -39475, 89746, -27970, -967, -89412, 41798, -87414, 18181, -16839, -23510, 28866, -42626, 20318, 72537, 78616, 91199, -48188, 45657, -99711, -83423, 90965, -3415, tail", "row-1796": "head, -70543, 45731, 75772, -85839, -83098, -34366, -59917, -6537, -79163, -75039, -57938, 71933, 90607, 3983, -93226, 55090, -69723, 90004, 45715, 6821, 89992, 72945, -86027, 73866, -68450, -90962, 66796, -89943, 37148, -67634, -84634, -77207, -48883, -85614, -75596, 69172, 63546, 79671, -75871, 37685, 26693, -18971, -43659, -58565, 11187, 54556, -19885, -11443, 94067, -31986, 29452, 71055, 48435, 87672, -27893, -23061, 20940, -31668, -39584, -17639, -93027, 8194, 24392, 17579, tail", "row-1797": "head, -47937, -30717, -49347, 19492, -42657, 43994, -70371, -96956, 67830, -19497, -18836, -45392, -24823, 6327, -30161, 46690, 25891, -29961, -22110, 33471, 32785, 1735, 77364, -27151, -17371, -37647, 29889, 17942, -17386, -20739, 56122, -84509, 58214, -39036, 77305, 2367, -7112, -6413, 26947, 34892, -35114, -97831, -5025, 8419, 64338, -76655, 25581, -14262, 97835, -21883, 51097, 34291, 34955, -87831, 38927, -84723, 30283, -219, -21027, -34963, -24079, -48433, -39156, -58309, tail", "row-1798": "head, 95226, 92251, 82775, -73518, -37912, 45997, 21738, 71628, 13072, -15357, -43695, -37943, 45111, 73753, -70777, 68627, -46697, -41625, 82281, -39153, 21431, -76845, -25353, -82103, 97517, -84298, 36074, -35734, 45215, 84501, 52874, 56291, 46633, 78386, 18061, -61196, -66906, -91355, 47845, 46027, -4151, 36970, 58769, 94345, 23234, -5873, -48387, -70590, -75690, -54623, -40562, 17758, 73743, -79670, -68803, 90578, -93175, -33205, 75720, -20210, -8981, 77824, 21943, 55405, tail", "row-1799": "head, -40224, -48434, -79012, -70687, -18650, 22943, -37232, 33171, -25189, 48240, -78459, -37098, -40690, -48204, 18098, -60976, -77790, -41774, -20413, -95411, 11933, -87115, 32600, -86407, -21621, -61124, 57254, -30383, -52167, -94779, -62378, -23965, -96123, -10051, 90332, 27226, -28345, 18443, -89841, 42162, -81121, -64905, 41749, 79789, -70780, 45054, -13395, 8945, -60627, 40908, 18068, -10338, -63456, -72586, 96087, -46515, 39391, 4415, -27208, -15314, -82680, -6390, -62550, -65818, tail", "row-1800": "head, -84763, -34915, 22530, 50940, 71608, -36348, 10158, 58517, -13994, -25967, -50582, 67111, -92096, -95556, -77981, -47105, -27051, 86872, 67366, 52471, -44113, 14793, -77806, -7662, -53122, -84196, 59868, 40192, -66362, 43552, -41102, 12217, -4951, -78801, -9220, -42079, -38967, -27210, -26165, 74309, 23046, 52237, -19763, -81612, 55913, -37508, -73777, 74093, -50340, 14142, 58184, 10501, -81758, 16170, 37437, 77582, 14183, -25248, 43267, -65766, -53851, 82418, 33511, 34558, tail", "row-1801": "head, -66649, 14586, 34040, -11175, -51452, -96833, -45515, -33130, -56438, 30045, -68893, 44077, 76800, 35910, 58842, 85259, -76108, -57447, 93011, -83107, 84310, 53645, -72199, -63228, -72371, -1520, -5597, 72532, -48568, 99024, -54024, -88753, 40287, 26509, -40221, 43850, 57331, 34506, -14579, 80337, -27791, 45524, -73418, 33810, 22199, -92218, -70697, 6457, -47436, -30621, 85740, -55981, 87841, 14179, -11785, 18789, 39211, 11475, -13557, 82060, -30768, -30085, -44542, 8443, tail", "row-1802": "head, 95017, 20266, -61845, -99694, -14749, 28138, -26291, -66863, 19134, -87180, -24265, -1713, -2203, 99538, 12981, 94684, 23171, -97881, 19538, -71169, 95191, 50967, -27273, 28158, -77476, -47666, -87754, 26921, 35495, 9104, 37640, -16218, -23578, 87379, 86514, -47305, 98416, 76338, -22146, 55894, -46177, -78670, -16529, 95098, 40760, -5372, 52428, -70707, 92644, 64988, 54242, 86880, 15052, -61034, 57090, 71010, 80668, 6029, 56800, 68740, 62740, -93585, 55254, -40965, tail", "row-1803": "head, -47397, -73743, -3244, -49584, -18532, 51132, -51460, -67905, 64525, 96890, 52452, -99357, 70714, -28564, 21222, -83582, 25141, 52849, -39861, -24030, 36574, -81071, 18831, -82503, 33354, -35799, -4479, 72640, -89381, 16328, -26387, -73246, 41428, 56064, -41036, -83690, -15303, -11916, 26452, 70830, -54436, 35790, -6425, -14171, 77224, -45135, -14749, -11941, -8341, 73175, 63097, 39117, -58706, -99084, 77731, -32622, -24251, -46901, -874, -86801, -89834, -74659, -81662, -70282, tail", "row-1804": "head, 61823, -14790, 41776, -35294, 84879, -53802, 80572, 75516, -78835, -12323, 60900, 79838, 96943, -11980, 62902, 3092, 75372, 18230, -3877, -348, -20451, 37478, -64316, -45254, 99654, 14780, 61921, 58542, 66043, -87620, -11550, 68571, -32123, 96942, 45342, -24278, -28246, -58317, -42922, 62635, -71989, 90651, 71793, 7431, -42005, 94813, -49615, -62896, -76868, -10477, 78076, -29199, 86539, -81116, 96103, -20692, -90643, -71400, -33018, -55751, 61151, -27631, -84773, 16747, tail", "row-1805": "head, 87896, 15467, -78644, -14827, 27243, 38964, -84162, 49580, -36562, -42617, 73773, 56077, 12023, -38441, 4339, 39843, -42734, -70177, 23365, 44774, -43823, 3119, -3344, -97267, 42531, -77255, 32707, 70791, 10090, -99591, -18000, 8108, 81521, 36790, -47618, -83494, -99097, -54224, 13644, 54274, -11412, 77848, 80015, -43686, -97912, 52996, -49824, -53786, 93050, 94599, 51176, -41830, -17189, 24529, -16412, 47531, -56069, -35547, 51908, 47070, -11983, -40529, 67254, -81184, tail", "row-1806": "head, -14782, -95414, 36675, 34162, -19221, -1628, -3792, -17726, -42928, 16031, -458, -91850, 31182, 83404, -1466, -4233, 79929, 91246, -79775, 60782, 85377, -99091, 46133, -94479, 78748, -18048, 19810, -57332, 36233, 85933, -24938, 78562, 3379, -51362, -55766, -16323, 36398, 38117, 66471, 31712, -15406, 52195, 46927, 65038, 56693, -69843, -96983, -72739, 77311, -79135, -95867, 48981, 52805, -77947, 85389, 35062, -82679, 46796, -74807, -50100, 36148, 90994, 92036, 16079, tail", "row-1807": "head, 59269, -62952, -43955, 93508, 93360, 87427, -82361, 11182, 61763, -19608, 64939, 16695, -9732, 65477, 96247, -3535, -98001, -44555, 46030, -58186, 28075, 41943, -61479, -33136, -10217, 90633, 9096, -92416, 4309, 16772, -78871, 16377, -91327, -90106, 35674, 60207, -62128, -20187, -78059, 37499, -75390, 41397, -76787, 19688, 29822, 44688, -47273, -46687, -50507, -78722, -7307, 78235, 66504, 24478, 66610, -7513, -11956, 651, -64471, -94249, -9021, -51360, -16620, 80195, tail", "row-1808": "head, -30903, 88460, -24662, 40930, 50628, -39628, -5711, 66515, 15303, 16037, 23101, -8584, 32750, -42068, -89923, 80719, 84871, 82501, 28579, 24895, -2360, -5883, -92607, -97913, 41800, 17143, -96324, 59448, 38612, -16604, -43552, 71630, 55597, 7387, -91447, -98298, -46014, 41920, -55054, 12700, 61390, 99528, -81722, -22453, 79389, -51365, 80404, 34928, 73442, 94068, -96802, -44144, 66120, 28505, 72955, 54452, -20800, -26631, -7987, 63982, 511, 69175, -88247, -17005, tail", "row-1809": "head, 93141, -64814, -73901, 21626, -68781, 56383, -85177, 9613, -10249, -20097, -84603, -19136, 51592, -3801, -13375, 22965, -71175, 75468, 64322, -51741, 31882, -66985, -491, -79589, 88812, -81668, 21228, -48646, -9196, 22904, -87680, -34081, -58890, 9101, -35046, 58688, 93865, 88024, 74738, -39120, 75147, 80757, 85396, -42379, 41905, 18936, -25161, -2658, -2043, 27893, -97880, 79025, 66717, 72126, -60546, -51535, -24695, -70283, -39245, -55380, 63152, 91972, -55960, 15534, tail", "row-1810": "head, 81940, 38386, -15916, -58244, 99581, 57965, -92236, 14256, -37003, 90047, 68254, -41826, 94874, 65683, 39489, 88599, -61461, -62047, 69823, -7496, -8668, 54375, 18864, 32068, -84082, 89352, 37070, 18796, -98106, -90247, -86502, 75419, 76859, -63647, -23485, -80387, -88030, 72745, 34141, -60274, -40933, -99206, -45423, 34293, -86356, 82524, -40983, -27568, -82917, 48516, 42744, 4922, -79997, 86827, 47198, -93625, -13025, 20677, 93630, 48030, -91292, -4882, 96806, 40854, tail", "row-1811": "head, -8973, 70893, 38162, -26594, -90370, 78242, 89257, -23281, -71650, -86645, -23706, -84998, -93328, -56930, 73887, 40041, 41220, 74670, 16481, 46192, 63970, -5858, 14448, 87133, -59864, -12380, -43980, -63155, -84165, 42435, -12191, 6260, -76544, -15226, 66951, -77997, -96471, 59394, 94189, 98303, -55663, 16408, 63441, 65054, -89719, -64834, -45706, -96795, 48118, 65495, -70826, 18717, 37138, 12589, 35461, -66784, -91657, -41467, -69886, 49476, -56398, 66360, -92751, -39219, tail", "row-1812": "head, -41471, -62673, -72915, -17860, 95649, 75558, -6372, 59511, 92523, 47046, 79735, -79319, 3209, -66428, 80203, 31756, -76417, 47644, -33817, 76589, -6728, 52552, -72497, -59486, 60574, -26770, 42296, -40260, -87637, -38688, -69902, -18111, -81996, -83258, 2546, 38641, -21483, -3538, 21390, -31663, 11540, 92164, -80032, 34486, -57303, -99194, -12153, -97609, 74161, -75747, -97868, -44704, -23415, 63498, -31170, 26791, -73588, -91726, 90367, -35649, 16645, -22983, -17855, -76591, tail", "row-1813": "head, -44684, 60695, 96895, 52429, -3693, 12020, 27131, -99134, 45139, -70589, 33159, -37866, 41930, 33665, 43011, 14561, 74827, 16065, 72337, -87166, -4966, 5345, -17351, -94546, 97930, -42487, 17594, -31663, -81045, -1306, -68557, -69359, 34058, -27, 69423, -22157, 71828, 17447, 61475, -22540, 47412, 75499, -35407, -72749, 46539, -54129, -82189, -33239, 48719, 3440, 75774, 10902, -1839, 88998, -13734, -98960, 87375, 75977, 63042, -3168, -63766, 89694, -34856, -36715, tail", "row-1814": "head, -78276, -71486, -49348, 64882, 78965, -25980, 96673, -15965, 58403, -31796, -8469, -62113, 88058, -5619, -18716, 63251, -62484, -17041, -37302, -86590, 9956, 6974, 97468, -34266, -95775, 88754, 62840, 78404, -46311, 34222, 59603, 16166, 90472, 68353, -22380, 29921, -85964, 93792, 87628, -11988, -39210, 38997, 14482, -7961, 66256, -71073, -99856, -66482, 79686, -96571, 23826, -84983, 47031, 57669, -23561, -15338, -1252, -68074, 11368, 51741, -24434, 37539, -21423, -79793, tail", "row-1815": "head, 74270, -36679, -39792, 38460, 6568, -87189, -79293, -83515, 61937, -18840, 79940, 28808, 89055, -78447, -70092, 87051, -54720, 39471, 89793, -95163, 23304, 54790, 4619, 47367, -50378, -21771, -29851, 19019, 52834, -23625, 45955, 8840, -76463, 27590, -19203, 50332, 22177, -21312, -96274, 30990, 40450, 8333, -11653, 11312, -44807, 40858, -89612, 71032, -52145, -16517, 16828, -57394, 73084, 87292, -24043, -71094, 26429, -1506, -99397, 74222, 46470, 28837, -67389, -31549, tail", "row-1816": "head, 8587, 38159, 7050, -41693, -52818, 84362, -3555, -56310, 2681, 78093, 30527, 51059, -86381, -44463, -12743, 10844, -1102, -5440, -56689, -18242, 75951, 80146, -99515, -83099, -57297, 25705, 64993, -86319, -94311, -83453, -77018, -64822, -50101, -77492, -71114, 1181, -86839, 15315, -78706, 23321, 82088, 66894, -79075, 95041, 13917, 18349, 3687, -64588, -53775, 15286, -21376, -23354, 39616, 63126, 1504, 48961, 27281, 96165, -1688, 47959, -91639, -64490, 16267, 5313, tail", "row-1817": "head, 97861, 84302, -57291, -40834, -60649, 59270, 99767, 65722, 22988, 47561, -95074, 74567, 58469, -17618, 96593, 25309, -84628, -35822, 45256, -1225, -82942, -61463, -37610, 90564, -95781, 85284, 95212, 98672, 34563, -98871, 15299, 57098, 1466, -84357, -9889, -82070, 35908, -18693, 18525, -6383, 96241, 22255, -42159, 3697, -51486, -80507, 51913, -82376, 55006, -44994, -18747, 82898, 16825, -94112, 99694, 30734, 58610, -20498, 70024, -51726, 85131, 48546, -16267, -54665, tail", "row-1818": "head, 20122, -37161, -72470, -67554, -80874, -4281, 13733, -68471, 26441, -70784, -80266, -9023, -55478, -55842, 23803, -34263, -66671, -14029, 78518, 5853, 58609, 77061, -48802, -71486, 43500, 37567, 1845, 20177, 84634, -69609, 61719, 43008, -68165, -25636, 39953, -9266, -2953, -96185, -43702, 82970, -67391, 58560, 58830, 78818, 98305, 66391, 12334, 91323, -42865, 47036, -16433, -65695, 70262, -72688, -80710, 46819, 36549, 39576, -66080, 1129, 19491, -91347, -18140, 78147, tail", "row-1819": "head, -55383, -53688, -8410, 37204, -33832, -3793, 86138, -81372, -14492, -69512, 31600, -47186, -64810, -71466, -20592, 72756, -14700, -32978, -48461, 73157, 79874, 64054, 1248, 16319, -71166, 56739, 95909, 94295, -84372, 39408, 22333, -22837, 12534, -43195, -42880, -37370, 57556, -29548, -24419, -46865, -59946, -60409, -49405, 25755, -47442, 67237, 26803, 19334, -89262, -55853, -85010, 69982, 3723, 43204, -14179, -71031, -40357, 31308, 63877, -84333, 12681, 37841, 97604, 73064, tail", "row-1820": "head, -28353, -88589, -15908, 26089, 98444, 93319, -81349, 92208, -42601, -16485, 33960, -57300, -13766, 93180, 10048, 95958, 37822, 78586, 67885, -51010, 51084, -59498, -6872, -73215, 3261, 26403, 48959, -1433, -85536, 9239, -54887, -99170, -74384, 99944, -24944, 90337, -60378, 8109, 13068, -55089, -74271, -27949, -80318, -28909, -64338, -44564, 85488, -23737, 45246, -50991, 73289, -15087, -58109, 10610, -28705, 37533, -55219, 78282, 69360, -89478, 53527, -25641, 5532, -22349, tail", "row-1821": "head, 56317, -67571, -24872, -30231, 3140, 59375, -16900, 16168, 25417, 82919, 84793, -16877, -18582, -31615, 45468, 7795, 50943, -34641, -81161, -19227, 30482, -25369, 43928, -32731, -49643, -11950, -52937, 5866, -47995, 86472, -46045, 66779, -69772, 40954, 91647, -40415, 35253, -57311, -40629, 5849, -67294, 68294, -42889, -59056, 4031, -65963, -76875, -65206, 23812, 8206, 94904, -98254, -1993, 73937, 96512, 40421, -18615, 20849, -87090, -76603, -49087, 38004, -26889, -76375, tail", "row-1822": "head, -73179, -13901, -71405, -79826, 51387, -12280, 53283, 60795, 53887, 77053, 57246, -69016, -66562, -19688, -93959, -90232, -4265, -51752, -43659, 69224, -61452, 55508, -29950, 58579, 45361, -11057, 23544, 14660, -25519, 77675, -59411, 55473, 78830, -1765, 84628, 39746, -93357, 68487, -18486, 16807, -48561, -8864, -6619, 29099, -30297, -31857, 68133, -45483, 95916, 56786, -68012, -44331, 77568, -10588, 46912, 47554, -6820, -16739, 27039, 77440, 84112, -20932, 74957, 86575, tail", "row-1823": "head, 73173, 95645, 55740, 65164, 92319, -63175, -6770, 20319, -64726, -21758, -9890, -8105, -8830, -51012, -26204, 64121, -20047, -34185, -31438, 92402, -42301, -99352, 21457, -28849, 39787, 17361, -63059, 26382, 80169, -38427, 19326, -89577, -93837, -5523, 98466, 56295, -7179, -90516, -378, -69651, -27761, 8925, 91477, 23815, 94969, -24540, 89826, -3240, 16378, 43794, -38069, 62120, -6921, -11153, 27155, -28338, 83894, -5100, -6771, -57080, 73779, -59095, -95036, 94959, tail", "row-1824": "head, 96590, -73021, 99649, -74568, 78633, -25825, 50440, 85787, -44423, 43, -75286, -49625, -31059, -46746, -36707, 96425, 1293, -77647, -10508, 70823, 14796, -61251, 7263, 59093, -23640, 83410, -85039, 97799, 30675, 48203, -93334, 88916, -74562, -5949, 91792, 50278, -61210, -5672, 51877, 59796, -68571, -99234, 20380, 14556, -9947, -99818, 92275, -69301, -57781, 99594, -2144, -30223, 299, -26057, 81373, 40722, 14010, 1582, 83729, -8423, -18963, -54967, 69162, -23332, tail", "row-1825": "head, -73975, -80916, 20913, -55299, 28624, 88383, 1926, 36901, -79328, -47662, -90200, -48517, 2227, 52071, -57012, -19, -64234, 23872, -35397, 99288, -14836, 91683, -7868, 84073, 25717, -80501, 63887, -54017, 14298, -16047, -4877, -86368, 16384, -70081, 1604, 72697, 12309, 58486, 97760, 49629, -60464, -72223, -64465, 84102, -17147, -10536, -73280, -86459, 591, -85605, 27199, -96649, 88775, -83129, -81695, 40576, 80898, -15646, -39757, -26367, -20579, -27500, 95402, -51038, tail", "row-1826": "head, 68488, 48605, -73812, 1864, 62879, -59121, 92081, 6398, 63205, -90834, -56533, -97378, -25635, -26140, 95197, -24693, 1590, 75384, -56052, -26946, 6545, 67912, -74196, -10914, -60366, -49391, 30243, -99594, -71738, 53445, 78142, 6455, 56950, 34353, 49768, 63103, 11472, 83221, 3185, 35959, 77821, -65310, -62553, -33189, 93791, 12914, -99030, 32746, 40712, -52199, 70066, 8646, -43873, -32339, 48550, -58877, 18538, -43762, -79535, 59738, 9671, -76385, -73187, -64586, tail", "row-1827": "head, 39355, 68152, -20510, 57808, 84924, -15360, 75650, -90582, -8704, 93810, 55107, 98927, 87976, 45205, 3852, 57962, 87298, -98051, -57158, 74690, -71706, 8811, 4487, -72626, 99531, -65380, -10993, 30024, 20824, -32582, 14826, 94006, -60911, 23393, 7035, 86184, -1331, 64087, -39430, 84164, 11815, 56770, -39186, -3125, 73036, -57337, 32414, -9588, -27508, -5894, 55813, 79404, -73710, -55331, -17466, -62060, -27300, -10131, -83237, -50615, -69055, -29765, 16197, -48245, tail", "row-1828": "head, -80297, -95846, -87916, -51968, 63137, -42385, 26756, -31863, 82713, -69481, 23339, -84424, -86557, -80930, 59211, 92028, -7736, 87400, 83145, -72625, 4016, -66933, -87313, -69811, -40256, 74409, 74305, 92140, -21894, 35564, -28087, 37876, -61712, -18051, -1314, -14106, 25238, 72874, -98198, 55732, 95601, -38983, 15753, -20611, -49911, -22493, 24916, 60253, -16172, 9911, 61879, 84214, -7380, 9131, -8950, -49039, 72391, -35535, -51874, -97905, -32053, -98092, 39234, 24642, tail", "row-1829": "head, 14597, 18206, -56553, -73968, 24888, 25089, -75033, -48346, -27456, -51793, 49643, -42649, 93967, -26049, -50904, -12793, -80940, -37087, 34689, -68828, 35621, 29158, -46718, 28948, -25501, -30669, 85631, 46426, 95519, -35499, 46982, -14514, -73962, -656, 14410, -6647, -66802, 50985, -14750, -86377, -85236, -44340, 52325, -16395, -78035, -34451, 8660, -32714, 1136, -90692, -21937, -13584, 6666, 86415, 69458, 14598, 33423, 67210, -69676, -85964, -9169, 10971, 37210, -63293, tail", "row-1830": "head, 23215, 67139, -6385, -2072, 71011, -24981, 19857, -68867, -86864, 65658, 76334, 51320, 42818, 88162, 63259, 77765, -99331, 85945, 53774, -67914, -86883, 74395, -99482, 29864, -18499, -64304, -53759, 62641, -27496, -69528, -94540, -11285, 30238, -65325, -26246, -52769, 87029, 22187, -61115, -86890, 62472, 60422, -14650, -92258, 91064, 70285, -97997, -79103, 5952, -8887, -6911, 12476, -81453, 74127, 15831, -69984, 73932, 42178, 68544, 70088, -35279, -58569, -72088, 88339, tail", "row-1831": "head, 46144, 52732, -14304, -31407, 33158, -40749, 77732, -76173, -42904, 41544, 27293, -29992, 15820, 76281, 87757, 77051, -70902, -98471, 24020, -55494, -93445, 97223, 34565, 35943, 10844, 20095, -75352, 73323, -46263, -48792, -42608, 8277, 62213, 98265, 53423, 94471, 8517, 18533, 26832, 35736, 34954, -28129, 7063, 44406, 86709, 33042, -16638, 98859, -84651, 1873, 91007, 22538, 8363, -80190, 27573, 18386, 65349, -19250, 30740, 78012, 74862, 23238, -87725, 95090, tail", "row-1832": "head, 85573, 44027, -57972, 22187, 78428, 60117, -38066, 4805, 63850, 2427, 25555, 14935, 16939, 27718, -93329, 52716, 22088, -4179, 84999, -9203, -65734, 55528, -16030, -24655, -55750, -52104, -81080, -36309, 93856, -21830, 36092, -12096, 90352, -47607, 18395, -89808, 45383, -19902, 35220, 75553, -66679, 88432, -11081, -14798, -78675, -47957, 67889, 38685, 86979, 17499, -94446, -52633, 61181, -62268, 62035, 29794, -6035, -54022, 37359, 78916, 14486, 4727, 59073, -77397, tail", "row-1833": "head, -43911, 41761, -34624, 19441, 65992, -93382, 85908, 61480, 78276, -36944, -66037, -97318, 2723, 21091, 72933, -67235, 48735, -79180, 74651, -78689, -56116, 46970, -35449, -93535, 28108, -7196, 1414, 46056, -25041, 86592, -55630, 78150, 51779, -7489, -10072, 59576, 76054, 83447, 67198, 8444, -71092, -62092, -66318, 51293, 9425, -52150, 97136, -62724, 62784, -50408, 89056, 86942, -72336, -78215, 30787, -80474, -25837, 83350, -60560, -40338, -52964, 37731, -25415, 34098, tail", "row-1834": "head, 78850, 85873, 23177, -73304, 6811, -96128, -54861, 69024, -10473, 6975, -34562, -81518, 96838, 71166, -23025, 62602, 1922, -73821, -44423, 45991, 29522, 18982, 5322, 5143, -79277, 39964, -70227, -49701, -91634, -38024, -83667, 34247, -44794, 69961, -49193, 74411, 96296, 44210, 96201, -86240, -64363, 64984, 62947, 38146, 19857, -47663, 24115, 30631, 17175, 78792, 76052, 88279, -67639, 12064, 84348, 31293, 90706, 15870, 28429, -93548, -45593, 62387, 87854, 19099, tail", "row-1835": "head, -52830, 73931, 370, 17574, -78313, 36808, -74152, -32280, 463, -92677, -6824, -43666, 86400, 97109, 54692, -61407, 86133, -28357, 37444, -97418, -36591, -57238, -47248, 84005, -75151, -19166, -2288, 36124, 41538, -60440, -67858, -8924, -87633, -80077, 20661, -12098, -17805, 42029, 6647, -47405, -40078, 42746, 61182, 21825, -84684, -72279, 49773, -41475, -81231, 93527, -77785, -49446, -27159, 8263, 13662, 42816, -54084, 79155, 94668, 31975, 39392, -90494, 83223, -36446, tail", "row-1836": "head, 51583, 75736, 54338, -45303, -10975, -35162, -36289, -99102, 18181, 79189, -58931, -36287, 93864, -26283, -39657, 19168, 8887, 71931, 62296, 19459, -77403, 53827, 4983, 29068, 3249, -52019, 36642, -62609, 45156, 14362, 54339, -85700, -40674, 53089, -35703, 94616, -79727, -89340, -58397, 55137, -34372, -62056, 36101, 85995, -93945, 31206, 49576, -92239, 87605, 16476, 29794, 95166, 5088, -47094, -60352, 2619, -56396, -71334, -67368, 90043, 53440, 43438, 80149, 92368, tail", "row-1837": "head, 78785, -28344, -47984, -70246, 40261, -41911, -58380, 70248, 45379, -32784, 35652, -10302, 2186, 37471, -61907, 92333, 42515, 88241, -61849, 62076, -63348, 66528, -59954, -2490, -22183, 79122, 20228, -40711, -38535, 48636, 12929, -72613, -86414, -328, -99494, 6735, -73885, 22108, -98827, -15824, 76331, 86756, -80549, 447, -29111, 41796, 38435, -71724, -59592, 80312, -22293, 33835, -58500, 52760, -38869, -91885, 11906, -45338, 2096, 42620, 48715, -97653, 71720, 97376, tail", "row-1838": "head, 19315, 47073, 2170, -28092, -69103, -10041, -45735, -28740, -62504, 50432, 60320, -74727, -30734, 37760, 29835, -14141, 52729, -1447, -11738, -45118, 80205, 35658, 40687, 75712, -23762, 95113, -87837, 59534, 15707, -43592, 89435, 22675, 21113, -1025, -90143, -58570, 27481, -27916, -65803, -14453, 82714, 4952, 92043, -21012, -83699, 83634, -49562, 36910, 52580, 51971, -15991, 68465, -36875, 79556, -17589, -49296, -60275, -56616, 47942, 62896, 85339, 28763, 30641, 94927, tail", "row-1839": "head, 23203, -21599, 25149, -35736, -85474, -96251, 81096, -93695, -49394, 37926, -13457, 62459, -37203, -80463, 85518, -63649, 26288, 78523, -91454, 62627, -45001, 42099, -82359, 13796, -72818, -51317, 58167, -53468, -54918, 51795, 45361, 2331, -50086, -67181, -46328, 66619, 45645, -40814, 90338, 54962, 92498, -34514, 46495, -11282, -7175, -25535, 35424, 49893, -77119, -57428, -80128, 31904, 84202, 20792, -73238, -30375, 75500, -92592, 73199, 55636, -64104, -41938, -57398, 51527, tail", "row-1840": "head, -61411, -96181, 29429, -95995, 15135, -71501, -23794, -9492, -67087, 81584, -60774, 68170, 60529, -39722, -90424, 18072, -71300, 98253, -90789, -5814, 79113, -20137, -18037, 62248, 85147, -8486, -62480, -54125, -56038, 89638, 64051, 53356, 38754, -9298, -76445, -19107, 31852, 55774, 62203, -8493, -67799, -11044, 5803, -36326, 41387, -44112, 4601, 29084, -88994, -62094, 71998, -20088, 93978, -15123, 96250, 78047, -65797, 29676, 78948, -61574, -11882, 3942, -47680, -82945, tail", "row-1841": "head, -74746, 33579, -41747, 47000, -84893, 98601, -80343, -31163, -95462, 6935, 4275, 92539, 34636, 75368, 95107, 18980, 93661, -33364, 91471, 81195, -71584, 90925, -79314, -22647, 31678, -88341, 7317, -64945, 23467, 15130, -12386, -25754, -69064, -2068, 57356, 79733, -39388, 82766, 90270, -26391, -62979, -30429, -97373, -14441, 96625, 64529, 26420, -40563, -17973, -28673, -54522, -34416, -58646, -3021, -92540, 30732, 66722, -32287, -82985, 65600, -26569, -11945, 40666, -20377, tail", "row-1842": "head, -58670, -19909, -8743, -85493, -31091, -96951, -14630, -58813, -80470, 31703, 48947, -61108, -28775, 36645, -13775, -72720, 88176, 43212, -36597, 63093, 53833, 23334, -39075, 1411, 42298, 86212, 34793, 4716, -53768, 28078, 71800, 83488, 21008, 11187, 89618, 62864, 66549, -35307, -16850, 94281, 95626, 35736, 80196, -38746, -53523, -10706, -64751, -65187, 70833, -56757, 17304, 88606, 90569, 50083, -63422, -84058, -41582, 67345, -35948, -98705, -74059, 47101, 41798, 93921, tail", "row-1843": "head, 80507, -40785, 65410, -34856, -60717, 24711, 70747, -40760, -75755, 71995, -50344, -88831, 17233, 36930, 8977, 54236, -53614, -43789, -1597, 95945, -12727, -40009, -92199, 31112, 57568, -19845, -74305, 23103, 86624, -76315, -66352, 32087, -11801, -45338, 50616, -51163, -93965, 72785, -68810, -79713, -79596, 98627, -75574, -91178, -94286, -95418, -1644, -46717, -97642, 79361, 50065, -26612, 55752, 59659, 38467, 58665, 33304, -81616, 57186, -83743, 92293, 987, 96879, 8465, tail", "row-1844": "head, -56874, -48139, 92172, -55760, -427, -99554, 64716, 13629, 41498, 8035, 19259, 31959, 76254, 52458, 25085, -95457, -22644, 24034, 43055, -899, -84197, 96823, 52821, -91725, 31275, -96977, -60430, 57483, -63445, -7685, 83698, -66587, -64678, 94613, -57900, 99641, 19022, 88952, -62382, -27759, -72477, 49425, 47407, 91644, -78892, -19311, -12814, -7816, -78889, -32689, -59127, 83592, 289, 22866, -73837, -15434, 18778, 52079, -15040, -36284, 17416, -8700, 84574, -33922, tail", "row-1845": "head, -17285, 75394, 86197, -77266, -52028, -62473, -51227, 83596, -22395, 63059, -59365, 28675, 22496, -14388, -60199, 29206, 66705, -42547, -27054, -46103, 12004, 22803, -47676, -73399, 90168, 32639, 54026, 84409, 60178, -50838, -39790, -72963, -45016, 50216, -60139, 82709, 11869, 38377, -62841, 77850, -11457, 34149, -62314, 15907, -97661, 52703, -81400, 9104, 75065, -3114, -68235, -9451, -16645, 83907, 97916, -79015, -86413, 67046, -1076, -4160, -44225, 84144, -72790, -39356, tail", "row-1846": "head, -58070, 63543, 18754, 14631, -23581, -39913, -42259, 3018, -73787, -16003, -68462, 67721, 59046, 93006, -69689, -96465, -27392, -29127, 21363, -6525, -12712, -14373, -61476, -16031, -69164, -54865, 90585, -95663, -48136, 2551, -85184, -21607, 44835, 23727, 24348, 73445, 73511, 73254, -92761, 64980, -88488, 34869, -3568, -34379, -13077, 71302, 27622, -97181, -89076, -45334, 43587, -46343, 15829, -77847, -48692, 17892, 62794, -61267, 80727, -93919, 33602, -17681, 43320, -41008, tail", "row-1847": "head, 94334, 86035, 11376, 59794, -93964, -24761, 55458, 55591, 94440, 49760, -89785, -72760, -66046, 48252, -38742, -34732, -36751, -52080, -58822, 83622, 71050, 1863, -89533, 54610, -7457, -8606, -64825, -97652, -32515, -96666, -88335, -61779, 9965, 87983, -86731, -77628, 87377, 50201, -99057, -83797, 65291, 62391, -17472, -23274, -26812, 88261, 23228, -11166, 57760, -92018, -93044, -66121, -86663, 90162, -39760, -67157, -63954, -39505, 20855, 85815, 18656, -88910, 15772, 65881, tail", "row-1848": "head, -130, -85752, 95208, -33475, 11334, -93770, 59902, -72403, 4696, -72839, 42730, -42385, -32400, -16492, 95216, -41512, 99218, 49838, -5895, 89241, 19767, 53483, -51802, 82172, -25156, -34474, -36997, 91618, -91415, -98222, -81367, 38940, 4411, 94769, 54047, 90808, 70425, 28384, -70561, -67173, 66875, -27014, 39381, 42048, 22314, 38551, 26105, -97634, -1393, 31402, -28807, -46373, -20360, 85367, -92, -59525, -50859, -68564, -66365, 75167, 39916, 87547, 76572, 53649, tail", "row-1849": "head, 91039, 69056, 46585, -7871, 29188, -89454, 50061, 65135, -4186, -80347, 95376, -71907, 84744, -11974, 86351, -27974, 65653, -53058, -95370, 95042, -50727, -18737, -1943, 81803, 43077, -84876, -38244, 70308, 29758, -39318, 12241, -12732, 97705, 74813, -2957, 90602, -5185, 46394, 609, -28043, 94099, 69405, 65456, -3224, 49510, 8788, -52289, -69743, 83138, 88687, 61601, -10242, -85642, 60385, 15209, -22846, -28095, -4281, -76567, -40850, 60777, 30028, -40485, -59834, tail", "row-1850": "head, 28336, -16165, -56122, 73379, 96084, 3248, -75393, 20832, 53015, -29319, -79720, -99589, 44932, 39518, 37396, 7447, 72042, -99411, 33891, 55203, 28437, 13850, -74913, 70103, -77972, 72335, -63373, -38469, 55199, -87286, 40450, -49569, 75243, 22462, 27948, 32115, 36970, -375, 13607, 97475, 44041, 55941, 17144, -86906, -91505, -78093, -42835, 81905, -38610, 15813, 98258, -96208, -49154, 85697, -82177, 9731, 47678, 42314, 15922, -21865, -48863, -11251, 29358, -44072, tail", "row-1851": "head, 43336, 60810, 96199, -62677, -88227, 24147, -73893, 91796, -28454, 35594, -81061, -74149, 919, 42420, 79151, -32782, -53377, 20171, -38087, 49693, -36402, -35351, 96385, -57142, 77782, -77985, 44593, 63018, -92605, -32633, 80960, 4999, 38979, 12558, 72655, 26603, 4587, 72953, 83990, -1230, -5468, -78878, 57554, 31150, 97280, 27864, 72055, 80977, 29237, 85027, 35803, 55808, 34881, 65897, -83082, 34312, -96516, -62904, -51603, 66486, -73338, -40994, 43020, 74481, tail", "row-1852": "head, -30457, 57082, 35219, -54892, 73121, -53716, -57031, 80989, -92373, 72818, 99756, -62145, 26812, -71210, -79183, 50230, -42161, 24436, 14726, 16403, 25020, -51979, 66590, 19855, -65312, 6786, -32048, 40368, -56269, 48525, -29584, -50750, 10784, -68585, -12, 45954, -55935, 13540, 83435, 9262, 51077, 45673, -52486, -56679, 79442, -25788, 15719, -24117, -76407, -10492, -4508, -61784, 91920, 24229, -11442, -96128, 39514, 4998, 71066, -92482, -3284, -73971, -16503, 30402, tail", "row-1853": "head, -94323, 85739, -41921, 62932, -70102, 98824, 82141, 42223, -39809, 78315, 59194, 92701, -67467, -14294, 90688, 52488, 85443, -15693, -79060, -13003, -57737, -50964, 60055, 88175, 81658, -19100, 92703, -29750, -74834, 30715, -53233, 99304, -60222, 35908, 48431, 71444, 93161, -46088, -67235, -73570, -81277, -9194, -78866, -65069, -10785, -11610, 3886, 72841, 32654, -38488, -11135, -6658, -64710, -2309, -83254, -20871, 91439, 13782, -50146, 88521, 79522, -61495, -62356, -76330, tail", "row-1854": "head, 23876, 14506, -31920, -53442, -45652, 92543, -49391, -25389, -56296, 61037, -90802, 25951, 15082, 37182, -5137, -94801, -60529, 71013, 56593, -25106, 98968, -90254, 3987, 23116, 7045, 34210, 86677, -49408, 90769, -43695, 76978, 6318, 88998, 78174, -60275, 21577, -74421, -58969, -24972, 18480, -25427, 44256, 58479, 60872, -35948, -62292, -6477, -14528, -14285, -69940, -39245, 56623, -47510, -88332, -73124, 52542, -75518, 12791, -14368, 81550, -4036, -94367, -99056, -26788, tail", "row-1855": "head, 38351, 66835, 35039, 85056, 83849, 53627, 94316, -16158, -83117, 74367, 89664, -74959, 40796, -58008, -51048, -76414, -54200, -11084, -55238, -94723, 15271, -35642, -52183, 75186, 20478, -62213, -92975, -38566, -16704, -51844, 63597, -67742, -88662, 65175, 54097, 28395, 62830, -58135, -17957, 12127, -31687, -84191, 70932, -56671, -54454, 31356, -71792, 12625, 63551, 94143, -68426, 87317, -94811, -61559, -98603, 13223, -40706, -48248, 25563, -53949, -81531, 94200, 95897, 19084, tail", "row-1856": "head, 38244, 35690, -85391, -9458, -65813, 77477, -64497, -30000, 44209, 64826, 49405, -54546, 6375, -80319, -57341, -3209, 3326, 88053, -25257, -84166, -24941, -16049, -84378, 15885, 49645, 37310, -45575, 43215, 43177, 86438, -67619, 95784, -70242, -90196, -43375, 20440, 9215, 80920, 70683, -68674, 96229, -44071, -14500, -7345, 52537, -87907, -83187, -61911, 2221, -36742, -36347, -79645, -53703, -86995, 61567, -1508, -31397, 44598, 27788, -66623, 41418, 48151, 38803, -61162, tail", "row-1857": "head, 54272, 59659, -18155, -33336, -33861, -18200, -24623, -13133, -24391, 82453, -77438, 15697, 27484, 85134, -86365, 70728, 56813, 64146, 60654, -880, 15724, -5726, -51038, 28587, 92531, -62230, 14345, -94797, 32332, 93401, 62361, -39408, 118, -38559, 42021, 54066, -29015, -21533, 92940, 93940, -60494, 23518, -10473, 1366, -19269, 26597, 71539, 84434, 26434, 9141, -54535, -12934, -67732, 90083, 96047, -90469, 11308, -21771, 65040, 9855, -43526, -80330, 57425, -59765, tail", "row-1858": "head, 24547, 61601, -15534, 46805, 80431, 62222, -83804, -45454, -65773, -89863, -75203, -69956, -91165, -2705, 91627, 53989, 49433, 9751, -59673, -17676, -50087, 42627, -58109, 38113, -98800, 88801, 95909, -37959, -15787, -86926, 6612, -30079, 741, -53265, -43054, 79153, 4917, 618, 88118, 22859, 6093, 29943, 26652, -46036, 48378, -83278, 93356, 69591, 15525, 8122, -8480, 65157, 44950, 54208, -68120, -94967, -96241, -54863, -10912, 51422, 96111, 71162, 22199, 67556, tail", "row-1859": "head, -6400, -93568, 33725, -39282, 31847, 8751, -62519, 23354, -58017, 51295, -33046, 90522, -79489, -59002, -32572, -37888, 69175, -58861, 1091, 87244, -23720, -66751, -28599, -81596, 76013, -67610, -66956, 51007, -21689, 23461, 72613, -210, 98940, -22717, 19563, -22612, -72337, -33031, 71625, -25879, 73223, -59431, -47319, -14074, 31348, 66, 83767, -55547, 34160, -59095, -30055, -30083, -4478, -57541, -12601, 65238, -55893, -12966, 91445, 90065, -69595, -38559, 97405, -34276, tail", "row-1860": "head, -39025, 90801, 52801, -27796, 18144, 42099, -44059, 24316, -62923, 78913, -70663, 96882, -22956, -14625, -47556, 67968, 1699, -14253, 76760, -30739, -64453, 13453, -77464, 9495, -32729, -26986, -84433, -60779, 85401, -71588, -18861, 63341, 6938, 26964, 85669, -65968, 25755, 98355, 95275, 38443, -4547, -60943, 3712, 19260, 68277, 51479, -77925, 99080, -97993, 57235, -68897, 69087, -88585, 60800, -910, -99096, -93155, -11005, -82581, 56390, -3065, -89071, -29970, -71921, tail", "row-1861": "head, -11864, 22530, 6246, 92996, -94754, -22623, 26380, 96382, -87248, 13381, -6056, 38107, 92105, 99265, 4462, -14240, -30633, 46542, 74081, -53684, 71172, -81348, 92625, 52164, 47145, -68189, 23702, -44178, 11813, 92582, 49147, -73521, 16088, 67304, -3687, 14624, 18131, 72363, 75020, -13998, 41197, -9324, 38130, -49482, 5937, 67427, -83613, -33002, 25758, -5809, -61407, 62026, 41630, -8455, 12183, -28720, -86251, 1546, -84095, 92913, -29293, -2356, -76006, -41767, tail", "row-1862": "head, -69396, -31363, -97981, 7343, -75720, -68800, -18919, -4113, 20527, 16548, -89172, 17405, -68177, -45127, -95051, 84939, 18214, 7626, -16480, 47530, 11187, 77806, 21241, 58643, 24899, 26583, -95551, 21577, -82499, 44069, 85860, 94451, -32814, 23681, 72702, 45615, 70766, 55974, 49489, 51935, 40550, -86149, 91503, 14655, 44571, 6551, 13427, -65000, -6744, 19568, -9699, 19265, -79944, 730, -99250, 2181, -26903, 4624, 9930, 24199, 65347, -54998, -98424, 51882, tail", "row-1863": "head, 72168, 88181, -71591, -64187, -92718, 94210, 29775, 10912, -46324, 36088, -547, -70755, -28344, -56318, -81688, -41, -83023, -85714, 34958, 76463, 24770, -44469, -42126, 2276, 18826, 47456, -63440, 19732, 19554, -68558, 36438, -24169, 38188, -71757, -34652, -75285, 95197, -13604, 9047, 40091, 24990, -42920, 54961, 58237, -88881, 61789, -27963, -44755, -7091, -47472, 86875, 80677, 29389, 4551, 38330, 82373, 37598, -84758, -91989, 48845, -22126, -93203, -64129, 60655, tail", "row-1864": "head, 8157, -17454, -56008, 36102, -32460, 72328, 36832, 97942, -38832, -14437, -827, 36902, -59615, -69865, -54035, 22129, -97154, -44842, 83323, -70299, -21375, 50089, -89923, 30082, -48740, -35221, -30636, 2199, 78412, -43713, -42017, -80361, 53674, 62483, 64439, -82956, -52365, -54358, -85246, -8856, 93297, 15232, 28449, -75290, 17762, 36627, 29793, -22269, 43472, -52319, -95618, -96393, 45488, 42222, 33384, 68985, 2809, -10466, 64010, -48273, 41293, 35235, 22036, -27727, tail", "row-1865": "head, -10310, -59397, 95732, 72319, -50863, 69575, -26575, -87653, 82302, 3440, 12231, 24834, -75526, -73113, 69938, 34290, -64311, -25477, 32698, 96242, -88424, 8583, -49157, -79418, 28482, 6168, -54843, -86584, 79754, -1687, -23814, -53634, -97700, -67637, 71929, -35938, -91073, 65272, 1616, 20304, 17825, 97728, 76786, -92844, 70889, -73701, -54117, -3690, -67335, -85962, 54062, -10519, 98854, -54056, 944, 90622, -77284, -33073, 8112, 50922, -51692, -46208, -15409, -8781, tail", "row-1866": "head, -19969, -54665, -78745, 61142, 42927, -70031, -55037, 60822, -4968, -82471, 34770, -22263, -39488, -64495, 92878, -30603, -36917, -13526, -60728, -82974, -29342, 90081, 6157, 22258, 76799, 14626, 78053, -18676, 46667, 46807, 27370, 97318, 82834, -44604, 87339, -38228, -56007, -31523, 8075, 30265, -97594, 19164, -39498, 3329, -23597, -80681, -5781, 41619, 21237, -48507, 37104, 24162, -64321, 79644, -25215, 16209, 62600, -49197, 71518, 31377, 62652, -12541, -12844, -91255, tail", "row-1867": "head, -77510, 43031, -36855, 89905, 98336, -36518, -67290, -19881, 48312, 98969, -8835, 62875, -99269, 89743, 50982, -41671, -93512, 9766, 59125, -70544, -75062, -56237, -90813, 52500, -9874, -80803, -50432, 10903, 49069, 87620, 94490, -7397, -46170, 55136, 80058, 73047, 95975, -20258, 50132, -53942, 42063, -25769, -43348, -65450, 58462, 32330, 70874, 72241, 27428, -64843, 22426, -67041, 93844, -24016, 86924, -58751, 32762, -78291, 76632, -59871, -53851, 43168, -82036, -71153, tail", "row-1868": "head, -97798, -35574, 29105, -19870, 7831, 90598, 39206, 29189, -54526, 60086, 87984, -6598, 47527, -49021, -88442, 96187, 72463, -95805, 78816, 88928, 75691, 31534, 92447, 24108, -7411, -38394, 14350, -5063, -4748, 16650, 7548, -12394, -34287, -33881, 9977, -50006, 87812, -55091, 79253, -64862, -77389, -6027, -45118, -93993, -59552, -10353, -80810, 66864, 49047, -55675, 77665, 919, 38060, 63068, -71973, -49810, -66499, -61879, 92451, 45881, 7584, -62382, -88788, -54024, tail", "row-1869": "head, -36472, -2804, 2778, -51603, -86708, 95610, -83332, -92227, 35507, -25781, 68138, 2838, -89669, -30331, -72409, 29875, 8968, -86110, -9368, 98991, 23771, -65925, -21109, -22203, -29601, 23871, 1715, 17088, 5896, 73002, -56512, 82418, 48494, 72014, -18193, -35807, 38112, -74618, 51635, 33791, 25165, 37164, 75131, 28684, 99625, 33890, -84340, 83700, 71303, 10892, 7730, 17483, 27000, 10823, 30277, -31046, -65010, -28146, -66144, -93940, 53576, -67671, 88405, 43682, tail", "row-1870": "head, -32540, -30528, -98510, -49116, 225, -37805, 79276, 70684, -65086, -52847, 1789, -92630, -36818, -32393, -56377, -68607, -27502, -67642, 17467, -20290, -75263, 50061, -22370, -57771, -19848, 65025, -17464, -11973, 97355, -53288, -57480, 53925, 47233, 71290, -22939, 15740, -17824, -33241, -41465, -56571, -23692, 79095, -8835, 33520, 17753, 30862, 61760, -66931, -38811, 33142, -43056, -13087, -42661, 77856, -69409, 12477, -30989, 98797, 53231, 18706, 3330, -52490, -5894, -41520, tail", "row-1871": "head, -25241, 79728, 96554, -25566, -16298, 17479, -17371, 64539, -26765, 6617, -20478, 97549, -71479, 6501, 90398, -6459, -24889, 26387, -22110, -59825, 40991, 96353, 70553, -14370, -96054, -16634, -41206, -74606, -95076, -23935, -55946, 56183, 78070, -91595, -84017, -17508, 14438, -13350, -76365, 16943, 63423, -99498, -3435, -78500, 62020, -42251, -77873, 96832, -9896, -5209, 84700, 47021, -56975, -67120, 53491, 46136, 77975, 39059, 45917, -12897, -59665, 51978, 57471, 20502, tail", "row-1872": "head, -4054, -3769, -90624, -9303, -6873, 55994, 251, -6719, 53994, -48268, -61505, 22120, 70582, -58962, 65002, -21786, -8978, 9525, 23651, -95206, 66599, -80018, -1594, -63596, 73058, 53580, 43761, -79704, 64353, -87095, -40908, 90913, 8506, -55479, -91222, 16273, 7171, 24152, 77276, -53134, 81136, 37115, -61065, 66862, 26671, -80817, 12109, 75891, -76921, -30291, -54526, 61490, -5317, 69596, -3995, -3507, 52859, -88820, -47135, 34319, 72476, 74322, 24762, -5608, tail", "row-1873": "head, 90306, 2861, 55771, -48474, -20498, -43232, 63472, -28803, -53761, 34905, 35638, 98470, -65231, -50759, 86145, -32883, -43468, -16516, -96330, -97515, -98666, 95732, 77760, -87339, -5196, 12902, -4242, 67134, -21197, 41867, 49814, -33764, -9883, 60425, 29156, -57217, 38065, -15865, -57929, 62768, -43551, 36042, -76947, 59715, 95635, -54222, 37743, 42694, -35735, -3494, 63966, 12192, 26710, -71332, -92586, -39738, -23452, 2463, 28366, -56487, -721, 78027, 46259, 96147, tail", "row-1874": "head, -53657, -53870, -99479, 86835, -30170, -74538, 22671, -90708, 4001, -64865, -7968, 77819, -53113, 94899, -90811, 58043, -10949, -20834, 85639, 3989, 10441, 3755, 11582, -66414, 53347, -89744, -20361, -18571, 35650, -39423, -80156, 51341, 42779, 84512, -63208, -42173, 47361, -59734, 93774, -13934, 48384, 7946, -26796, -93800, -9685, -16189, -68230, 89930, 3169, 44102, -37698, -60135, -30752, 17923, -94036, 38466, 8985, 94024, 53402, -42523, -72052, 60141, -16359, 89495, tail", "row-1875": "head, 35927, -87872, 92705, 27849, -14469, 35691, -64701, -55151, 64675, 87664, -3523, 72221, -57224, -37391, 12530, -70250, -14447, -4738, -69061, -8010, 84864, -12992, 74029, -81428, -81925, -10477, -63605, 25329, 9495, -25558, -59715, 74373, 1872, -83776, 80224, -80435, -48039, 39576, -19544, 1335, 57988, 29778, -65225, -3808, 82676, 36643, 38146, 20465, 8413, 57222, 24194, 37155, -10729, 29254, 495, 15312, 28439, 79947, 87273, 57615, 86050, 90383, 90340, -14073, tail", "row-1876": "head, 73953, 63394, -1859, -98165, 76688, -576, 99463, -11755, -89723, 46893, -64502, -19783, -24110, -96547, -7788, -45682, -27351, -14508, 51011, 72253, 35908, -49099, -58593, -55201, -93482, -66723, -62252, 16621, -62236, -11822, -72381, 6066, -3243, 5419, -93547, -78198, 59623, 46030, 74221, -5568, -56036, -66328, 31335, 23306, -47107, 70278, 16016, 96877, -59067, -36005, -48053, -72481, 88990, -256, 35900, -53608, -11195, 34549, 39956, 98997, 45014, -24526, -26674, -59683, tail", "row-1877": "head, 69975, 34558, -58357, 89998, 54670, -1420, -43620, -45718, -51083, 83057, 87705, -94465, 68460, 43555, -14513, 33795, -49425, 73132, 60456, -3321, -38068, -16357, 83215, -65149, 9755, 97238, -67270, 67262, -45653, -93618, 19389, 78647, -17683, 21841, -77641, -4836, 218, -54194, 17899, 17666, -85123, -61174, 87094, 52868, -88328, 2610, -75982, -61166, 35757, 87656, -63404, -58049, -78414, 25486, 93651, 55926, -89184, -49546, -7760, 25717, 40420, -86883, 2838, -79469, tail", "row-1878": "head, -34062, -36249, 2361, 24883, 59505, 835, 92549, -32592, 89483, 58254, -90000, -79132, 52569, 56271, -91876, -64601, 42822, 18137, -57179, -39785, -75687, 7294, 28194, -32179, 7730, 2086, -39291, -11479, 28046, 46185, -40001, -72777, 70985, 82180, -63356, -28994, 94344, 36127, 68860, 24455, -10372, -10722, -48208, 81247, -88062, 99336, 37148, 13094, 4933, -24979, -57285, -39579, -90921, 30178, -6297, -5609, 19572, 2453, 35033, 48340, -61060, 44611, -26788, -18077, tail", "row-1879": "head, 32502, -73840, 65595, 61104, -5406, -55355, 41893, 84288, -36084, -53488, -13839, -10181, 72069, 50437, -35969, -37645, -3587, -32351, 26359, 58389, 91590, -14760, 65115, 70215, 92796, -89832, -68197, 31851, 3912, -90413, -96856, -50144, 18925, 48318, 347, 21838, 98432, -6565, 43856, -50116, -58684, -30, -67170, 81115, 60626, -4587, -24531, 70268, 82675, -9263, 52664, 4118, 51951, -4406, 13185, 46124, -45689, 61649, -47458, -12557, 30318, 63454, 64610, -8042, tail", "row-1880": "head, -73073, -46876, -20949, -73457, 29172, -87331, 98135, -15197, -21409, -32916, -19388, -45529, -15551, -46351, 73243, -82599, 90191, -65231, -84659, 34228, 84396, 71672, 75339, 90227, 63017, 79236, -6409, 75362, 5817, 11686, -12841, -17094, 50137, -51968, -16324, -35433, 33718, -24819, -88095, 76675, 76655, 86978, -4432, 83118, -8021, 62461, 83017, -72472, -96659, -45293, 8044, -2120, -99845, 96965, 4035, 65061, 58065, -22285, 68168, 86703, -65734, 85372, 96553, 27406, tail", "row-1881": "head, 5689, 52932, -95811, 28503, -2144, -64318, -76999, -45147, -24785, -33518, 65007, -31620, -55093, 25053, 53723, 37970, -66666, 51256, -80432, -27107, 41881, -57082, -89196, 79128, 57614, 45814, -38863, 56549, -19008, -18001, -62009, 11086, 91982, 9263, 25655, 90731, 98780, -60748, 20667, -35505, -37825, 9309, 160, 14963, 53926, 54929, 67066, -20788, -99427, -68750, -77006, -89428, 79105, 29004, 59309, 28883, -15229, -17426, -48415, -13939, 73445, 17463, -64271, -13937, tail", "row-1882": "head, -48949, -19974, 24393, -93065, 45512, -37101, 69440, 31444, 46543, -93169, 4063, -75921, 89377, 62830, -14492, 75859, -86953, 50090, -41876, -95035, -24832, 96792, 11549, -81278, 66899, 43310, -83207, 81531, -52952, -57813, 74026, 2454, 66292, 18843, -41278, -31885, 40867, 7536, -96680, 91312, -79503, 94524, 51013, 11432, 5895, -52213, -80388, 53448, -80613, -81660, -26130, 86379, -1634, 7067, -87324, -11815, 80570, 93381, -41449, -76219, -55225, -91620, 9890, -81097, tail", "row-1883": "head, 49160, 19304, 73732, 13747, 18229, 8670, -44622, 87764, 3240, -41034, 9804, 25500, -19838, 22809, 49330, -48949, 97195, 15962, 66391, -29798, 24707, 46174, 87435, 71666, 91119, -28002, -48075, -77905, 8034, -77908, 33819, 65970, -32857, -76183, -32905, -97871, 10577, 39272, 59738, 10702, 58802, -29835, 2758, -82195, 55120, 37144, 96238, 22817, 81829, 18536, 73305, -15163, 41467, 37480, -35514, 92289, 28072, -30234, -74637, 67876, 93465, -33926, -42027, -57240, tail", "row-1884": "head, -41586, -73831, -64894, -17077, 40536, -63174, 35325, -64478, -56732, -3580, -23758, -58847, 74494, -18487, 98425, 32984, -72350, 47976, 21616, 63662, 80121, 68708, 16698, 78026, -8654, -8861, 66501, 82583, 58548, -34464, -44565, 45819, -88564, 54767, 15574, -62342, -32403, -85362, 42314, 6077, -83196, 47994, -71621, 85370, 15199, -32627, 58589, 1631, -90590, 2387, -26291, 57100, -76008, 56191, 67111, -6704, 69706, 51346, -52152, -64907, -14765, 48748, -29686, -51459, tail", "row-1885": "head, -17413, 78241, 4927, 96648, 88148, -64912, -20984, 45521, 25180, 93095, 31569, 39008, 55150, -37710, -93240, 99536, 12260, -83766, 546, -62215, 29685, -40724, -52468, 19701, 3182, -76628, -68002, -20788, -86579, 13842, -36329, -72552, 70568, -70296, 73056, 41663, 84789, 60587, 57185, -76624, 48863, 50526, -95096, -11831, 32788, -83110, -1708, 72785, -29076, 90362, -78983, 72147, -52291, 15805, 42957, -49288, -25487, -43708, -85265, 26539, 66162, 10565, -14009, 1199, tail", "row-1886": "head, -93737, 94000, 71398, -2190, -19581, 45531, 21805, 94803, 67428, 83033, -96453, -46861, -17168, 14720, 27583, -57395, -30836, -89986, 53966, -19394, -48952, 72656, 35464, 2567, -66837, -79394, 20296, 98260, -97065, -11639, -32082, 292, 42885, 32431, -82071, 5271, 20540, -58244, 44087, 39909, 55459, 52223, 13533, -49934, -25204, -45813, -5542, -87358, -59573, -11385, 17122, -7678, -32994, 77348, 82751, 4437, 93275, 20598, 55423, -82852, -60691, -79658, -95192, 2401, tail", "row-1887": "head, -46267, 94184, -33997, -50305, -60767, 62048, -73278, 14093, -93823, 41398, -45608, -35793, -67943, -22687, -3225, 9352, -80657, -19376, 4299, -51726, 13380, 31289, 97734, 77523, -24382, 62306, -17398, -94511, 86404, 49015, -60482, -94609, 51859, 81665, 21216, -89618, -31196, 60056, -16467, 58536, -57924, -31548, 32793, 26149, -69210, 66356, 15143, 4056, 30546, -73724, 88461, 29240, 46642, 15098, 33936, 91974, 61353, 96766, -5276, 54349, 85497, 70944, -68490, -5734, tail", "row-1888": "head, -19001, 27788, -25357, -5140, -32753, 26715, 17206, -83406, -48246, -81542, 33191, 97145, -69359, 57498, 45329, 1967, -40738, 2413, 54756, 66718, -71810, 81701, -17897, 42581, -35866, 31309, 63755, -49962, -2575, 14222, 12098, -72679, 1432, -90487, 33410, -23003, -87040, -67840, -763, -64367, 19011, -67670, 22431, -27498, 8147, -73110, -26530, 53336, -3727, 37744, 87584, -96371, -65045, 8618, -49137, 37584, -71917, 28669, 66948, -11189, -3956, -68163, -31064, 97454, tail", "row-1889": "head, -25709, 43002, 64688, 36208, 33304, 72122, -11694, -10289, -706, 83271, -34506, 58834, 33203, 46990, -20172, 58033, -22578, 43451, 83503, -19450, 65690, 32861, 22604, -19433, -87216, -30988, -27988, -63190, 96618, -83635, -77242, 79313, -75590, -98913, -65324, 94556, -15616, 40663, -26114, 78826, 46345, -92321, 9642, 66104, -19551, -91973, 21116, 36408, 49854, -89240, -25542, 63529, 65375, -79566, -67224, -5269, 80453, -33122, -24699, -95946, 50431, 62004, -56265, 96621, tail", "row-1890": "head, -40700, 86441, -7798, -60553, -14078, -92073, -17722, -48346, -74097, 36780, 67138, -68086, -3214, 3674, 71787, 23353, 73947, -62815, 24189, 54844, 47277, 65221, 61407, -13350, 35565, 23072, 45484, -8372, -45029, -81820, 25660, -31030, 47491, -39415, 28602, 76952, -51377, -80239, 34353, 67849, -44961, 27807, 48018, -95409, 59901, 78620, -18832, -10371, 88988, 16977, -55056, 45163, 11206, -60183, 87033, 53288, 76293, 88633, -93181, -44114, 27560, 96269, 17102, 1661, tail", "row-1891": "head, 56944, 60017, -64248, 88118, 76491, 51230, -14791, -97417, -14969, -71571, 5315, -54721, -60079, -17806, -68003, -96816, 26052, 13340, 41409, 35165, 13459, 49600, -26657, 94115, -50738, 98250, 93881, 23066, 60630, -36867, -67846, -4184, -31036, 67693, 75196, -71324, 67735, -159, -56202, -52460, -1993, -2134, -48209, -11536, 87219, -5042, -21616, -71562, 63409, -11549, -94546, -97648, 95112, -10162, 10541, 24168, 25912, 23972, -40100, -43174, 65386, 91313, 41110, 21218, tail", "row-1892": "head, -37489, 75344, -2972, 94908, -98705, -69521, 95233, -39747, 45255, 54837, 91329, 71922, 33941, 70449, -7140, -75385, -42958, 92652, -34266, -33835, 82365, 79846, 79742, -60728, -92280, -78107, -11868, 99185, -39305, -7482, -93528, -37575, -62433, -66511, -11325, 4530, -72236, 87707, -41376, -39061, -42544, -71011, 20929, 49169, 9220, -11961, 31043, 17494, -72220, 66871, 282, -21866, -32973, -99850, 70582, 6140, -55190, -37508, 16246, -65149, -68774, 41798, 55747, -23519, tail", "row-1893": "head, -20558, -37336, 84512, -92484, -25963, -83450, 50728, 58102, 52646, 81846, 97754, -49149, -64064, 94561, -28105, -31432, 61113, -82300, -10566, 32492, -1252, -48420, 73964, -94102, -73219, 50287, -3218, -46330, -78584, -90558, 35413, -98441, -21077, -42399, -60626, -56220, 44756, -67236, -73224, -12714, -80990, -43139, -38390, 71045, 22999, 63863, 4927, -75123, -26103, -50694, 65345, 79772, -68154, 24567, -81815, 59133, 94088, 86475, 24340, 34985, -57500, 67113, -47255, 85564, tail", "row-1894": "head, 4051, 13517, -34853, -11939, -93279, 29036, -40815, -66607, 74907, -86496, -54352, -10351, 86753, 94634, -78355, -66781, 47879, -96984, -98034, -68573, -23332, -22303, 83955, -25884, -20854, -54807, -6506, 44131, -97640, -58048, -41289, 7580, -67373, -73609, -76351, 6819, 67321, 65374, 64255, -76325, 47120, -10764, -20135, -45167, 96632, -29447, 91406, 95949, -5806, -35086, -82225, 56366, 69703, -94701, 97312, 78634, -58133, -68644, 62307, -39943, -54591, -45051, 85943, -59074, tail", "row-1895": "head, 67196, -25395, 37743, -35562, 80325, 34383, -79530, -94859, -39431, -58636, -96203, 28676, 5652, 24834, 47201, -88598, -18127, 92019, 22904, -54265, -76313, -158, 86965, 94074, -22914, 40275, 56252, 67476, -69997, 26086, -95654, -42432, 50319, -4267, -40097, -21293, 5951, -68843, 27260, -22804, -54576, -1375, 49922, -47707, 98445, -91754, 71496, 30796, -33439, 7994, -28019, 49083, 89947, 72798, 85696, 15509, -21025, -35401, -91553, 77148, 43371, -41466, -33501, -54506, tail", "row-1896": "head, 4503, -64086, -12583, 64990, 8969, 76026, -19614, -87996, 67898, -98242, 94733, 7434, -79325, 21199, 54552, -48672, 62288, -54522, 46482, -52320, 26311, 40410, -45524, 97466, 27604, -56851, 15275, 33162, 58029, 87291, 96080, 92008, -27182, -47518, -25041, 84117, -65512, -91076, 33723, 81550, 99032, 25974, 61586, -58311, -59116, -71537, 76364, 70924, 55937, 26043, 75164, -76500, -66231, -9005, -4757, 16682, -85023, -79016, -56963, 40087, 47521, 49855, 3057, -17942, tail", "row-1897": "head, -24760, -67293, 58131, 19491, -15774, -20578, 89704, -8524, -20291, 7339, -58548, 6473, -83113, -97013, -83313, 89945, -47014, 77106, 44595, 12415, -94922, 85635, -72316, 44423, 6776, -93311, -65376, 25526, 32665, -34194, 68889, 77971, -80050, -33361, 38799, -64165, -17204, -34039, -51672, 13925, -15030, 96200, -10756, -40189, -38589, -96842, 27378, -83832, 23726, 44681, -11064, -78819, 98607, 61883, 57719, -71722, -22633, 95436, -74871, 23343, -51009, -76410, 31120, -35405, tail", "row-1898": "head, 83968, 73847, 14435, 85558, -33452, -40992, -78658, 9247, -73830, -56437, -81821, 90324, 24961, -39699, 63635, -84565, 98437, 18927, 52892, 81204, 9007, -64809, -57554, -91667, -91717, -87502, 37503, 42585, 45631, -96571, -70690, -10327, -29569, -72625, -61536, 8604, 25461, -96612, -45990, 73280, -68555, -28158, 71427, -61902, -42909, 98464, -47492, -73509, -91146, 25261, -58911, 7913, -18830, -83480, -78207, 2346, -58655, 8483, -23761, -62952, 57944, 23757, -14452, -95132, tail", "row-1899": "head, -20237, -65785, -66851, -30270, -32780, -74839, 6541, 87383, -37090, -87018, 45284, -16125, 32387, -28845, 44101, -35770, 40522, -9965, -71665, 83677, 68719, 45566, -67933, 52647, 55812, 74445, -4779, 7169, -4373, 98283, 73101, 99314, -29298, 66741, 88491, 21616, -10951, 81266, -98255, -99287, 909, -37370, -61011, -52418, -4745, -26439, -91019, 16113, -57061, -80722, -83590, -19233, 34851, 78453, 26782, 46016, -95076, -75454, -47123, 89464, -25527, -37976, -18252, 90731, tail", "row-1900": "head, -28369, 84147, -92948, 99400, -29838, 65795, -14149, -73252, -22874, -9684, 58826, -8841, 24630, 7619, -4343, -87422, 88301, -46302, -22818, -7816, -10554, 97148, -81547, 13884, -10731, -1251, -18539, 51146, -52352, 36323, 64822, 54529, -78961, 81419, 23107, 68596, -82000, 27849, -88451, -8497, 59944, -88747, -74961, 24292, -24754, 54996, 93790, -75800, -22086, 82416, 77135, -25182, 25675, 80746, 7903, 94416, 99976, 63046, 29371, 9371, -37631, 16216, 36793, -53724, tail", "row-1901": "head, 88666, 37739, -92703, -14397, -8455, -98239, 80764, 35494, 53172, -39772, -7630, -3638, 81546, 36116, -8609, 36562, -69453, 81628, -34938, -70690, 38918, 86916, 55809, -81468, 96569, -36644, 66899, -17026, -50563, -29382, -87617, -14143, -56920, 67223, 39411, 17965, 10732, -47218, -78005, 4929, -5356, -5744, 88694, -45753, 61596, 91320, -98436, 23077, 12392, -12848, -27729, 67883, -82059, -78939, 14605, -66197, -36611, 88257, 59628, 10671, 17998, 49372, -73104, 61890, tail", "row-1902": "head, 66038, 72449, -16268, -58909, 63223, -3647, -76731, 51115, 27086, -64019, 83609, -44868, -32302, -55100, 91611, 41101, -68454, -15205, 96498, 9482, -28074, 7635, 38614, -75061, -80192, 99637, 16022, 79590, -74478, -81033, -50124, -38002, 81265, -51767, 71963, 79101, 56579, 64998, -12339, 95037, 31102, -76249, 10663, 58741, -47499, 85572, 55747, -75846, -58252, -29066, 51475, 40451, 94703, -84985, -446, -83829, 84808, -14593, -93876, 80516, -38376, 45632, -66356, 42040, tail", "row-1903": "head, 42529, -26669, -74577, 82072, 73653, 6464, 28771, -68653, -31688, 63514, -71043, -4345, -1441, 65475, -99956, 63551, 71692, -71557, -22241, -78130, -99594, 52100, 60685, 8748, 3172, 75346, 61329, -63368, 997, 22592, 68563, 55814, 11573, -42581, -70229, 52000, -37342, 53041, 38290, -89403, -27710, -32211, 97389, -26357, 86754, -28868, -91889, -7761, 89133, -17017, 7285, 17806, 35640, -74605, 98367, -29914, 60350, -37432, 39051, -45169, 71057, 3909, -90537, 65663, tail", "row-1904": "head, -35955, 4632, -66262, -20471, -51465, -45905, -89805, -91582, 86417, -20321, 71732, 96209, 96679, -5611, -79032, 68518, -74493, -54604, 43712, 41912, -28087, 43424, -66762, 29720, 63148, 11619, 10187, 79709, 61228, -78944, 31613, 97662, -76084, 7127, -75915, -82543, -97667, 51392, 65664, 18463, -46770, 67464, 21054, 4456, 33981, 98939, 66364, -3266, -76137, 27585, -30932, -38857, 67531, -14614, 68054, 95867, -81053, 43283, 90658, 16445, 72106, -68904, 71979, -74273, tail", "row-1905": "head, 54795, -6181, -65016, 95638, -80260, -14001, -97624, 54327, 7489, 48639, -85184, 56546, 39489, 46474, 27675, -18790, 69039, 46543, 69489, 39874, 41808, -88626, -82774, -54847, 97298, -73195, 22472, -21147, -17203, -7659, 44871, 92214, -74037, 92905, 84273, -59972, -25351, 50568, -34752, -20325, 99852, -82351, 75274, 51607, 75396, -49737, 59430, 39123, -2667, 93352, -60589, -22937, 29572, -98204, 73292, 70994, 39408, 69073, 71837, 81321, -31050, 24404, -69217, -15028, tail", "row-1906": "head, 73874, 99584, 30865, 66354, -8319, 43508, -63242, 5086, 97916, -5827, -84060, -96947, 29582, -3990, -52144, 1025, 29860, 62925, 21961, 60943, -74319, -18221, 60348, 83767, 70137, -25707, -18147, -95866, -82726, 36745, -23133, 66377, 68002, 91758, 3760, 2876, -14119, -49278, 65697, 98896, 61369, -42014, -24333, -87596, -23029, 64401, -56704, 26001, 6728, -6954, -74846, -12293, -11983, -55673, -37622, 32896, -77712, 59505, 10918, -4854, 59041, 82084, 56482, 45487, tail", "row-1907": "head, -96925, -18538, 4503, -4185, 6069, 49257, 24287, 5977, 35706, 91011, 12849, -43959, 50556, -72947, 9425, 8479, -95880, 2790, 47961, -80153, 50922, 59557, 49232, -96708, -32395, 74994, 68505, 95316, 11654, 4299, -72114, 69207, -88461, -46606, -26398, 81147, 92447, -92981, -2320, -75470, -17308, -7444, 28334, -13557, 46248, -36402, -26454, -63418, 36212, -84574, -35484, -4245, -25039, -6191, -98682, 21368, -62099, -98974, -96196, -8142, 3461, 28209, -42000, 87865, tail", "row-1908": "head, -4287, 16040, -72612, -42852, -29244, -95549, 44607, -38846, -67608, -16176, 88727, -95621, -87814, 84137, -99504, -59694, -80079, 69040, 91601, 89400, 67195, -45110, -65526, 15170, -43349, 16768, 80071, -79301, 43651, 59028, 12535, -4942, 56312, -82354, -430, 25805, -56905, 51015, -75926, -1620, 80580, 4440, -8830, 49965, 56258, -2510, -35505, -27898, 39637, -27909, 60988, -43627, -43176, 32766, 97713, 91851, 15084, 95551, 96315, 99110, -46034, 67154, 11423, -44472, tail", "row-1909": "head, -11512, -94399, -45348, 82823, -47096, -18036, 54298, -63577, 77079, -54910, -7426, -89700, 37125, 9571, -41817, -87007, 33393, 43364, 95447, 68139, -74385, 87259, 6441, 96690, -5267, -25567, 19483, -44824, -90445, -34375, 82397, -7294, -54338, -86535, 92056, -75283, -49833, 81211, 45972, -46664, 77754, 69456, 49006, 46576, -60850, 34713, 67949, 25552, 76975, -66873, 40441, 28503, 70090, -3134, 20821, 56238, -203, 20998, 3638, -46713, -3652, 41273, -21252, 6026, tail", "row-1910": "head, -49272, 30056, -30766, -89675, -35567, -90218, 66818, 34466, 73681, 27604, 25911, 37385, 30413, 85033, 34231, 89113, 83364, 47538, 58850, 69292, -86833, 36736, 9284, 337, -28965, -22022, -62698, 88138, 61545, 74072, -6676, -24391, -83150, -8310, -73538, -29489, -63480, 48066, -20077, -15129, -59824, 94743, 44961, -17316, 80402, 20988, -8580, -52239, -1974, 13864, 59628, 1473, 12215, -45147, 43135, 46476, 65894, 20017, -78716, 97302, -60302, 90402, 22338, -36973, tail", "row-1911": "head, -63418, -29312, 97258, 17940, -91947, -66593, -87173, 30336, -68223, 51068, -60170, -31402, 88498, 93507, -49602, -9722, -1626, 1295, 7940, -87781, 43769, -741, -59287, 72469, 21870, -2770, -66084, -32940, -99793, 94311, 65036, 33457, -63564, -23272, 30311, 19719, 48441, 99621, -63617, -45327, 73817, 25879, 93474, -54971, -2346, 13507, -28878, 65703, -26692, -75310, -34056, 69552, 59133, 85802, -33683, -52567, 7445, 77391, -59797, -45296, 36959, -81005, -79754, 3414, tail", "row-1912": "head, 40582, -44436, -87083, 82242, 9489, 14477, -86328, -42606, -31269, -27875, -65659, -39728, 42286, 28877, 7409, 48513, -62079, 87974, -98036, 29687, 51360, -77913, -54169, 77062, -48577, -18973, 94454, -74074, -76380, -20652, 83440, 12119, 44726, -17291, -58454, 41660, -72611, -52250, -88001, 90288, -33155, 95223, 59752, -55643, -58960, -49895, -20208, 34410, -58582, -23374, -41264, 63999, -7208, 48551, 48991, 21487, 87464, -71707, -21138, -35130, 55200, 9795, 55763, -88921, tail", "row-1913": "head, 17953, 18877, -1564, 52111, 402, -10997, -29086, 18195, -21066, 26678, -90614, -83415, -55171, -39483, -71003, -47699, 2230, 853, -54351, -71148, 2704, 65014, -39550, -15139, 54320, -16464, 90043, 10217, 52018, 84285, 69402, 75000, -86687, 63562, -58172, 16406, -79749, 94230, -56147, -44828, 20925, 37188, -23078, -40783, 86663, -40409, -64828, 71766, 46760, 87478, 92924, -8944, -2036, 87814, 60454, 16700, 4982, 41731, 27386, 70874, 91521, 66677, 46743, 54285, tail", "row-1914": "head, -8405, 98145, -6913, 30285, 19882, -59478, -9378, -49503, 83637, -73263, -76047, -82355, -77465, 43460, 48741, -96539, 16468, -67898, 31288, 10413, -9576, 34253, 54092, -73332, -61696, -54724, 84117, -58642, -3286, -68977, -94710, 19123, 69950, -807, 44541, -72306, -78501, 30341, 72725, 71447, 36740, -62759, -8667, -81863, -97441, 53297, -22190, 9946, 22542, 19176, 49860, -60587, -51755, 17711, 61871, 59307, 69377, 90817, -38703, -53786, -38151, 73270, 90169, -20844, tail", "row-1915": "head, -69661, -78202, -33823, 85066, -12656, -56733, -47526, 20887, 27304, 19484, -73367, -42060, -39661, -51928, -17991, -21737, 42960, 54612, -64983, 35672, 95336, -2396, -33706, 23400, -6137, -9702, 33457, 76887, -78432, -75935, 97334, -33570, 40133, -94264, -52063, -88237, -10038, -49678, 35840, -19214, 64233, 88516, 69508, 94388, 57670, -24998, 6818, -58117, -77584, -40880, 59806, 26770, -4812, 92015, -87006, 4140, 84333, -28719, 83666, 32028, -78773, -78538, 80650, 44711, tail", "row-1916": "head, -61928, 67471, -48836, -25370, -24799, 27213, -15302, 75514, 371, -75397, -29489, -24574, 29051, 21844, -47760, 98645, 76360, -81164, 84178, -19791, 13355, 25527, -93814, -70214, 41308, 55717, -17180, 44954, 51640, 66706, 64332, -28938, 85295, -3978, 23826, -69569, -24243, 87559, -14511, -37622, 88048, 49579, 2304, -84003, -61961, 94356, 99799, 68741, 61616, -41233, -81732, -78295, -54090, -85840, -97366, -72915, -88648, -1187, 39917, 49330, -16391, -8444, -77753, 67274, tail", "row-1917": "head, 95079, -66221, -5712, 19345, 52668, -80008, 91077, -76385, -33014, -42652, -76304, 29404, -8666, -4243, 82601, 30110, 16290, -28125, 20466, 73878, -78284, -67674, -87996, -94864, 35554, -1877, -4715, -67560, 53436, 16182, -52204, -6915, 6289, 53013, 89721, -72641, 58534, -56427, -90495, -79848, -55208, 93304, -28317, -69676, -26001, -51992, -57090, 77828, 26716, -74155, 77152, -55484, 2427, 35001, -8204, -84137, -74289, -50832, -33249, -41903, 15187, 23595, 6666, -36977, tail", "row-1918": "head, -91955, 71448, 54325, 82264, -94267, 44415, 26864, -35154, 18753, -47701, 94434, -97373, -79709, -60117, 8044, -40480, 90698, 14533, 11037, 36336, 83103, 56582, 23002, 43249, 63038, 41504, -7608, -88010, -62189, -71429, 50036, -23708, -54479, 85220, -17440, -67953, -6794, -92730, -40975, -84051, 47752, -72274, 37039, 49038, -73915, 91276, -67232, -76022, -31239, 85141, 90046, 36697, 13365, 52262, -32447, -69721, 70920, -37554, 29533, -40467, -90318, 67810, -69021, 21532, tail", "row-1919": "head, 21769, 92722, 31870, -90565, -24652, 9417, -67624, 25238, -41415, 99576, -10728, -74294, -6908, 71063, -58625, 5200, -65547, 75910, 67073, -47127, -94811, 44824, -30899, 90074, 56065, -74636, 32206, 94953, 11058, -3540, 66869, -2896, 28187, -56996, -2670, 19735, 49290, -35877, 38017, 63009, -96173, 51069, 55978, 98777, -64443, -21277, -32784, -37379, -51367, -34114, -7614, 99050, 2494, -94766, -23474, -17965, 92701, 15418, -54908, 37951, 97030, 4945, 13044, -38449, tail", "row-1920": "head, 89039, -54829, -81971, 47863, 28156, -42426, 26072, -86230, -46831, -31780, 18941, 28451, -5785, 77591, -70670, 18144, 65243, -82788, -32068, -69318, 44752, 1887, -71295, 85430, -87701, 98237, -23851, -21333, -80206, 75907, 24834, -83955, 56708, 31846, 68810, -82038, 57224, -77014, -91632, -63351, -81571, -73204, -13998, 30147, -36619, 64825, 40416, 45039, -46447, -66460, 35777, 44300, -38456, 43704, 48756, -42269, -84437, 52873, -34822, 7417, -34720, -12127, -80300, -79478, tail", "row-1921": "head, -263, -69209, 19926, 5478, 89766, -91169, 68623, 963, -96390, -18398, 89912, -56590, -24433, -13941, -34926, -62422, 51769, -22802, -69927, 14996, -70928, 61891, -20551, -43133, -81051, 1359, -78890, -28712, -38509, 77045, 97865, -48689, -63830, 52895, 72644, -68612, -81361, -79689, 34181, -36750, -95684, 77146, -90820, -64119, -15466, -57408, -18780, 82540, -66129, -34952, -47206, 26870, -10237, 44380, 29056, -59888, 14351, -72666, -4439, 74910, -87228, -66843, 23946, -46095, tail", "row-1922": "head, 54210, 79743, -86883, -56304, 58078, -2232, 15530, -52322, 13739, 25195, 22315, 82286, -19472, -74676, 94605, -52761, 38123, 65351, 52987, 38274, 25613, 47286, 23736, 88581, -52791, 57702, -67321, -96526, 96688, -90485, -39361, 32355, 28016, -48243, 42235, -60974, -89857, 82524, 5273, 37853, 88140, -75217, -38769, -1665, -55682, -14493, -12976, -95269, -65890, -82183, -71937, 27507, 15534, -48267, -40349, -71304, 31502, 78356, 80260, 76431, 76034, -33088, 11140, -22012, tail", "row-1923": "head, -47936, 97166, -25915, 3701, -28872, 78381, 29401, -33686, 78950, -83439, 75052, 62966, -98460, -63149, -28269, -31726, -85163, 48280, 64277, -49927, -30814, -80411, -36371, -33310, 92311, 9800, -71341, -99250, -84562, 90352, 55263, 15518, -18224, -72261, -6192, 60184, 19301, 47558, -42248, 48507, 19375, 89797, 99428, -47112, -67498, -53219, -85405, 68164, 40084, -58056, 41321, 28620, -26984, -85855, -61739, -3319, -39952, -80408, -29637, -7060, -97629, -60918, 75290, -35773, tail", "row-1924": "head, -3772, 27896, -47048, -19984, 82612, 82518, -76828, -24066, 22776, -13305, 93047, -6227, -72625, -1833, 41654, 26367, -86301, 22977, -2400, 92666, 55115, -35075, -43864, -23107, 58975, -92372, -74753, 65673, 1108, 50257, 66039, -8709, -94794, -6015, -53855, -43583, 90873, -85749, 18581, -88085, 21077, 26823, 1724, 67374, -31136, -76496, -35158, -20500, 61682, -65225, -5679, -44387, 15208, 28499, -51676, 96835, 7196, 69476, -81292, 19427, -68106, 90325, 60240, 8321, tail", "row-1925": "head, 48234, -85933, -23128, -30124, -35851, 15806, 2940, 57085, -29839, 48580, -41235, 78968, -1501, -74823, -99903, 708, -45112, 61919, 96681, -2975, -37388, 31443, -3936, 57915, -71723, -20545, -51463, 68663, 95162, 10917, -94892, 64538, -44841, -60288, -53071, 92900, -98977, -28385, 16928, 39903, 22465, 66135, 19974, -8987, -47026, 35810, 77809, 31084, -84259, -32448, -8841, -84295, 53871, -39005, -28545, 19947, 36080, 31310, -72175, 86981, -24438, 5906, -35527, -59556, tail", "row-1926": "head, 28939, -49976, 29015, -29921, -60035, -43886, 54144, -39851, 69974, 96243, 56200, -98429, -25874, 89547, -13683, 81102, -59270, -34071, -27433, 53475, -32971, 39063, 14015, -17943, 41778, 71264, 87378, -65930, -18976, 98116, -9824, 45669, -65654, 14378, 93097, -74239, -43035, -91033, 82218, 94357, -67562, -61687, -95561, 91180, 73238, 51145, 94478, -15092, -54601, 14236, -33181, 96200, 70761, 59319, -92738, 33753, 53783, -73264, -4645, 36414, -94191, 69500, -5384, -92286, tail", "row-1927": "head, 38521, 64600, 9988, 78454, 17351, 75531, -91695, 18448, 89063, -20318, 67142, -3269, 1678, -29937, -77726, -88894, 59480, 4115, -62752, -85088, 81957, -49325, -27797, -78612, -20029, -33639, -67183, 20549, 80967, -57576, -7103, 59993, 12974, -86123, 62087, 47251, -16014, 45364, -21636, -84993, 72568, 8341, -69113, -17939, 35275, 46973, 33037, 14046, 48829, 151, -58824, 39896, 68776, -38642, -12137, 9203, -37042, -99578, 96021, 8399, 956, -97161, -96069, -48429, tail", "row-1928": "head, -32390, 73534, 88719, -14383, -64744, 24371, 61230, 79242, 16017, 18838, 61616, 95793, 71753, -67715, -16432, -48524, -1846, -28507, -33039, 84345, 97841, 95210, 40995, 9343, 63856, -83663, -40100, -73514, 59475, -89908, -16806, -93402, 32821, -26079, -11625, 40333, -44847, -33538, 38453, -69938, -73729, 3815, -48942, 77279, 7493, -33112, 43195, 54112, -27178, -40739, 49540, 7707, 29733, 51979, 75580, -17701, 96532, -57266, 53190, 85619, 97381, -88242, 49323, -56713, tail", "row-1929": "head, -57648, 61602, 91075, 89662, 82839, -77477, 11891, -82582, 70142, 88817, 91203, -15322, -1353, -32321, -40556, -28514, -38646, -83951, -14806, 61990, -66285, -87539, 58225, -6608, 46706, -90142, -70252, -69574, 4036, 58524, 62665, 7208, 30271, 74327, 21878, 69238, 81724, -66754, 42928, 82759, -85830, -65087, -98379, 68891, -89021, 6511, -87222, 23112, -29888, -93960, 24207, 70326, -27512, 31599, -45892, -86948, -87960, 30409, 20111, 12179, -82595, 34624, -4576, -71994, tail", "row-1930": "head, -45502, 27103, -61010, 96341, 5988, 61584, 10359, 92519, -92012, -93170, 1877, -86150, 57931, 61225, 36433, 20043, 43669, -84328, -58357, -49523, 262, 28650, 51152, -80259, 76266, 69405, -55452, -98884, 7274, -72930, 64798, -76175, -75178, 64974, 94723, 20396, -9308, -10758, 48439, 52282, 79455, 19737, 61786, -98817, 2617, -22518, -60778, 19645, 86431, -30263, 35361, -62938, -21662, -60477, -1293, 26100, -61857, -25274, -38359, 41265, 45541, -35923, 39358, -29364, tail", "row-1931": "head, -49487, -59944, -14751, 97317, -43663, 22225, 50939, 27934, 90202, -45361, 42981, 45675, -27420, -15684, -56694, 946, -63357, 42168, 29654, -86681, 94668, 50033, -44288, 8876, -30403, 37810, -56600, 37431, -37412, -54662, 27686, -13506, -28490, 30029, -4960, 44508, 74333, -84248, 62035, 82, -19013, 70528, 64632, -19032, 31287, 4569, -31539, 35383, -63312, -81134, 2883, 33312, -34550, 22750, -98863, 94527, -99293, 70975, -36961, 42323, 24542, 18452, 90370, 58195, tail", "row-1932": "head, 91924, -53997, -85191, -80755, -27530, -86915, 91245, 83094, 84611, -92387, 71986, -98570, 69870, 99509, 68863, -49849, -31228, 47211, 72271, 70977, -25751, 52443, 16024, 44688, -52947, 89938, -75823, 50435, -88866, -24205, 58769, -2962, 22522, -53723, 53451, -53465, 47218, 9378, 44998, -72357, -38444, 23266, -13767, -10681, -24566, 15941, 83108, 55970, -9810, -6270, -66156, 41082, 32385, -24683, 23013, 23504, 94161, -88459, -49258, 60493, 20561, -25155, 75757, 4831, tail", "row-1933": "head, 3684, -32333, -72596, -30734, 86363, 6088, -48700, -21522, 17150, -60689, -35931, 64630, -27855, -31964, 81946, 80372, 51838, 90205, 28736, 92016, -79652, 59142, 79141, -75751, 31374, -50424, 99096, 86262, 47144, -26260, 35714, 5385, 6384, 4083, 4416, 14017, 56832, 52295, -52004, 65063, 43618, -54500, 43243, 19257, 7551, -44157, -52909, -61917, 82797, -16840, 35401, 5228, 68816, 24843, 32729, 90390, 89896, 95897, 37772, 86524, 41884, 92876, 48113, 20503, tail", "row-1934": "head, 80032, -39334, -42939, -28997, -45767, 91762, 14520, -33688, -35012, -21273, -66791, 49322, -11489, -63269, 88072, -93305, 46348, -49860, 60365, 95043, -7918, 53930, 46129, -15425, -18882, 10083, -31716, 56329, 28253, 7072, 79022, -68412, 22658, -88384, -15370, 82060, 61882, 42811, 78900, -45684, -68135, -66140, -16588, -95330, 91007, -58444, 46, 27424, 13173, 86493, -79549, 10473, 29755, -8800, 28167, 69335, -54758, -35071, -87957, 79972, -71307, -46527, 43806, 473, tail", "row-1935": "head, 11315, -21325, 81884, -20441, -40756, -86366, 45423, -35432, 34252, -45932, -14782, -995, -30205, -37795, 12545, 69418, -38429, 47863, -9757, -25558, -24008, 93566, -41052, 27772, -6822, 86641, 94483, -95689, 33642, 25546, 95856, -38358, 63520, -78589, 69834, -81862, 11881, 89305, -48795, -52521, -49538, -88439, -50271, 14140, -65178, 43315, 39016, -84321, -83744, -14854, 82316, 38098, -8982, 75835, -29142, -78198, 52466, -96328, 10422, -55680, 30211, -66235, 96718, -1764, tail", "row-1936": "head, 25778, -58462, -2136, 24333, 99891, 15926, -9384, -15889, -43159, -42516, 60350, -40915, -21867, -66063, -39215, -87604, 19381, 79460, -88068, -38909, -22529, -62309, 59904, -74997, -89977, -74371, 41205, -8083, -82495, -65211, -79278, 97327, -50980, -94560, -13251, -27034, -63021, -33876, -2508, -22535, 46228, -12858, 95579, -42116, -74639, 83732, -9880, 75317, -61257, -14309, -64216, 62199, -58051, 14159, 46454, 82375, -67192, 47977, -49824, -13050, 81666, -84784, 92229, -50616, tail", "row-1937": "head, 9422, -39262, -92701, -25193, -73559, -34901, 35640, 96279, 81885, 71389, 63978, -50226, -12975, -51212, 19148, -77648, 47471, -82776, 5759, -79903, -33416, -49272, -65470, -33401, -4942, -12604, -14798, -19612, -36228, -506, 37085, 81370, 3730, 56244, -2321, -91521, -43255, 42391, 70796, -63327, -57557, -65713, -21860, -35028, 683, 14116, 92771, -29398, -69486, 11495, -27529, -52444, 92024, 11559, 51881, 31557, -37743, -17676, -59571, -60072, 49547, -33932, -2263, -56380, tail", "row-1938": "head, 45100, 99582, 81340, -18781, -30702, 15823, -33330, -80092, 22521, 29770, -3253, -41602, 68234, -17781, 6081, 72460, -53375, 56720, 54705, 22781, -33890, -15019, -36775, -38074, -88703, 15244, 35434, 22661, -9014, 29659, 34022, -65294, 5289, 83387, -8472, 16007, 9087, 56112, 86525, -46671, 58481, -77227, -60506, -52878, -38749, 24736, 25773, -86706, -95963, 93864, -68781, -66671, -52356, 69821, -5652, -93379, 68913, 47673, -38104, 82443, 52408, 65865, -84110, 74979, tail", "row-1939": "head, 66178, -42935, 76307, 52529, 96476, 49304, 23290, -73988, -19252, -50932, 83713, -50269, -62633, 37216, -12431, 35807, -41010, -43315, 22745, -52062, 34436, -91641, 86844, 31181, -92719, 87959, 30327, -37880, 72815, -28851, -52017, -6749, -52123, 10119, -22, 31481, 87232, 67793, -61151, -28476, 2624, -3525, -8584, 16788, 42291, -40171, 12865, 71025, -93366, 98018, -93811, -92245, -15620, -52229, 4690, 49505, 6855, -8122, -13784, -45993, -40357, 54310, -29732, -5699, tail", "row-1940": "head, 25630, -27995, -74386, -46085, 24487, -62199, -49976, -12034, -92681, 11827, -57942, 4454, -5984, -36468, 21076, -30, -95446, -11318, -54628, -73747, -53592, 9133, 54227, -44951, -10264, -50794, -34962, -78083, 10348, 38854, 36116, -26924, -53175, -61498, 10184, -34719, 10244, -34172, -49904, -24645, 86903, 88699, 31306, 99047, -21136, -30492, 51943, 1663, -86466, 59093, -19188, -889, 24680, 34063, 70716, -33458, -34244, 30304, 27303, -40401, -35985, 69983, -37305, -29431, tail", "row-1941": "head, -20953, 62407, 32204, 34458, -16143, 38105, -50242, -70978, -85246, 99338, -12062, -60344, 19303, -78417, 16279, -11115, -4701, 12673, 54370, -47290, -51660, -89231, -63856, -2886, -24676, 74244, -69534, 83237, 72535, -97041, 3266, -87012, 41038, -7498, 31239, 50740, 39769, 29589, -12415, 88795, 45764, 38456, -71101, -26926, -15283, 47827, -22840, -16475, 76246, 83969, 83994, -23705, 52513, 63887, 52019, -10560, 62543, 4061, 78594, 95428, 36063, 11018, 24793, 94887, tail", "row-1942": "head, 25355, 96223, 93124, -15331, 4335, 65615, 92739, -43277, -99805, 79549, 69601, 65367, -54974, 29284, -28074, 38296, -38040, -18077, -61573, -6598, 11007, -17936, -83163, 54262, 38202, 26672, -59244, 96957, 2812, 64590, 31294, -98434, -49468, -15182, -82655, -76090, 6175, 65115, -54531, 78191, -54721, 81948, -66616, -86201, 32845, 17270, -65405, -75421, 17416, -59278, -4760, 72035, -23491, 72305, -2254, 67538, 74533, -26592, 38964, 26001, -93225, -90006, 46052, -37859, tail", "row-1943": "head, -24759, -65837, 7585, -92735, 75183, 16302, 46893, -269, 10997, 19607, -23295, 75498, -25374, 19729, 88893, 44028, 8474, -86266, 82903, 50296, -52153, -72147, 88739, 71252, 75050, -33113, -22387, -76768, -82629, 14942, -71579, 26463, -62253, 79180, -82057, -86367, -22551, 77248, -76398, -8174, 37412, -64832, -26777, -37325, 99537, 75755, -77480, 74765, -26128, 43409, -12753, -65357, -7781, -6019, -77176, 6536, 2139, 70797, 55883, 40517, 43259, -91232, 1764, -31585, tail", "row-1944": "head, -51957, -95481, -88332, -65333, 74485, -15706, 16152, 55255, -56911, -23358, 5451, -3784, -60847, -40340, 36250, -67106, 60022, 83648, 60321, -55464, -89851, 35162, -84328, -98640, 77185, -91267, -74200, -68618, -24418, -15612, 90814, 1114, 63748, 37665, 53761, 76440, 61232, 17394, 36451, 50955, -90490, -45952, -89902, -41110, 67562, 37357, -27416, 31665, 93702, -96510, 11433, 93796, 61002, 95394, 36613, 44640, -17941, -47690, 19950, -80686, -23473, -74631, -37454, 5377, tail", "row-1945": "head, 21168, 93084, 24854, -87620, -84669, -11, 90792, -36355, -93469, 80091, -45422, -46920, -49645, 51064, -38964, 54606, 94732, 18789, -94423, 34358, -24279, -95708, -19827, -27596, 2273, 58381, -97286, 12178, 17897, 20939, -28008, -12683, -94805, 3231, -82045, -11485, -77016, -57351, 93245, -72442, 71475, 75795, 52657, -56904, -37818, 21693, 90752, 23926, -71924, -89097, 53642, 82134, -88735, 8506, -1506, 53379, -59391, 44930, 84981, 24995, -41337, -88207, -26673, -4065, tail", "row-1946": "head, 72740, 67288, 85533, 58364, -39718, 87065, 12942, 78654, -84375, 72597, -43984, 75738, -29059, 11670, -76670, 36018, -66295, -21858, -42318, -43130, -34058, 2684, -36288, -73722, -58246, 67913, -50299, -2400, -55437, 1039, 68594, 33060, -42563, -5869, -89108, -52272, -22937, 82209, 43041, -7179, -14607, -44510, 45072, -25578, 48495, -51383, -53496, -11346, -50736, -86854, -14370, 52764, 93243, -20771, 98240, -32993, -28058, -92429, -67596, 23836, 68237, -27326, 91606, -93490, tail", "row-1947": "head, 20546, 35842, -20079, -90957, 59434, -91123, -6738, -57713, -36974, -49360, -78971, 20710, -44411, -82511, 80137, -563, 43303, 22087, 79689, -3799, -99668, -62373, 26106, 2587, 213, 87099, 32446, -8713, 30494, -72111, 85739, -26277, 27037, 22338, 35538, 80256, 9703, -40495, -44377, 79235, -66704, 58557, 20809, 17393, 99652, 64948, 23504, -96497, 89147, -50648, 82776, -68421, -74173, -17584, 19256, 32839, 41989, 28156, 82971, 19208, 61687, -76858, 82165, 28927, tail", "row-1948": "head, 10615, -71464, 43555, -60987, -34802, 52118, -83721, -13663, -69466, 10243, 93500, 98982, -35902, -47619, 67575, -84821, 59868, -31501, -46753, -39275, -15441, -82581, 18158, 42788, -83672, -23614, 11558, 33688, -48962, 38900, 98696, 92740, -78432, 26910, 68413, -88322, 10390, -58971, 53033, 42728, 21796, -1980, 66631, -67570, 39640, 10777, -9898, 81457, -20141, 50653, 37967, 2887, -79355, -87650, -87257, 58618, -95632, 12239, -81937, -52370, 4473, -25271, -85111, -16319, tail", "row-1949": "head, -10474, 5655, -18154, -94074, -21423, 20439, -70392, 69927, 50020, -67114, -6628, -67517, -67963, -98085, -24082, 59858, 89393, 70582, 74733, -82309, 99422, 72829, -15508, -44039, 60857, -83069, 85752, 51391, -37921, 81933, 92166, -991, 80584, 13333, 84627, -40590, 4617, 41681, 11646, 12555, -76419, -78930, -78086, 30616, 83750, 49103, -18792, 54456, -70332, -94568, 34006, 58501, 86504, 19147, -39391, -69816, -61233, -20020, 50516, 45076, -53480, -37869, -54259, 11786, tail", "row-1950": "head, 51039, 85872, -3865, -60415, 56195, -72250, -66309, 3595, 2824, 52378, 72550, 79072, -54605, 91018, -59187, -42364, 67510, 12853, 24515, -78990, 14546, 77373, 80324, 45888, -2383, -50411, -69082, 79680, 20538, -63972, -15149, 94736, -58178, 97980, -5832, -8364, -51679, 80082, -33061, -91719, 5563, 71433, -83234, -95146, -90210, -14072, -29360, 51124, -30192, 16681, -82457, -51831, -2218, 96931, 45096, 7197, 29266, 40729, 95174, 23925, -38581, -80783, -13910, -10104, tail", "row-1951": "head, -43908, 95992, -5126, 72509, -40024, -50121, 92275, -63061, 35128, 17208, -49539, 47383, 2184, -14320, 13472, 99007, -76059, 91269, -52430, -32026, -7528, 32934, 19992, 68711, 59892, -13815, -98015, 24461, 40914, -13915, -85207, 82623, -81998, 50039, 10981, 22606, -29369, 21726, -28664, 74364, 1132, 82008, 5696, 31739, 12691, 57889, 4149, -26721, -2040, -53210, -78136, 66085, -9832, -42555, -38926, 61592, 14627, -63971, -46462, 38929, 97193, -96089, 55372, 9812, tail", "row-1952": "head, 45257, -68131, -11869, -94139, 14789, -25865, -792, 46861, -11392, 8730, -67493, 29552, 89567, 43710, 86807, 82013, -50664, -64368, -63718, -36123, 14184, 30809, 81006, -6978, -7059, 68891, 13089, -77799, 3663, 29170, 30251, -78432, -29, 43140, 17610, -80931, -79294, -27029, 12765, -48424, 28626, -74244, -87339, -91619, 81170, -52879, -22373, 80313, -20403, 19030, 58799, -78715, 86208, 3723, -82808, -70714, -22684, 48451, 59456, -29990, 16171, 60558, -97283, 89829, tail", "row-1953": "head, -50456, -31808, -92164, -34580, 37753, 31169, 4028, 61260, -17013, -61793, 34294, -41793, 76779, 58047, 9723, -61713, 81639, 47865, -34177, 42735, -73025, -41536, -86296, 12806, 76346, -35936, -60471, -1563, 77106, 7647, 33583, -41531, 86774, 60803, 21236, -50463, 83164, -28287, -24884, -23578, -24439, -19319, 99990, 19019, -15113, -59994, 33864, 41215, -87419, 15, -79905, -5747, -28077, 17640, -23706, -14629, 52324, -34245, -65446, 28443, 91232, 968, -37660, 64416, tail", "row-1954": "head, -49644, 18462, -60034, 52019, 18536, 59843, -68773, -24257, -57233, 75373, -61302, 84195, -36186, -38681, -16698, -53364, -38458, -59687, -37569, -21196, 86483, 23266, -68077, -6638, -39123, -14818, 35845, -82440, -86386, 41870, -77604, -58235, 72730, 82763, 42903, -94489, 95284, 35799, 41358, -91221, 92358, -67319, 53452, 64717, 45726, 28580, -44161, 49401, 63786, 85070, 63538, 24586, 49779, 51010, -49939, 9222, 78900, 57836, -66353, 96997, 23110, -64014, -84851, 23651, tail", "row-1955": "head, 44425, -35881, -44166, 29305, 6596, 27459, -14024, -64162, -64490, -46849, -27118, -85682, 62801, 56404, 87035, -45094, 66144, 96547, 81076, -79028, 99907, -86241, -6424, 73016, 56758, 11353, -91721, -82224, -2595, 64774, 60723, -48730, 75817, 59288, 84203, 43682, 31067, -61482, 46246, -41276, 82041, 95518, -82635, -71794, 7622, 39108, -80005, -89674, -40093, -76747, -63650, 50735, -49226, -82022, 13479, -10134, -18099, -15805, 58527, -57575, 70765, 71734, 78173, 29549, tail", "row-1956": "head, 13627, 27127, -97514, 33023, 84837, 90895, 42853, -63851, -16709, -41704, -72582, 92329, -53283, 25552, 51709, 10497, -3218, 40136, 91155, -2572, 2680, 87648, -95746, 54695, 15093, 67558, -77680, -55947, 31834, -44044, -45028, 75092, 84825, -63057, -57024, -83869, 49664, 61166, 98542, -47505, -15578, 42972, 20293, 21713, 88007, 65783, -45856, 30899, 76006, 67974, 65325, 73909, -67091, -97352, -10132, 45335, 63289, -45213, -76015, -18194, 12594, -36277, 14109, -24160, tail", "row-1957": "head, -635, 91821, 47946, -24800, 13629, 64798, 44571, 58574, -33395, 3098, 39931, 88004, -18755, 33645, -84647, 65051, -95780, -93056, 72207, -81565, 34361, 91240, 46327, 1430, -60624, 96564, 91010, 5081, -23535, -32094, -309, -37235, -2592, 56393, 54440, -98010, 17627, -2741, -82855, 74974, 19311, -78516, 32049, 57022, 46518, 4767, 84742, -78007, 57324, -4990, 61637, 13053, -99028, -67332, 70750, -67704, 53521, -22364, -50855, -4593, 73411, -72457, -27851, 14514, tail", "row-1958": "head, -99165, -25631, -48022, 55928, -57301, 359, 45815, 46808, -52969, 93701, 59276, -52819, 21852, 71148, 77204, -23457, 47824, 3059, 39949, -63521, -2758, -92273, 9158, 79517, 24130, 53853, 69327, 98296, -27612, -47100, -46602, -31596, 18111, -74390, -24632, 82634, 43632, 62763, 74873, 23049, 64240, 78142, 6784, 88340, -80414, 40741, -53270, -9552, -81413, -12683, 70172, 71017, 59309, -94071, 36112, -15751, 45309, -60088, 39506, -66258, -4826, -26466, 44282, 73991, tail", "row-1959": "head, -39878, -56919, -72159, 13949, 99576, -86448, -56665, -12463, -26155, -23614, -46306, -88666, -37401, -22157, 57880, -69231, 38321, 1500, 15875, 52757, -79914, 81330, 85532, -31218, 96276, 41660, 57288, -40388, -71993, -87578, -96833, 82027, 20337, -7335, 69881, 55539, 63612, 15197, -63809, -80966, -7439, -48453, 50208, -62872, 95377, 90202, -27692, 78551, -85284, -14592, 85874, -27399, -6060, 11364, 20817, 84689, 10580, -24799, 89403, 53092, -2684, -59779, -34878, -85379, tail", "row-1960": "head, -21283, 88994, -23538, 33865, 47054, -25511, 9857, -69462, -33925, 52506, 67292, 3117, -67324, -84338, -36664, -86085, -78691, 56351, 56357, 44280, -32492, -18111, -43061, -31346, 16103, -17909, -41081, 33944, 69842, 14605, -51689, -75222, -98381, -96178, -7859, -4211, 88513, 566, -70881, 12164, 19544, 6962, -49986, -39021, 38479, 39099, -4277, 32089, -5137, -81685, -85892, 11925, -18254, 48816, -69123, -64058, -20435, -88180, 98608, 99151, 71276, -96448, -83702, 19054, tail", "row-1961": "head, -96950, -28861, 54133, -37648, 20839, 17489, 50863, 76855, 27368, -35773, 31893, -32708, 11638, 40828, 33710, -92458, 9151, 57028, 80737, 1733, 82495, 22785, -88446, -99213, -72563, -54827, -28262, -73446, 90246, 14192, 46160, -80844, -61229, -87514, 47651, -32403, 42535, -7472, 15605, 91912, -27647, 74456, -6315, 87972, -65569, 52328, 81545, -1142, 39426, 26771, -54135, -12876, 11417, 19108, 63260, -44449, 41987, 58863, -10718, -88595, 88073, -38642, -3334, 45608, tail", "row-1962": "head, -75365, -765, -35525, -5741, -683, -76881, 33114, 73036, -87311, 83420, -99056, 53234, 54822, 57311, 57966, 74836, 74957, -86777, 30205, 16732, -88715, -3209, -48052, 56822, 71172, -94863, 30443, 14760, 46911, 21748, -91553, 99722, 86396, 65087, 73584, -6701, 42117, 13578, 46149, 31056, -67584, 79606, 43869, 3222, 28147, 58886, -1932, 58999, -81700, 35450, -32066, -64592, -64293, -58011, 86896, -83393, 13763, -3780, 50485, -13496, -27496, 31046, 45211, 81439, tail", "row-1963": "head, 61056, 12755, -1907, 4541, 26921, 71651, 87176, -42668, 84818, 29994, 31487, 73855, -43719, -13176, -81138, -44105, -101, 48712, 84279, 76886, 54905, 57866, -31309, 52213, -3480, 35502, 86310, -15864, -12115, -49737, 84497, -21626, 47653, 87690, -82631, 67418, -36539, -81116, -9713, 91959, 39390, -27982, 7983, -75233, 13706, -42449, 28867, -56568, 82222, -80304, 9463, 68484, 98812, -97396, -47915, 64572, -71732, -14307, 74025, 57914, 84757, -89355, -11252, -95965, tail", "row-1964": "head, -34309, 94254, -67364, 97493, -85186, 74895, 22039, 70673, -45073, 34895, 80820, -63558, 92455, -75447, 50344, 79139, -62185, 7674, -66650, -71005, -45668, 37, -94809, 91150, 75640, 59169, -92977, -35136, -84064, -6648, -92399, 45464, 22843, 48565, 66450, 30583, -28621, 96217, -15107, -9844, 72309, -23686, 31049, -69761, -12246, 12040, 60898, -43219, 20574, -37265, 57869, -44285, 17807, -39175, 52931, 36693, 73375, 82228, 25076, 13209, -18706, -54704, 5054, 10964, tail", "row-1965": "head, 90637, -59767, 21806, 41028, -50478, -5425, 37749, 85381, -2291, -27122, 60828, -84690, -16158, -6203, -31439, 99244, 67802, 85276, -31865, -68246, -4015, -53237, 37409, 41484, -47869, 24910, 78625, 26176, 86831, -4080, 70853, -76274, -12906, -52936, 48600, -47255, 67325, -82145, 36270, -93181, 71101, -64555, 89928, 53975, 47180, 92397, 58998, -96121, 61929, -78548, 23594, -24480, 4665, -10568, 38920, 97311, -60934, -16233, -92469, 93354, -409, 3117, -18280, -36936, tail", "row-1966": "head, -6637, -57497, 42395, 69854, 70312, -42514, 12423, -77671, -91335, -51898, 69232, -39727, 15025, 66243, -42477, 11147, 60700, -12816, -97068, 25123, -87444, 91139, 93837, -69918, 53707, 45946, -64974, -25973, 19350, 47196, 56004, 77675, -35574, 49141, -21578, 4973, -65298, -55252, 30304, 20282, 13613, -22571, 94804, -76694, -17609, -22011, 68333, 15424, -98590, -72012, 28076, -98754, -83878, -15669, -95146, -36705, 29513, -219, -51222, -40137, -43416, -78083, -75395, -50793, tail", "row-1967": "head, -8551, 39844, -62314, 69757, -91706, 44248, -69224, -24188, 67671, 92814, 31694, 81567, 8389, 75742, 70884, -79197, -79413, -96406, 58826, 79602, 96513, -23030, 17129, -73628, -3639, 91020, -91499, -73138, -96189, -27469, -71900, 96695, 815, 53375, -36882, -90991, -89523, -48452, -87022, -11682, -40823, -65382, -32632, 79528, -48160, -61192, 83980, -39450, -71230, 35953, -13081, -70120, 38142, -49280, -32909, -82358, 2499, 125, -2951, 2175, -32043, -6685, -24731, 71341, tail", "row-1968": "head, -63488, 82888, -86292, 98045, 94803, -6120, 55297, -19147, -93719, 58240, -99993, 32590, 17784, 18852, 29271, 27136, 80154, -82559, -76885, 98762, 28919, 33806, -41858, -5707, -70799, 734, 84221, -87926, 94215, -34843, -7959, -15644, -10595, 97783, -66083, 95933, -91932, 83000, -53737, 62595, -97167, 49977, 15741, 24608, -68898, 78236, 69682, -2470, -48740, 73544, -4004, 77351, -64971, 44311, 18920, -1446, -48078, -44223, -33419, -13936, -2522, -62465, -36083, 37077, tail", "row-1969": "head, 62277, -96211, 98509, 44244, 60875, 83248, 23722, -50203, -82572, 18957, -58370, 24702, 68457, 99477, 94591, 32681, 19015, -77738, -92836, 41080, 93347, 95452, -41743, 36966, -51689, 45487, 55395, 14624, 52536, 52806, -49939, -74493, -98123, -71156, 7456, 72920, -73152, 56160, -83045, 48020, -46968, -85019, 84474, -33561, 96081, -90852, 61644, -67906, 31927, -75051, -29637, -48106, -28563, 86810, -70476, -24416, 94511, -52400, 54884, -99405, -70977, 45316, -28386, -92875, tail", "row-1970": "head, -31742, 74637, 78221, 20809, 87141, -72013, -70143, -48528, 34928, -40613, 17071, 28464, 94521, 66496, 80599, 19871, 37621, 76751, -25644, 34163, 34400, 75085, 47584, -49176, 2812, -60133, 30382, -39272, 96252, -91930, 26585, -32687, -92804, 83711, 67042, -93543, -80585, -8673, 47533, -38767, 97588, -66263, -54951, -13127, -65810, 99151, -74806, -44648, -94920, 33011, -3523, -38253, -33035, 14337, -51801, -25891, 25618, 54454, -94326, 93965, 30925, -90258, 39059, 94522, tail", "row-1971": "head, 50661, 17368, 63505, 51617, 70427, -1312, -5538, 11804, 7552, 79692, -92945, -17897, -73924, 40438, -95347, 40539, -27283, 44822, 26084, 99440, -96768, -99464, 36446, -38832, -20377, 48062, -81338, -13891, 41430, -67575, -68123, -34050, -8668, -38453, 65264, -82582, 19926, -72403, -57709, 98056, 13029, 93225, 35928, -17630, -51030, 50929, -93413, 22675, -16433, 48936, -74242, -16669, -72868, -37125, 20397, -25966, 33099, -14345, -88603, -55833, -95273, -79854, -43709, 88838, tail", "row-1972": "head, 15817, -47295, -31180, 1110, -29615, 12155, -43125, -2782, 77140, 57751, 96527, -28248, 94784, 78302, 54413, 28859, 84000, 94888, -93835, -6410, -68282, -38619, 59859, 30601, 77103, -66640, 4919, 75205, 15261, 27979, -78580, 54288, -87302, 26605, -70979, 32803, -22453, 55241, 52861, -21100, 84388, -25933, 52485, -36122, 1735, 27967, 17032, 75390, 9961, 51366, -76974, 54163, 80851, 89545, 3312, -81544, 70004, 42809, 97064, -89192, -30101, -92284, -54261, -40450, tail", "row-1973": "head, -10375, -27793, 87069, 39772, -68634, 97712, 65022, 85128, 64431, -8324, 6197, -28160, 84243, -77855, 73241, -55758, 10217, 54892, -91799, 12806, 48357, 75455, -860, -8095, 70430, 95564, 4506, 41480, -8354, -66069, 94447, 6285, 3380, -92719, 68207, -46529, -56764, -17001, 32265, 73540, 22432, 88248, -20255, -73938, 65209, -38941, 57269, -66009, -35200, -30460, -77338, 23977, 39716, -69891, 50807, 59377, 26363, 73014, -56530, 29338, 12309, 46470, -99156, -7914, tail", "row-1974": "head, 2461, -5160, -85802, -87857, 70052, 30, -71774, 89828, 66482, -18976, 94099, -83256, 29246, 40954, -93520, -2433, 56382, 61450, 99797, 98862, 24509, -67340, 52293, 3971, 2277, 60536, 67674, -53623, 85121, 50808, -9745, -5187, -54469, -29449, -8024, 89381, -92137, -73778, 63972, 41687, -64864, -92701, -53683, 30057, -60350, 67233, 34099, 42173, -6323, 80250, 28332, 83499, -59990, 31548, 94303, 55089, -10643, -83269, -41213, -57566, -62107, 22730, 20373, 83384, tail", "row-1975": "head, -69810, -44215, -49688, -55062, 63261, 39992, -15376, -97326, -81677, 81959, 50127, -2376, -84075, 22164, -59157, 13362, 32180, 99644, 97299, 95296, 35332, -82128, -4085, -6021, -7725, -33586, 98703, -4730, -77253, -51217, 35455, -85008, -90024, -24571, 89174, -72606, -25830, 4725, -9919, -62644, -37051, 33317, -21992, 41371, -25765, 32360, -14577, -91814, -28042, -94203, 18313, -50328, 58069, -70259, 2545, 71175, 61222, -63010, 33669, 69896, 11840, 45445, -68897, 82845, tail", "row-1976": "head, -12240, -25153, -63515, -53414, -92089, -61604, -2477, 95975, -82431, -43913, -38391, 58163, -38789, 33952, -34768, 21842, 79121, 43155, -62520, -17248, 18521, -55635, 42996, 55477, 96986, -36677, -69705, 92841, -80943, -58590, 43017, 41385, 22684, 80730, 28900, -43229, 75965, 5348, 20872, 42486, -26379, 66418, 17707, 80417, 36127, -60180, 92655, 62351, 25137, 94285, -82144, 85441, 27082, -49391, -12838, 12838, 23931, -36926, 86345, 35336, 27312, 29334, 89955, -14953, tail", "row-1977": "head, -52579, 66709, -43368, 85021, 64564, 48389, -93027, 43321, -11766, 37514, 11039, 97016, 92053, -75852, -46475, 45735, 99037, 8809, -74992, 25220, 79251, -22104, -3638, -68444, -50028, -2503, 78438, 42854, -26206, -76840, 33984, -20065, 1627, -71399, 65650, -17743, 99078, 80819, 55964, -51160, 18169, -42237, -24949, 62154, 2187, 24463, -49946, -9198, -49348, -33012, -42954, 96855, 3939, 91181, -85324, 69957, -73308, -23260, -98284, 75775, 55181, -43205, 25037, -9340, tail", "row-1978": "head, 42130, 24778, 57817, 84222, 35515, 50571, 61625, -47999, 77015, -65539, -65264, 94985, -62404, 48825, -20369, 32220, 24890, -62843, 32898, -13368, -48359, -85627, 52995, -21402, -48677, -70239, 21075, 41748, -3778, 80822, -29230, 74724, -82247, -864, 29740, -96906, -8953, -69770, 35062, 87392, -38589, -55130, 20865, -87862, -14085, 74079, -72738, 42708, -70164, 13620, 44172, -1954, 21739, -7303, -71608, 98346, 49099, 54694, 85214, 2500, 12529, 54752, 80851, -98066, tail", "row-1979": "head, -88427, 42864, -68908, -60985, -36082, -7961, -9194, -75710, 83889, -22674, -25316, -19574, -55140, 84268, -44260, -10163, 28201, -67799, -24141, 13793, -74056, -69548, 36106, 80562, 75283, 8959, -20618, 21244, 86523, -56872, -50043, 41938, -16455, -55702, -98877, 32180, 97743, -8291, -25981, 67742, -83675, -50290, 58276, -59233, -42760, -23717, 50768, 11507, -5604, 1369, 6489, -11727, -33059, -42938, -26220, 97808, -39717, -28120, 17831, -12408, 44643, -4882, -25681, -7754, tail", "row-1980": "head, -18385, 13055, -3748, 91097, 75066, 18737, 4704, 92434, -18903, -270, -86550, -77704, 16414, -56444, -37453, -19409, 20676, -75439, -13392, 51119, 32528, -33568, 3594, 60766, 88567, 96381, 36322, 30593, 19217, -37942, 1329, -85402, -35990, 16462, -42938, -69817, -37133, -13795, 85556, 10543, 79040, -81413, -33174, 703, -31162, -32571, 43294, 74473, 49067, -81274, 58261, 19335, -20419, -24436, 3984, 1324, -94915, -29954, -9811, 56601, -86613, -17714, -29682, -60406, tail", "row-1981": "head, 11928, 49310, -93832, 74817, -77624, -51121, -54520, -32943, -3227, -79812, 71374, 78530, -43308, -51049, 75466, 82461, -55996, 21417, -28846, 47205, -59204, -20223, -76689, 59830, 20838, 41155, -74688, 86244, -83336, -35528, -36761, -20580, 91571, 11595, -86120, 4694, 92106, 45578, -6003, -94736, -28154, -65006, -76868, 14456, 22876, -98957, -47186, 11233, -76339, -76684, -64546, -47899, 13463, 92611, 60119, -63400, -69818, 28083, 4742, 96612, -25141, 52240, 22205, 50828, tail", "row-1982": "head, -28639, -61916, -43130, 15792, -79607, -54067, 65789, 71353, 66566, -30591, 12648, 80901, -37995, -73556, -59448, -62673, -73999, 62920, -32877, -74801, 94641, -66397, 5729, 99405, -3472, -56527, -69399, 99059, -59983, 47649, -28796, 35940, -36585, -91919, 71872, -46566, -57226, -9400, -72296, -5574, -64066, -95069, 91031, -62114, 72018, -9236, 53542, 84197, 62031, -33776, 48130, -27418, 6225, -72770, -23557, 1772, 35077, -82282, 82515, -54997, -50420, 26355, 46048, 58174, tail", "row-1983": "head, -25083, -67112, -79208, 37574, 35654, 12448, 51149, -95227, 57298, -61573, -4923, -78019, -90772, 24190, 7326, -13303, -30609, 59867, 76438, 20739, -80360, 45243, -68084, -84848, 87708, -58409, 68662, -46351, 66994, -24249, 32212, -21604, -79042, 33787, 31006, -88881, 15622, 33754, -90797, 50406, -85778, -47729, -8171, -39500, 32625, -32492, 12289, -66472, -93986, 23842, -57637, 70429, -47152, 99886, 70822, 38167, -90953, 80740, -12996, -2260, 46039, -19936, 10891, -46537, tail", "row-1984": "head, 88994, -42739, -84163, -46792, 39792, 94687, -8171, -23887, 80764, 4316, 75911, -87692, -21738, -15134, -59895, -35345, -89194, 77467, -65086, -7049, 36, 19032, -58479, -54785, 29242, 38279, -21100, -21472, -67415, -91560, 94190, -57318, 40306, 46737, -81171, 95823, 95588, 97082, -85564, -29455, 64820, -98403, 40476, -88731, -88978, 11891, 8864, 17954, -90090, 40655, 96969, 7622, 23060, -38282, 42686, 60537, -97987, 9084, -44564, -43900, 27217, -96534, -89155, -16787, tail", "row-1985": "head, 54776, -84796, -946, -79044, -87094, -60297, -93278, 59900, 4922, 43195, -54012, -83499, -99732, 11995, 31500, 14111, -45207, -88045, -36889, -8584, -49238, 88080, -96610, 60677, 45008, 92916, -44565, -45131, -89213, -84148, 58567, 37107, 53132, 19241, 61886, -11864, -35577, 93136, 26732, 80560, -60731, 81630, 5510, -25100, 89764, -67363, 87780, -96529, -39263, 16428, -95429, 59368, 91750, 50447, 89846, -68927, -60349, 31773, -75592, 74708, 30043, -42169, 25548, -62193, tail", "row-1986": "head, 75140, 39518, -68283, 9696, 2322, -66853, -67160, 84733, 43744, 46908, -88596, -31449, 51638, 97683, -9387, -87333, -62813, -83474, -24286, -76908, -69843, -27754, 75652, 33490, 18770, -62987, -41938, -55870, 83480, 17954, 88768, 21998, -61304, -18934, 26065, -59500, 68121, -67347, -63822, 29311, 63694, 57034, -45169, -97116, 18834, -16953, -92516, -45832, 42844, 74024, 77017, -60078, 42746, -79282, -48940, -1121, 49485, 19122, -95600, 60015, -10933, 41689, -79754, 97743, tail", "row-1987": "head, 25937, 58521, -71488, -88280, 68625, 88149, -57171, -14044, -61809, -71518, 5763, 86213, -72899, 24851, -63058, 18932, -97414, 28267, -28166, 81267, -13202, 72744, 10839, -85513, -31067, -23592, 50937, -40884, 81277, -35561, -42427, 59144, -73324, -80481, 97954, -96588, -72007, -59847, -21894, 38992, -61411, 45330, -68625, 6625, -26939, -91911, 13666, 42727, -94044, -2808, 10615, 26676, 5985, 89003, -90893, 61932, 35604, -6518, 3583, -14792, -38285, 26538, 418, -26070, tail", "row-1988": "head, 47195, 89412, 22468, 39279, 68259, -71654, -80000, 15502, 99765, -88468, -45890, 82280, -49186, 7868, -53178, 82074, -96978, 91082, -57584, -7001, -24038, -20335, 73129, -77652, -29534, 54010, -31323, -37347, -88304, -35569, 51684, -57174, -50325, -27156, -93299, -57154, -16641, -58393, -83608, 95715, -8652, 48013, -40771, 76933, -33543, -31808, 79856, 74863, 6357, -57041, -7659, -95532, 30281, 64860, 23772, 39625, 25611, 16145, 46405, -88208, 89081, 24627, -21456, -65667, tail", "row-1989": "head, -56938, -72500, -16829, 45123, 7178, 7314, -30066, 95742, 14586, -22808, 73171, 11586, 13851, -9970, 53413, 55578, -54034, -72421, -10099, 60555, -55478, -42390, -4472, 80081, -10555, 70631, -23374, 31578, -22556, 74054, -27534, -88534, -75672, 80832, 59425, -15782, 43390, 773, -52095, -37828, -71161, 6032, 49790, 64465, 91954, 78431, 70721, 40626, 95219, 71621, -22700, 88875, -32441, 44940, -81997, -84991, -1254, 38535, -11892, 13043, 4635, -2533, -58747, -55204, tail", "row-1990": "head, 3110, -77079, -95524, 58319, -75223, -58266, -14113, -23063, 65533, -80256, 38774, -88394, -15488, -20946, -79901, -99659, 21685, -40351, 93311, 87667, 58182, 7556, -38191, 6188, 47330, -32093, 90207, -28313, 59975, 32504, 31067, 17976, -78164, -65907, -79290, 66592, -92088, 14917, 6398, 93086, -66815, 50693, -61660, -95521, 79853, 46699, 88358, 46926, -34970, 69265, -40338, 53939, 75649, 36006, -45807, -99543, -47257, 96023, -28638, 79001, -74847, -70972, -57974, 93687, tail", "row-1991": "head, -42815, -32701, 86643, 68743, -84973, -36950, -76084, -73433, -54330, -62933, 20764, -46490, -39995, 4985, 59718, -62980, 48405, -99780, 1071, 32908, -39274, -62342, 447, -54845, 2607, 65012, 43191, 40360, -29338, -47343, 96320, 94373, 8629, 25803, -21700, -35393, -12215, 84091, 70380, 1880, -71664, -1166, -53891, -55518, -42203, 48192, 65057, -16450, 98928, -54303, 26122, -37158, 13572, 90786, 94065, 49902, 71767, -33510, 29471, -67789, -12565, -30739, -25391, -99020, tail", "row-1992": "head, -40, -67669, 55807, 63109, 51709, -3199, -72017, 21667, 99069, -51284, -10155, -87097, -79016, -38379, 89779, 16314, -8334, 92976, 40456, 26283, 20437, 56029, -74909, -50981, -20015, 63487, -6495, -20280, 48775, 79867, 37653, 3334, -66005, -52517, 63146, -19464, 20957, -78405, 7901, -31077, 79853, -40341, -57056, -5994, 75508, -4667, -9225, -67178, -8385, -90479, -40364, -17462, -9949, -28731, 3876, -82397, -717, 84270, -87480, -91555, 18038, -37637, 74090, -16044, tail", "row-1993": "head, -2141, 60975, 3797, -10697, 42402, -56775, -73615, -92247, 78057, 20683, 81015, 12648, 30035, -93691, 45000, 30407, 11015, 49404, 7557, 63816, 811, 36748, 12612, -72604, -92088, 8064, -6625, 26193, -89485, -54701, 21429, -10304, -20508, 46027, 6886, -89020, -16100, 43098, 95609, 3554, 86827, -22012, -37218, -87551, -98591, -82943, -18918, -98011, 82403, -41609, -48640, -4517, 76358, 98396, 54753, -47664, 1569, -67110, -43366, 530, 53924, 4345, -34671, 28010, tail", "row-1994": "head, -88145, -76714, -66146, 87113, -42002, 77738, 96988, 45558, 23870, 16776, 1242, -89701, -50935, -99118, -42705, 96994, -37934, 63310, 17813, 10333, -25924, -59599, 43679, 81706, -36946, 24164, 98845, 55408, 42415, 28897, 32787, -18788, -65400, -70042, -53821, 77689, 39964, 4982, -52576, 24644, -67124, 79221, -7201, -26259, -24060, -82951, -14432, -68330, 90795, 94711, 14278, -46252, 11665, -2832, -68682, 56787, 96695, 31847, 47270, 8851, 93825, 2357, -22163, 26687, tail", "row-1995": "head, -8485, 21293, -27729, -12798, 38531, 44963, -93474, 90452, -80623, -53142, -53455, 24413, -91255, 51660, 52264, -79558, 16492, -80480, 12814, -13221, 45943, -1189, 31188, -78804, 24843, 8656, 61998, -69906, 40771, -84189, -33835, 40616, -62537, 8418, -86913, -11822, -1601, -32947, -12389, -42579, 15838, 67108, -20007, -5851, -43807, 79467, 99236, -45008, 49374, 27845, 77856, -8536, -15072, 79402, -73435, -48462, 81612, -2281, 76919, -86597, -87411, -57371, 91649, -26427, tail", "row-1996": "head, 13005, 26326, 38041, 68274, 27476, 3459, -78850, 47529, -83799, -55073, -72716, -54505, -1432, -93329, 60772, -50230, 66024, -17500, 94758, -83743, 19630, 68270, 7279, 3071, -60171, 85472, 99290, -56099, -58347, -54230, -73350, 2959, -68805, 46959, 71936, 88789, 88307, 93861, 60691, 4351, 31875, -23082, 29322, -96076, 18989, 17327, 42271, -95131, -42272, -76060, 86533, -72412, 89397, -91715, -30584, -24015, -86523, -55864, 21173, -64131, 20060, 75409, 46112, 25248, tail", "row-1997": "head, 62436, 67645, 89487, -58092, -56558, 11428, 59212, 93180, 60981, 42038, 93032, 75806, -65443, 95733, -68110, -94271, 25728, 26812, -91010, -30136, -37712, -64304, -60726, -51709, 41389, -73403, -86557, 90799, 87637, -58357, -64513, 96851, -65317, -65832, -96697, -23388, -49736, -28739, 15179, -55842, 59828, 89137, -7848, 72197, 95090, -92681, 12285, -93949, 51157, -40454, -12152, 31562, 20066, -68124, 70948, 61901, 16806, -55673, 13682, 53887, -51375, -67086, 15179, -95268, tail", "row-1998": "head, -43120, 62227, 12206, -45843, -52261, 63371, 94136, -9756, 73904, -60155, 52168, 75881, 5728, 1942, -42857, -19839, 559, 23118, -19632, 22863, -50319, -46704, 6123, 35229, 60819, 39810, -72068, -48357, 17552, 21744, -92744, -28842, 64227, -12926, -35447, -1505, 44162, 32689, 16554, -23735, -62490, -57858, 12393, 51408, 60358, 57913, -67687, 87084, 16978, -2451, -34934, 38164, 30347, 97200, -52488, -27790, -29843, -92795, -14478, -76196, -91323, -4858, -41809, 13860, tail", "row-1999": "head, 7343, -81159, 4491, 39214, -55396, 31879, -59602, -9943, 45883, 34509, -9488, 12934, -55109, -49348, -65256, 74359, 98648, 44462, 17873, 10620, -15532, -47306, -29582, 9791, -52559, -67801, 22642, -57472, 15003, -95068, 88515, 863, -8200, -33883, 76316, -29185, 86777, -16194, 25730, -25983, -69365, 94677, -5909, -97460, -76464, 63324, -50419, 11873, 81855, -50578, -97137, 59482, 37683, 74179, 74825, 94572, -69352, 61234, 41874, -72789, 31220, 33968, 11443, 36326, tail", "row-2000": "head, 46636, -56623, 94468, 94314, 89899, -11380, 13509, 3784, 42117, -67081, 37533, 53124, -12003, -42287, 2891, -74971, -57145, 2189, -29521, 30481, -73934, 72436, 93205, -76519, -75924, -58378, 18494, -8587, 57223, 12422, -29919, 72626, 26671, -61422, -82263, 15338, 7563, -26733, -82515, -88532, -71557, -49012, 55678, -27323, 11990, 17103, -8445, -77919, 83194, 99735, -50625, 962, -13368, 81018, -40248, 69547, 60193, 78769, -10654, 12086, 91639, 3874, -70326, -74495, tail", "row-2001": "head, -12482, 53313, -36372, -71902, -98067, -96682, 29263, -14854, 79465, 86355, -34022, 76812, 51563, -29058, 8131, -86483, -16989, -25455, 40169, 78912, 5747, -17156, 89191, -45618, 67187, -47381, -35847, -62958, 81569, -8862, 16367, 68787, 1854, -13146, -62119, 55505, -10024, -34496, -54466, -58196, -14133, 21964, -54702, -98022, 18138, -18578, -92412, -80290, -48407, 6281, -1315, 28438, 14076, 35271, -66188, 13547, -69692, -27209, -59524, 45755, 27416, -25908, 67564, -68972, tail", "row-2002": "head, 95759, 30507, 7881, -95842, 26722, -4838, 15965, 36854, 58030, 64992, 71958, -11502, 53453, -26486, -71632, -67564, -73253, -19543, 6271, -85510, 16791, -6135, -95147, -51843, -35506, 91802, 69955, 11236, 82152, -12232, -41829, -43365, 70061, -15740, -63058, 49223, -44265, -80015, 27389, 95232, -84374, -29763, -25825, 22814, -85699, 99878, 42395, 99681, -60607, 93032, 68312, -7092, -82267, -517, 68255, 68639, -97930, -30076, 58816, -38116, 75200, -9760, -31509, -80223, tail", "row-2003": "head, 54037, 62391, 10943, 30843, 47793, -17077, 1771, -66650, -64375, -74693, 94268, -53502, 17961, -37270, -89809, 33678, 91238, -9905, -72042, -64904, -91655, -37860, 33811, -81804, 35435, -69457, -21862, 35823, -76502, 19732, 77155, -96221, -25349, -64336, 6559, 50459, 51522, 82275, -8386, 20988, -22605, -93965, -32936, -63023, -61843, -74331, 37178, 61934, 17191, 37150, 58922, 80119, 29357, -52604, -3218, -72560, 13375, 6280, 43531, -96235, 9095, -32861, -38802, -49568, tail", "row-2004": "head, -18796, -36454, 75144, -27850, 7609, 38279, -24010, 4408, -53320, 73766, -27523, -28398, 78305, -57393, -27244, 56553, 47137, -41053, -78959, -76004, -45829, 8862, 48916, 9741, -71345, -63299, 17560, 21584, -47032, -42327, -86068, 27812, 2008, -77943, 72721, -68713, 59285, 23006, 39311, -12379, 48958, 49707, 80284, -33468, 40878, 2692, -57098, 76898, -9551, 35141, -79754, 65359, 87450, -16186, -38801, 30824, -59797, 96129, 92766, 75671, -91507, -53384, -31605, -12765, tail", "row-2005": "head, -68451, -34640, -38816, -46400, -89067, 46102, 33725, 45658, -93077, 41010, -25798, -20978, 45021, -45231, 62533, -85685, 63467, -38084, -36448, 57519, -36135, -4229, -710, -92062, 497, 41277, 46892, 92551, -95526, -82705, 8583, -89209, 62597, -34377, 77472, -26016, 26738, -63477, -62723, 12182, -49467, 55788, -37650, 53393, 72783, 96202, -31495, 66954, 31588, -59584, -27581, -14822, 42131, 47033, -67229, 12585, 45825, -74462, 376, 54634, 39161, 48378, -83702, -55212, tail", "row-2006": "head, -18595, -27392, 59993, -58673, 35791, 77818, 4822, 75645, -89933, 379, 24018, 97810, -90038, 24094, 21659, 89436, -37417, 97516, -46116, -67397, -72329, 3302, 89629, 18801, -71190, -88812, 50914, 37760, -88106, -87985, -83094, -3576, 88116, 9486, -50216, -3591, -70684, 65677, 76824, 66756, -6173, 30807, -67946, -11520, -26831, -94243, 1985, 40770, 24430, -13169, 97802, 9972, -98924, 25536, -52385, -71700, 20921, -54711, -39759, -54787, -78532, 11377, 7658, -47103, tail", "row-2007": "head, -18485, 65192, -39521, 7752, 25705, -90281, 12437, -16236, -87862, 45073, 3126, 50348, 92543, 8120, 64657, 93359, 19236, -48445, -94520, -83517, 82831, 77041, 60266, 33171, 63734, -11823, 43234, 42893, 64514, 49479, -99432, 95704, 89659, 61146, 61866, -4091, -88200, -89438, 5347, -87749, 41784, -7833, 6952, 2104, 77857, 18129, -20470, 98734, -96478, 12958, 42068, -30818, -24283, -4883, 9640, -63319, -80164, 88379, -13588, -90730, -82740, 42479, 71482, -11882, tail", "row-2008": "head, 2146, 30235, -50000, -15444, -55256, -15590, 48508, -56227, -28379, 30647, -55609, -48494, 3312, 94711, 33792, 14704, -44989, -98727, -80252, -91472, -10571, 18485, -66443, 9067, 99131, 72283, -64758, -71423, 75235, -68830, 74957, 82243, -77335, 35982, 42193, 93300, 63170, -61565, -52370, -96241, -65949, 36739, 81384, 40388, 71579, 29706, 33092, 3810, 94244, -21078, -74080, -26042, -12141, 32927, -40853, -94672, -46940, -62762, -97818, 47967, -1472, -23001, 79315, -13598, tail", "row-2009": "head, -26486, 1625, 81575, -74021, 98576, 36672, -31723, 1870, 69374, 10738, -88050, -30879, 9239, -49868, 81099, -2761, -9253, 18861, -78453, -67822, -54543, 80196, -45580, -60596, 19495, -96341, -40866, 65912, 54638, -33823, 27275, -6748, 32197, -91111, 7290, 70229, 59602, -99710, 5306, 97915, 47439, -85098, -4816, 69697, 76795, 4335, 68978, 73367, 62475, 91328, -52629, 23039, 45809, 38274, 25043, 26481, -55065, 97718, 79693, -89141, 92432, -57261, -71978, -42763, tail", "row-2010": "head, 31134, 15669, 34565, -1246, -69642, 46121, -15911, 73273, -79375, 5321, 7955, 76532, 78275, 16150, 62379, -8350, -32763, 32141, 10138, 37109, -34106, 30865, 1403, -20023, 2, 57581, 90845, 28089, -45434, -9644, 81520, 62849, -64213, 62974, -43385, 63604, -63765, 38855, 86539, 48710, 68588, 57196, 31252, -55149, -96851, 58610, -55998, -50225, -90920, -60424, -53436, 84681, -14436, 98900, -48899, 96363, -85170, -36969, -61709, -20471, 85888, -11709, 95823, -15347, tail", "row-2011": "head, -23347, 52361, -80758, 4258, 60307, 58081, -11517, -61596, 83687, -14775, 3269, 19417, 25146, -59485, -51378, 13, 46251, -61908, -5526, -82738, -39773, -76436, 48, -57971, 31213, 2563, -63798, -6263, 66075, -33513, 81098, 7255, 10268, -24646, 6764, -62814, 33428, -69190, 21415, 22548, 6111, 96053, 74192, -87484, -40356, -21864, -22142, -89050, 5179, -35939, -84490, -39750, 13970, -46092, -93331, 76067, -88130, 12698, 50791, -55606, -19288, 84953, -24895, -63913, tail", "row-2012": "head, -56329, -3182, -95021, 47636, -91767, -77675, -70912, 49145, -5143, 95123, 87674, -89584, 15356, -5153, -31678, -63357, 24484, 13094, -10182, -85861, -41851, -4383, -11421, 194, -29134, 93459, -17600, -41608, -3149, 896, 52622, 3496, -51586, 34754, 67284, -51444, 68723, -16221, 14044, 94295, -98186, 25353, -39110, -15393, 68956, -34635, 58969, 32433, -92916, -76407, 13570, -43252, -99872, -71666, 60360, 16957, -41574, -86774, 4183, -56961, 21471, -26648, 86390, 42167, tail", "row-2013": "head, -46160, -55291, 54698, 9329, -45260, -95471, -94755, 98527, -44724, 60481, -94183, -78957, 51767, 48781, 23579, -36953, 7845, -63625, -24076, 32813, 17058, -83979, -38285, -64424, 38760, -10381, -85389, 6120, -66269, 97200, -59870, 19778, -3707, -72290, -53512, -75819, 22670, -23736, -92165, -85118, -87561, -39439, -23620, -18642, -91861, 32077, 19351, -53064, 97555, -75936, 94839, -56241, -94436, -44020, -60727, -75142, -44063, -97305, 34780, 85666, 12182, 54395, 58469, -91848, tail", "row-2014": "head, -44981, -49925, -1232, -59047, -76435, 82535, -62528, -37155, 69119, 10839, -42622, -27922, 12133, 7044, -30264, 73430, -64766, -38608, -87566, 57808, 30467, -78493, -16872, 51507, 23442, 99612, -56923, 51486, 36177, 99444, -77787, -34709, 18458, 14433, 59616, -43678, -48714, -42273, -15913, -53770, 18226, -25120, 2286, 81328, -89107, 94066, -65966, -13171, 11219, -65308, -26521, 18012, -71564, -77231, -56317, 17459, -45521, 98137, 66869, -85231, -44979, 27874, -57051, 20342, tail", "row-2015": "head, 89889, -96322, 5088, 68950, 32368, 57681, 69264, 63848, 40425, 95216, 61533, -29095, 89459, 4751, 91512, 21272, 40441, 86576, -12599, 77748, 88438, -23977, 61769, -13354, 6776, 7134, -92335, 23754, 89940, -21395, -65126, -93752, -44731, -25674, -79962, -26817, 31657, 76385, -4519, -27602, -39821, -89642, 59658, -94010, -95945, 68672, -94170, -40015, 77415, -16492, -76521, 75999, 23992, -54656, -29100, -74258, 5351, 34031, 69568, 5167, -69155, 6775, 41454, -23479, tail", "row-2016": "head, 62801, 79899, 10966, -1316, 42582, -82931, -18350, -52969, -82519, 63989, 48715, -64474, 26150, -23899, 24672, -79543, -77713, -79237, -83834, 66837, -13409, -6843, -48340, 58765, 14238, 12925, 61758, -86769, 62784, 8096, 87619, -67479, -8364, 47404, -22700, -3072, -46787, 24967, -43568, -82213, -61139, 76574, 77161, 13339, 59598, 52617, -8073, 57791, 51797, -70696, -81714, 85117, -91663, -30670, -76970, -55869, -59959, -47418, 15727, -16707, -40109, -17254, -64594, 8958, tail", "row-2017": "head, 60541, 73007, 22802, 49816, -10553, 50180, -53842, 17307, 27919, 92151, -59341, 3279, -28000, 35465, 68772, -37040, 25209, 79625, -86901, -88086, -46986, 76932, -2028, 84145, -11263, 6880, 65367, 63951, 59712, 53057, 69697, 16875, -54063, -52370, 89791, -49050, -71653, -29821, 67250, -17195, -23458, 29583, 83546, -50906, -25322, 66316, 98344, -51015, 91537, 47480, 8560, 66422, 29264, -17082, -44505, 16370, 77750, 15214, 10864, 90904, -89060, -46492, -23158, 88035, tail", "row-2018": "head, -84692, 74411, 72020, -16958, 55684, -11445, 90930, -9934, -46983, -48991, 12355, 23060, -82230, -70188, -63797, 64093, -89046, 93351, 14860, 97318, 18848, 86638, 22977, -21426, 85905, 50552, -11293, -52898, 20222, -17017, -54690, -56569, 89336, 13171, -24258, -43783, -75968, -4025, 13564, -91741, -95292, -94054, -21454, 88863, -35193, 65813, 59831, -77798, 53408, 99244, 75373, 3511, 55629, -31896, 59629, 75580, -4386, 11884, 40606, 13704, -23841, 32071, -21745, -4766, tail", "row-2019": "head, 15531, -54987, -31960, -82294, 50021, 27328, -58832, 73593, 29945, 96582, -19247, 37258, 10540, 10284, 16017, 94679, 56480, -51028, -58294, 96221, -5728, -18047, 20077, -47141, 29304, -88, -68236, -66683, 39979, 41318, -65068, 19318, -13478, 59133, 47659, -62117, -94972, 60900, 59064, 49113, -75917, 47561, 67121, -24543, -72058, 70623, 16047, -21054, 53925, 92537, 19890, -90557, -87655, -94632, 33974, -21095, 44847, 30184, -65731, -36767, 25246, -84297, -6658, -84965, tail", "row-2020": "head, 36547, 92846, 34996, -7017, 99548, 17790, -30095, 14997, 14541, -27158, 31837, -44661, -52422, -82633, -5166, 69250, -50089, 26355, -42785, 48630, -66195, -90689, 46679, -79023, 83079, -54543, 74709, 94273, -83358, -10739, 73197, 14259, -45086, 82341, -80215, 69290, -21116, 8678, -22927, -73300, -93884, 91812, -66444, -57033, -6764, -98458, 72744, 71152, -31354, 81848, 4084, 49228, 58889, 83884, -56260, -52350, 7578, -45202, -58755, -51960, -5685, 77992, -87098, 12979, tail", "row-2021": "head, -6666, 65300, -6746, 56621, -35963, 13174, 97727, -65745, 97222, -32025, -18299, -27308, 49374, -70813, 22053, 26971, 14767, 24608, 61125, 62420, 37593, 56219, 29476, 31137, 1145, 6900, -55305, -29907, 3504, 93435, 85240, -44500, -427, -20349, -94850, -56359, 13176, 33110, 55945, -67322, -96481, -49815, 83222, -78954, -18562, 27356, 19008, 73757, 7074, -36412, -66335, 43582, -42323, 41828, 1671, -41849, -53100, -56601, 50611, 22204, -88831, 34076, 4714, 28113, tail", "row-2022": "head, 99063, 81710, 82789, -45097, -82471, -17137, 71729, -2555, 19483, 46765, -32456, 94675, 53214, 43873, 49635, -41519, 95052, 71866, -64976, -74373, 68768, 70809, 24540, -27273, -67784, 91390, 24694, -26140, -22136, 54232, -70003, -86671, 50180, 70364, 93317, -17977, -6201, -61319, 43445, 88358, 18742, 36467, -88642, -8428, 27839, -69088, -74146, -83703, -42271, -33564, -86858, 54316, -46784, -97964, 20024, 84150, 44035, 90049, -90164, -1999, -80403, 28047, -53917, 98801, tail", "row-2023": "head, 30017, -90191, -91228, -51800, -63831, 90242, -49840, 69661, 97637, 14131, -79639, -77028, 19948, -31908, -54355, -97791, 93455, 84193, 31131, 50096, -57356, -32621, -51319, -10179, 29008, 3164, 88017, 8922, -3830, -98485, -10168, -46083, -87652, -14416, 90818, -82779, 35339, 1636, -39359, -60634, 45442, 34762, -89580, 11445, -20120, 47450, -47401, -56857, -86842, -99923, 61393, 47106, -67956, 71911, -63327, 98891, 17671, -52648, 91892, -86066, 98013, -25006, 54812, 74882, tail", "row-2024": "head, -33128, 75516, -8959, 35421, -25046, -60415, 56227, 14172, 53354, 40415, 91373, -46503, -71803, -46777, 15788, 27059, 86519, 93606, 19007, 77507, -5157, 77199, 46162, 53355, -68936, -53394, 52464, 20753, -79372, 46146, 94666, 65085, 41024, -8691, 93601, -87168, 36816, 8803, 48931, 70718, -9579, -93022, 86982, 33493, -76685, 44236, -99152, -23803, -46546, -76118, -40255, 85594, 76491, 19159, 55765, -19453, -33620, -55316, 64198, 82748, 80543, -40917, 36739, -55857, tail", "row-2025": "head, 61619, 47812, 1907, 21652, -531, 97909, 38941, -29843, -69930, -2225, -69629, -66529, 48853, 73889, -29230, 25795, 88550, -2345, 18683, -15436, -26933, -97443, -52378, -4391, 40859, -44838, -9306, 8134, -35304, -27083, 51387, 99505, -20894, -68008, -4424, 47165, -85947, -64105, -68220, -64785, -54582, 61253, 3321, 90749, -21902, 12910, 20806, -42619, -20735, -87231, -24709, -95047, -95769, -45265, 51536, -59232, -61113, -99230, 97378, -7809, -97607, -86853, 60665, 57748, tail", "row-2026": "head, -8703, 96854, -14581, -22303, -61884, 80026, 31170, 85180, 84185, -92066, -48379, 53870, 70067, -55089, 44187, 87801, 55352, -40503, 71104, -58047, 38848, 21830, 26497, 90423, 64289, 98495, 862, -94572, -37764, -32159, 60883, -25788, 26713, -88000, 35629, 92029, -10548, -71652, 43932, -88330, -17871, 90341, -48235, 35151, -82220, 6199, -72851, 82615, -60746, 48165, 45258, 65909, -54962, -33179, 19362, 54548, 56940, -84700, -56095, -42217, -98661, -46775, -77452, -35941, tail", "row-2027": "head, -24036, -27643, 9832, -82655, -47521, -84263, 58477, -79983, -44713, 95399, -96685, 1253, 88659, -53412, 51202, -36668, 95673, 36086, 64126, -70146, -69567, 25571, 79096, 14148, 1609, 35511, 15744, -31622, 65325, -73533, 6998, 92827, -64380, 69138, -38398, -25545, 35178, -69476, 97146, 17649, 53502, -25576, 89938, -77619, -52112, 6920, 94114, 65220, 2328, -86882, 54569, -77506, -61507, 18987, -62532, 48481, -23411, 65127, 77343, 16852, 91808, 36701, -5795, -19656, tail", "row-2028": "head, 24260, 35283, -67542, 25103, 45321, 99980, -37128, 47944, -49700, -86095, 70011, -9723, -50345, 98643, -40433, -66894, 80876, 32877, 26367, 12290, 18560, -82587, 17386, -41789, 85949, 38918, -80010, -78657, 40993, -70734, -36882, 8759, -50146, 5594, -50777, 69297, -45611, -5153, 59833, -68589, -20267, 56384, 91549, 94374, 56684, -54186, 20718, -96609, -77505, 3466, 39157, 86436, -89880, 61040, -67726, 72885, 88375, -48518, -13229, 30518, 42915, -79221, -28423, 43872, tail", "row-2029": "head, 16036, 24388, -44578, 31665, 55321, -99763, 37562, 10683, -25974, 91674, -11098, 61063, -71529, -94428, -6750, -85292, 70572, 71118, -49066, -52202, -8594, -40408, 32479, -60158, 28424, 704, -13202, 39237, 63497, -98813, -87805, 81249, 48350, -57115, 96555, 6951, -9870, 79505, 65198, -14365, -25754, 889, 59646, 2545, -84868, -84909, 72709, -18990, -8613, 21229, 87042, -50639, -34093, -86011, 18595, 55090, -32134, -7659, -96372, 7140, -67022, 19608, -15651, -8148, tail", "row-2030": "head, -3202, 15409, -70477, -59594, -25181, -56277, -61944, -6651, 25004, -79631, 53539, -11391, -68463, 53931, 91391, 35232, 25649, 46796, -77935, 54154, 61937, -93785, 36136, 71709, -17324, 13302, -36889, -33723, -92134, 94478, -55744, -64661, 3812, 86159, -84981, -39648, -60601, -157, 19192, 36227, 69284, 4584, 21607, -74048, -14780, -45786, 98567, -25624, 8942, 73385, -3701, -58722, 56810, 16582, 36492, 47271, 32321, 25643, 76658, 8600, -27675, -14987, 9280, 74884, tail", "row-2031": "head, -23945, 10856, -85222, -60123, 38684, -29950, -75078, 60960, 86174, 84536, -17157, 67359, 2852, 93235, 43408, -99010, 13802, -54909, -66518, 80492, -11738, 97357, -42031, -7466, -86079, -37585, -76051, -64030, -77395, 3595, -11865, 66682, -55235, 15827, 88591, 98704, 87418, 35209, 7415, 14788, 88009, 8827, 57821, 34316, -85068, -86508, -66951, 88094, -72204, 61190, 92235, 26422, -24435, 98487, -30079, 10483, 35045, -6476, -66206, -11947, -45063, 16882, 71650, 17549, tail", "row-2032": "head, -79897, -76302, -78228, 12598, 19296, 542, 30446, 79083, -16587, -99966, 62823, 41766, 70389, -68130, 99737, 85324, 2021, 83730, -42333, -8803, 41902, 62862, -28500, 21138, -37269, -54543, -2184, -56328, -66651, -64855, -35665, 40676, 53115, -39489, -35364, 3980, -33636, -68078, 34504, 4603, 84354, -7762, 23306, -58731, -68817, -8965, 56553, 40657, -43890, 46441, 43738, -26753, -58673, 82277, -56623, 85887, -40333, -49687, -3296, -59014, 16724, 66954, -81859, -4542, tail", "row-2033": "head, 65743, -554, -36802, 80419, -79781, -33977, 85900, 31549, -40926, 53099, 34826, -69292, -41004, 59510, 7529, -34817, 45112, -89585, -16129, -55194, 31909, 78179, 84062, 14859, -67535, 54286, -98387, -63163, 73181, -10840, -70670, 20585, -69808, -55094, -1700, 56816, -18569, 6857, -21319, -8721, -93756, 20450, -17555, -45688, -32323, -19939, 94542, 40817, 51228, -4734, 7405, -79388, 32840, -33361, -2532, 21141, 43903, -40259, -87128, -50965, 54651, -25411, -64964, 90989, tail", "row-2034": "head, 7386, -67239, -20563, 57678, -44401, -5686, 54917, -32258, 3517, -93566, -37218, -89526, 3590, -31546, 64019, 64512, -47867, 10542, -16363, 1294, -96371, 79500, 94432, -45360, 55235, -58045, 51535, 61055, 73829, 39176, -49627, -17826, 62780, 52269, -58363, -65226, -83499, -49082, -87046, -9327, 74784, -66806, -5073, 85982, -52655, -30493, 12145, 94245, 81841, -94195, -11540, 29365, -63876, 48819, 40181, 84090, 7927, -21959, 15840, -47397, 86649, -67254, -73294, 9644, tail", "row-2035": "head, -23865, 76544, 5328, 66968, 93668, 85060, 69933, 74366, -41843, -35076, 30237, 95127, -24931, -57979, -50762, -20919, 64574, -30307, -60715, 18256, -60791, 58477, -74997, 71658, 98506, 46985, 95376, 682, -51953, 16864, 34761, 50440, 22116, 92609, 53677, -23840, 59607, 84222, 90246, -10654, -80221, 94334, -91883, 20821, -23817, 14104, 76587, -81306, -58154, -53949, -38134, -21564, 31223, 84913, 54070, 81028, 52683, 14818, 14822, 97234, -79900, 76421, -49125, 13686, tail", "row-2036": "head, -33471, 84012, 82933, 57957, 44877, 17102, 83563, -31842, 97838, 58285, 24104, -56862, 45677, 92508, 96817, -65764, -56088, -837, 46477, 76216, -56430, -28457, 59778, -86434, 86330, -57324, 69777, -19027, -22630, -11599, 27250, 38774, -19180, -9121, 50199, 96922, 30138, 68650, -92134, 20594, 79367, 2841, -19280, -95053, 57095, -35404, 44691, 46801, 55668, -58079, -33734, -53922, 36914, -84586, -78334, 23412, -33154, 46602, -57593, 32256, 80588, -43378, 1655, 6997, tail", "row-2037": "head, 48962, 39184, -356, 80338, -3583, 74581, -16673, -10387, 25130, -82987, 52704, -27233, -7347, 7285, 73798, -82600, 99383, 49869, 95422, -76636, -84081, 15661, 94942, 41533, -68902, -51260, -9419, -7895, 20618, 41510, 59782, -24846, 7440, -38693, -3549, 50332, 1703, -72065, -17304, 42745, -80577, -61265, -49344, 75428, -79720, 54972, 23700, 10623, -19261, 10333, -96998, -521, 79513, 25825, -46375, -46000, -49276, -75767, 84192, -41081, -87672, -52, 92453, -80282, tail", "row-2038": "head, 96950, -76561, -13240, -98382, -6518, 61215, 943, -10476, -39393, 53926, -92178, -32928, 37770, 74413, 12825, 6367, 82616, 57541, 62135, 12506, 31058, -18751, -66080, -76741, 13612, -12556, -2162, -50384, -52552, -4618, 45765, -4734, -58178, -73621, -11828, -5370, 89067, -8272, -60236, -76371, 38891, -34160, 37770, -38051, 31039, 71770, 25614, 62913, 22519, -67944, 60978, 9151, -38582, 1110, 21157, -8566, 21221, 27136, 33498, 61397, 93572, 97024, 60360, 48609, tail", "row-2039": "head, -15370, 83924, 37905, -62150, 556, -23057, -40434, -15892, 53761, -59887, 27933, 46231, -49479, -76489, -78680, -40673, -94572, 20515, 72253, -51810, 71004, -10930, -69684, -98376, 92768, 20660, 70605, 43810, 47856, -68165, -64377, 13154, -87354, -73635, -75260, 46756, -87181, -60378, -12247, -53428, -66283, -82405, 81393, 38009, -61633, 62191, 25317, 75657, 74326, 13661, -53949, -31885, 25149, -95367, 95915, -35230, -96438, -9725, 63233, 43768, 74500, 93581, 32154, -85367, tail", "row-2040": "head, -87456, -70947, -99998, 44549, 71431, 56449, -67070, 41779, -94110, 17843, -13924, -65666, 28297, -71739, -46762, -30867, 4482, -26527, 94172, 45927, 82476, -41468, 44031, -90621, 86004, 51905, 34439, -13921, -40466, -80253, 6935, 50989, -92673, 21748, 98716, -94823, -57619, 7220, -93959, 86574, 9133, 70546, -37288, 66692, -35129, -56879, 62263, 92487, -80398, 88190, -61900, -9709, 16382, 2459, 36638, 12635, -31886, -89095, 73362, 70171, -38847, -79778, -37844, -7249, tail", "row-2041": "head, -61588, -2642, -49979, -28672, -86476, -11759, 30824, 37021, -70681, 25659, -21251, -83052, -13998, -66803, 1163, -22075, 60181, 80583, -71309, -90331, -12513, 57500, -95159, -14747, 30847, 71425, 38795, 96975, 13054, -98625, -81089, 68828, 47222, 78591, -76403, -88271, 3257, -36711, -92702, 9562, -77204, -92054, 17725, -27543, -62165, -89826, -49903, 40203, -44656, -55451, -13433, 88168, 20106, -68961, -3021, -79124, 88960, -94729, -59373, -3672, 40094, -68871, 98666, 74550, tail", "row-2042": "head, 52202, -23010, 51739, 68458, -87503, -27058, -40913, 54180, 58816, -96637, 65438, 4238, 32353, 96523, 22666, 31112, 30994, 48680, 5964, -97423, 5842, 57960, 3604, -48701, -53285, 11597, 80923, -97312, 94107, -84196, 28786, -87094, -77546, 53339, 17017, -54314, -16393, 84169, 31960, 70277, -56565, 98481, -17858, -35613, 35287, -38916, -87705, 50929, 42038, -53999, 41111, -18782, 68643, -8050, 4928, 72225, -55043, -49248, 41662, -20051, 58336, 96103, -53339, -69179, tail", "row-2043": "head, -86228, -23071, -80404, -76581, -74350, 52469, 71187, -47821, 22413, 20153, -59356, -44135, -89159, -78340, 77352, 8587, -55510, 67623, 89338, 3977, 70345, 88984, -59091, -74025, 88100, -67920, 8291, -95821, -15893, 94709, -32702, -4228, -67676, -43247, -53472, 51919, -49645, -57704, -17464, -1583, 47428, 27919, -69569, 57243, -39740, -75492, -2984, -7281, 16532, -5904, -85692, 91538, -33605, 54867, 14822, -21700, -34226, 4097, 59092, -86964, 17048, -75101, 93630, 94232, tail", "row-2044": "head, -69380, -90340, -92921, -5726, 13322, 89217, -19386, -88785, -85312, 94460, -77429, 31526, -51066, -69314, -24348, 54052, -42348, 65392, -21760, 86999, -78063, 81261, -74387, -74789, 88665, 1675, 37164, -12764, -31521, -27506, 52875, 44006, 27269, 52727, 69665, 53167, 46542, -12640, -79612, 37375, 83828, 72052, -24365, -3006, 45462, -66517, 57746, -37226, -2140, 6433, 69991, 56443, -70145, -89742, 96165, 90657, 99796, -88761, 59082, -89792, 85146, -29254, -49172, 72136, tail", "row-2045": "head, -80113, 39196, 82730, 25096, 46211, 51488, -35678, -53866, -61728, 11541, 63226, -78375, 75298, 26144, -23260, 67844, -62314, -48753, 59717, 25428, 46863, -57409, 41335, 85317, -8380, 72011, 27271, 75651, 66564, 6050, 44305, 8525, 48451, 20610, 18687, -1141, 51111, -83222, 20455, -90936, -83120, 27293, -65981, -25929, -84833, -86222, -32233, 9196, -63938, -36646, -58051, -83223, -51334, -50463, 20752, -81473, -12092, -7911, 93200, 62156, 9667, 5188, 70343, -55757, tail", "row-2046": "head, 38385, -19912, -54240, -53412, 81561, 31508, 24780, 23765, -23522, -69320, 63424, -65280, -20643, -64633, -21160, 80389, 53836, 48447, 4697, 66122, -98976, -81673, -59501, -84902, 88171, -71861, -42368, -7493, 7473, -12185, 23404, 12545, -96885, 41388, -67858, 1863, 43515, -65594, -99222, -92416, 9412, 85339, 21789, 56581, 55518, 4577, -14182, 49051, -93720, 20107, 25451, -89283, -18841, -55853, -63243, 60355, -46941, 1110, -32125, 41865, 77851, -16763, 78122, -6730, tail", "row-2047": "head, 81122, -48610, 83969, -31249, 72042, -7864, 27462, 51390, 10238, -86343, 10778, 88153, -3023, 10513, 70133, 25964, 23216, -58136, 97174, -84807, -38094, 62418, 65366, 63254, -57326, -4342, 10009, -59977, 31376, 82521, 44062, 9900, 5471, 91289, -10854, 74075, -42891, 18964, -27419, -23183, 83282, -21769, 52083, -33853, 48580, 97942, -22832, 29203, 80129, -5298, -5818, 65057, 87559, -64298, 98833, -38851, -36813, 57012, -73258, 44379, -30906, 32943, 83375, -84405, tail"} diff --git a/test/data/multiple_statements.sql b/test/data/multiple_statements.sql index bbdfaf053..236cd5977 100644 --- a/test/data/multiple_statements.sql +++ b/test/data/multiple_statements.sql @@ -1,3 +1,3 @@ select 1; select 2; -select 3; \ No newline at end of file +select 3; diff --git a/test/data/multiple_statements_negative.sql b/test/data/multiple_statements_negative.sql index 4238acd80..7d550a234 100644 --- a/test/data/multiple_statements_negative.sql +++ b/test/data/multiple_statements_negative.sql @@ -1,3 +1,3 @@ select 987; select 1345 -select 9876; \ No newline at end of file +select 9876; diff --git a/test/data/rsa_keys/rsa_key_encrypted.p8 b/test/data/rsa_keys/rsa_key_encrypted.p8 new file mode 100644 index 000000000..53f9edd04 --- /dev/null +++ b/test/data/rsa_keys/rsa_key_encrypted.p8 @@ -0,0 +1,29 @@ +-----BEGIN ENCRYPTED PRIVATE KEY----- +MIIE6TAbBgkqhkiG9w0BBQMwDgQI6xVPyzHaGbYCAggABIIEyJ40XLJ4ifNL5iu0 +dYv0ksu7JtO8tBl/02DNED0P8T/mF8miJrfWq7C++4bGgRnJgeo/g4REi7GifLwg +kRiNGibwfGVXhvJYiHDUCuGlhTe+haNEOO3RfNc6I0YL0uDrgIxlu6sGu9QGvdfr +UOD+zRWlhJKIxTuXLRQb7sXQUAr1Xjkg+mSCaG4jnit37LSaWxUYu7FZYYpONKYG +iwErlzLhSfNiIRkaWwd5SDeXOMSsVrSbLm/4PtQoB5tCZMYNRQXySl7Sn35R0xiI +n2wzOhUkjeaXUjjmVYoRImKrq5bShMYR0xJHSj5jNggfDqATYj7Y7sa5ZEmSR+T1 +OCdV6ARKnwRnHmuTgNvuCqsT1qipxGdhtxSlO7RyDJIPC32jJWKhH8pDrwTVqZrb +ytsDOjravQclf5l2rmBsChI0oNiE3ZmhF8O05T+5CUkcpkKTwQssH4zp0ctUfYCC +9lrSOLEzIKePuKvKsU1wNKZbbJlYN7svlDW0LwISc9VS9cwKs2s8yNTqTykg1CSs +eGTZBtoG9yPatLl4CoSe1O1CQ09eVX+03OPj7wFGIuw8S/odXaVhlziIcBeL0MCv +hTHMyMJiyI3A57n+Q/644Q8Hw6g29dGoLtR+rACfNoeE+cBgs0Iq0FMkXEQe5Ae5 +QxvE0gpgH/KFUiuA8RjIC4wtkjBPCFNA8lsVIgGNEbCF7QmMB7exg1Orm4FxjYRv +M4KAQLjZEeGHaLp+3zXtUazl+34GWuaQlfmvjgg0MOL8R5mKIAKU4yeze2twQ2CZ +yrqZgZOsbXsBcAxe6/5x3NqHKJ2ZN/kqHFnRHIIX1R8U9TE9zmfJ3THYjH+pYS9P +4CPhfanfNbQxQCJIctR/dvUiyIkk+fji3peidlKVgcYFFs9XB5CInzx4o3hOt6n5 +R5vVsFgUSesqzfBm1JKn8Pkl5hXvhTHJx0QL9CoRJ+M1MMIUN94vaDywjX/RTJp+ +syaSW6CMmQkGhhfZKv7P9egHwyvUvsWQKvATLFlG7pJcEhIrnY7zDWUXvlHWAV+b +4ohN9B1ds2BR+rV9WcikEZVBHZLK8HxkmOmyppWZJd3V8kHRNGKClZgF0+0TTcyl +nQQl8U76vUtQx9gx2bbB8jZ3AWxU3mknnoG8cnBVH67XA4jnusWZDSrydoBiFw1l +wSDsFmYIlxYdg+KsAm2bciYZu/QUkk6EBw38zOgsKG++QB9C6SGVer98Lehb9I82 +PD9lp5ca7Q0iCd8ynMTUY47nabLvYydQ3iv6vi00m83tu4gBjEvIceg1cqzNz/Tv +ISL+rpqJl+zm+aCob3fQIrm/MgFZTbP5EKDIE1UDOyS94v1H+L9H+Jr2VNLVgC6B +SS9W7+gz0CjhKgC1N2uxcw1dZ2fRUarmPHBgCizJWIkxHMWOtQnOlvltIWI9eeZU +f3SYrEPaTF0cRpRZGFXf9JDiDTYVKFh+RO5y2QmZszxZTIpQoI0lN8J6mpUqQLlJ +agdfrPiG29Kv1NR5Os++Z/5J9JncNF/BdAoALqQXynwAl0GWpNNlV8TYyP1YuOvP +Q0DvGtuiXBOxZj4u55ba5kM0QE5X26Z5LwXEeZBFvM35wwlLh20rAcPJnaL+aqrP +zTOyyzibyoLzbvnk7A== +-----END ENCRYPTED PRIVATE KEY----- diff --git a/test/generate_test_files.py b/test/generate_test_files.py new file mode 100644 index 000000000..7bb0c2726 --- /dev/null +++ b/test/generate_test_files.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python3 +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import argparse +import os +import platform +import random +import subprocess +import tempfile +import time + +IS_WINDOWS = platform.system() == "Windows" + + +def generate_k_lines_of_n_files( + k: int, n: int, compress: bool = False, tmp_dir: str | None = None +) -> str: + """Creates testing files. + + Notes: + Returned path has to be deleted by caller. + + Args: + k: Number of lines per file to generate. + n: Number of files to generate. + compress: Whether to compress the files (Default value = False). + tmp_dir: Location where the files should be generated, if not supplied a temp directory will be created + (Default value = None). + + Returns: + Path to parent folder to newly generated files. + """ + if tmp_dir is None: + tmp_dir = tempfile.mkdtemp(prefix="data") + for i in range(n): + with open(os.path.join(tmp_dir, f"file{i}"), "w", encoding="utf-8") as f: + for _ in range(k): + num = int(random.random() * 10000.0) + tm = time.gmtime(int(random.random() * 30000.0) - 15000) + dt = time.strftime("%Y-%m-%d", tm) + tm = time.gmtime(int(random.random() * 30000.0) - 15000) + ts = time.strftime("%Y-%m-%d %H:%M:%S", tm) + tm = time.gmtime(int(random.random() * 30000.0) - 15000) + tsltz = time.strftime("%Y-%m-%d %H:%M:%S", tm) + tm = time.gmtime(int(random.random() * 30000.0) - 15000) + tsntz = time.strftime("%Y-%m-%d %H:%M:%S", tm) + tm = time.gmtime(int(random.random() * 30000.0) - 15000) + tstz = ( + time.strftime("%Y-%m-%dT%H:%M:%S", tm) + + ("-" if random.random() < 0.5 else "+") + + "{:02d}:{:02d}".format( + int(random.random() * 12.0), int(random.random() * 60.0) + ) + ) + pct = random.random() * 1000.0 + ratio = f"{random.random() * 1000.0:5.2f}" + rec = "{:d},{:s},{:s},{:s},{:s},{:s},{:f},{:s}".format( + num, dt, ts, tsltz, tsntz, tstz, pct, ratio + ) + f.write(rec + "\n") + if compress: + if not IS_WINDOWS: + subprocess.Popen( + ["gzip", os.path.join(tmp_dir, f"file{i}")], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ).communicate() + else: + import gzip + import shutil + + fname = os.path.join(tmp_dir, f"file{i}") + with open(fname, "rb") as f_in, gzip.open(fname + ".gz", "wb") as f_out: + shutil.copyfileobj(f_in, f_out) + os.unlink(fname) + return tmp_dir + + +if __name__ == "__main__": + parser = argparse.ArgumentParser( + description="Generate random testing files for Snowflake" + ) + parser.add_argument( + "k", metavar="K", type=int, help="number of lines to generate in each files" + ) + parser.add_argument("n", metavar="N", type=int, help="number of files to generate") + parser.add_argument( + "--dir", + action="store", + default=None, + help="the directory in which to generate files", + ) + args = vars(parser.parse_args()) + print(generate_k_lines_of_n_files(k=args["k"], n=args["n"], tmp_dir=args["dir"])) diff --git a/test/helpers.py b/test/helpers.py new file mode 100644 index 000000000..05bd8c03d --- /dev/null +++ b/test/helpers.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +from typing import Pattern, Sequence +from unittest.mock import Mock + +from snowflake.connector.compat import OK + + +def create_mock_response(status_code: int) -> Mock: + """Create a Mock "Response" with a given status code. See `test_result_batch.py` for examples. + Args: + status_code: the status code of the response. + Returns: + A Mock object that can be used as a Mock Response in tests. + """ + mock_resp = Mock() + mock_resp.status_code = status_code + mock_resp.raw = "success" if status_code == OK else "fail" + return mock_resp + + +def verify_log_tuple( + module: str, + level: int, + message: str | Pattern, + log_tuples: Sequence[tuple[str, int, str]], +): + """Convenience function to be able to search for regex patterns in log messages. + + Designed to search caplog.record_tuples. + + Notes: + - module could be extended to take a pattern too + """ + for _module, _level, _message in log_tuples: + if _module == module and _level == level: + if _message == message or ( + isinstance(message, Pattern) and message.search(_message) + ): + return True + return False diff --git a/test/integ/__init__.py b/test/integ/__init__.py new file mode 100644 index 000000000..d689e9c10 --- /dev/null +++ b/test/integ/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# diff --git a/test/integ/conftest.py b/test/integ/conftest.py new file mode 100644 index 000000000..1d2ce723f --- /dev/null +++ b/test/integ/conftest.py @@ -0,0 +1,244 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os +import sys +import time +import uuid +from contextlib import contextmanager +from logging import getLogger +from typing import Any, Callable, ContextManager, Generator + +import pytest + +import snowflake.connector +from snowflake.connector.compat import IS_WINDOWS +from snowflake.connector.connection import DefaultConverterClass + +from .. import running_on_public_ci +from ..parameters import CONNECTION_PARAMETERS + +try: + from ..parameters import CLIENT_FAILOVER_PARAMETERS # type: ignore +except ImportError: + CLIENT_FAILOVER_PARAMETERS: dict[str, Any] = {} # type: ignore + +MYPY = False +if MYPY: # from typing import TYPE_CHECKING once 3.5 is deprecated + from snowflake.connector import SnowflakeConnection + +RUNNING_ON_GH = os.getenv("GITHUB_ACTIONS") == "true" + +if not isinstance(CONNECTION_PARAMETERS["host"], str): + raise Exception("default host is not a string in parameters.py") +RUNNING_AGAINST_LOCAL_SNOWFLAKE = CONNECTION_PARAMETERS["host"].endswith("local") + +try: + from ..parameters import CONNECTION_PARAMETERS_ADMIN # type: ignore +except ImportError: + CONNECTION_PARAMETERS_ADMIN: dict[str, Any] = {} # type: ignore + +logger = getLogger(__name__) + +if RUNNING_ON_GH: + TEST_SCHEMA = "GH_JOB_{}".format(str(uuid.uuid4()).replace("-", "_")) +else: + TEST_SCHEMA = "python_connector_tests_" + str(uuid.uuid4()).replace("-", "_") + +DEFAULT_PARAMETERS: dict[str, Any] = { + "account": "", + "user": "", + "password": "", + "database": "", + "schema": "", + "protocol": "https", + "host": "", + "port": "443", +} + + +def print_help() -> None: + print( + """Connection parameter must be specified in parameters.py, + for example: +CONNECTION_PARAMETERS = { + 'account': 'testaccount', + 'user': 'user1', + 'password': 'test', + 'database': 'testdb', + 'schema': 'public', +} +""" + ) + + +@pytest.fixture(scope="session") +def is_public_test() -> bool: + return is_public_testaccount() + + +def is_public_testaccount() -> bool: + db_parameters = get_db_parameters() + if not isinstance(db_parameters.get("account"), str): + raise Exception("default account is not a string in parameters.py") + return running_on_public_ci() or db_parameters["account"].startswith("sfctest0") + + +@pytest.fixture(scope="session") +def db_parameters() -> dict[str, str]: + return get_db_parameters() + + +def get_db_parameters(connection_name: str = "default") -> dict[str, Any]: + """Sets the db connection parameters. + + We do this by reading out values from parameters.py and then inserting some + hard-coded values into them. Dummy values are also inserted in case these + dictionaries were printed by mistake. + """ + os.environ["TZ"] = "UTC" + if not IS_WINDOWS: + time.tzset() + + connections = { + "default": CONNECTION_PARAMETERS, + "client_failover": CLIENT_FAILOVER_PARAMETERS, + } + + chosen_connection = connections[connection_name] + if "account" not in chosen_connection: + pytest.skip(f"{connection_name} connection is unavailable in parameters.py") + + # testaccount connection info + ret = {**DEFAULT_PARAMETERS, **chosen_connection} + + # snowflake admin account. Not available in GH actions + for k, v in CONNECTION_PARAMETERS_ADMIN.items(): + ret["sf_" + k] = v + + if "host" in ret and ret["host"] == DEFAULT_PARAMETERS["host"]: + ret["host"] = ret["account"] + ".snowflakecomputing.com" + + if "account" in ret and ret["account"] == DEFAULT_PARAMETERS["account"]: + print_help() + sys.exit(2) + + # a unique table name + ret["name"] = "python_tests_" + str(uuid.uuid4()).replace("-", "_") + ret["name_wh"] = ret["name"] + "wh" + + ret["schema"] = TEST_SCHEMA + + # This reduces a chance to exposing password in test output. + ret["a00"] = "dummy parameter" + ret["a01"] = "dummy parameter" + ret["a02"] = "dummy parameter" + ret["a03"] = "dummy parameter" + ret["a04"] = "dummy parameter" + ret["a05"] = "dummy parameter" + ret["a06"] = "dummy parameter" + ret["a07"] = "dummy parameter" + ret["a08"] = "dummy parameter" + ret["a09"] = "dummy parameter" + ret["a10"] = "dummy parameter" + ret["a11"] = "dummy parameter" + ret["a12"] = "dummy parameter" + ret["a13"] = "dummy parameter" + ret["a14"] = "dummy parameter" + ret["a15"] = "dummy parameter" + ret["a16"] = "dummy parameter" + return ret + + +@pytest.fixture(scope="session", autouse=True) +def init_test_schema(db_parameters) -> Generator[None, None, None]: + """Initializes and destroys the schema specific to this pytest session. + + This is automatically called per test session. + """ + ret = db_parameters + with snowflake.connector.connect( + user=ret["user"], + password=ret["password"], + host=ret["host"], + port=ret["port"], + database=ret["database"], + account=ret["account"], + protocol=ret["protocol"], + ) as con: + con.cursor().execute(f"CREATE SCHEMA IF NOT EXISTS {TEST_SCHEMA}") + yield + con.cursor().execute(f"DROP SCHEMA IF EXISTS {TEST_SCHEMA}") + + +def create_connection(connection_name: str, **kwargs) -> SnowflakeConnection: + """Creates a connection using the parameters defined in parameters.py. + + You can select from the different connections by supplying the appropiate + connection_name parameter and then anything else supplied will overwrite the values + from parameters.py. + """ + ret = get_db_parameters(connection_name) + ret.update(kwargs) + connection = snowflake.connector.connect(**ret) + return connection + + +@contextmanager +def db( + connection_name: str = "default", + **kwargs, +) -> Generator[SnowflakeConnection, None, None]: + if not kwargs.get("timezone"): + kwargs["timezone"] = "UTC" + if not kwargs.get("converter_class"): + kwargs["converter_class"] = DefaultConverterClass() + cnx = create_connection(connection_name, **kwargs) + try: + yield cnx + finally: + cnx.close() + + +@contextmanager +def negative_db( + connection_name: str = "default", + **kwargs, +) -> Generator[SnowflakeConnection, None, None]: + if not kwargs.get("timezone"): + kwargs["timezone"] = "UTC" + if not kwargs.get("converter_class"): + kwargs["converter_class"] = DefaultConverterClass() + cnx = create_connection(connection_name, **kwargs) + if not is_public_testaccount(): + cnx.cursor().execute("alter session set SUPPRESS_INCIDENT_DUMPS=true") + try: + yield cnx + finally: + cnx.close() + + +@pytest.fixture() +def conn_testaccount(request) -> SnowflakeConnection: + connection = create_connection("default") + + def fin(): + connection.close() # close when done + + request.addfinalizer(fin) + return connection + + +@pytest.fixture() +def conn_cnx() -> Callable[..., ContextManager[SnowflakeConnection]]: + return db + + +@pytest.fixture() +def negative_conn_cnx() -> Callable[..., ContextManager[SnowflakeConnection]]: + """Use this if an incident is expected and we don't want GS to create a dump file about the incident.""" + return negative_db diff --git a/test/integ/lambda/__init__.py b/test/integ/lambda/__init__.py new file mode 100644 index 000000000..d689e9c10 --- /dev/null +++ b/test/integ/lambda/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# diff --git a/test/integ/lambda/test_basic_query.py b/test/integ/lambda/test_basic_query.py new file mode 100644 index 000000000..083266be7 --- /dev/null +++ b/test/integ/lambda/test_basic_query.py @@ -0,0 +1,22 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + + +def test_connection(conn_cnx): + """Test basic connection.""" + with conn_cnx() as cnx: + cur = cnx.cursor() + result = cur.execute("select 1;").fetchall() + assert result == [(1,)] + + +def test_large_resultset(conn_cnx): + """Test large resultset.""" + with conn_cnx() as cnx: + cur = cnx.cursor() + result = cur.execute( + "select seq8(), randstr(1000, random()) from table(generator(rowcount=>10000));" + ).fetchall() + assert len(result) == 10000 diff --git a/test/integ/pandas/__init__.py b/test/integ/pandas/__init__.py new file mode 100644 index 000000000..d689e9c10 --- /dev/null +++ b/test/integ/pandas/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# diff --git a/test/integ/pandas/test_arrow_chunk_iterator.py b/test/integ/pandas/test_arrow_chunk_iterator.py new file mode 100644 index 000000000..b31be4b5f --- /dev/null +++ b/test/integ/pandas/test_arrow_chunk_iterator.py @@ -0,0 +1,78 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +import datetime +import random +from typing import Callable + +import pytest + +try: + from snowflake.connector.options import installed_pandas +except ImportError: + installed_pandas = False + +try: + import snowflake.connector.arrow_iterator # NOQA + + no_arrow_iterator_ext = False +except ImportError: + no_arrow_iterator_ext = True + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +@pytest.mark.parametrize("timestamp_type", ("TZ", "LTZ", "NTZ")) +def test_iterate_over_timestamp_chunk(conn_cnx, timestamp_type): + seed = datetime.datetime.now().timestamp() + row_numbers = 10 + random.seed(seed) + + # Generate random test data + def generator_test_data(scale: int) -> Callable[[], int]: + def generate_test_data() -> int: + nonlocal scale + epoch = random.randint(-100_355_968, 2_534_023_007) + frac = random.randint(0, 10 ** scale - 1) + if scale == 8: + frac *= 10 ** (9 - scale) + scale = 9 + return int(f"{epoch}{str(frac).rjust(scale, '0')}") + + return generate_test_data + + test_generators = [generator_test_data(i) for i in range(10)] + test_data = [[g() for g in test_generators] for _ in range(row_numbers)] + + with conn_cnx( + session_parameters={ + "PYTHON_CONNECTOR_QUERY_RESULT_FORMAT": "ARROW_FORCE", + "TIMESTAMP_TZ_OUTPUT_FORMAT": "YYYY-MM-DD HH24:MI:SS.FF6 TZHTZM", + "TIMESTAMP_LTZ_OUTPUT_FORMAT": "YYYY-MM-DD HH24:MI:SS.FF6 TZHTZM", + "TIMESTAMP_NTZ_OUTPUT_FORMAT": "YYYY-MM-DD HH24:MI:SS.FF6 ", + } + ) as conn: + with conn.cursor() as cur: + results = cur.execute( + "select " + + ", ".join( + f"to_timestamp_{timestamp_type}(${s + 1}, {s if s != 8 else 9}) c_{s}" + for s in range(10) + ) + + ", " + + ", ".join(f"c_{i}::varchar" for i in range(10)) + + f" from values {', '.join(str(tuple(e)) for e in test_data)}" + ).fetch_arrow_all() + retrieved_results = [ + list(map(lambda e: e.as_py().strftime("%Y-%m-%d %H:%M:%S.%f %z"), line)) + for line in list(results)[:10] + ] + retrieved_strigs = [ + list(map(lambda e: e.as_py().replace("Z", "+0000"), line)) + for line in list(results)[10:] + ] + + assert retrieved_results == retrieved_strigs diff --git a/test/integ/pandas/test_arrow_pandas.py b/test/integ/pandas/test_arrow_pandas.py new file mode 100644 index 000000000..16fa23344 --- /dev/null +++ b/test/integ/pandas/test_arrow_pandas.py @@ -0,0 +1,1189 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +import decimal +import itertools +import random +import time +from datetime import datetime +from decimal import Decimal +from enum import Enum +from unittest import mock + +import numpy +import pytest +import pytz +from numpy.testing import assert_equal + +try: + from snowflake.connector.constants import ( + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT, + IterUnit, + ) +except ImportError: + # This is because of olddriver tests + class IterUnit(Enum): + ROW_UNIT = "row" + TABLE_UNIT = "table" + + +try: + from snowflake.connector.options import installed_pandas, pandas, pyarrow +except ImportError: + installed_pandas = False + pandas = None + pyarrow = None + +try: + from snowflake.connector.arrow_iterator import PyArrowIterator # NOQA + + no_arrow_iterator_ext = False +except ImportError: + no_arrow_iterator_ext = True + +SQL_ENABLE_ARROW = "alter session set python_connector_query_result_format='ARROW';" + +EPSILON = 1e-8 + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +def test_num_one(conn_cnx): + print("Test fetching one single dataframe") + row_count = 50000 + col_count = 2 + random_seed = get_random_seed() + sql_exec = ( + f"select seq4() as c1, uniform(1, 10, random({random_seed})) as c2 from " + f"table(generator(rowcount=>{row_count})) order by c1, c2" + ) + fetch_pandas(conn_cnx, sql_exec, row_count, col_count, "one") + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +def test_scaled_tinyint(conn_cnx): + cases = ["NULL", 0.11, -0.11, "NULL", 1.27, -1.28, "NULL"] + table = "test_arrow_tiny_int" + column = "(a number(5,2))" + values = ",".join([f"({i}, {c})" for i, c in enumerate(cases)]) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas(conn, sql_text, cases, 1, "one") + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +def test_scaled_smallint(conn_cnx): + cases = ["NULL", 0, 0.11, -0.11, "NULL", 32.767, -32.768, "NULL"] + table = "test_arrow_small_int" + column = "(a number(5,3))" + values = ",".join([f"({i}, {c})" for i, c in enumerate(cases)]) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas(conn, sql_text, cases, 1, "one") + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +def test_scaled_int(conn_cnx): + cases = [ + "NULL", + 0, + "NULL", + 0.123456789, + -0.123456789, + 2.147483647, + -2.147483648, + "NULL", + ] + table = "test_arrow_int" + column = "(a number(10,9))" + values = ",".join([f"({i}, {c})" for i, c in enumerate(cases)]) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas(conn, sql_text, cases, 1, "one") + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is not installed.", +) +def test_scaled_bigint(conn_cnx): + cases = [ + "NULL", + 0, + "NULL", + "1.23456789E-10", + "-1.23456789E-10", + "2.147483647E-9", + "-2.147483647E-9", + "-1e-9", + "1e-9", + "1e-8", + "-1e-8", + "NULL", + ] + table = "test_arrow_big_int" + column = "(a number(38,18))" + values = ",".join([f"({i}, {c})" for i, c in enumerate(cases)]) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas(conn, sql_text, cases, 1, "one", epsilon=EPSILON) + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +def test_decimal(conn_cnx): + cases = [ + "NULL", + 0, + "NULL", + "10000000000000000000000000000000000000", + "12345678901234567890123456789012345678", + "99999999999999999999999999999999999999", + "-1000000000000000000000000000000000000", + "-2345678901234567890123456789012345678", + "-9999999999999999999999999999999999999", + "NULL", + ] + table = "test_arrow_decimal" + column = "(a number(38,0))" + values = ",".join([f"({i}, {c})" for i, c in enumerate(cases)]) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas(conn, sql_text, cases, 1, "one", data_type="decimal") + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is not installed.", +) +def test_scaled_decimal(conn_cnx): + cases = [ + "NULL", + 0, + "NULL", + "1.0000000000000000000000000000000000000", + "1.2345678901234567890123456789012345678", + "9.9999999999999999999999999999999999999", + "-1.000000000000000000000000000000000000", + "-2.345678901234567890123456789012345678", + "-9.999999999999999999999999999999999999", + "NULL", + ] + table = "test_arrow_decimal" + column = "(a number(38,37))" + values = ",".join([f"({i}, {c})" for i, c in enumerate(cases)]) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas(conn, sql_text, cases, 1, "one", data_type="decimal") + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is not installed.", +) +def test_scaled_decimal_SNOW_133561(conn_cnx): + cases = [ + "NULL", + 0, + "NULL", + "1.2345", + "2.1001", + "2.2001", + "2.3001", + "2.3456", + "-9.999", + "-1.000", + "-3.4567", + "3.4567", + "4.5678", + "5.6789", + "-0.0012", + "NULL", + ] + table = "test_scaled_decimal_SNOW_133561" + column = "(a number(38,10))" + values = ",".join([f"({i}, {c})" for i, c in enumerate(cases)]) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas(conn, sql_text, cases, 1, "one", data_type="float") + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +def test_boolean(conn_cnx): + cases = ["NULL", True, "NULL", False, True, True, "NULL", True, False, "NULL"] + table = "test_arrow_boolean" + column = "(a boolean)" + values = ",".join([f"({i}, {c})" for i, c in enumerate(cases)]) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas(conn, sql_text, cases, 1, "one") + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +def test_double(conn_cnx): + cases = [ + "NULL", + # SNOW-31249 + "-86.6426540296895", + "3.14159265359", + # SNOW-76269 + "1.7976931348623157E308", + "1.7E308", + "1.7976931348623151E308", + "-1.7976931348623151E308", + "-1.7E308", + "-1.7976931348623157E308", + "NULL", + ] + table = "test_arrow_double" + column = "(a double)" + values = ",".join([f"({i}, {c})" for i, c in enumerate(cases)]) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas(conn, sql_text, cases, 1, "one") + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +def test_semi_struct(conn_cnx): + sql_text = """ + select array_construct(10, 20, 30), + array_construct(null, 'hello', 3::double, 4, 5), + array_construct(), + object_construct('a',1,'b','BBBB', 'c',null), + object_construct('Key_One', parse_json('NULL'), 'Key_Two', null, 'Key_Three', 'null'), + to_variant(3.2), + parse_json('{ "a": null}'), + 100::variant; + """ + res = [ + "[\n" + " 10,\n" + " 20,\n" + " 30\n" + "]", + "[\n" + + " undefined,\n" + + ' "hello",\n' + + " 3.000000000000000e+00,\n" + + " 4,\n" + + " 5\n" + + "]", + "[]", + "{\n" + ' "a": 1,\n' + ' "b": "BBBB"\n' + "}", + "{\n" + ' "Key_One": null,\n' + ' "Key_Three": "null"\n' + "}", + "3.2", + "{\n" + ' "a": null\n' + "}", + "100", + ] + with conn_cnx() as cnx_table: + # fetch dataframe with new arrow support + cursor_table = cnx_table.cursor() + cursor_table.execute(SQL_ENABLE_ARROW) + cursor_table.execute(sql_text) + df_new = cursor_table.fetch_pandas_all() + col_new = df_new.iloc[0] + for j, c_new in enumerate(col_new): + assert res[j] == c_new, ( + "{} column: original value is {}, new value is {}, " + "values are not equal".format(j, res[j], c_new) + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +def test_date(conn_cnx): + cases = [ + "NULL", + "2017-01-01", + "2014-01-02", + "2014-01-02", + "1970-01-01", + "1970-01-01", + "NULL", + "1969-12-31", + "0200-02-27", + "NULL", + "0200-02-28", + # "0200-02-29", # day is out of range + # "0000-01-01", # year 0 is out of range + "0001-12-31", + "NULL", + ] + table = "test_arrow_date" + column = "(a date)" + values = ",".join( + [f"({i}, {c})" if c == "NULL" else f"({i}, '{c}')" for i, c in enumerate(cases)] + ) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas(conn, sql_text, cases, 1, "one", data_type="date") + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +@pytest.mark.parametrize("scale", [i for i in range(10)]) +def test_time(conn_cnx, scale): + cases = [ + "NULL", + "00:00:51", + "01:09:03.100000", + "02:23:23.120000", + "03:56:23.123000", + "04:56:53.123400", + "09:01:23.123450", + "11:03:29.123456", + # note: Python's max time precision is microsecond, rest of them will lose precision + # "15:31:23.1234567", + # "19:01:43.12345678", + # "23:59:59.99999999", + "NULL", + ] + table = "test_arrow_time" + column = f"(a time({scale}))" + values = ",".join( + [f"({i}, {c})" if c == "NULL" else f"({i}, '{c}')" for i, c in enumerate(cases)] + ) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas(conn, sql_text, cases, 1, "one", data_type="time", scale=scale) + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +@pytest.mark.parametrize("scale", [i for i in range(10)]) +def test_timestampntz(conn_cnx, scale): + cases = [ + "NULL", + "1970-01-01 00:00:00", + "1970-01-01 00:00:01", + "1970-01-01 00:00:10", + "2014-01-02 16:00:00", + "2014-01-02 12:34:56", + "2017-01-01 12:00:00.123456789", + "2014-01-02 16:00:00.000000001", + "NULL", + "2014-01-02 12:34:57.1", + "1969-12-31 23:59:59.000000001", + "1970-01-01 00:00:00.123412423", + "1970-01-01 00:00:01.000001", + "1969-12-31 11:59:59.001", + # "0001-12-31 11:59:59.11", + # pandas._libs.tslibs.np_datetime.OutOfBoundsDatetime: + # Out of bounds nanosecond timestamp: 1-12-31 11:59:59 + "NULL", + ] + table = "test_arrow_timestamp" + column = f"(a timestampntz({scale}))" + + values = ",".join( + [f"({i}, {c})" if c == "NULL" else f"({i}, '{c}')" for i, c in enumerate(cases)] + ) + with conn_cnx() as conn: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + validate_pandas( + conn, sql_text, cases, 1, "one", data_type="timestamp", scale=scale + ) + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +@pytest.mark.parametrize( + "scale, timezone", + itertools.product( + [i for i in range(10)], ["UTC", "America/New_York", "Australia/Sydney"] + ), +) +def test_timestamptz(conn_cnx, scale, timezone): + cases = [ + "NULL", + "1971-01-01 00:00:00", + "1971-01-11 00:00:01", + "1971-01-01 00:00:10", + "2014-01-02 16:00:00", + "2014-01-02 12:34:56", + "2017-01-01 12:00:00.123456789", + "2014-01-02 16:00:00.000000001", + "NULL", + "2014-01-02 12:34:57.1", + "1969-12-31 23:59:59.000000001", + "1970-01-01 00:00:00.123412423", + "1970-01-01 00:00:01.000001", + "1969-12-31 11:59:59.001", + # "0001-12-31 11:59:59.11", + # pandas._libs.tslibs.np_datetime.OutOfBoundsDatetime: + # Out of bounds nanosecond timestamp: 1-12-31 11:59:59 + "NULL", + ] + table = "test_arrow_timestamp" + column = f"(a timestamptz({scale}))" + values = ",".join( + [f"({i}, {c})" if c == "NULL" else f"({i}, '{c}')" for i, c in enumerate(cases)] + ) + with conn_cnx() as conn: + init(conn, table, column, values, timezone=timezone) + sql_text = f"select a from {table} order by s" + validate_pandas( + conn, + sql_text, + cases, + 1, + "one", + data_type="timestamptz", + scale=scale, + timezone=timezone, + ) + finish(conn, table) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +@pytest.mark.parametrize( + "scale, timezone", + itertools.product( + [i for i in range(10)], ["UTC", "America/New_York", "Australia/Sydney"] + ), +) +def test_timestampltz(conn_cnx, scale, timezone): + cases = [ + "NULL", + "1970-01-01 00:00:00", + "1970-01-01 00:00:01", + "1970-01-01 00:00:10", + "2014-01-02 16:00:00", + "2014-01-02 12:34:56", + "2017-01-01 12:00:00.123456789", + "2014-01-02 16:00:00.000000001", + "NULL", + "2014-01-02 12:34:57.1", + "1969-12-31 23:59:59.000000001", + "1970-01-01 00:00:00.123412423", + "1970-01-01 00:00:01.000001", + "1969-12-31 11:59:59.001", + # "0001-12-31 11:59:59.11", + # pandas._libs.tslibs.np_datetime.OutOfBoundsDatetime: + # Out of bounds nanosecond timestamp: 1-12-31 11:59:59 + "NULL", + ] + table = "test_arrow_timestamp" + column = f"(a timestampltz({scale}))" + values = ",".join( + [f"({i}, {c})" if c == "NULL" else f"({i}, '{c}')" for i, c in enumerate(cases)] + ) + with conn_cnx() as conn: + init(conn, table, column, values, timezone=timezone) + sql_text = f"select a from {table} order by s" + validate_pandas( + conn, + sql_text, + cases, + 1, + "one", + data_type="timestamp", + scale=scale, + timezone=timezone, + ) + finish(conn, table) + + +def validate_pandas( + cnx_table, + sql, + cases, + col_count, + method="one", + data_type="float", + epsilon=None, + scale=0, + timezone=None, +): + """Tests that parameters can be customized. + + Args: + cnx_table: Connection object. + sql: SQL command for execution. + cases: Test cases. + col_count: Number of columns in dataframe. + method: If method is 'batch', we fetch dataframes in batch. If method is 'one', we fetch a single dataframe + containing all data (Default value = 'one'). + data_type: Defines how to compare values (Default value = 'float'). + epsilon: For comparing double values (Default value = None). + scale: For comparing time values with scale (Default value = 0). + timezone: For comparing timestamp ltz (Default value = None). + """ + + row_count = len(cases) + assert col_count != 0, "# of columns should be larger than 0" + + cursor_table = cnx_table.cursor() + cursor_table.execute(SQL_ENABLE_ARROW) + cursor_table.execute(sql) + + # build dataframe + total_rows, total_batches = 0, 0 + start_time = time.time() + + if method == "one": + df_new = cursor_table.fetch_pandas_all() + total_rows = df_new.shape[0] + else: + for df_new in cursor_table.fetch_pandas_batches(): + total_rows += df_new.shape[0] + total_batches += 1 + end_time = time.time() + + print(f"new way (fetching {method}) took {end_time - start_time}s") + if method == "batch": + print(f"new way has # of batches : {total_batches}") + cursor_table.close() + assert ( + total_rows == row_count + ), f"there should be {row_count} rows, but {total_rows} rows" + + # verify the correctness + # only do it when fetch one dataframe + if method == "one": + assert (row_count, col_count) == df_new.shape, ( + "the shape of old dataframe is {}, " + "the shape of new dataframe is {}, " + "shapes are not equal".format((row_count, col_count), df_new.shape) + ) + + for i in range(row_count): + for j in range(col_count): + c_new = df_new.iat[i, j] + if cases[i] == "NULL": + assert c_new is None or pandas.isnull(c_new), ( + "{} row, {} column: original value is NULL, " + "new value is {}, values are not equal".format(i, j, c_new) + ) + else: + if data_type == "float": + c_case = float(cases[i]) + elif data_type == "decimal": + c_case = Decimal(cases[i]) + elif data_type == "date": + c_case = datetime.strptime(cases[i], "%Y-%m-%d").date() + elif data_type == "time": + time_str_len = 8 if scale == 0 else 9 + scale + c_case = cases[i].strip()[:time_str_len] + c_new = str(c_new).strip()[:time_str_len] + assert c_new == c_case, ( + "{} row, {} column: original value is {}, " + "new value is {}, " + "values are not equal".format(i, j, cases[i], c_new) + ) + break + elif data_type.startswith("timestamp"): + time_str_len = 19 if scale == 0 else 20 + scale + if timezone: + c_case = pandas.Timestamp( + cases[i][:time_str_len], tz=timezone + ) + if data_type == "timestamptz": + c_case = c_case.tz_convert("UTC") + else: + c_case = pandas.Timestamp(cases[i][:time_str_len]) + assert c_case == c_new, ( + "{} row, {} column: original value is {}, new value is {}, " + "values are not equal".format(i, j, cases[i], c_new) + ) + break + else: + c_case = cases[i] + if epsilon is None: + assert c_case == c_new, ( + "{} row, {} column: original value is {}, new value is {}, " + "values are not equal".format(i, j, cases[i], c_new) + ) + else: + assert abs(c_case - c_new) < epsilon, ( + "{} row, {} column: original value is {}, " + "new value is {}, epsilon is {} \ + values are not equal".format( + i, j, cases[i], c_new, epsilon + ) + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +def test_num_batch(conn_cnx): + print("Test fetching dataframes in batch") + row_count = 1000000 + col_count = 2 + random_seed = get_random_seed() + sql_exec = ( + f"select seq4() as c1, uniform(1, 10, random({random_seed})) as c2 from " + f"table(generator(rowcount=>{row_count})) order by c1, c2" + ) + fetch_pandas(conn_cnx, sql_exec, row_count, col_count, "batch") + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +@pytest.mark.parametrize( + "result_format", + ["pandas", "arrow"], +) +def test_empty(conn_cnx, result_format): + print("Test fetch empty dataframe") + with conn_cnx() as cnx: + cursor = cnx.cursor() + cursor.execute(SQL_ENABLE_ARROW) + cursor.execute( + "select seq4() as foo, seq4() as bar from table(generator(rowcount=>1)) limit 0" + ) + fetch_all_fn = getattr(cursor, f"fetch_{result_format}_all") + fetch_batches_fn = getattr(cursor, f"fetch_{result_format}_batches") + result = fetch_all_fn() + if result_format == "pandas": + assert len(list(result)) == 2 + assert list(result)[0] == "FOO" + assert list(result)[1] == "BAR" + else: + assert result is None + + cursor.execute( + "select seq4() as foo from table(generator(rowcount=>1)) limit 0" + ) + df_count = 0 + for _ in fetch_batches_fn(): + df_count += 1 + assert df_count == 0 + + +def get_random_seed(): + random.seed(datetime.now().timestamp()) + return random.randint(0, 10000) + + +def fetch_pandas(conn_cnx, sql, row_count, col_count, method="one"): + """Tests that parameters can be customized. + + Args: + conn_cnx: Connection object. + sql: SQL command for execution. + row_count: Number of total rows combining all dataframes. + col_count: Number of columns in dataframe. + method: If method is 'batch', we fetch dataframes in batch. If method is 'one', we fetch a single dataframe + containing all data (Default value = 'one'). + """ + assert row_count != 0, "# of rows should be larger than 0" + assert col_count != 0, "# of columns should be larger than 0" + + with conn_cnx() as conn: + # fetch dataframe by fetching row by row + cursor_row = conn.cursor() + cursor_row.execute(SQL_ENABLE_ARROW) + cursor_row.execute(sql) + + # build dataframe + # actually its exec time would be different from `pandas.read_sql()` via sqlalchemy as most people use + # further perf test can be done separately + start_time = time.time() + rows = 0 + if method == "one": + df_old = pandas.DataFrame( + cursor_row.fetchall(), + columns=[f"c{i}" for i in range(col_count)], + ) + else: + print("use fetchmany") + while True: + dat = cursor_row.fetchmany(10000) + if not dat: + break + else: + df_old = pandas.DataFrame( + dat, columns=[f"c{i}" for i in range(col_count)] + ) + rows += df_old.shape[0] + end_time = time.time() + print(f"The original way took {end_time - start_time}s") + cursor_row.close() + + # fetch dataframe with new arrow support + cursor_table = conn.cursor() + cursor_table.execute(SQL_ENABLE_ARROW) + cursor_table.execute(sql) + + # build dataframe + total_rows, total_batches = 0, 0 + start_time = time.time() + if method == "one": + df_new = cursor_table.fetch_pandas_all() + total_rows = df_new.shape[0] + else: + for df_new in cursor_table.fetch_pandas_batches(): + total_rows += df_new.shape[0] + total_batches += 1 + end_time = time.time() + print(f"new way (fetching {method}) took {end_time - start_time}s") + if method == "batch": + print(f"new way has # of batches : {total_batches}") + cursor_table.close() + assert total_rows == row_count, "there should be {} rows, but {} rows".format( + row_count, total_rows + ) + + # verify the correctness + # only do it when fetch one dataframe + if method == "one": + assert ( + df_old.shape == df_new.shape + ), "the shape of old dataframe is {}, the shape of new dataframe is {}, \ + shapes are not equal".format( + df_old.shape, df_new.shape + ) + + for i in range(row_count): + col_old = df_old.iloc[i] + col_new = df_new.iloc[i] + for j, (c_old, c_new) in enumerate(zip(col_old, col_new)): + assert c_old == c_new, ( + f"{i} row, {j} column: old value is {c_old}, new value " + f"is {c_new} values are not equal" + ) + else: + assert ( + rows == total_rows + ), f"the number of rows are not equal {rows} vs {total_rows}" + + +def init(json_cnx, table, column, values, timezone=None): + cursor_json = json_cnx.cursor() + if timezone is not None: + cursor_json.execute(f"ALTER SESSION SET TIMEZONE = '{timezone}'") + column_with_seq = column[0] + "s number, " + column[1:] + cursor_json.execute(f"create or replace table {table} {column_with_seq}") + cursor_json.execute(f"insert into {table} values {values}") + + +def finish(json_cnx, table): + cursor_json = json_cnx.cursor() + cursor_json.execute(f"drop table if exists {table};") + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing.", +) +def test_arrow_fetch_result_scan(conn_cnx): + with conn_cnx() as cnx: + cur = cnx.cursor() + cur.execute("alter session set query_result_format='ARROW_FORCE'") + cur.execute( + "alter session set python_connector_query_result_format='ARROW_FORCE'" + ) + res = cur.execute("select 1, 2, 3").fetch_pandas_all() + assert tuple(res) == ("1", "2", "3") + result_scan_res = cur.execute( + f"select * from table(result_scan('{cur.sfqid}'));" + ).fetch_pandas_all() + assert tuple(result_scan_res) == ("1", "2", "3") + + +@pytest.mark.parametrize("query_format", ("JSON", "ARROW")) +@pytest.mark.parametrize("resultscan_format", ("JSON", "ARROW")) +def test_query_resultscan_combos(conn_cnx, query_format, resultscan_format): + if query_format == "JSON" and resultscan_format == "ARROW": + pytest.xfail("fix not yet released to test deployment") + with conn_cnx() as cnx: + sfqid = None + results = None + scanned_results = None + with cnx.cursor() as query_cur: + query_cur.execute( + "alter session set python_connector_query_result_format='{}'".format( + query_format + ) + ) + query_cur.execute( + "select seq8(), randstr(1000,random()) from table(generator(rowcount=>100))" + ) + sfqid = query_cur.sfqid + assert query_cur._query_result_format.upper() == query_format + if query_format == "JSON": + results = query_cur.fetchall() + else: + results = query_cur.fetch_pandas_all() + with cnx.cursor() as resultscan_cur: + resultscan_cur.execute( + "alter session set python_connector_query_result_format='{}'".format( + resultscan_format + ) + ) + resultscan_cur.execute(f"select * from table(result_scan('{sfqid}'))") + if resultscan_format == "JSON": + scanned_results = resultscan_cur.fetchall() + else: + scanned_results = resultscan_cur.fetch_pandas_all() + assert resultscan_cur._query_result_format.upper() == resultscan_format + if isinstance(results, pandas.DataFrame): + results = [tuple(e) for e in results.values.tolist()] + if isinstance(scanned_results, pandas.DataFrame): + scanned_results = [tuple(e) for e in scanned_results.values.tolist()] + assert results == scanned_results + + +@pytest.mark.parametrize( + "use_decimal,expected", + [ + (False, numpy.float64), + pytest.param(True, decimal.Decimal, marks=pytest.mark.skipolddriver), + ], +) +def test_number_fetchall_retrieve_type(conn_cnx, use_decimal, expected): + with conn_cnx(arrow_number_to_decimal=use_decimal) as con: + with con.cursor() as cur: + cur.execute("SELECT 12345600.87654301::NUMBER(18, 8) a") + result_df = cur.fetch_pandas_all() + a_column = result_df["A"] + assert isinstance(a_column.values[0], expected), type(a_column.values[0]) + + +@pytest.mark.parametrize( + "use_decimal,expected", + [ + ( + False, + numpy.float64, + ), + pytest.param(True, decimal.Decimal, marks=pytest.mark.skipolddriver), + ], +) +def test_number_fetchbatches_retrieve_type(conn_cnx, use_decimal: bool, expected: type): + with conn_cnx(arrow_number_to_decimal=use_decimal) as con: + with con.cursor() as cur: + cur.execute("SELECT 12345600.87654301::NUMBER(18, 8) a") + for batch in cur.fetch_pandas_batches(): + a_column = batch["A"] + assert isinstance(a_column.values[0], expected), type( + a_column.values[0] + ) + + +def test_simple_async_arrow(conn_cnx): + """Simple test to that shows the most simple usage of fire and forget. + + This test also makes sure that wait_until_ready function's sleeping is tested and + that some fields are copied over correctly from the original query. + """ + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute_async("select count(*) from table(generator(timeLimit => 5))") + cur.get_results_from_sfqid(cur.sfqid) + assert len(cur.fetch_pandas_all()) == 1 + assert cur.rowcount + assert cur.description + + +@pytest.mark.parametrize( + "use_decimal,expected", + [ + ( + True, + decimal.Decimal, + ), + pytest.param(False, numpy.float64, marks=pytest.mark.xfail), + ], +) +def test_number_iter_retrieve_type(conn_cnx, use_decimal: bool, expected: type): + with conn_cnx(arrow_number_to_decimal=use_decimal) as con: + with con.cursor() as cur: + cur.execute("SELECT 12345600.87654301::NUMBER(18, 8) a") + for row in cur: + assert isinstance(row[0], expected), type(row[0]) + + +def test_resultbatches_pandas_functionality(conn_cnx): + """Fetch ArrowResultBatches as pandas dataframes and check its result.""" + rowcount = 100000 + expected_df = pandas.DataFrame(data={"A": range(rowcount)}) + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute( + f"select seq4() a from table(generator(rowcount => {rowcount}));" + ) + assert cur._result_set.total_row_index() == rowcount + result_batches = cur.get_result_batches() + assert cur.fetch_pandas_all().index[-1] == rowcount - 1 + assert len(result_batches) > 1 + tables = itertools.chain.from_iterable( + list(b.create_iter(iter_unit=IterUnit.TABLE_UNIT, structure="arrow")) + for b in result_batches + ) + final_df = pyarrow.concat_tables(tables).to_pandas() + assert numpy.array_equal(expected_df, final_df) + + +@pytest.mark.skipolddriver +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas is missing. or no new telemetry defined - skipolddrive", +) +@pytest.mark.parametrize( + "fetch_method, expected_telemetry_type", + [ + ("one", "client_fetch_pandas_all"), # TelemetryField.PANDAS_FETCH_ALL + ("batch", "client_fetch_pandas_batches"), # TelemetryField.PANDAS_FETCH_BATCHES + ], +) +def test_pandas_telemetry( + conn_cnx, capture_sf_telemetry, fetch_method, expected_telemetry_type +): + cases = ["NULL", 0.11, -0.11, "NULL", 1.27, -1.28, "NULL"] + table = "test_telemetry" + column = "(a number(5,2))" + values = ",".join([f"({i}, {c})" for i, c in enumerate(cases)]) + with conn_cnx() as conn, capture_sf_telemetry.patch_connection( + conn, False + ) as telemetry_test: + init(conn, table, column, values) + sql_text = f"select a from {table} order by s" + + validate_pandas( + conn, + sql_text, + cases, + 1, + fetch_method, + ) + + occurence = 0 + for t in telemetry_test.records: + if t.message["type"] == expected_telemetry_type: + occurence += 1 + assert occurence == 1 + + finish(conn, table) + + +@pytest.mark.parametrize("result_format", ["pandas", "arrow"]) +def test_batch_to_pandas_arrow(conn_cnx, result_format): + rowcount = 10 + with conn_cnx() as cnx: + with cnx.cursor() as cur: + cur.execute(SQL_ENABLE_ARROW) + cur.execute( + f"select seq4() as foo, seq4() as bar from table(generator(rowcount=>{rowcount})) order by foo asc" + ) + batches = cur.get_result_batches() + assert len(batches) == 1 + batch = batches[0] + + # check that size, columns, and FOO column data is correct + if result_format == "pandas": + df = batch.to_pandas() + assert type(df) == pandas.DataFrame + assert df.shape == (10, 2) + assert all(df.columns == ["FOO", "BAR"]) + assert list(df.FOO) == list(range(rowcount)) + elif result_format == "arrow": + arrow_table = batch.to_arrow() + assert type(arrow_table) == pyarrow.Table + assert arrow_table.shape == (10, 2) + assert arrow_table.column_names == ["FOO", "BAR"] + assert arrow_table.to_pydict()["FOO"] == list(range(rowcount)) + + +def test_simple_arrow_fetch(conn_cnx): + rowcount = 250_000 + with conn_cnx() as cnx: + with cnx.cursor() as cur: + cur.execute(SQL_ENABLE_ARROW) + cur.execute( + f"select seq4() as foo from table(generator(rowcount=>{rowcount})) order by foo asc" + ) + arrow_table = cur.fetch_arrow_all() + assert arrow_table.shape == (rowcount, 1) + assert arrow_table.to_pydict()["FOO"] == list(range(rowcount)) + + cur.execute( + f"select seq4() as foo from table(generator(rowcount=>{rowcount})) order by foo asc" + ) + assert len(cur.get_result_batches()) > 1 # non-trivial number of batches + + # the start and end points of each batch + lo, hi = 0, 0 + for table in cur.fetch_arrow_batches(): + assert type(table) == pyarrow.Table # sanity type check + + # check that data is correct + length = len(table) + hi += length + assert table.to_pydict()["FOO"] == list(range(lo, hi)) + lo += length + + assert lo == rowcount + + +@pytest.mark.parametrize("fetch_fn_name", ["to_arrow", "to_pandas", "create_iter"]) +@pytest.mark.parametrize("pass_connection", [True, False]) +def test_sessions_used(conn_cnx, fetch_fn_name, pass_connection): + rowcount = 250_000 + with conn_cnx() as cnx: + with cnx.cursor() as cur: + cur.execute(SQL_ENABLE_ARROW) + cur.execute(f"select seq1() from table(generator(rowcount=>{rowcount}))") + batches = cur.get_result_batches() + assert len(batches) > 1 + batch = batches[-1] + + connection = cnx if pass_connection else None + fetch_fn = getattr(batch, fetch_fn_name) + + # check that sessions are used when connection is supplied + with mock.patch( + "snowflake.connector.network.SnowflakeRestful._use_requests_session", + side_effect=cnx._rest._use_requests_session, + ) as get_session_mock: + fetch_fn(connection=connection) + assert get_session_mock.call_count == (1 if pass_connection else 0) + + +def assert_dtype_equal(a, b): + """Pandas method of asserting the same numpy dtype of variables by computing hash.""" + assert_equal(a, b) + assert_equal( + hash(a), hash(b), "two equivalent types do not hash to the same value !" + ) + + +def test_pandas_dtypes(conn_cnx): + with conn_cnx( + session_parameters={ + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT: "arrow_force" + } + ) as cnx: + with cnx.cursor() as cur: + cur.execute( + "select 1::integer, 2.3::double, 'foo'::string, current_timestamp()::timestamp where 1=0" + ) + batches = cur.get_result_batches() + batch = batches[0].to_pandas() + + assert batch.dtypes is not None + assert batches[0].to_arrow() is not True + + pandas_dtypes = batch.dtypes + expected_types = [numpy.int64, float, object, numpy.datetime64] + # pd.string is represented as an np.object + # np.dtype string is not the same as pd.string (python) + for i, typ in enumerate(expected_types): + assert_dtype_equal(pandas_dtypes[i].type, numpy.dtype(typ).type) + + +def test_timestamp_tz(conn_cnx): + with conn_cnx( + session_parameters={ + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT: "arrow_force" + } + ) as cnx: + with cnx.cursor() as cur: + cur.execute("select '1990-01-04 10:00:00 +1100'::timestamp_tz as d") + res = cur.fetchall() + assert res[0][0].tzinfo is not None + res_pd = cur.fetch_pandas_all() + assert res_pd.D.dt.tz is pytz.UTC + res_pa = cur.fetch_arrow_all() + assert res_pa.field("D").type.tz == "UTC" + + +def test_arrow_number_to_decimal(conn_cnx): + with conn_cnx( + session_parameters={ + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT: "arrow_force" + }, + arrow_number_to_decimal=True, + ) as cnx: + with cnx.cursor() as cur: + cur.execute("select -3.20 as num") + df = cur.fetch_pandas_all() + val = df.NUM[0] + assert val == Decimal("-3.20") + assert isinstance(val, decimal.Decimal) + + +@pytest.mark.parametrize( + "timestamp_type", + [ + "TIMESTAMP_TZ", + "TIMESTAMP_NTZ", + "TIMESTAMP_LTZ", + ], +) +def test_time_interval_microsecond(conn_cnx, timestamp_type): + with conn_cnx( + session_parameters={ + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT: "arrow_force" + } + ) as cnx: + with cnx.cursor() as cur: + res = cur.execute( + f"SELECT TO_{timestamp_type}('2010-06-25 12:15:30.747000')+INTERVAL '8999999999999998 MICROSECONDS'" + ).fetchone() + assert res[0].microsecond == 746998 + res = cur.execute( + f"SELECT TO_{timestamp_type}('2010-06-25 12:15:30.747000')+INTERVAL '8999999999999999 MICROSECONDS'" + ).fetchone() + assert res[0].microsecond == 746999 diff --git a/test/integ/pandas/test_logging.py b/test/integ/pandas/test_logging.py new file mode 100644 index 000000000..d811dfb67 --- /dev/null +++ b/test/integ/pandas/test_logging.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging + + +def test_rand_table_log(caplog, conn_cnx, db_parameters): + with conn_cnx() as conn: + caplog.set_level(logging.DEBUG, "snowflake.connector") + + num_of_rows = 10 + with conn.cursor() as cur: + cur.execute( + "select randstr(abs(mod(random(), 100)), random()) from table(generator(rowcount => {}));".format( + num_of_rows + ) + ).fetchall() + + # make assertions + has_batch_read = has_batch_size = has_chunk_info = has_batch_index = False + for record in caplog.records: + if "Batches read:" in record.msg: + has_batch_read = True + assert "arrow_iterator" in record.filename + assert "__cinit__" in record.funcName + + if "Arrow BatchSize:" in record.msg: + has_batch_size = True + assert "CArrowIterator.cpp" in record.filename + assert "CArrowIterator" in record.funcName + + if "Arrow chunk info:" in record.msg: + has_chunk_info = True + assert "CArrowChunkIterator.cpp" in record.filename + assert "CArrowChunkIterator" in record.funcName + + if "Current batch index:" in record.msg: + has_batch_index = True + assert "CArrowChunkIterator.cpp" in record.filename + assert "next" in record.funcName + + # each of these records appear at least once in records + assert has_batch_read and has_batch_size and has_chunk_info and has_batch_index diff --git a/test/integ/pandas/test_pandas_tools.py b/test/integ/pandas/test_pandas_tools.py new file mode 100644 index 000000000..3ed00756d --- /dev/null +++ b/test/integ/pandas/test_pandas_tools.py @@ -0,0 +1,468 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import math +from datetime import datetime, timedelta, timezone +from typing import TYPE_CHECKING, Callable, Generator +from unittest import mock + +import pytest + +from snowflake.connector import DictCursor + +from ...lazy_var import LazyVar +from ...randomize import random_string + +try: + from snowflake.connector.options import pandas + from snowflake.connector.pandas_tools import write_pandas +except ImportError: + pandas = None + write_pandas = None + + +if TYPE_CHECKING: + from snowflake.connector import SnowflakeConnection + +sf_connector_version_data = [ + ("snowflake-connector-python", "1.2.23"), + ("snowflake-sqlalchemy", "1.1.1"), + ("snowflake-connector-go", "0.0.1"), + ("snowflake-go", "1.0.1"), + ("snowflake-odbc", "3.12.3"), +] + +sf_connector_version_df = LazyVar( + lambda: pandas.DataFrame( + sf_connector_version_data, columns=["name", "newest_version"] + ) +) + + +@pytest.mark.parametrize("chunk_size", [5, 1]) +@pytest.mark.parametrize( + "compression", + [ + "gzip", + ], +) +@pytest.mark.parametrize("quote_identifiers", [True, False]) +@pytest.mark.parametrize("auto_create_table", [True, False]) +@pytest.mark.parametrize("create_temp_table", [True, False]) +def test_write_pandas( + conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + db_parameters: dict[str, str], + compression: str, + chunk_size: int, + quote_identifiers: bool, + auto_create_table: bool, + create_temp_table: bool, +): + num_of_chunks = math.ceil(len(sf_connector_version_data) / chunk_size) + + with conn_cnx( + user=db_parameters["user"], + account=db_parameters["account"], + password=db_parameters["password"], + ) as cnx: + table_name = "driver_versions" + + if quote_identifiers: + create_sql = 'CREATE OR REPLACE TABLE "{}" ("name" STRING, "newest_version" STRING)'.format( + table_name + ) + select_sql = f'SELECT * FROM "{table_name}"' + drop_sql = f'DROP TABLE IF EXISTS "{table_name}"' + else: + create_sql = "CREATE OR REPLACE TABLE {} (name STRING, newest_version STRING)".format( + table_name + ) + select_sql = f"SELECT * FROM {table_name}" + drop_sql = f"DROP TABLE IF EXISTS {table_name}" + + if not auto_create_table: + cnx.execute_string(create_sql) + try: + success, nchunks, nrows, _ = write_pandas( + cnx, + sf_connector_version_df.get(), + table_name, + compression=compression, + chunk_size=chunk_size, + quote_identifiers=quote_identifiers, + auto_create_table=auto_create_table, + create_temp_table=create_temp_table, + ) + + if num_of_chunks == 1: + # Note: since we used one chunk order is conserved + assert ( + cnx.cursor().execute(select_sql).fetchall() + == sf_connector_version_data + ) + else: + # Note: since we used more than one chunk order is NOT conserved + assert set(cnx.cursor().execute(select_sql).fetchall()) == set( + sf_connector_version_data + ) + + # Make sure all files were loaded and no error occurred + assert success + # Make sure overall as many rows were ingested as we tried to insert + assert nrows == len(sf_connector_version_data) + # Make sure we uploaded in as many chunk as we wanted to + assert nchunks == num_of_chunks + # Check to see if this is a temporary or regular table if we auto-created this table + if auto_create_table: + table_info = ( + cnx.cursor(DictCursor) + .execute(f"show tables like '{table_name}'") + .fetchall() + ) + assert table_info[0]["kind"] == ( + "TEMPORARY" if create_temp_table else "TABLE" + ) + finally: + cnx.execute_string(drop_sql) + + +@pytest.mark.parametrize("quote_identifiers", [True, False]) +def test_location_building_db_schema(conn_cnx, quote_identifiers: bool): + """This tests that write_pandas constructs location correctly with database, schema and table name.""" + from snowflake.connector.cursor import SnowflakeCursor + + with conn_cnx() as cnx: + + def mocked_execute(*args, **kwargs): + if len(args) >= 1 and args[0].startswith("COPY INTO"): + location = args[0].split(" ")[2] + if quote_identifiers: + assert location == '"database"."schema"."table"' + else: + assert location == "database.schema.table" + cur = SnowflakeCursor(cnx) + cur._result = iter([]) + return cur + + with mock.patch( + "snowflake.connector.cursor.SnowflakeCursor.execute", + side_effect=mocked_execute, + ) as m_execute: + success, nchunks, nrows, _ = write_pandas( + cnx, + sf_connector_version_df.get(), + "table", + database="database", + schema="schema", + quote_identifiers=quote_identifiers, + ) + assert m_execute.called and any( + map(lambda e: "COPY INTO" in str(e[0]), m_execute.call_args_list) + ) + + +@pytest.mark.parametrize("quote_identifiers", [True, False]) +def test_location_building_schema(conn_cnx, quote_identifiers: bool): + """This tests that write_pandas constructs location correctly with schema and table name.""" + from snowflake.connector.cursor import SnowflakeCursor + + with conn_cnx() as cnx: + + def mocked_execute(*args, **kwargs): + if len(args) >= 1 and args[0].startswith("COPY INTO"): + location = args[0].split(" ")[2] + if quote_identifiers: + assert location == '"schema"."table"' + else: + assert location == "schema.table" + cur = SnowflakeCursor(cnx) + cur._result = iter([]) + return cur + + with mock.patch( + "snowflake.connector.cursor.SnowflakeCursor.execute", + side_effect=mocked_execute, + ) as m_execute: + success, nchunks, nrows, _ = write_pandas( + cnx, + sf_connector_version_df.get(), + "table", + schema="schema", + quote_identifiers=quote_identifiers, + ) + assert m_execute.called and any( + map(lambda e: "COPY INTO" in str(e[0]), m_execute.call_args_list) + ) + + +@pytest.mark.parametrize("quote_identifiers", [True, False]) +def test_location_building(conn_cnx, quote_identifiers: bool): + """This tests that write_pandas constructs location correctly with schema and table name.""" + from snowflake.connector.cursor import SnowflakeCursor + + with conn_cnx() as cnx: + + def mocked_execute(*args, **kwargs): + if len(args) >= 1 and args[0].startswith("COPY INTO"): + location = args[0].split(" ")[2] + if quote_identifiers: + assert location == '"teble.table"' + else: + assert location == "teble.table" + cur = SnowflakeCursor(cnx) + cur._result = iter([]) + return cur + + with mock.patch( + "snowflake.connector.cursor.SnowflakeCursor.execute", + side_effect=mocked_execute, + ) as m_execute: + success, nchunks, nrows, _ = write_pandas( + cnx, + sf_connector_version_df.get(), + "teble.table", + quote_identifiers=quote_identifiers, + ) + assert m_execute.called and any( + map(lambda e: "COPY INTO" in str(e[0]), m_execute.call_args_list) + ) + + +@pytest.mark.parametrize("quote_identifiers", [True, False]) +def test_default_value_insertion( + conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + quote_identifiers: bool, +): + """Tests whether default values can be successfully inserted with the pandas writeback.""" + table_name = "users" + df_data = [("Mark", 10), ("Luke", 20)] + + # Create a DataFrame containing data about customers + df = pandas.DataFrame(df_data, columns=["name", "balance"]) + # Assume quote_identifiers is true in string and if not remove " from strings + create_sql = """CREATE OR REPLACE TABLE "{}" + ("name" STRING, "balance" INT, + "id" varchar(36) default uuid_string(), + "ts" timestamp_ltz default current_timestamp)""".format( + table_name + ) + select_sql = f'SELECT * FROM "{table_name}"' + drop_sql = f'DROP TABLE IF EXISTS "{table_name}"' + if not quote_identifiers: + create_sql = create_sql.replace('"', "") + select_sql = select_sql.replace('"', "") + drop_sql = drop_sql.replace('"', "") + with conn_cnx() as cnx: # type: SnowflakeConnection + cnx.execute_string(create_sql) + try: + success, nchunks, nrows, _ = write_pandas( + cnx, df, table_name, quote_identifiers=quote_identifiers + ) + + # Check write_pandas output + assert success + assert nrows == len(df_data) + assert nchunks == 1 + # Check table's contents + result = cnx.cursor(DictCursor).execute(select_sql).fetchall() + for row in result: + assert ( + row["id" if quote_identifiers else "ID"] is not None + ) # ID (UUID String) + assert len(row["id" if quote_identifiers else "ID"]) == 36 + assert ( + row["ts" if quote_identifiers else "TS"] is not None + ) # TS (Current Timestamp) + assert isinstance(row["ts" if quote_identifiers else "TS"], datetime) + assert ( + row["name" if quote_identifiers else "NAME"], + row["balance" if quote_identifiers else "BALANCE"], + ) in df_data + finally: + cnx.execute_string(drop_sql) + + +@pytest.mark.parametrize("quote_identifiers", [True, False]) +def test_autoincrement_insertion( + conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + quote_identifiers: bool, +): + """Tests whether default values can be successfully inserted with the pandas writeback.""" + table_name = "users" + df_data = [("Mark", 10), ("Luke", 20)] + + # Create a DataFrame containing data about customers + df = pandas.DataFrame(df_data, columns=["name", "balance"]) + # Assume quote_identifiers is true in string and if not remove " from strings + create_sql = ( + 'CREATE OR REPLACE TABLE "{}"' + '("name" STRING, "balance" INT, "id" INT AUTOINCREMENT)' + ).format(table_name) + select_sql = f'SELECT * FROM "{table_name}"' + drop_sql = f'DROP TABLE IF EXISTS "{table_name}"' + if not quote_identifiers: + create_sql = create_sql.replace('"', "") + select_sql = select_sql.replace('"', "") + drop_sql = drop_sql.replace('"', "") + with conn_cnx() as cnx: # type: SnowflakeConnection + cnx.execute_string(create_sql) + try: + success, nchunks, nrows, _ = write_pandas( + cnx, df, table_name, quote_identifiers=quote_identifiers + ) + + # Check write_pandas output + assert success + assert nrows == len(df_data) + assert nchunks == 1 + # Check table's contents + result = cnx.cursor(DictCursor).execute(select_sql).fetchall() + for row in result: + assert row["id" if quote_identifiers else "ID"] in (1, 2) + assert ( + row["name" if quote_identifiers else "NAME"], + row["balance" if quote_identifiers else "BALANCE"], + ) in df_data + finally: + cnx.execute_string(drop_sql) + + +@pytest.mark.parametrize("auto_create_table", [True, False]) +def test_special_name_quoting( + conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], + auto_create_table: bool, +): + """Tests whether special column names get quoted as expected.""" + table_name = "users" + df_data = [("Mark", 10), ("Luke", 20)] + + df = pandas.DataFrame(df_data, columns=["00name", "bAlance"]) + create_sql = ( + f'CREATE OR REPLACE TABLE "{table_name}"' + '("00name" STRING, "bAlance" INT, "id" INT AUTOINCREMENT)' + ) + select_sql = f'SELECT * FROM "{table_name}"' + drop_sql = f'DROP TABLE IF EXISTS "{table_name}"' + with conn_cnx() as cnx: # type: SnowflakeConnection + if not auto_create_table: + cnx.execute_string(create_sql) + try: + success, nchunks, nrows, _ = write_pandas( + cnx, + df, + table_name, + quote_identifiers=True, + auto_create_table=auto_create_table, + ) + + # Check write_pandas output + assert success + assert nrows == len(df_data) + assert nchunks == 1 + # Check table's contents + result = cnx.cursor(DictCursor).execute(select_sql).fetchall() + for row in result: + # The auto create table functionality does not auto-create an incrementing ID + if not auto_create_table: + assert row["id"] in (1, 2) + assert ( + row["00name"], + row["bAlance"], + ) in df_data + finally: + cnx.execute_string(drop_sql) + + +def test_auto_create_table_similar_column_names( + conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]], +): + """Tests whether similar names do not cause issues when auto-creating a table as expected.""" + table_name = random_string(5, "numbas_") + df_data = [(10, 11), (20, 21)] + + df = pandas.DataFrame(df_data, columns=["number", "Number"]) + select_sql = f'SELECT * FROM "{table_name}"' + drop_sql = f'DROP TABLE IF EXISTS "{table_name}"' + with conn_cnx() as cnx: + try: + success, nchunks, nrows, _ = write_pandas( + cnx, df, table_name, quote_identifiers=True, auto_create_table=True + ) + + # Check write_pandas output + assert success + assert nrows == len(df_data) + assert nchunks == 1 + # Check table's contents + result = cnx.cursor(DictCursor).execute(select_sql).fetchall() + for row in result: + assert ( + row["number"], + row["Number"], + ) in df_data + finally: + cnx.execute_string(drop_sql) + + +def test_all_pandas_types( + conn_cnx: Callable[..., Generator[SnowflakeConnection, None, None]] +): + table_name = random_string(5, "all_types_") + datetime_with_tz = datetime( + 1997, 6, 3, 14, 21, 32, 00, tzinfo=timezone(timedelta(hours=+10)) + ) + datetime_with_ntz = datetime(1997, 6, 3, 14, 21, 32, 00) + df_data = [ + (1, 1.1, "1string1", True, datetime_with_tz, datetime_with_ntz), + (2, 2.2, "2string2", False, datetime_with_tz, datetime_with_ntz), + ] + df_data_no_timestamps = [ + ( + row[0], + row[1], + row[2], + row[3], + ) + for row in df_data + ] + + df = pandas.DataFrame( + df_data, + columns=["int", "float", "string", "bool", "timestamp_tz", "timestamp_ntz"], + ) + + select_sql = f'SELECT * FROM "{table_name}"' + drop_sql = f'DROP TABLE IF EXISTS "{table_name}"' + with conn_cnx() as cnx: + try: + success, nchunks, nrows, _ = write_pandas( + cnx, df, table_name, quote_identifiers=True, auto_create_table=True + ) + + # Check write_pandas output + assert success + assert nrows == len(df_data) + assert nchunks == 1 + # Check table's contents + result = cnx.cursor(DictCursor).execute(select_sql).fetchall() + for row in result: + assert ( + row["int"], + row["float"], + row["string"], + row["bool"], + ) in df_data_no_timestamps + # TODO: Schema detection on the server-side has bugs dealing with timestamp_ntz and timestamp_tz. + # After the bugs are fixed, change the assertion to `data[0]["tm_tz"] == datetime_with_tz` + # and `data[0]["tm_ntz"] == datetime_with_ntz`, + # JIRA https://snowflakecomputing.atlassian.net/browse/SNOW-524865 + # JIRA https://snowflakecomputing.atlassian.net/browse/SNOW-359205 + # JIRA https://snowflakecomputing.atlassian.net/browse/SNOW-507644 + assert row["timestamp_tz"] is not None + assert row["timestamp_ntz"] is not None + finally: + cnx.execute_string(drop_sql) diff --git a/test/integ/pandas/test_unit_arrow_chunk_iterator.py b/test/integ/pandas/test_unit_arrow_chunk_iterator.py new file mode 100644 index 000000000..1b50476d0 --- /dev/null +++ b/test/integ/pandas/test_unit_arrow_chunk_iterator.py @@ -0,0 +1,444 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import datetime +import decimal +import os +import random +from io import BytesIO + +import pytest +import pytz + +from snowflake.connector.arrow_context import ArrowConverterContext + +try: + from snowflake.connector.options import installed_pandas +except ImportError: + installed_pandas = False + +try: + import tzlocal +except ImportError: + tzlocal = None + +try: + import pyarrow + from pyarrow import RecordBatchStreamReader # NOQA + from pyarrow import RecordBatch, RecordBatchStreamWriter +except ImportError: + pass + +try: + from snowflake.connector.arrow_iterator import IterUnit, PyArrowIterator + + no_arrow_iterator_ext = False +except ImportError: + no_arrow_iterator_ext = True + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +def test_iterate_over_string_chunk(): + random.seed(datetime.datetime.now().timestamp()) + column_meta = [{"logicalType": "TEXT"}, {"logicalType": "TEXT"}] + field_foo = pyarrow.field("column_foo", pyarrow.string(), True, column_meta[0]) + field_bar = pyarrow.field("column_bar", pyarrow.string(), True, column_meta[1]) + pyarrow.schema([field_foo, field_bar]) + + def str_generator(): + return str(random.randint(-100, 100)) + + iterate_over_test_chunk( + [pyarrow.string(), pyarrow.string()], column_meta, str_generator + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +def test_iterate_over_int64_chunk(): + random.seed(datetime.datetime.now().timestamp()) + column_meta = [ + {"logicalType": "FIXED", "precision": "38", "scale": "0"}, + {"logicalType": "FIXED", "precision": "38", "scale": "0"}, + ] + + def int64_generator(): + return random.randint(-9223372036854775808, 9223372036854775807) + + iterate_over_test_chunk( + [pyarrow.int64(), pyarrow.int64()], column_meta, int64_generator + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +def test_iterate_over_int32_chunk(): + random.seed(datetime.datetime.now().timestamp()) + column_meta = [ + {"logicalType": "FIXED", "precision": "10", "scale": "0"}, + {"logicalType": "FIXED", "precision": "10", "scale": "0"}, + ] + + def int32_generator(): + return random.randint(-2147483648, 2147483637) + + iterate_over_test_chunk( + [pyarrow.int32(), pyarrow.int32()], column_meta, int32_generator + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +def test_iterate_over_int16_chunk(): + random.seed(datetime.datetime.now().timestamp()) + column_meta = [ + {"logicalType": "FIXED", "precision": "5", "scale": "0"}, + {"logicalType": "FIXED", "precision": "5", "scale": "0"}, + ] + + def int16_generator(): + return random.randint(-32768, 32767) + + iterate_over_test_chunk( + [pyarrow.int16(), pyarrow.int16()], column_meta, int16_generator + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +def test_iterate_over_int8_chunk(): + random.seed(datetime.datetime.now().timestamp()) + column_meta = [ + {"logicalType": "FIXED", "precision": "3", "scale": "0"}, + {"logicalType": "FIXED", "precision": "3", "scale": "0"}, + ] + + def int8_generator(): + return random.randint(-128, 127) + + iterate_over_test_chunk( + [pyarrow.int8(), pyarrow.int8()], column_meta, int8_generator + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +def test_iterate_over_bool_chunk(): + random.seed(datetime.datetime.now().timestamp()) + column_meta = {"logicalType": "BOOLEAN"} + + def bool_generator(): + return bool(random.getrandbits(1)) + + iterate_over_test_chunk( + [pyarrow.bool_(), pyarrow.bool_()], [column_meta, column_meta], bool_generator + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +def test_iterate_over_float_chunk(): + random.seed(datetime.datetime.now().timestamp()) + column_meta = [{"logicalType": "REAL"}, {"logicalType": "FLOAT"}] + + def float_generator(): + return random.uniform(-100.0, 100.0) + + iterate_over_test_chunk( + [pyarrow.float64(), pyarrow.float64()], column_meta, float_generator + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +def test_iterate_over_decimal_chunk(): + random.seed(datetime.datetime.now().timestamp()) + precision = random.randint(1, 38) + scale = random.randint(0, precision) + datatype = None + if precision <= 2: + datatype = pyarrow.int8() + elif precision <= 4: + datatype = pyarrow.int16() + elif precision <= 9: + datatype = pyarrow.int32() + elif precision <= 19: + datatype = pyarrow.int64() + else: + datatype = pyarrow.decimal128(precision, scale) + + def decimal_generator(_precision, _scale): + def decimal128_generator(precision, scale): + data = [] + for _ in range(precision): + data.append(str(random.randint(0, 9))) + + if scale: + data.insert(-scale, ".") + return decimal.Decimal("".join(data)) + + def int64_generator(precision): + data = random.randint(-9223372036854775808, 9223372036854775807) + return int(str(data)[: precision if data >= 0 else precision + 1]) + + def int32_generator(precision): + data = random.randint(-2147483648, 2147483637) + return int(str(data)[: precision if data >= 0 else precision + 1]) + + def int16_generator(precision): + data = random.randint(-32768, 32767) + return int(str(data)[: precision if data >= 0 else precision + 1]) + + def int8_generator(precision): + data = random.randint(-128, 127) + return int(str(data)[: precision if data >= 0 else precision + 1]) + + if _precision <= 2: + return int8_generator(_precision) + elif _precision <= 4: + return int16_generator(_precision) + elif _precision <= 9: + return int32_generator(_precision) + elif _precision <= 19: + return int64_generator(_precision) + else: + return decimal128_generator(_precision, _scale) + + def expected_data_transform_decimal(_precision, _scale): + def expected_data_transform_decimal_impl( + data, precision=_precision, scale=_scale + ): + if precision <= 19: + return decimal.Decimal(data).scaleb(-scale) + else: + return data + + return expected_data_transform_decimal_impl + + column_meta = { + "logicalType": "FIXED", + "precision": str(precision), + "scale": str(scale), + } + iterate_over_test_chunk( + [datatype, datatype], + [column_meta, column_meta], + lambda: decimal_generator(precision, scale), + expected_data_transform_decimal(precision, scale), + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +def test_iterate_over_date_chunk(): + random.seed(datetime.datetime.now().timestamp()) + column_meta = { + "byteLength": "4", + "logicalType": "DATE", + "precision": "38", + "scale": "0", + "charLength": "0", + } + + def date_generator(): + return datetime.date.fromordinal(random.randint(1, 1000000)) + + iterate_over_test_chunk( + [pyarrow.date32(), pyarrow.date32()], [column_meta, column_meta], date_generator + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +def test_iterate_over_binary_chunk(): + random.seed(datetime.datetime.now().timestamp()) + column_meta = { + "byteLength": "100", + "logicalType": "BINARY", + "precision": "0", + "scale": "0", + "charLength": "0", + } + + def byte_array_generator(): + return bytearray(os.urandom(1000)) + + iterate_over_test_chunk( + [pyarrow.binary(), pyarrow.binary()], + [column_meta, column_meta], + byte_array_generator, + ) + + +@pytest.mark.skipif( + not installed_pandas or no_arrow_iterator_ext, + reason="arrow_iterator extension is not built, or pandas option is not installed.", +) +def test_iterate_over_time_chunk(): + random.seed(datetime.datetime.now().timestamp()) + column_meta_int64 = [ + {"logicalType": "TIME", "scale": "9"}, + {"logicalType": "TIME", "scale": "9"}, + ] + + column_meta_int32 = [ + {"logicalType": "TIME", "scale": "4"}, + {"logicalType": "TIME", "scale": "4"}, + ] + + def time_generator_int64(): + return random.randint(0, 86399999999999) + + def time_generator_int32(): + return random.randint(0, 863999999) + + def expected_data_transform_int64(data): + milisec = data % (10**9) + milisec //= 10**3 + data //= 10**9 + second = data % 60 + data //= 60 + minute = data % 60 + hour = data // 60 + return datetime.time(hour, minute, second, milisec) + + def expected_data_transform_int32(data): + milisec = data % (10**4) + milisec *= 10**2 + data //= 10**4 + second = data % 60 + data //= 60 + minute = data % 60 + hour = data // 60 + return datetime.time(hour, minute, second, milisec) + + iterate_over_test_chunk( + [pyarrow.int64(), pyarrow.int64()], + column_meta_int64, + time_generator_int64, + expected_data_transform_int64, + ) + + iterate_over_test_chunk( + [pyarrow.int32(), pyarrow.int32()], + column_meta_int32, + time_generator_int32, + expected_data_transform_int32, + ) + + +def iterate_over_test_chunk( + pyarrow_type, column_meta, source_data_generator, expected_data_transformer=None +): + stream = BytesIO() + + assert len(pyarrow_type) == len(column_meta) + + column_size = len(pyarrow_type) + batch_row_count = 10 + batch_count = 9 + + fields = [] + for i in range(column_size): + fields.append( + pyarrow.field(f"column_{i}", pyarrow_type[i], True, column_meta[i]) + ) + schema = pyarrow.schema(fields) + + expected_data = [] + writer = RecordBatchStreamWriter(stream, schema) + + for i in range(batch_count): + column_arrays = [] + py_arrays = [] + for j in range(column_size): + column_data = [] + not_none_cnt = 0 + while not_none_cnt == 0: + column_data = [] + for _ in range(batch_row_count): + data = ( + None if bool(random.getrandbits(1)) else source_data_generator() + ) + if data is not None: + not_none_cnt += 1 + column_data.append(data) + column_arrays.append(column_data) + py_arrays.append(pyarrow.array(column_data, type=pyarrow_type[j])) + + if expected_data_transformer: + for i in range(len(column_arrays)): + column_arrays[i] = [ + expected_data_transformer(_data) if _data is not None else None + for _data in column_arrays[i] + ] + expected_data.append(column_arrays) + + column_names = [f"column_{i}" for i in range(column_size)] + rb = RecordBatch.from_arrays(py_arrays, column_names) + writer.write_batch(rb) + + writer.close() + + # seek stream to begnning so that we can read from stream + stream.seek(0) + context = ArrowConverterContext() + it = PyArrowIterator(None, stream, context, False, False, False) + it.init(IterUnit.ROW_UNIT.value) + + count = 0 + while True: + try: + val = next(it) + for i in range(column_size): + batch_index = int(count / batch_row_count) + assert ( + val[i] + == expected_data[batch_index][i][ + count - batch_row_count * batch_index + ] + ) + count += 1 + except StopIteration: + assert count == (batch_count * batch_row_count) + break + + +def get_timezone(timezone=None): + """Gets, or uses the session timezone or use the local computer's timezone.""" + try: + tz = "UTC" if not timezone else timezone + return pytz.timezone(tz) + except pytz.exceptions.UnknownTimeZoneError: + if tzlocal is not None: + return tzlocal.get_localzone() + else: + try: + return datetime.datetime.timezone.utc + except AttributeError: + return pytz.timezone("UTC") diff --git a/test/integ/pandas/test_unit_options.py b/test/integ/pandas/test_unit_options.py new file mode 100644 index 000000000..963b83ad7 --- /dev/null +++ b/test/integ/pandas/test_unit_options.py @@ -0,0 +1,44 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +import logging +from copy import deepcopy +from unittest import mock + +import pytest +from pkg_resources import working_set + +try: + from snowflake.connector.options import ( + MissingPandas, + _import_or_missing_pandas_option, + ) +except ImportError: + MissingPandas = None + _import_or_missing_pandas_option = None + + +@pytest.mark.skipif( + MissingPandas is None or _import_or_missing_pandas_option is None, + reason="No snowflake.connector.options is available. It can be the case if running old driver tests", +) +def test_pandas_option_reporting(caplog): + """Tests for the weird case where someone can import pyarrow, but setuptools doesn't know about it. + + This issue was brought to attention in: https://github.com/snowflakedb/snowflake-connector-python/issues/412 + """ + modified_by_key = deepcopy(working_set.by_key) + modified_by_key.pop("snowflake-connector-python") + modified_by_key.pop("pyarrow") + with mock.patch.object(working_set, "by_key", modified_by_key): + caplog.set_level(logging.DEBUG, "snowflake.connector") + pandas, pyarrow, installed_pandas = _import_or_missing_pandas_option() + assert installed_pandas + assert not isinstance(pandas, MissingPandas) + assert not isinstance(pyarrow, MissingPandas) + assert ( + "Cannot determine if compatible pyarrow is installed because of missing package(s) " + "from dict_keys([" + ) in caplog.text diff --git a/test/integ/sso/__init__.py b/test/integ/sso/__init__.py new file mode 100644 index 000000000..d689e9c10 --- /dev/null +++ b/test/integ/sso/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# diff --git a/test/integ/sso/test_connection_manual.py b/test/integ/sso/test_connection_manual.py new file mode 100644 index 000000000..45e0af515 --- /dev/null +++ b/test/integ/sso/test_connection_manual.py @@ -0,0 +1,185 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +# This test requires the SSO and Snowflake admin connection parameters. +# +# CONNECTION_PARAMETERS_SSO = { +# 'account': 'testaccount', +# 'user': 'qa@snowflakecomputing.com', +# 'protocol': 'http', +# 'host': 'testaccount.reg.snowflakecomputing.com', +# 'port': '8082', +# 'authenticator': 'externalbrowser', +# 'timezone': 'UTC', +# } +# +# CONNECTION_PARAMETERS_ADMIN = { ... Snowflake admin ... } +import os +import sys + +import pytest + +import snowflake.connector + +try: + from snowflake.connector.auth import delete_temporary_credential +except ImportError: + delete_temporary_credential = None + +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) + +try: + from parameters import CONNECTION_PARAMETERS_SSO +except ImportError: + CONNECTION_PARAMETERS_SSO = {} + +try: + from parameters import CONNECTION_PARAMETERS_ADMIN +except ImportError: + CONNECTION_PARAMETERS_ADMIN = {} + +ID_TOKEN = "ID_TOKEN" + + +@pytest.fixture +def token_validity_test_values(request): + with snowflake.connector.connect(**CONNECTION_PARAMETERS_ADMIN) as cnx: + cnx.cursor().execute( + """ +ALTER SYSTEM SET + MASTER_TOKEN_VALIDITY=60, + SESSION_TOKEN_VALIDITY=5, + ID_TOKEN_VALIDITY=60 +""" + ) + # ALLOW_UNPROTECTED_ID_TOKEN is going to be deprecated in the future + # cnx.cursor().execute("alter account testaccount set ALLOW_UNPROTECTED_ID_TOKEN=true;") + cnx.cursor().execute("alter account testaccount set ALLOW_ID_TOKEN=true;") + cnx.cursor().execute( + "alter account testaccount set ID_TOKEN_FEATURE_ENABLED=true;" + ) + + def fin(): + with snowflake.connector.connect(**CONNECTION_PARAMETERS_ADMIN) as cnx: + cnx.cursor().execute( + """ +ALTER SYSTEM SET + MASTER_TOKEN_VALIDITY=default, + SESSION_TOKEN_VALIDITY=default, + ID_TOKEN_VALIDITY=default +""" + ) + + request.addfinalizer(fin) + return None + + +@pytest.mark.skipif( + not ( + CONNECTION_PARAMETERS_SSO + and CONNECTION_PARAMETERS_ADMIN + and delete_temporary_credential + ), + reason="SSO and ADMIN connection parameters must be provided.", +) +def test_connect_externalbrowser(token_validity_test_values): + """SSO Id Token Cache tests. This test should only be ran if keyring optional dependency is installed. + + In order to run this test, remove the above pytest.mark.skip annotation and run it. It will popup a windows once + but the rest connections should not create popups. + """ + delete_temporary_credential( + host=CONNECTION_PARAMETERS_SSO["host"], + user=CONNECTION_PARAMETERS_SSO["user"], + cred_type=ID_TOKEN, + ) # delete existing temporary credential + CONNECTION_PARAMETERS_SSO["client_store_temporary_credential"] = True + + # change database and schema to non-default one + print( + "[INFO] 1st connection gets id token and stores in the local cache (keychain/credential manager/cache file). " + "This popup a browser to SSO login" + ) + cnx = snowflake.connector.connect(**CONNECTION_PARAMETERS_SSO) + assert cnx.database == "TESTDB" + assert cnx.schema == "PUBLIC" + assert cnx.role == "SYSADMIN" + assert cnx.warehouse == "REGRESS" + ret = ( + cnx.cursor() + .execute( + "select current_database(), current_schema(), " + "current_role(), current_warehouse()" + ) + .fetchall() + ) + assert ret[0][0] == "TESTDB" + assert ret[0][1] == "PUBLIC" + assert ret[0][2] == "SYSADMIN" + assert ret[0][3] == "REGRESS" + cnx.close() + + print( + "[INFO] 2nd connection reads the local cache and uses the id token. " + "This should not popups a browser." + ) + CONNECTION_PARAMETERS_SSO["database"] = "testdb" + CONNECTION_PARAMETERS_SSO["schema"] = "testschema" + cnx = snowflake.connector.connect(**CONNECTION_PARAMETERS_SSO) + print( + "[INFO] Running a 10 seconds query. If the session expires in 10 " + "seconds, the query should renew the token in the middle, " + "and the current objects should be refreshed." + ) + cnx.cursor().execute("select seq8() from table(generator(timelimit=>10))") + assert cnx.database == "TESTDB" + assert cnx.schema == "TESTSCHEMA" + assert cnx.role == "SYSADMIN" + assert cnx.warehouse == "REGRESS" + + print("[INFO] Running a 1 second query. ") + cnx.cursor().execute("select seq8() from table(generator(timelimit=>1))") + assert cnx.database == "TESTDB" + assert cnx.schema == "TESTSCHEMA" + assert cnx.role == "SYSADMIN" + assert cnx.warehouse == "REGRESS" + + print( + "[INFO] Running a 90 seconds query. This pops up a browser in the " + "middle of the query." + ) + cnx.cursor().execute("select seq8() from table(generator(timelimit=>90))") + assert cnx.database == "TESTDB" + assert cnx.schema == "TESTSCHEMA" + assert cnx.role == "SYSADMIN" + assert cnx.warehouse == "REGRESS" + + cnx.close() + + # change database and schema again to ensure they are overridden + CONNECTION_PARAMETERS_SSO["database"] = "testdb" + CONNECTION_PARAMETERS_SSO["schema"] = "testschema" + cnx = snowflake.connector.connect(**CONNECTION_PARAMETERS_SSO) + assert cnx.database == "TESTDB" + assert cnx.schema == "TESTSCHEMA" + assert cnx.role == "SYSADMIN" + assert cnx.warehouse == "REGRESS" + cnx.close() + + with snowflake.connector.connect(**CONNECTION_PARAMETERS_ADMIN) as cnx_admin: + # cnx_admin.cursor().execute("alter account testaccount set ALLOW_UNPROTECTED_ID_TOKEN=false;") + cnx_admin.cursor().execute( + "alter account testaccount set ALLOW_ID_TOKEN=false;" + ) + cnx_admin.cursor().execute( + "alter account testaccount set ID_TOKEN_FEATURE_ENABLED=false;" + ) + print( + "[INFO] Login again with ALLOW_UNPROTECTED_ID_TOKEN unset. Please make sure this pops up the browser" + ) + cnx = snowflake.connector.connect(**CONNECTION_PARAMETERS_SSO) + cnx.close() diff --git a/test/integ/sso/test_unit_mfa_cache.py b/test/integ/sso/test_unit_mfa_cache.py new file mode 100644 index 000000000..6b8b316c3 --- /dev/null +++ b/test/integ/sso/test_unit_mfa_cache.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import json +import os +from unittest.mock import Mock, patch + +import pytest + +import snowflake.connector +from snowflake.connector.compat import IS_LINUX +from snowflake.connector.errors import DatabaseError + +try: + from snowflake.connector.compat import IS_MACOS +except ImportError: + import platform + + IS_MACOS = platform.system() == "Darwin" +try: + from snowflake.connector.auth import delete_temporary_credential +except ImportError: + delete_temporary_credential = None + +MFA_TOKEN = "MFATOKEN" + + +# Although this is an unit test, we put it under test/integ/sso, since it needs keyring package installed +@pytest.mark.skipif( + delete_temporary_credential is None, + reason="delete_temporary_credential is not available.", +) +@patch("snowflake.connector.network.SnowflakeRestful._post_request") +def test_mfa_cache(mockSnowflakeRestfulPostRequest): + """Connects with (username, pwd, mfa) mock.""" + os.environ["SF_TEMPORARY_CREDENTIAL_CACHE_DIR"] = os.getenv( + "WORKSPACE", os.path.expanduser("~") + ) + + LOCAL_CACHE = dict() + + def mock_post_request(url, headers, json_body, **kwargs): + global mock_post_req_cnt + ret = None + body = json.loads(json_body) + if mock_post_req_cnt == 0: + # issue MFA token for a succeeded login + assert ( + body["data"]["SESSION_PARAMETERS"].get("CLIENT_REQUEST_MFA_TOKEN") + is True + ) + ret = { + "success": True, + "message": None, + "data": { + "token": "TOKEN", + "masterToken": "MASTER_TOKEN", + "mfaToken": "MFA_TOKEN", + }, + } + elif mock_post_req_cnt == 2: + # check associated mfa token and issue a new mfa token + # note: Normally, backend doesn't issue a new mfa token in this case, we do it here only to test + # whether the driver can replace the old token when server provides a new token + assert ( + body["data"]["SESSION_PARAMETERS"].get("CLIENT_REQUEST_MFA_TOKEN") + is True + ) + assert body["data"]["TOKEN"] == "MFA_TOKEN" + ret = { + "success": True, + "message": None, + "data": { + "token": "NEW_TOKEN", + "masterToken": "NEW_MASTER_TOKEN", + "mfaToken": "NEW_MFA_TOKEN", + }, + } + elif mock_post_req_cnt == 4: + # check new mfa token + assert ( + body["data"]["SESSION_PARAMETERS"].get("CLIENT_REQUEST_MFA_TOKEN") + is True + ) + assert body["data"]["TOKEN"] == "NEW_MFA_TOKEN" + ret = { + "success": True, + "message": None, + "data": { + "token": "NEW_TOKEN", + "masterToken": "NEW_MASTER_TOKEN", + }, + } + elif mock_post_req_cnt == 6: + # mock a failed log in + ret = {"success": False, "message": None, "data": {}} + elif mock_post_req_cnt == 7: + assert ( + body["data"]["SESSION_PARAMETERS"].get("CLIENT_REQUEST_MFA_TOKEN") + is True + ) + assert "TOKEN" not in body["data"] + ret = { + "success": True, + "data": {"token": "TOKEN", "masterToken": "MASTER_TOKEN"}, + } + elif mock_post_req_cnt in [1, 3, 5, 8]: + # connection.close() + ret = {"success": True} + mock_post_req_cnt += 1 + return ret + + def mock_del_password(system, user): + LOCAL_CACHE.pop(system + user, None) + + def mock_set_password(system, user, pwd): + LOCAL_CACHE[system + user] = pwd + + def mock_get_password(system, user): + return LOCAL_CACHE.get(system + user, None) + + global mock_post_req_cnt + mock_post_req_cnt = 0 + + # POST requests mock + mockSnowflakeRestfulPostRequest.side_effect = mock_post_request + + def test_body(conn_cfg): + delete_temporary_credential( + host=conn_cfg["host"], user=conn_cfg["user"], cred_type=MFA_TOKEN + ) + + # first connection, no mfa token cache + con = snowflake.connector.connect(**conn_cfg) + assert con._rest.token == "TOKEN" + assert con._rest.master_token == "MASTER_TOKEN" + assert con._rest.mfa_token == "MFA_TOKEN" + con.close() + + # second connection that uses the mfa token issued for first connection to login + con = snowflake.connector.connect(**conn_cfg) + assert con._rest.token == "NEW_TOKEN" + assert con._rest.master_token == "NEW_MASTER_TOKEN" + assert con._rest.mfa_token == "NEW_MFA_TOKEN" + con.close() + + # third connection which is expected to login with new mfa token + con = snowflake.connector.connect(**conn_cfg) + assert con._rest.mfa_token is None + con.close() + + with pytest.raises(DatabaseError): + # A failed login will be forced by a mocked response for this connection + # Under authentication failed exception, mfa cache is expected to be cleaned up + con = snowflake.connector.connect(**conn_cfg) + + # no mfa cache token should be sent at this connection + con = snowflake.connector.connect(**conn_cfg) + con.close() + + conn_cfg = { + "account": "testaccount", + "user": "testuser", + "password": "testpwd", + "authenticator": "username_password_mfa", + "host": "testaccount.snowflakecomputing.com", + } + if IS_LINUX: + conn_cfg["client_request_mfa_token"] = True + + if IS_MACOS: + with patch( + "keyring.delete_password", Mock(side_effect=mock_del_password) + ), patch("keyring.set_password", Mock(side_effect=mock_set_password)), patch( + "keyring.get_password", Mock(side_effect=mock_get_password) + ): + test_body(conn_cfg) + else: + test_body(conn_cfg) diff --git a/test/integ/sso/test_unit_sso_connection.py b/test/integ/sso/test_unit_sso_connection.py new file mode 100644 index 000000000..34eb3ebe9 --- /dev/null +++ b/test/integ/sso/test_unit_sso_connection.py @@ -0,0 +1,161 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os +from unittest.mock import Mock, patch + +import pytest + +import snowflake.connector + +try: + from snowflake.connector.compat import IS_MACOS +except ImportError: + IS_MACOS = False +try: + from snowflake.connector.auth import delete_temporary_credential +except ImportError: + delete_temporary_credential = None + +ID_TOKEN = "ID_TOKEN" + + +@pytest.mark.skipif( + delete_temporary_credential is None, + reason="delete_temporary_credential is not available.", +) +@patch("snowflake.connector.auth_webbrowser.AuthByWebBrowser.authenticate") +@patch("snowflake.connector.network.SnowflakeRestful._post_request") +def test_connect_externalbrowser( + mockSnowflakeRestfulPostRequest, mockAuthByBrowserAuthenticate +): + """Connects with authentictor=externalbrowser mock.""" + os.environ["SF_TEMPORARY_CREDENTIAL_CACHE_DIR"] = os.getenv( + "WORKSPACE", os.path.expanduser("~") + ) + + def mock_post_request(url, headers, json_body, **kwargs): + global mock_post_req_cnt + ret = None + if mock_post_req_cnt == 0: + # return from /v1/login-request + ret = { + "success": True, + "message": None, + "data": { + "token": "TOKEN", + "masterToken": "MASTER_TOKEN", + "idToken": "ID_TOKEN", + }, + } + elif mock_post_req_cnt == 1: + # return from /v1/login-request + ret = { + "success": True, + "message": None, + "data": { + "token": "NEW_TOKEN", + "masterToken": "NEW_MASTER_TOKEN", + }, + } + elif mock_post_req_cnt == 2: + # return from USE WAREHOUSE TESTWH_NEW + ret = { + "success": True, + "message": None, + "data": { + "finalDatabase": "TESTDB", + "finalWarehouse": "TESTWH_NEW", + }, + } + elif mock_post_req_cnt == 3: + # return from USE DATABASE TESTDB_NEW + ret = { + "success": True, + "message": None, + "data": { + "finalDatabase": "TESTDB_NEW", + "finalWarehouse": "TESTWH_NEW", + }, + } + elif mock_post_req_cnt == 4: + # return from SELECT 1 + ret = { + "success": True, + "message": None, + "data": { + "finalDatabase": "TESTDB_NEW", + "finalWarehouse": "TESTWH_NEW", + }, + } + mock_post_req_cnt += 1 + return ret + + def mock_get_password(service, user): + global mock_get_pwd_cnt + ret = None + if mock_get_pwd_cnt == 1: + # second connection + ret = "ID_TOKEN" + mock_get_pwd_cnt += 1 + return ret + + global mock_post_req_cnt, mock_get_pwd_cnt + mock_post_req_cnt, mock_get_pwd_cnt = 0, 0 + + # pre-authentication doesn't matter + mockAuthByBrowserAuthenticate.return_value = None + + # POST requests mock + mockSnowflakeRestfulPostRequest.side_effect = mock_post_request + + def test_body(): + account = "testaccount" + user = "testuser" + authenticator = "externalbrowser" + host = "testaccount.snowflakecomputing.com" + + delete_temporary_credential(host=host, user=user, cred_type=ID_TOKEN) + + # first connection + con = snowflake.connector.connect( + account=account, + user=user, + host=host, + authenticator=authenticator, + database="TESTDB", + warehouse="TESTWH", + client_store_temporary_credential=True, + ) + assert con._rest.token == "TOKEN" + assert con._rest.master_token == "MASTER_TOKEN" + assert con._rest.id_token == "ID_TOKEN" + + # second connection that uses the id token to get the session token + con = snowflake.connector.connect( + account=account, + user=user, + host=host, + authenticator=authenticator, + database="TESTDB_NEW", # override the database + warehouse="TESTWH_NEW", # override the warehouse + client_store_temporary_credential=True, + ) + + assert con._rest.token == "NEW_TOKEN" + assert con._rest.master_token == "NEW_MASTER_TOKEN" + assert con._rest.id_token is None + assert con.database == "TESTDB_NEW" + assert con.warehouse == "TESTWH_NEW" + + if IS_MACOS: + with patch("keyring.delete_password", Mock(return_value=None)), patch( + "keyring.set_password", Mock(return_value=None) + ), patch("keyring.get_password", Mock(side_effect=mock_get_password)): + test_body() + else: + test_body() diff --git a/test/integ/test_arrow_result.py b/test/integ/test_arrow_result.py new file mode 100644 index 000000000..29c6b5012 --- /dev/null +++ b/test/integ/test_arrow_result.py @@ -0,0 +1,664 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import random +from datetime import datetime, timedelta + +import numpy +import pytest + +import snowflake.connector + +pytestmark = pytest.mark.skipolddriver # old test driver tests won't run this module + +try: + from snowflake.connector.arrow_iterator import PyArrowIterator # NOQA + + no_arrow_iterator_ext = False +except ImportError: + no_arrow_iterator_ext = True + + +def test_select_tinyint(conn_cnx): + cases = [0, 1, -1, 127, -128] + table = "test_arrow_tiny_int" + column = "(a int)" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_scaled_tinyint(conn_cnx): + cases = [0.0, 0.11, -0.11, 1.27, -1.28] + table = "test_arrow_tiny_int" + column = "(a number(5,3))" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_smallint(conn_cnx): + cases = [0, 1, -1, 127, -128, 128, -129, 32767, -32768] + table = "test_arrow_small_int" + column = "(a int)" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_scaled_smallint(conn_cnx): + cases = ["0", "2.0", "-2.0", "32.767", "-32.768"] + table = "test_arrow_small_int" + column = "(a number(5,3))" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_int(conn_cnx): + cases = [ + 0, + 1, + -1, + 127, + -128, + 128, + -129, + 32767, + -32768, + 32768, + -32769, + 2147483647, + -2147483648, + ] + table = "test_arrow_int" + column = "(a int)" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_scaled_int(conn_cnx): + cases = ["0", "0.123456789", "-0.123456789", "0.2147483647", "-0.2147483647"] + table = "test_arrow_int" + column = "(a number(10,9))" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_bigint(conn_cnx): + cases = [ + 0, + 1, + -1, + 127, + -128, + 128, + -129, + 32767, + -32768, + 32768, + -32769, + 2147483647, + -2147483648, + 2147483648, + -2147483649, + 9223372036854775807, + -9223372036854775808, + ] + table = "test_arrow_bigint" + column = "(a int)" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_scaled_bigint(conn_cnx): + cases = [ + "0", + "0.000000000000000001", + "-0.000000000000000001", + "0.000000000000000127", + "-0.000000000000000128", + "0.000000000000000128", + "-0.000000000000000129", + "0.000000000000032767", + "-0.000000000000032768", + "0.000000000000032768", + "-0.000000000000032769", + "0.000000002147483647", + "-0.000000002147483648", + "0.000000002147483648", + "-0.000000002147483649", + "9.223372036854775807", + "-9.223372036854775808", + ] + table = "test_arrow_bigint" + column = "(a number(38,18))" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_decimal(conn_cnx): + cases = [ + "10000000000000000000000000000000000000", + "12345678901234567890123456789012345678", + "99999999999999999999999999999999999999", + ] + table = "test_arrow_decimal" + column = "(a number(38,0))" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_scaled_decimal(conn_cnx): + cases = [ + "0", + "0.000000000000000001", + "-0.000000000000000001", + "0.000000000000000127", + "-0.000000000000000128", + "0.000000000000000128", + "-0.000000000000000129", + "0.000000000000032767", + "-0.000000000000032768", + "0.000000000000032768", + "-0.000000000000032769", + "0.000000002147483647", + "-0.000000002147483648", + "0.000000002147483648", + "-0.000000002147483649", + "9.223372036854775807", + "-9.223372036854775808", + ] + table = "test_arrow_decimal" + column = "(a number(38,37))" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_large_scaled_decimal(conn_cnx): + cases = [ + "1.0000000000000000000000000000000000000", + "1.2345678901234567890123456789012345678", + "9.9999999999999999999999999999999999999", + ] + table = "test_arrow_decimal" + column = "(a number(38,37))" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_scaled_decimal_SNOW_133561(conn_cnx): + cases = [ + "0", + "1.2345", + "2.3456", + "-9.999", + "-1.000", + "-3.4567", + "3.4567", + "4.5678", + "5.6789", + "NULL", + ] + table = "test_scaled_decimal_SNOW_133561" + column = "(a number(38,10))" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_boolean(conn_cnx): + cases = ["true", "false", "true"] + table = "test_arrow_boolean" + column = "(a boolean)" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("boolean", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +@pytest.mark.skipif( + no_arrow_iterator_ext, reason="arrow_iterator extension is not built." +) +def test_select_double_precision(conn_cnx): + cases = [ + # SNOW-31249 + "-86.6426540296895", + "3.14159265359", + # SNOW-76269 + "1.7976931348623157e+308", + "1.7e+308", + "1.7976931348623151e+308", + "-1.7976931348623151e+308", + "-1.7e+308", + "-1.7976931348623157e+308", + ] + table = "test_arrow_double" + column = "(a double)" + values = "(" + "),(".join([f"{i}, {c}" for i, c in enumerate(cases)]) + ")" + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + col_count = 1 + iterate_over_test_chunk( + "float", conn_cnx, sql_text, row_count, col_count, expected=cases + ) + finish(conn_cnx, table) + + +def test_select_semi_structure(conn_cnx): + sql_text = """select array_construct(10, 20, 30), + array_construct(null, 'hello', 3::double, 4, 5), + array_construct(), + object_construct('a',1,'b','BBBB', 'c',null), + object_construct('Key_One', parse_json('NULL'), 'Key_Two', null, 'Key_Three', 'null'), + to_variant(3.2), + parse_json('{ "a": null}'), + 100::variant; + """ + row_count = 1 + col_count = 8 + iterate_over_test_chunk("struct", conn_cnx, sql_text, row_count, col_count) + + +def test_select_time(conn_cnx): + for scale in range(10): + select_time_with_scale(conn_cnx, scale) + + +def select_time_with_scale(conn_cnx, scale): + cases = [ + "00:01:23", + "00:01:23.1", + "00:01:23.12", + "00:01:23.123", + "00:01:23.1234", + "00:01:23.12345", + "00:01:23.123456", + "00:01:23.1234567", + "00:01:23.12345678", + "00:01:23.123456789", + ] + table = "test_arrow_time" + column = f"(a time({scale}))" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, '{c}'" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("time", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +def test_select_date(conn_cnx): + cases = [ + "2016-07-23", + "1970-01-01", + "1969-12-31", + "0001-01-01", + "9999-12-31", + ] + table = "test_arrow_time" + column = "(a date)" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, '{c}'" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + iterate_over_test_chunk("date", conn_cnx, sql_text, row_count, col_count) + finish(conn_cnx, table) + + +@pytest.mark.parametrize("scale", range(10)) +@pytest.mark.parametrize("type", ["timestampntz", "timestampltz", "timestamptz"]) +def test_select_timestamp_with_scale(conn_cnx, scale, type): + cases = [ + "2017-01-01 12:00:00", + "2014-01-02 16:00:00", + "2014-01-02 12:34:56", + "2017-01-01 12:00:00.123456789", + "2014-01-02 16:00:00.000000001", + "2014-01-02 12:34:56.1", + "1969-12-31 23:59:59.000000001", + "1969-12-31 23:59:58.000000001", + "1969-11-30 23:58:58.000001001", + "1970-01-01 00:00:00.123412423", + "1970-01-01 00:00:01.000001", + "1969-12-31 11:59:59.001", + "0001-12-31 11:59:59.11", + ] + table = "test_arrow_timestamp" + column = f"(a {type}({scale}))" + values = ( + "(-1, NULL), (" + + "),(".join([f"{i}, '{c}'" for i, c in enumerate(cases)]) + + f"), ({len(cases)}, NULL)" + ) + init(conn_cnx, table, column, values) + sql_text = f"select a from {table} order by s" + row_count = len(cases) + 2 + col_count = 1 + # TODO SNOW-534252 + iterate_over_test_chunk( + type, conn_cnx, sql_text, row_count, col_count, eps=timedelta(microseconds=1) + ) + finish(conn_cnx, table) + + +def test_select_with_string(conn_cnx): + col_count = 2 + row_count = 50000 + random_seed = get_random_seed() + length = random.randint(1, 10) + sql_text = ( + "select seq4() as c1, randstr({}, random({})) as c2 from ".format( + length, random_seed + ) + + "table(generator(rowcount=>50000)) order by c1" + ) + iterate_over_test_chunk("string", conn_cnx, sql_text, row_count, col_count) + + +def test_select_with_bool(conn_cnx): + col_count = 2 + row_count = 50000 + random_seed = get_random_seed() + sql_text = ( + "select seq4() as c1, as_boolean(uniform(0, 1, random({}))) as c2 from ".format( + random_seed + ) + + f"table(generator(rowcount=>{row_count})) order by c1" + ) + iterate_over_test_chunk("bool", conn_cnx, sql_text, row_count, col_count) + + +def test_select_with_float(conn_cnx): + col_count = 2 + row_count = 50000 + random_seed = get_random_seed() + pow_val = random.randint(0, 10) + val_len = random.randint(0, 16) + # if we assign val_len a larger value like 20, then the precision difference between c++ and python will become + # very obvious so if we meet some error in this test in the future, please check that whether it is caused by + # different precision between python and c++ + val_range = random.randint(0, 10**val_len) + + sql_text = "select seq4() as c1, as_double(uniform({}, {}, random({})))/{} as c2 from ".format( + -val_range, val_range, random_seed, 10**pow_val + ) + "table(generator(rowcount=>{})) order by c1".format( + row_count + ) + iterate_over_test_chunk( + "float", conn_cnx, sql_text, row_count, col_count, eps=10 ** (-pow_val + 1) + ) + + +def test_select_with_empty_resultset(conn_cnx): + with conn_cnx() as cnx: + cursor = cnx.cursor() + cursor.execute("alter session set query_result_format='ARROW_FORCE'") + cursor.execute( + "alter session set python_connector_query_result_format='ARROW_FORCE'" + ) + cursor.execute("select seq4() from table(generator(rowcount=>100)) limit 0") + + assert cursor.fetchone() is None + + +def test_select_with_large_resultset(conn_cnx): + col_count = 5 + row_count = 1000000 + random_seed = get_random_seed() + + sql_text = ( + "select seq4() as c1, " + "uniform(-10000, 10000, random({})) as c2, " + "randstr(5, random({})) as c3, " + "randstr(10, random({})) as c4, " + "uniform(-100000, 100000, random({})) as c5 " + "from table(generator(rowcount=>{}))".format( + random_seed, random_seed, random_seed, random_seed, row_count + ) + ) + + iterate_over_test_chunk("large_resultset", conn_cnx, sql_text, row_count, col_count) + + +def test_dict_cursor(conn_cnx): + with conn_cnx() as cnx: + with cnx.cursor(snowflake.connector.DictCursor) as c: + c.execute("alter session set python_connector_query_result_format='ARROW'") + + # first test small result generated by GS + ret = c.execute("select 1 as foo, 2 as bar").fetchone() + assert ret["FOO"] == 1 + assert ret["BAR"] == 2 + + # test larger result set + row_index = 1 + for row in c.execute( + "select row_number() over (order by val asc) as foo, " + "row_number() over (order by val asc) as bar " + "from (select seq4() as val from table(generator(rowcount=>10000)));" + ): + assert row["FOO"] == row_index + assert row["BAR"] == row_index + row_index += 1 + + +def test_fetch_as_numpy_val(conn_cnx): + with conn_cnx(numpy=True) as cnx: + cursor = cnx.cursor() + cursor.execute("alter session set python_connector_query_result_format='ARROW'") + + val = cursor.execute( + """ +select 1.23456::double, 1.3456::number(10, 4), 1234567::number(10, 0) +""" + ).fetchone() + assert isinstance(val[0], numpy.float64) + assert val[0] == numpy.float64("1.23456") + assert isinstance(val[1], numpy.float64) + assert val[1] == numpy.float64("1.3456") + assert isinstance(val[2], numpy.int64) + assert val[2] == numpy.float64("1234567") + + val = cursor.execute( + """ +select '2019-08-10'::date, '2019-01-02 12:34:56.1234'::timestamp_ntz(4), +'2019-01-02 12:34:56.123456789'::timestamp_ntz(9), '2019-01-02 12:34:56.123456789'::timestamp_ntz(8) +""" + ).fetchone() + assert isinstance(val[0], numpy.datetime64) + assert val[0] == numpy.datetime64("2019-08-10") + assert isinstance(val[1], numpy.datetime64) + assert val[1] == numpy.datetime64("2019-01-02 12:34:56.1234") + assert isinstance(val[2], numpy.datetime64) + assert val[2] == numpy.datetime64("2019-01-02 12:34:56.123456789") + assert isinstance(val[3], numpy.datetime64) + assert val[3] == numpy.datetime64("2019-01-02 12:34:56.12345678") + + +def get_random_seed(): + random.seed(datetime.now().timestamp()) + return random.randint(0, 10000) + + +def iterate_over_test_chunk( + test_name, conn_cnx, sql_text, row_count, col_count, eps=None, expected=None +): + with conn_cnx() as json_cnx: + with conn_cnx() as arrow_cnx: + if expected is None: + cursor_json = json_cnx.cursor() + cursor_json.execute("alter session set query_result_format='JSON'") + cursor_json.execute( + "alter session set python_connector_query_result_format='JSON'" + ) + cursor_json.execute(sql_text) + + cursor_arrow = arrow_cnx.cursor() + cursor_arrow.execute("alter session set use_cached_result=false") + cursor_arrow.execute("alter session set query_result_format='ARROW_FORCE'") + cursor_arrow.execute( + "alter session set python_connector_query_result_format='ARROW_FORCE'" + ) + cursor_arrow.execute(sql_text) + assert cursor_arrow._query_result_format == "arrow" + + if expected is None: + for _ in range(0, row_count): + json_res = cursor_json.fetchone() + arrow_res = cursor_arrow.fetchone() + for j in range(0, col_count): + if test_name == "float" and eps is not None: + assert abs(json_res[j] - arrow_res[j]) <= eps + elif ( + test_name == "timestampltz" + and json_res[j] is not None + and eps is not None + ): + assert abs(json_res[j] - arrow_res[j]) <= eps + else: + assert json_res[j] == arrow_res[j] + else: + # only support single column for now + for i in range(0, row_count): + arrow_res = cursor_arrow.fetchone() + assert str(arrow_res[0]) == expected[i] + + +def init(conn_cnx, table, column, values): + with conn_cnx() as json_cnx: + cursor_json = json_cnx.cursor() + column_with_seq = column[0] + "s number, " + column[1:] + cursor_json.execute(f"create or replace table {table} {column_with_seq}") + cursor_json.execute(f"insert into {table} values {values}") + + +def finish(conn_cnx, table): + with conn_cnx() as json_cnx: + cursor_json = json_cnx.cursor() + cursor_json.execute(f"drop table IF EXISTS {table};") diff --git a/test/integ/test_async.py b/test/integ/test_async.py new file mode 100644 index 000000000..f0100624e --- /dev/null +++ b/test/integ/test_async.py @@ -0,0 +1,223 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import time + +import pytest + +from snowflake.connector import ProgrammingError + +# Mark all tests in this file to time out after 2 minutes to prevent hanging forever +pytestmark = [pytest.mark.timeout(120), pytest.mark.skipolddriver] + +try: # pragma: no cover + from snowflake.connector.constants import QueryStatus +except ImportError: + QueryStatus = None + + +def test_simple_async(conn_cnx): + """Simple test to that shows the most simple usage of fire and forget. + + This test also makes sure that wait_until_ready function's sleeping is tested and + that some fields are copied over correctly from the original query. + """ + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute_async("select count(*) from table(generator(timeLimit => 5))") + cur.get_results_from_sfqid(cur.sfqid) + assert len(cur.fetchall()) == 1 + assert cur.rowcount + assert cur.description + + +def test_async_exec(conn_cnx): + """Tests whether simple async query execution works. + + Runs a query that takes a few seconds to finish and then totally closes connection + to Snowflake. Then waits enough time for that query to finish, opens a new connection + and fetches results. It also tests QueryStatus related functionality too. + + This test tends to hang longer than expected when the testing warehouse is overloaded. + Manually looking at query history reveals that when a full GH actions + Jenkins test load hits one warehouse + it can be queued for 15 seconds, so for now we wait 5 seconds before checking and then we give it another 25 + seconds to finish. + """ + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute_async("select count(*) from table(generator(timeLimit => 5))") + q_id = cur.sfqid + status = con.get_query_status(q_id) + assert con.is_still_running(status) + time.sleep(5) + with conn_cnx() as con: + with con.cursor() as cur: + for _ in range(25): + # Check upto 15 times once a second to see if it's done + status = con.get_query_status(q_id) + if status == QueryStatus.SUCCESS: + break + time.sleep(1) + else: + pytest.fail( + f"We should have broke out of this loop, final query status: {status}" + ) + status = con.get_query_status_throw_if_error(q_id) + assert status == QueryStatus.SUCCESS + cur.get_results_from_sfqid(q_id) + assert len(cur.fetchall()) == 1 + + +def test_async_error(conn_cnx): + """Tests whether simple async query error retrieval works. + + Runs a query that will fail to execute and then tests that if we tried to get results for the query + then that would raise an exception. It also tests QueryStatus related functionality too. + """ + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute_async("select * from nonexistentTable") + q_id = cur.sfqid + while con.is_still_running(con.get_query_status(q_id)): + time.sleep(1) + status = con.get_query_status(q_id) + assert status == QueryStatus.FAILED_WITH_ERROR + assert con.is_an_error(status) + with pytest.raises(ProgrammingError): + con.get_query_status_throw_if_error(q_id) + with pytest.raises(ProgrammingError): + cur.get_results_from_sfqid(q_id) + + +def test_mix_sync_async(conn_cnx): + with conn_cnx() as con: + with con.cursor() as cur: + # Setup + cur.execute("alter session set CLIENT_TIMESTAMP_TYPE_MAPPING=TIMESTAMP_TZ") + try: + for table in ["smallTable", "uselessTable"]: + cur.execute( + "create or replace table {} (colA string, colB int)".format( + table + ) + ) + cur.execute( + "insert into {} values ('row1', 1), ('row2', 2), ('row3', 3)".format( + table + ) + ) + cur.execute_async("select * from smallTable") + sf_qid1 = cur.sfqid + cur.execute_async("select * from uselessTable") + sf_qid2 = cur.sfqid + # Wait until the 2 queries finish + while con.is_still_running(con.get_query_status(sf_qid1)): + time.sleep(1) + while con.is_still_running(con.get_query_status(sf_qid2)): + time.sleep(1) + cur.execute("drop table uselessTable") + assert cur.fetchall() == [("USELESSTABLE successfully dropped.",)] + cur.get_results_from_sfqid(sf_qid1) + assert cur.fetchall() == [("row1", 1), ("row2", 2), ("row3", 3)] + cur.get_results_from_sfqid(sf_qid2) + assert cur.fetchall() == [("row1", 1), ("row2", 2), ("row3", 3)] + finally: + for table in ["smallTable", "uselessTable"]: + cur.execute(f"drop table if exists {table}") + + +def test_async_qmark(conn_cnx): + """Tests that qmark parameter binding works with async queries.""" + import snowflake.connector + + orig_format = snowflake.connector.paramstyle + snowflake.connector.paramstyle = "qmark" + try: + with conn_cnx() as con: + with con.cursor() as cur: + try: + cur.execute( + "create or replace table qmark_test (aa STRING, bb STRING)" + ) + cur.execute( + "insert into qmark_test VALUES(?, ?)", ("test11", "test12") + ) + cur.execute_async("select * from qmark_test") + async_qid = cur.sfqid + with conn_cnx() as con2: + with con2.cursor() as cur2: + cur2.get_results_from_sfqid(async_qid) + assert cur2.fetchall() == [("test11", "test12")] + finally: + cur.execute("drop table if exists qmark_test") + finally: + snowflake.connector.paramstyle = orig_format + + +def test_done_caching(conn_cnx): + """Tests whether get status caching is working as expected.""" + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute_async("select count(*) from table(generator(timeLimit => 5))") + qid1 = cur.sfqid + cur.execute_async("select count(*) from table(generator(timeLimit => 10))") + qid2 = cur.sfqid + assert len(con._async_sfqids) == 2 + time.sleep(5) + while con.is_still_running(con.get_query_status(qid1)): + time.sleep(1) + assert con.get_query_status(qid1) == QueryStatus.SUCCESS + assert len(con._async_sfqids) == 1 + assert len(con._done_async_sfqids) == 1 + time.sleep(5) + while con.is_still_running(con.get_query_status(qid2)): + time.sleep(1) + assert con.get_query_status(qid2) == QueryStatus.SUCCESS + assert len(con._async_sfqids) == 0 + assert len(con._done_async_sfqids) == 2 + assert con._all_async_queries_finished() + + +def test_invalid_uuid_get_status(conn_cnx): + with conn_cnx() as con: + with con.cursor() as cur: + with pytest.raises( + ValueError, match=r"Invalid UUID: 'doesnt exist, dont even look'" + ): + cur.get_results_from_sfqid("doesnt exist, dont even look") + + +def test_unknown_sfqid(conn_cnx): + """Tests the exception that there is no Exception thrown when we attempt to get a status of a not existing query.""" + with conn_cnx() as con: + assert ( + con.get_query_status("12345678-1234-4123-A123-123456789012") + == QueryStatus.NO_DATA + ) + + +def test_unknown_sfqid_results(conn_cnx): + """Tests that there is no Exception thrown when we attempt to get a status of a not existing query.""" + with conn_cnx() as con: + with con.cursor() as cur: + cur.get_results_from_sfqid("12345678-1234-4123-A123-123456789012") + + +def test_not_fetching(conn_cnx): + """Tests whether executing a new query actually cleans up after an async result retrieving. + + If someone tries to retrieve results then the first fetch would have to block. We should not block + if we executed a new query. + """ + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute_async("select 1") + sf_qid = cur.sfqid + cur.get_results_from_sfqid(sf_qid) + cur.execute("select 2") + assert cur._inner_cursor is None + assert cur._prefetch_hook is None diff --git a/test/integ/test_autocommit.py b/test/integ/test_autocommit.py new file mode 100644 index 000000000..a77fb8607 --- /dev/null +++ b/test/integ/test_autocommit.py @@ -0,0 +1,197 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import snowflake.connector + + +def exe0(cnx, sql): + return cnx.cursor().execute(sql) + + +def _run_autocommit_off(cnx, db_parameters): + """Runs autocommit off test. + + Args: + cnx: The database connection context. + db_parameters: Database parameters. + """ + + def exe(cnx, sql): + return cnx.cursor().execute(sql.format(name=db_parameters["name"])) + + exe( + cnx, + """ +INSERT INTO {name} VALUES(True), (False), (False) +""", + ) + res = exe0( + cnx, + """ +SELECT CURRENT_TRANSACTION() +""", + ).fetchone() + assert res[0] is not None + res = exe( + cnx, + """ +SELECT COUNT(*) FROM {name} WHERE c1 +""", + ).fetchone() + assert res[0] == 1 + res = exe( + cnx, + """ +SELECT COUNT(*) FROM {name} WHERE NOT c1 +""", + ).fetchone() + assert res[0] == 2 + cnx.rollback() + res = exe0( + cnx, + """ +SELECT CURRENT_TRANSACTION() +""", + ).fetchone() + assert res[0] is None + res = exe( + cnx, + """ +SELECT COUNT(*) FROM {name} WHERE NOT c1 +""", + ).fetchone() + assert res[0] == 0 + exe( + cnx, + """ +INSERT INTO {name} VALUES(True), (False), (False) +""", + ) + cnx.commit() + res = exe( + cnx, + """ +SELECT COUNT(*) FROM {name} WHERE NOT c1 +""", + ).fetchone() + assert res[0] == 2 + cnx.rollback() + res = exe( + cnx, + """ +SELECT COUNT(*) FROM {name} WHERE NOT c1 +""", + ).fetchone() + assert res[0] == 2 + + +def _run_autocommit_on(cnx, db_parameters): + """Run autocommit on test. + + Args: + cnx: The database connection context. + db_parameters: Database parameters. + """ + + def exe(cnx, sql): + return cnx.cursor().execute(sql.format(name=db_parameters["name"])) + + exe( + cnx, + """ +INSERT INTO {name} VALUES(True), (False), (False) +""", + ) + cnx.rollback() + res = exe( + cnx, + """ +SELECT COUNT(*) FROM {name} WHERE NOT c1 +""", + ).fetchone() + assert res[0] == 4 + + +def test_autocommit_attribute(conn_cnx, db_parameters): + """Tests autocommit attribute. + + Args: + conn_cnx: The database connection context. + db_parameters: Database parameters. + """ + + def exe(cnx, sql): + return cnx.cursor().execute(sql.format(name=db_parameters["name"])) + + with conn_cnx() as cnx: + exe( + cnx, + """ +CREATE TABLE {name} (c1 boolean) +""", + ) + try: + cnx.autocommit(False) + _run_autocommit_off(cnx, db_parameters) + cnx.autocommit(True) + _run_autocommit_on(cnx, db_parameters) + finally: + exe( + cnx, + """ +DROP TABLE IF EXISTS {name} + """, + ) + + +def test_autocommit_parameters(db_parameters): + """Tests autocommit parameter. + + Args: + db_parameters: Database parameters. + """ + + def exe(cnx, sql): + return cnx.cursor().execute(sql.format(name=db_parameters["name"])) + + with snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + protocol=db_parameters["protocol"], + schema=db_parameters["schema"], + database=db_parameters["database"], + autocommit=False, + ) as cnx: + exe( + cnx, + """ +CREATE TABLE {name} (c1 boolean) +""", + ) + _run_autocommit_off(cnx, db_parameters) + + with snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + protocol=db_parameters["protocol"], + schema=db_parameters["schema"], + database=db_parameters["database"], + autocommit=True, + ) as cnx: + _run_autocommit_on(cnx, db_parameters) + exe( + cnx, + """ +DROP TABLE IF EXISTS {name} +""", + ) diff --git a/test/integ/test_bindings.py b/test/integ/test_bindings.py new file mode 100644 index 000000000..45ccf2ae7 --- /dev/null +++ b/test/integ/test_bindings.py @@ -0,0 +1,554 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import calendar +import tempfile +import time +from datetime import date, datetime +from datetime import time as datetime_time +from datetime import timedelta +from decimal import Decimal +from unittest.mock import patch + +import pendulum +import pytest +import pytz + +from snowflake.connector.converter import convert_datetime_to_epoch +from snowflake.connector.errors import ForbiddenError, ProgrammingError + +from ..randomize import random_string + +tempfile.gettempdir() + +PST_TZ = "America/Los_Angeles" +JST_TZ = "Asia/Tokyo" +CLIENT_STAGE_ARRAY_BINDING_THRESHOLD = "CLIENT_STAGE_ARRAY_BINDING_THRESHOLD" + + +def test_invalid_binding_option(conn_cnx): + """Invalid paramstyle parameters.""" + with pytest.raises(ProgrammingError): + with conn_cnx(paramstyle="hahaha"): + pass + + # valid cases + for s in ["format", "pyformat", "qmark", "numeric"]: + with conn_cnx(paramstyle=s): + pass + + +@pytest.mark.parametrize( + "bulk_array_optimization", + [pytest.param(True, marks=pytest.mark.skipolddriver), False], +) +def test_binding(conn_cnx, db_parameters, bulk_array_optimization): + """Paramstyle qmark binding tests to cover basic data types.""" + CREATE_TABLE = """create or replace table {name} ( + c1 BOOLEAN, + c2 INTEGER, + c3 NUMBER(38,2), + c4 VARCHAR(1234), + c5 FLOAT, + c6 BINARY, + c7 BINARY, + c8 TIMESTAMP_NTZ, + c9 TIMESTAMP_NTZ, + c10 TIMESTAMP_NTZ, + c11 TIMESTAMP_NTZ, + c12 TIMESTAMP_LTZ, + c13 TIMESTAMP_LTZ, + c14 TIMESTAMP_LTZ, + c15 TIMESTAMP_LTZ, + c16 TIMESTAMP_TZ, + c17 TIMESTAMP_TZ, + c18 TIMESTAMP_TZ, + c19 TIMESTAMP_TZ, + c20 DATE, + c21 TIME, + c22 TIMESTAMP_NTZ, + c23 TIME, + c24 STRING, + c25 STRING, + c26 STRING + ) + """ + INSERT = """ +insert into {name} values( +?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?,?,?) +""" + with conn_cnx(paramstyle="qmark") as cnx: + cnx.cursor().execute(CREATE_TABLE.format(name=db_parameters["name"])) + current_utctime = datetime.utcnow() + current_localtime = pytz.utc.localize(current_utctime, is_dst=False).astimezone( + pytz.timezone(PST_TZ) + ) + current_localtime_without_tz = datetime.now() + current_localtime_with_other_tz = pytz.utc.localize( + current_localtime_without_tz, is_dst=False + ).astimezone(pytz.timezone(JST_TZ)) + dt = date(2017, 12, 30) + tm = datetime_time(hour=1, minute=2, second=3, microsecond=456) + struct_time_v = time.strptime("30 Sep 01 11:20:30", "%d %b %y %H:%M:%S") + tdelta = timedelta( + seconds=tm.hour * 3600 + tm.minute * 60 + tm.second, microseconds=tm.microsecond + ) + data = ( + True, + 1, + Decimal("1.2"), + "str1", + 1.2, + # Py2 has bytes in str type, so Python Connector + bytes(b"abc"), + bytearray(b"def"), + current_utctime, + current_localtime, + current_localtime_without_tz, + current_localtime_with_other_tz, + ("TIMESTAMP_LTZ", current_utctime), + ("TIMESTAMP_LTZ", current_localtime), + ("TIMESTAMP_LTZ", current_localtime_without_tz), + ("TIMESTAMP_LTZ", current_localtime_with_other_tz), + ("TIMESTAMP_TZ", current_utctime), + ("TIMESTAMP_TZ", current_localtime), + ("TIMESTAMP_TZ", current_localtime_without_tz), + ("TIMESTAMP_TZ", current_localtime_with_other_tz), + dt, + tm, + ("TIMESTAMP_NTZ", struct_time_v), + ("TIME", tdelta), + ("TEXT", None), + "", + ',an\\\\escaped"line\n', + ) + try: + with conn_cnx(paramstyle="qmark", timezone=PST_TZ) as cnx: + csr = cnx.cursor() + if bulk_array_optimization: + cnx._session_parameters[CLIENT_STAGE_ARRAY_BINDING_THRESHOLD] = 1 + csr.executemany(INSERT.format(name=db_parameters["name"]), [data]) + else: + csr.execute(INSERT.format(name=db_parameters["name"]), data) + + ret = ( + cnx.cursor() + .execute( + """ +select * from {name} where c1=? and c2=? +""".format( + name=db_parameters["name"] + ), + (True, 1), + ) + .fetchone() + ) + assert len(ret) == 26 + assert ret[0], "BOOLEAN" + assert ret[2] == Decimal("1.2"), "NUMBER" + assert ret[4] == 1.2, "FLOAT" + assert ret[5] == b"abc" + assert ret[6] == b"def" + assert ret[7] == current_utctime + assert convert_datetime_to_epoch(ret[8]) == convert_datetime_to_epoch( + current_localtime + ) + assert convert_datetime_to_epoch(ret[9]) == convert_datetime_to_epoch( + current_localtime_without_tz + ) + assert convert_datetime_to_epoch(ret[10]) == convert_datetime_to_epoch( + current_localtime_with_other_tz + ) + assert convert_datetime_to_epoch(ret[11]) == convert_datetime_to_epoch( + current_utctime + ) + assert convert_datetime_to_epoch(ret[12]) == convert_datetime_to_epoch( + current_localtime + ) + assert convert_datetime_to_epoch(ret[13]) == convert_datetime_to_epoch( + current_localtime_without_tz + ) + assert convert_datetime_to_epoch(ret[14]) == convert_datetime_to_epoch( + current_localtime_with_other_tz + ) + assert convert_datetime_to_epoch(ret[15]) == convert_datetime_to_epoch( + current_utctime + ) + assert convert_datetime_to_epoch(ret[16]) == convert_datetime_to_epoch( + current_localtime + ) + assert convert_datetime_to_epoch(ret[17]) == convert_datetime_to_epoch( + current_localtime_without_tz + ) + assert convert_datetime_to_epoch(ret[18]) == convert_datetime_to_epoch( + current_localtime_with_other_tz + ) + assert ret[19] == dt + assert ret[20] == tm + assert convert_datetime_to_epoch(ret[21]) == calendar.timegm(struct_time_v) + assert ( + timedelta( + seconds=ret[22].hour * 3600 + ret[22].minute * 60 + ret[22].second, + microseconds=ret[22].microsecond, + ) + == tdelta + ) + assert ret[23] is None + assert ret[24] == "" + assert ret[25] == ',an\\\\escaped"line\n' + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +drop table if exists {name} +""".format( + name=db_parameters["name"] + ) + ) + + +def test_pendulum_binding(conn_cnx, db_parameters): + pendulum_test = pendulum.now() + try: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ + create or replace table {name} ( + c1 timestamp + ) + """.format( + name=db_parameters["name"] + ) + ) + c = cnx.cursor() + fmt = "insert into {name}(c1) values(%(v1)s)".format( + name=db_parameters["name"] + ) + c.execute(fmt, {"v1": pendulum_test}) + assert ( + len( + cnx.cursor() + .execute( + "select count(*) from {name}".format(name=db_parameters["name"]) + ) + .fetchall() + ) + == 1 + ) + with conn_cnx(paramstyle="qmark") as cnx: + cnx.cursor().execute( + """ + create or replace table {name} (c1 timestamp, c2 timestamp) + """.format( + name=db_parameters["name"] + ) + ) + with conn_cnx(paramstyle="qmark") as cnx: + cnx.cursor().execute( + """ + insert into {name} values(?, ?) + """.format( + name=db_parameters["name"] + ), + (pendulum_test, pendulum_test), + ) + ret = ( + cnx.cursor() + .execute( + """ + select * from {name} + """.format( + name=db_parameters["name"] + ) + ) + .fetchone() + ) + assert convert_datetime_to_epoch(ret[0]) == convert_datetime_to_epoch( + pendulum_test + ) + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ + drop table if exists {name} + """.format( + name=db_parameters["name"] + ) + ) + + +def test_binding_with_numeric(conn_cnx, db_parameters): + """Paramstyle numeric tests. Both qmark and numeric leverages server side bindings.""" + with conn_cnx(paramstyle="numeric") as cnx: + cnx.cursor().execute( + """ +create or replace table {name} (c1 integer, c2 string) +""".format( + name=db_parameters["name"] + ) + ) + + try: + with conn_cnx(paramstyle="numeric") as cnx: + cnx.cursor().execute( + """ +insert into {name}(c1, c2) values(:2, :1) + """.format( + name=db_parameters["name"] + ), + ("str1", 123), + ) + cnx.cursor().execute( + """ +insert into {name}(c1, c2) values(:2, :1) + """.format( + name=db_parameters["name"] + ), + ("str2", 456), + ) + # numeric and qmark can be used in the same session + rec = ( + cnx.cursor() + .execute( + """ +select * from {name} where c1=? +""".format( + name=db_parameters["name"] + ), + (123,), + ) + .fetchall() + ) + assert len(rec) == 1 + assert rec[0][0] == 123 + assert rec[0][1] == "str1" + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +drop table if exists {name} +""".format( + name=db_parameters["name"] + ) + ) + + +def test_binding_timestamps(conn_cnx, db_parameters): + """Binding datetime object with TIMESTAMP_LTZ. + + The value is bound as TIMESTAMP_NTZ, but since it is converted to UTC in the backend, + the returned value must be ???. + """ + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +create or replace table {name} ( + c1 integer, + c2 timestamp_ltz) +""".format( + name=db_parameters["name"] + ) + ) + + try: + with conn_cnx(paramstyle="numeric", timezone=PST_TZ) as cnx: + current_localtime = datetime.now() + cnx.cursor().execute( + """ +insert into {name}(c1, c2) values(:1, :2) + """.format( + name=db_parameters["name"] + ), + (123, ("TIMESTAMP_LTZ", current_localtime)), + ) + rec = ( + cnx.cursor() + .execute( + """ +select * from {name} where c1=? + """.format( + name=db_parameters["name"] + ), + (123,), + ) + .fetchall() + ) + assert len(rec) == 1 + assert rec[0][0] == 123 + assert convert_datetime_to_epoch(rec[0][1]) == convert_datetime_to_epoch( + current_localtime + ) + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +drop table if exists {name} +""".format( + name=db_parameters["name"] + ) + ) + + +@pytest.mark.parametrize( + "num_rows", [pytest.param(100000, marks=pytest.mark.skipolddriver), 4] +) +def test_binding_bulk_insert(conn_cnx, db_parameters, num_rows): + """Bulk insert test.""" + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +create or replace table {name} ( + c1 integer, + c2 string +) +""".format( + name=db_parameters["name"] + ) + ) + try: + with conn_cnx(paramstyle="qmark") as cnx: + c = cnx.cursor() + fmt = "insert into {name}(c1,c2) values(?,?)".format( + name=db_parameters["name"] + ) + c.executemany(fmt, [(idx, f"test{idx}") for idx in range(num_rows)]) + assert c.rowcount == num_rows + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +drop table if exists {name} +""".format( + name=db_parameters["name"] + ) + ) + + +@pytest.mark.skipolddriver +def test_bulk_insert_binding_fallback(conn_cnx): + """When stage creation fails, bulk inserts falls back to server side binding and disables stage optimization.""" + with conn_cnx(paramstyle="qmark") as cnx, cnx.cursor() as csr: + query = f"insert into {random_string(5)}(c1,c2) values(?,?)" + cnx._session_parameters[CLIENT_STAGE_ARRAY_BINDING_THRESHOLD] = 1 + with patch.object(csr, "_execute_helper") as mocked_execute_helper, patch( + "snowflake.connector.cursor.BindUploadAgent._create_stage" + ) as mocked_stage_creation: + mocked_stage_creation.side_effect = ForbiddenError + csr.executemany(query, [(idx, f"test{idx}") for idx in range(4)]) + mocked_stage_creation.assert_called_once() + mocked_execute_helper.assert_called_once() + assert ( + "binding_stage" not in mocked_execute_helper.call_args[1] + ), "Stage binding should fail" + assert ( + "binding_params" in mocked_execute_helper.call_args[1] + ), "Should fall back to server side binding" + assert cnx._session_parameters[CLIENT_STAGE_ARRAY_BINDING_THRESHOLD] == 0 + + +def test_binding_bulk_update(conn_cnx, db_parameters): + """Bulk update test. + + Notes: + UPDATE,MERGE and DELETE are not supported for actual bulk operation + but executemany accepts the multiple rows and iterate DMLs. + """ + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +create or replace table {name} ( + c1 integer, + c2 string +) +""".format( + name=db_parameters["name"] + ) + ) + try: + with conn_cnx(paramstyle="qmark") as cnx: + # short list + c = cnx.cursor() + fmt = "insert into {name}(c1,c2) values(?,?)".format( + name=db_parameters["name"] + ) + c.executemany( + fmt, + [ + (1, "test1"), + (2, "test2"), + (3, "test3"), + (4, "test4"), + ], + ) + assert c.rowcount == 4 + + fmt = "update {name} set c2=:2 where c1=:1".format( + name=db_parameters["name"] + ) + c.executemany( + fmt, + [ + (1, "test5"), + (2, "test6"), + ], + ) + assert c.rowcount == 2 + + fmt = "select * from {name} where c1=?".format(name=db_parameters["name"]) + rec = cnx.cursor().execute(fmt, (1,)).fetchall() + assert rec[0][0] == 1 + assert rec[0][1] == "test5" + + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +drop table if exists {name} +""".format( + name=db_parameters["name"] + ) + ) + + +def test_binding_identifier(conn_cnx, db_parameters): + """Binding a table name.""" + try: + with conn_cnx(paramstyle="qmark") as cnx: + data = "test" + cnx.cursor().execute( + """ +create or replace table identifier(?) (c1 string) +""", + (db_parameters["name"],), + ) + with conn_cnx(paramstyle="qmark") as cnx: + cnx.cursor().execute( + """ +insert into identifier(?) values(?) +""", + (db_parameters["name"], data), + ) + ret = ( + cnx.cursor() + .execute( + """ +select * from identifier(?) +""", + (db_parameters["name"],), + ) + .fetchall() + ) + assert len(ret) == 1 + assert ret[0][0] == data + finally: + with conn_cnx(paramstyle="qmark") as cnx: + cnx.cursor().execute( + """ +drop table if exists identifier(?) +""", + (db_parameters["name"],), + ) diff --git a/test/integ/test_boolean.py b/test/integ/test_boolean.py new file mode 100644 index 000000000..e3e0674bc --- /dev/null +++ b/test/integ/test_boolean.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + + +def test_binding_fetching_boolean(conn_cnx, db_parameters): + try: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +create or replace table {name} (c1 boolean, c2 integer) +""".format( + name=db_parameters["name"] + ) + ) + + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +insert into {name} values(%s,%s), (%s,%s), (%s,%s) +""".format( + name=db_parameters["name"] + ), + (True, 1, False, 2, True, 3), + ) + results = ( + cnx.cursor() + .execute( + """ +select * from {name} order by 1""".format( + name=db_parameters["name"] + ) + ) + .fetchall() + ) + assert not results[0][0] + assert results[1][0] + assert results[2][0] + results = ( + cnx.cursor() + .execute( + """ +select c1 from {name} where c2=2 +""".format( + name=db_parameters["name"] + ) + ) + .fetchall() + ) + assert not results[0][0] + + # SNOW-15905: boolean support + results = ( + cnx.cursor() + .execute( + """ +SELECT CASE WHEN (null LIKE trim(null)) THEN null ELSE null END +""" + ) + .fetchall() + ) + assert not results[0][0] + + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +drop table if exists {name} +""".format( + name=db_parameters["name"] + ) + ) + + +def test_boolean_from_compiler(conn_cnx): + with conn_cnx() as cnx: + ret = cnx.cursor().execute("SELECT true").fetchone() + assert ret[0] + + ret = cnx.cursor().execute("SELECT false").fetchone() + assert not ret[0] diff --git a/test/test_client_session_keep_alive.py b/test/integ/test_client_session_keep_alive.py similarity index 60% rename from test/test_client_session_keep_alive.py rename to test/integ/test_client_session_keep_alive.py index 122ab7962..b1b9b4ebb 100644 --- a/test/test_client_session_keep_alive.py +++ b/test/integ/test_client_session_keep_alive.py @@ -1,24 +1,24 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # -# This test requires the Snowflake admin connection parameters. -import pytest +from __future__ import annotations + import time +import pytest + import snowflake.connector -from snowflake.connector.auth import delete_temporary_credential_file try: - from parameters import (CONNECTION_PARAMETERS) -except: + from parameters import CONNECTION_PARAMETERS +except ImportError: CONNECTION_PARAMETERS = {} try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: + from parameters import CONNECTION_PARAMETERS_ADMIN +except ImportError: CONNECTION_PARAMETERS_ADMIN = {} @@ -26,20 +26,24 @@ def token_validity_test_values(request): with snowflake.connector.connect(**CONNECTION_PARAMETERS_ADMIN) as cnx: print("[INFO] Setting token validity to test values") - cnx.cursor().execute(""" -ALTER SYSTEM SET + cnx.cursor().execute( + """ +ALTER SYSTEM SET MASTER_TOKEN_VALIDITY=30, SESSION_TOKEN_VALIDITY=10 -""") +""" + ) def fin(): with snowflake.connector.connect(**CONNECTION_PARAMETERS_ADMIN) as cnx: print("[INFO] Reverting token validity") - cnx.cursor().execute(""" -ALTER SYSTEM SET + cnx.cursor().execute( + """ +ALTER SYSTEM SET MASTER_TOKEN_VALIDITY=default, SESSION_TOKEN_VALIDITY=default -""") +""" + ) request.addfinalizer(fin) return None @@ -47,22 +51,26 @@ def fin(): @pytest.mark.skipif( not (CONNECTION_PARAMETERS_ADMIN), - reason="ADMIN connection parameters must be provided." + reason="ADMIN connection parameters must be provided.", ) def test_client_session_keep_alive(token_validity_test_values): test_connection_parameters = CONNECTION_PARAMETERS.copy() print("[INFO] Connected") - test_connection_parameters['client_session_keep_alive'] = True + test_connection_parameters["client_session_keep_alive"] = True with snowflake.connector.connect(**test_connection_parameters) as con: print("[INFO] Running a query. Ensuring a connection is valid.") con.cursor().execute("select 1") print("[INFO] Sleeping 15s") time.sleep(15) - print("[INFO] Running a query. Both master and session tokens must " - "have been renewed by token request") + print( + "[INFO] Running a query. Both master and session tokens must " + "have been renewed by token request" + ) con.cursor().execute("select 1") print("[INFO] Sleeping 40s") time.sleep(40) - print("[INFO] Running a query. Master token must have been renewed " - "by the heartbeat") + print( + "[INFO] Running a query. Master token must have been renewed " + "by the heartbeat" + ) con.cursor().execute("select 1") diff --git a/test/integ/test_concurrent_create_objects.py b/test/integ/test_concurrent_create_objects.py new file mode 100644 index 000000000..7058b610c --- /dev/null +++ b/test/integ/test_concurrent_create_objects.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from concurrent.futures.thread import ThreadPoolExecutor +from logging import getLogger + +import pytest + +from snowflake.connector import ProgrammingError + +try: + from parameters import CONNECTION_PARAMETERS_ADMIN +except ImportError: + CONNECTION_PARAMETERS_ADMIN = {} + +logger = getLogger(__name__) + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_snow5871(conn_cnx, db_parameters): + _test_snow5871( + conn_cnx, + db_parameters, + number_of_threads=5, + rt_max_outgoing_rate=60, + rt_max_burst_size=5, + rt_max_borrowing_limt=1000, + rt_reset_period=10000, + ) + + _test_snow5871( + conn_cnx, + db_parameters, + number_of_threads=40, + rt_max_outgoing_rate=60, + rt_max_burst_size=1, + rt_max_borrowing_limt=200, + rt_reset_period=1000, + ) + + +def _create_a_table(meta): + cnx = meta["cnx"] + name = meta["name"] + try: + cnx.cursor().execute( + """ +create table {} (aa int) + """.format( + name + ) + ) + # print("Success #" + meta['idx']) + return {"success": True} + except ProgrammingError: + logger.exception("Failed to create a table") + return {"success": False} + + +def _test_snow5871( + conn_cnx, + db_parameters, + number_of_threads=10, + rt_max_outgoing_rate=60, + rt_max_burst_size=1, + rt_max_borrowing_limt=1000, + rt_reset_period=10000, +): + """SNOW-5871: rate limiting for creation of non-recycable objects.""" + logger.debug( + ( + "number_of_threads = %s, rt_max_outgoing_rate = %s, " + "rt_max_burst_size = %s, rt_max_borrowing_limt = %s, " + "rt_reset_period = %s" + ), + number_of_threads, + rt_max_outgoing_rate, + rt_max_burst_size, + rt_max_borrowing_limt, + rt_reset_period, + ) + with conn_cnx( + user=db_parameters["sf_user"], + password=db_parameters["sf_password"], + account=db_parameters["sf_account"], + ) as cnx: + cnx.cursor().execute( + """ +alter system set + RT_MAX_OUTGOING_RATE={}, + RT_MAX_BURST_SIZE={}, + RT_MAX_BORROWING_LIMIT={}, + RT_RESET_PERIOD={}""".format( + rt_max_outgoing_rate, + rt_max_burst_size, + rt_max_borrowing_limt, + rt_reset_period, + ) + ) + + try: + with conn_cnx() as cnx: + cnx.cursor().execute( + "create or replace database {name}_db".format( + name=db_parameters["name"] + ) + ) + meta = [] + for i in range(number_of_threads): + meta.append( + { + "idx": str(i + 1), + "cnx": cnx, + "name": db_parameters["name"] + "tbl_5871_" + str(i + 1), + } + ) + pool = ThreadPoolExecutor(number_of_threads) + results = list(pool.map(_create_a_table, meta)) + success = 0 + for r in results: + success += 1 if r["success"] else 0 + + # at least one should be success + assert success >= 1, "success queries" + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + "drop database if exists {name}_db".format(name=db_parameters["name"]) + ) + + with conn_cnx( + user=db_parameters["sf_user"], + password=db_parameters["sf_password"], + account=db_parameters["sf_account"], + ) as cnx: + cnx.cursor().execute( + """ +alter system set + RT_MAX_OUTGOING_RATE=default, + RT_MAX_BURST_SIZE=default, + RT_RESET_PERIOD=default, + RT_MAX_BORROWING_LIMIT=default""" + ) diff --git a/test/integ/test_concurrent_insert.py b/test/integ/test_concurrent_insert.py new file mode 100644 index 000000000..4ce639df4 --- /dev/null +++ b/test/integ/test_concurrent_insert.py @@ -0,0 +1,196 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from concurrent.futures.thread import ThreadPoolExecutor +from logging import getLogger + +import pytest + +import snowflake.connector +from snowflake.connector.errors import ProgrammingError + +try: + from parameters import CONNECTION_PARAMETERS_ADMIN +except Exception: + CONNECTION_PARAMETERS_ADMIN = {} + +logger = getLogger(__name__) + + +def _concurrent_insert(meta): + """Concurrent insert method.""" + cnx = snowflake.connector.connect( + user=meta["user"], + password=meta["password"], + host=meta["host"], + port=meta["port"], + account=meta["account"], + database=meta["database"], + schema=meta["schema"], + timezone="UTC", + protocol="http", + ) + try: + cnx.cursor().execute("use warehouse {}".format(meta["warehouse"])) + table = meta["table"] + sql = f"insert into {table} values(%(c1)s, %(c2)s)" + logger.debug(sql) + cnx.cursor().execute( + sql, + { + "c1": meta["idx"], + "c2": "test string " + meta["idx"], + }, + ) + meta["success"] = True + logger.debug("Succeeded process #%s", meta["idx"]) + except Exception: + logger.exception("failed to insert into a table [%s]", table) + meta["success"] = False + finally: + cnx.close() + return meta + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, + reason="The user needs a privilege of create warehouse.", +) +def test_concurrent_insert(conn_cnx, db_parameters): + """Concurrent insert tests. Inserts block on the one that's running.""" + number_of_threads = 22 # change this to increase the concurrency + expected_success_runs = number_of_threads - 1 + cnx_array = [] + + try: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +create or replace warehouse {} +warehouse_type=standard +warehouse_size=small +""".format( + db_parameters["name_wh"] + ) + ) + sql = """ +create or replace table {name} (c1 integer, c2 string) +""".format( + name=db_parameters["name"] + ) + cnx.cursor().execute(sql) + for i in range(number_of_threads): + cnx_array.append( + { + "host": db_parameters["host"], + "port": db_parameters["port"], + "user": db_parameters["user"], + "password": db_parameters["password"], + "account": db_parameters["account"], + "database": db_parameters["database"], + "schema": db_parameters["schema"], + "table": db_parameters["name"], + "idx": str(i), + "warehouse": db_parameters["name_wh"], + } + ) + + pool = ThreadPoolExecutor(number_of_threads) + results = list(pool.map(_concurrent_insert, cnx_array)) + pool.shutdown() + success = 0 + for record in results: + success += 1 if record["success"] else 0 + + # 21 threads or more + assert success >= expected_success_runs, "Number of success run" + + c = cnx.cursor() + sql = "select * from {name} order by 1".format(name=db_parameters["name"]) + c.execute(sql) + for rec in c: + logger.debug(rec) + c.close() + + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + "drop table if exists {}".format(db_parameters["name"]) + ) + cnx.cursor().execute( + "drop warehouse if exists {}".format(db_parameters["name_wh"]) + ) + + +def _concurrent_insert_using_connection(meta): + connection = meta["connection"] + idx = meta["idx"] + name = meta["name"] + try: + connection.cursor().execute( + f"INSERT INTO {name} VALUES(%s, %s)", + (idx, f"test string{idx}"), + ) + except ProgrammingError as e: + if e.errno != 619: # SQL Execution Canceled + raise + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, + reason="The user needs a privilege of create warehouse.", +) +def test_concurrent_insert_using_connection(conn_cnx, db_parameters): + """Concurrent insert tests using the same connection.""" + try: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +create or replace warehouse {} +warehouse_type=standard +warehouse_size=small +""".format( + db_parameters["name_wh"] + ) + ) + cnx.cursor().execute( + """ +CREATE OR REPLACE TABLE {name} (c1 INTEGER, c2 STRING) +""".format( + name=db_parameters["name"] + ) + ) + number_of_threads = 5 + metas = [] + for i in range(number_of_threads): + metas.append( + { + "connection": cnx, + "idx": i, + "name": db_parameters["name"], + } + ) + pool = ThreadPoolExecutor(number_of_threads) + pool.map(_concurrent_insert_using_connection, metas) + pool.shutdown() + cnt = 0 + for _ in cnx.cursor().execute( + "SELECT * FROM {name} ORDER BY 1".format(name=db_parameters["name"]) + ): + cnt += 1 + assert ( + cnt <= number_of_threads + ), "Number of records should be less than the number of threads" + assert cnt > 0, "Number of records should be one or more number of threads" + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + "drop table if exists {}".format(db_parameters["name"]) + ) + cnx.cursor().execute( + "drop warehouse if exists {}".format(db_parameters["name_wh"]) + ) diff --git a/test/integ/test_connection.py b/test/integ/test_connection.py new file mode 100644 index 000000000..5081d8132 --- /dev/null +++ b/test/integ/test_connection.py @@ -0,0 +1,1094 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import gc +import logging +import os +import queue +import threading +import warnings +import weakref +from unittest import mock +from uuid import uuid4 + +import pytest + +import snowflake.connector +from snowflake.connector import DatabaseError, OperationalError, ProgrammingError +from snowflake.connector.auth_okta import AuthByOkta +from snowflake.connector.connection import ( + DEFAULT_CLIENT_PREFETCH_THREADS, + SnowflakeConnection, +) +from snowflake.connector.description import CLIENT_NAME +from snowflake.connector.errorcode import ( + ER_CONNECTION_IS_CLOSED, + ER_FAILED_PROCESSING_PYFORMAT, + ER_INVALID_VALUE, + ER_NO_ACCOUNT_NAME, + ER_NOT_IMPLICITY_SNOWFLAKE_DATATYPE, +) +from snowflake.connector.errors import Error, ForbiddenError +from snowflake.connector.network import APPLICATION_SNOWSQL, ReauthenticationRequest +from snowflake.connector.sqlstate import SQLSTATE_FEATURE_NOT_SUPPORTED + +try: # pragma: no cover + from parameters import CONNECTION_PARAMETERS_ADMIN +except ImportError: + CONNECTION_PARAMETERS_ADMIN = {} + +try: + from snowflake.connector.errorcode import ER_FAILED_PROCESSING_QMARK +except ImportError: # Keep olddrivertest from breaking + ER_FAILED_PROCESSING_QMARK = 252012 + + +def test_basic(conn_testaccount): + """Basic Connection test.""" + assert conn_testaccount, "invalid cnx" + # Test default values + assert conn_testaccount.session_id + + +def test_connection_without_schema(db_parameters): + """Basic Connection test without schema.""" + cnx = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + database=db_parameters["database"], + protocol=db_parameters["protocol"], + timezone="UTC", + ) + assert cnx, "invalid cnx" + cnx.close() + + +def test_connection_without_database_schema(db_parameters): + """Basic Connection test without database and schema.""" + cnx = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + protocol=db_parameters["protocol"], + timezone="UTC", + ) + assert cnx, "invalid cnx" + cnx.close() + + +def test_connection_without_database2(db_parameters): + """Basic Connection test without database.""" + cnx = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + schema=db_parameters["schema"], + protocol=db_parameters["protocol"], + timezone="UTC", + ) + assert cnx, "invalid cnx" + cnx.close() + + +def test_with_config(db_parameters): + """Creates a connection with the config parameter.""" + config = { + "user": db_parameters["user"], + "password": db_parameters["password"], + "host": db_parameters["host"], + "port": db_parameters["port"], + "account": db_parameters["account"], + "schema": db_parameters["schema"], + "database": db_parameters["database"], + "protocol": db_parameters["protocol"], + "timezone": "UTC", + } + cnx = snowflake.connector.connect(**config) + try: + assert cnx, "invalid cnx" + assert not cnx.client_session_keep_alive # default is False + finally: + cnx.close() + + +def test_keep_alive_true(db_parameters): + """Creates a connection with client_session_keep_alive parameter.""" + config = { + "user": db_parameters["user"], + "password": db_parameters["password"], + "host": db_parameters["host"], + "port": db_parameters["port"], + "account": db_parameters["account"], + "schema": db_parameters["schema"], + "database": db_parameters["database"], + "protocol": db_parameters["protocol"], + "timezone": "UTC", + "client_session_keep_alive": True, + } + cnx = snowflake.connector.connect(**config) + try: + assert cnx.client_session_keep_alive + finally: + cnx.close() + + +def test_keep_alive_heartbeat_frequency(db_parameters): + """Tests heartbeat setting. + + Creates a connection with client_session_keep_alive_heartbeat_frequency + parameter. + """ + config = { + "user": db_parameters["user"], + "password": db_parameters["password"], + "host": db_parameters["host"], + "port": db_parameters["port"], + "account": db_parameters["account"], + "schema": db_parameters["schema"], + "database": db_parameters["database"], + "protocol": db_parameters["protocol"], + "timezone": "UTC", + "client_session_keep_alive": True, + "client_session_keep_alive_heartbeat_frequency": 1000, + } + cnx = snowflake.connector.connect(**config) + try: + assert cnx.client_session_keep_alive_heartbeat_frequency == 1000 + finally: + cnx.close() + + +def test_keep_alive_heartbeat_frequency_min(db_parameters): + """Tests heartbeat setting with custom frequency. + + Creates a connection with client_session_keep_alive_heartbeat_frequency parameter and set the minimum frequency. + """ + config = { + "user": db_parameters["user"], + "password": db_parameters["password"], + "host": db_parameters["host"], + "port": db_parameters["port"], + "account": db_parameters["account"], + "schema": db_parameters["schema"], + "database": db_parameters["database"], + "protocol": db_parameters["protocol"], + "timezone": "UTC", + "client_session_keep_alive": True, + "client_session_keep_alive_heartbeat_frequency": 10, + } + cnx = snowflake.connector.connect(**config) + try: + # The min value of client_session_keep_alive_heartbeat_frequency + # is 1/16 of master token validity, so 14400 / 4 /4 => 900 + assert cnx.client_session_keep_alive_heartbeat_frequency == 900 + finally: + cnx.close() + + +def test_bad_db(db_parameters): + """Attempts to use a bad DB.""" + cnx = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + protocol=db_parameters["protocol"], + database="baddb", + ) + assert cnx, "invald cnx" + cnx.close() + + +def test_bogus(db_parameters): + """Attempts to login with invalid user name and password. + + Notes: + This takes a long time. + """ + with pytest.raises(DatabaseError): + snowflake.connector.connect( + protocol="http", + user="bogus", + password="bogus", + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + login_timeout=5, + ) + + with pytest.raises(DatabaseError): + snowflake.connector.connect( + protocol="http", + user="bogus", + password="bogus", + account="testaccount123", + host=db_parameters["host"], + port=db_parameters["port"], + login_timeout=5, + insecure_mode=True, + ) + + with pytest.raises(DatabaseError): + snowflake.connector.connect( + protocol="http", + user="snowman", + password="", + account="testaccount123", + host=db_parameters["host"], + port=db_parameters["port"], + login_timeout=5, + ) + + with pytest.raises(ProgrammingError): + snowflake.connector.connect( + protocol="http", + user="", + password="password", + account="testaccount123", + host=db_parameters["host"], + port=db_parameters["port"], + login_timeout=5, + ) + + +def test_invalid_application(db_parameters): + """Invalid application name.""" + with pytest.raises(snowflake.connector.Error): + snowflake.connector.connect( + protocol=db_parameters["protocol"], + user=db_parameters["user"], + password=db_parameters["password"], + application="%%%", + ) + + +def test_valid_application(db_parameters): + """Valid application name.""" + application = "Special_Client" + cnx = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + application=application, + protocol=db_parameters["protocol"], + ) + assert cnx.application == application, "Must be valid application" + cnx.close() + + +def test_invalid_default_parameters(db_parameters): + """Invalid database, schema, warehouse and role name.""" + cnx = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + protocol=db_parameters["protocol"], + database="neverexists", + schema="neverexists", + warehouse="neverexits", + ) + assert cnx, "Must be success" + + with pytest.raises(snowflake.connector.DatabaseError): + # must not success + snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + protocol=db_parameters["protocol"], + database="neverexists", + schema="neverexists", + validate_default_parameters=True, + ) + + with pytest.raises(snowflake.connector.DatabaseError): + # must not success + snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + protocol=db_parameters["protocol"], + database=db_parameters["database"], + schema="neverexists", + validate_default_parameters=True, + ) + + with pytest.raises(snowflake.connector.DatabaseError): + # must not success + snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + protocol=db_parameters["protocol"], + database=db_parameters["database"], + schema=db_parameters["schema"], + warehouse="neverexists", + validate_default_parameters=True, + ) + + # Invalid role name is already validated + with pytest.raises(snowflake.connector.DatabaseError): + # must not success + snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + protocol=db_parameters["protocol"], + database=db_parameters["database"], + schema=db_parameters["schema"], + role="neverexists", + ) + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, + reason="The user needs a privilege of create warehouse.", +) +def test_drop_create_user(conn_cnx, db_parameters): + """Drops and creates user.""" + with conn_cnx() as cnx: + + def exe(sql): + return cnx.cursor().execute(sql) + + exe("use role accountadmin") + exe("drop user if exists snowdog") + exe("create user if not exists snowdog identified by 'testdoc'") + exe("use {}".format(db_parameters["database"])) + exe("create or replace role snowdog_role") + exe("grant role snowdog_role to user snowdog") + exe( + "grant all on database {} to role snowdog_role".format( + db_parameters["database"] + ) + ) + exe( + "grant all on schema {} to role snowdog_role".format( + db_parameters["schema"] + ) + ) + + with conn_cnx(user="snowdog", password="testdoc") as cnx2: + + def exe(sql): + return cnx2.cursor().execute(sql) + + exe("use role snowdog_role") + exe("use {}".format(db_parameters["database"])) + exe("use schema {}".format(db_parameters["schema"])) + exe("create or replace table friends(name varchar(100))") + exe("drop table friends") + with conn_cnx() as cnx: + + def exe(sql): + return cnx.cursor().execute(sql) + + exe("use role accountadmin") + exe( + "revoke all on database {} from role snowdog_role".format( + db_parameters["database"] + ) + ) + exe("drop role snowdog_role") + exe("drop user if exists snowdog") + + +@pytest.mark.timeout(15) +def test_invalid_account_timeout(): + with pytest.raises(ForbiddenError): + snowflake.connector.connect( + account="bogus", user="test", password="test", login_timeout=5 + ) + + +@pytest.mark.timeout(15) +def test_invalid_proxy(db_parameters): + with pytest.raises(OperationalError): + snowflake.connector.connect( + protocol="http", + account="testaccount", + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + login_timeout=5, + proxy_host="localhost", + proxy_port="3333", + ) + # NOTE environment variable is set if the proxy parameter is specified. + del os.environ["HTTP_PROXY"] + del os.environ["HTTPS_PROXY"] + + +@pytest.mark.timeout(15) +def test_eu_connection(tmpdir): + """Tests setting custom region. + + If region is specified to eu-central-1, the URL should become + https://testaccount1234.eu-central-1.snowflakecomputing.com/ . + + Notes: + Region is deprecated. + """ + import os + + os.environ["SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED"] = "true" + with pytest.raises(ForbiddenError): + # must reach Snowflake + snowflake.connector.connect( + account="testaccount1234", + user="testuser", + password="testpassword", + region="eu-central-1", + login_timeout=5, + ocsp_response_cache_filename=os.path.join( + str(tmpdir), "test_ocsp_cache.txt" + ), + ) + + +def test_us_west_connection(tmpdir): + """Tests default region setting. + + Region='us-west-2' indicates no region is included in the hostname, i.e., + https://testaccount1234.snowflakecomputing.com. + + Notes: + Region is deprecated. + """ + with pytest.raises(ForbiddenError): + # must reach Snowflake + snowflake.connector.connect( + account="testaccount1234", + user="testuser", + password="testpassword", + region="us-west-2", + login_timeout=5, + ) + + +@pytest.mark.timeout(60) +def test_privatelink(db_parameters): + """Ensure the OCSP cache server URL is overridden if privatelink connection is used.""" + try: + os.environ["SF_OCSP_FAIL_OPEN"] = "false" + os.environ["SF_OCSP_DO_RETRY"] = "false" + snowflake.connector.connect( + account="testaccount", + user="testuser", + password="testpassword", + region="eu-central-1.privatelink", + login_timeout=5, + ) + pytest.fail("should not make connection") + except OperationalError: + ocsp_url = os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_URL") + assert ocsp_url is not None, "OCSP URL should not be None" + assert ( + ocsp_url == "http://ocsp.testaccount.eu-central-1." + "privatelink.snowflakecomputing.com/" + "ocsp_response_cache.json" + ) + + cnx = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + database=db_parameters["database"], + protocol=db_parameters["protocol"], + timezone="UTC", + ) + assert cnx, "invalid cnx" + + ocsp_url = os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_URL") + assert ocsp_url is None, f"OCSP URL should be None: {ocsp_url}" + del os.environ["SF_OCSP_DO_RETRY"] + del os.environ["SF_OCSP_FAIL_OPEN"] + + +def test_disable_request_pooling(db_parameters): + """Creates a connection with client_session_keep_alive parameter.""" + config = { + "user": db_parameters["user"], + "password": db_parameters["password"], + "host": db_parameters["host"], + "port": db_parameters["port"], + "account": db_parameters["account"], + "schema": db_parameters["schema"], + "database": db_parameters["database"], + "protocol": db_parameters["protocol"], + "timezone": "UTC", + "disable_request_pooling": True, + } + cnx = snowflake.connector.connect(**config) + try: + assert cnx.disable_request_pooling + finally: + cnx.close() + + +def test_privatelink_ocsp_url_creation(): + hostname = "testaccount.us-east-1.privatelink.snowflakecomputing.com" + SnowflakeConnection.setup_ocsp_privatelink(APPLICATION_SNOWSQL, hostname) + + ocsp_cache_server = os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_URL", None) + assert ( + ocsp_cache_server + == "http://ocsp.testaccount.us-east-1.privatelink.snowflakecomputing.com/ocsp_response_cache.json" + ) + + del os.environ["SF_OCSP_RESPONSE_CACHE_SERVER_URL"] + + SnowflakeConnection.setup_ocsp_privatelink(CLIENT_NAME, hostname) + ocsp_cache_server = os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_URL", None) + assert ( + ocsp_cache_server + == "http://ocsp.testaccount.us-east-1.privatelink.snowflakecomputing.com/ocsp_response_cache.json" + ) + + +def test_privatelink_ocsp_url_multithreaded(): + bucket = queue.Queue() + + hostname = "testaccount.us-east-1.privatelink.snowflakecomputing.com" + expectation = "http://ocsp.testaccount.us-east-1.privatelink.snowflakecomputing.com/ocsp_response_cache.json" + thread_obj = [] + for _ in range(15): + thread_obj.append( + ExecPrivatelinkThread(bucket, hostname, expectation, CLIENT_NAME) + ) + + for t in thread_obj: + t.start() + + fail_flag = False + for t in thread_obj: + t.join() + exc = bucket.get(block=False) + if exc != "Success" and not fail_flag: + fail_flag = True + + if fail_flag: + raise AssertionError() + + if os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_URL", None) is not None: + del os.environ["SF_OCSP_RESPONSE_CACHE_SERVER_URL"] + + +def test_privatelink_ocsp_url_multithreaded_snowsql(): + bucket = queue.Queue() + + hostname = "testaccount.us-east-1.privatelink.snowflakecomputing.com" + expectation = "http://ocsp.testaccount.us-east-1.privatelink.snowflakecomputing.com/ocsp_response_cache.json" + thread_obj = [] + for _ in range(15): + thread_obj.append( + ExecPrivatelinkThread(bucket, hostname, expectation, APPLICATION_SNOWSQL) + ) + + for t in thread_obj: + t.start() + + fail_flag = False + for i in range(15): + thread_obj[i].join() + exc = bucket.get(block=False) + if exc != "Success" and not fail_flag: + fail_flag = True + + if fail_flag: + raise AssertionError() + + +class ExecPrivatelinkThread(threading.Thread): + def __init__(self, bucket, hostname, expectation, client_name): + threading.Thread.__init__(self) + self.bucket = bucket + self.hostname = hostname + self.expectation = expectation + self.client_name = client_name + + def run(self): + SnowflakeConnection.setup_ocsp_privatelink(self.client_name, self.hostname) + ocsp_cache_server = os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_URL", None) + if ocsp_cache_server is not None and ocsp_cache_server != self.expectation: + print(f"Got {ocsp_cache_server} Expected {self.expectation}") + self.bucket.put("Fail") + else: + self.bucket.put("Success") + + +@pytest.mark.skipolddriver +def test_okta_url(db_parameters): + orig_authenticator = "https://someaccount.okta.com/snowflake/oO56fExYCGnfV83/2345" + + def mock_auth(self, auth_instance): + assert isinstance(auth_instance, AuthByOkta) + assert self._authenticator == orig_authenticator + + with mock.patch( + "snowflake.connector.connection.SnowflakeConnection._SnowflakeConnection__authenticate", + mock_auth, + ): + cnx = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + schema=db_parameters["schema"], + database=db_parameters["database"], + protocol=db_parameters["protocol"], + timezone="UTC", + authenticator=orig_authenticator, + ) + assert cnx + + +@pytest.mark.skipolddriver +def test_use_openssl_only(db_parameters): + cnx = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + protocol=db_parameters["protocol"], + use_openssl_only=True, + ) + assert cnx + assert "SF_USE_OPENSSL_ONLY" in os.environ + # Note during testing conftest will default this value to False, so if testing this we need to manually clear it + # Let's test it again, after clearing it + del os.environ["SF_USE_OPENSSL_ONLY"] + cnx = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + protocol=db_parameters["protocol"], + use_openssl_only=True, + ) + assert cnx + assert os.environ["SF_USE_OPENSSL_ONLY"] == "True" + + +def test_dashed_url(db_parameters): + """Test whether dashed URLs get created correctly.""" + with mock.patch( + "snowflake.connector.network.SnowflakeRestful.fetch", + return_value={"data": {"token": None, "masterToken": None}, "success": True}, + ) as mocked_fetch: + with snowflake.connector.connect( + user="test-user", + password="test-password", + host="test-host", + port="443", + account="test-account", + ) as cnx: + assert cnx + cnx.commit = ( + cnx.rollback + ) = lambda: None # Skip tear down, there's only a mocked rest api + assert any( + [ + c[0][1].startswith("https://test-host:443") + for c in mocked_fetch.call_args_list + ] + ) + + +def test_dashed_url_account_name(db_parameters): + """Tests whether dashed URLs get created correctly when no hostname is provided.""" + with mock.patch( + "snowflake.connector.network.SnowflakeRestful.fetch", + return_value={"data": {"token": None, "masterToken": None}, "success": True}, + ) as mocked_fetch: + with snowflake.connector.connect( + user="test-user", + password="test-password", + port="443", + account="test-account", + ) as cnx: + assert cnx + cnx.commit = ( + cnx.rollback + ) = lambda: None # Skip tear down, there's only a mocked rest api + assert any( + [ + c[0][1].startswith( + "https://test-account.snowflakecomputing.com:443" + ) + for c in mocked_fetch.call_args_list + ] + ) + + +@pytest.mark.skipolddriver +@pytest.mark.parametrize( + "name,value,exc_warn", + [ + # Not existing parameter + ( + "no_such_parameter", + True, + UserWarning("'no_such_parameter' is an unknown connection parameter"), + ), + # Typo in parameter name + ( + "applucation", + True, + UserWarning( + "'applucation' is an unknown connection parameter, did you mean 'application'?" + ), + ), + # Single type error + ( + "support_negative_year", + "True", + UserWarning( + "'support_negative_year' connection parameter should be of type " + "'bool', but is a 'str'" + ), + ), + # Multiple possible type error + ( + "autocommit", + "True", + UserWarning( + "'autocommit' connection parameter should be of type " + "'(NoneType, bool)', but is a 'str'" + ), + ), + ], +) +def test_invalid_connection_parameter(db_parameters, name, value, exc_warn): + with warnings.catch_warnings(record=True) as w: + conn_params = { + "account": db_parameters["account"], + "user": db_parameters["user"], + "password": db_parameters["password"], + "schema": db_parameters["schema"], + "database": db_parameters["database"], + "protocol": db_parameters["protocol"], + "host": db_parameters["host"], + "port": db_parameters["port"], + "validate_default_parameters": True, + name: value, + } + try: + conn = snowflake.connector.connect(**conn_params) + assert getattr(conn, "_" + name) == value + assert len(w) == 1 + assert str(w[0].message) == str(exc_warn) + finally: + conn.close() + + +def test_invalid_connection_parameters_turned_off(db_parameters): + """Makes sure parameter checking can be turned off.""" + with warnings.catch_warnings(record=True) as w: + conn_params = { + "account": db_parameters["account"], + "user": db_parameters["user"], + "password": db_parameters["password"], + "schema": db_parameters["schema"], + "database": db_parameters["database"], + "protocol": db_parameters["protocol"], + "host": db_parameters["host"], + "port": db_parameters["port"], + "validate_default_parameters": False, + "autocommit": "True", # Wrong type + "applucation": "this is a typo or my own variable", # Wrong name + } + try: + conn = snowflake.connector.connect(**conn_params) + assert conn._autocommit == conn_params["autocommit"] + assert conn._applucation == conn_params["applucation"] + assert len(w) == 0 + finally: + conn.close() + + +def test_invalid_connection_parameters_only_warns(db_parameters): + """This test supresses warnings to only have warehouse, database and schema checking.""" + with warnings.catch_warnings(record=True) as w: + conn_params = { + "account": db_parameters["account"], + "user": db_parameters["user"], + "password": db_parameters["password"], + "schema": db_parameters["schema"], + "database": db_parameters["database"], + "protocol": db_parameters["protocol"], + "host": db_parameters["host"], + "port": db_parameters["port"], + "validate_default_parameters": True, + "autocommit": "True", # Wrong type + "applucation": "this is a typo or my own variable", # Wrong name + } + try: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + conn = snowflake.connector.connect(**conn_params) + assert conn._autocommit == conn_params["autocommit"] + assert conn._applucation == conn_params["applucation"] + assert len(w) == 0 + finally: + conn.close() + + +@pytest.mark.skipolddriver +def test_region_deprecation(conn_cnx): + """Tests whether region raises a deprecation warning.""" + with conn_cnx() as conn: + with warnings.catch_warnings(record=True) as w: + conn.region + assert len(w) == 1 + assert issubclass(w[0].category, PendingDeprecationWarning) + assert "Region has been deprecated" in str(w[0].message) + + +def test_invalid_errorhander_error(conn_cnx): + """Tests if no errorhandler cannot be set.""" + with conn_cnx() as conn: + with pytest.raises(ProgrammingError, match="None errorhandler is specified"): + conn.errorhandler = None + original_handler = conn.errorhandler + conn.errorhandler = original_handler + assert conn.errorhandler is original_handler + + +def test_disable_request_pooling_setter(conn_cnx): + """Tests whether request pooling can be set successfully.""" + with conn_cnx() as conn: + original_value = conn.disable_request_pooling + conn.disable_request_pooling = not original_value + assert conn.disable_request_pooling == (not original_value) + conn.disable_request_pooling = original_value + assert conn.disable_request_pooling == original_value + + +def test_autocommit_closed_already(conn_cnx): + """Test if setting autocommit on an already closed connection raised right error.""" + with conn_cnx() as conn: + pass + with pytest.raises(DatabaseError, match=r"Connection is closed") as dbe: + conn.autocommit(True) + assert dbe.errno == ER_CONNECTION_IS_CLOSED + + +def test_autocommit_invalid_type(conn_cnx): + """Tests if setting autocommit on an already closed connection raised right error.""" + with conn_cnx() as conn: + with pytest.raises(ProgrammingError, match=r"Invalid parameter: True") as dbe: + conn.autocommit("True") + assert dbe.errno == ER_INVALID_VALUE + + +def test_autocommit_unsupported(conn_cnx, caplog): + """Tests if server-side error is handled correctly when setting autocommit.""" + with conn_cnx() as conn: + caplog.set_level(logging.DEBUG, "snowflake.connector") + with mock.patch( + "snowflake.connector.cursor.SnowflakeCursor.execute", + side_effect=Error("Test error", sqlstate=SQLSTATE_FEATURE_NOT_SUPPORTED), + ): + conn.autocommit(True) + assert ( + "snowflake.connector.connection", + logging.DEBUG, + "Autocommit feature is not enabled for this connection. Ignored", + ) in caplog.record_tuples + + +def test_sequence_counter(conn_cnx): + """Tests whether setting sequence counter and increasing it works as expected.""" + with conn_cnx(sequence_counter=4) as conn: + assert conn.sequence_counter == 4 + with conn.cursor() as cur: + assert cur.execute("select 1 ").fetchall() == [(1,)] + assert conn.sequence_counter == 5 + + +def test_missing_account(conn_cnx): + """Test whether missing account raises the right exception.""" + with pytest.raises(ProgrammingError, match="Account must be specified") as pe: + with conn_cnx(account=""): + pass + assert pe.errno == ER_NO_ACCOUNT_NAME + + +@pytest.mark.parametrize("resp", [None, {}]) +def test_empty_response(conn_cnx, resp): + """Tests that cmd_query returns an empty response when empty/no response is recevided from back-end.""" + with conn_cnx() as conn: + with mock.patch( + "snowflake.connector.network.SnowflakeRestful.request", return_value=resp + ): + assert conn.cmd_query("select 1", 0, uuid4()) == {"data": {}} + + +@pytest.mark.skipolddriver +def test_authenticate_error(conn_cnx, caplog): + """Test Reauthenticate error handling while authenticating.""" + mock_auth = mock.MagicMock() + mock_auth.authenticate.side_effect = ReauthenticationRequest(None) + with conn_cnx() as conn: + caplog.set_level(logging.DEBUG, "snowflake.connector") + with pytest.raises(ReauthenticationRequest): + conn._authenticate(mock_auth) + assert ( + "snowflake.connector.connection", + logging.DEBUG, + "ID token expired. Reauthenticating...: None", + ) in caplog.record_tuples + + +@pytest.mark.skipolddriver +def test_process_qmark_params_error(conn_cnx): + """Tests errors thrown in _process_params_qmarks.""" + sql = "select 1;" + with conn_cnx(paramstyle="qmark") as conn: + with conn.cursor() as cur: + with pytest.raises( + ProgrammingError, + match="Binding parameters must be a list: invalid input", + ) as pe: + cur.execute(sql, params="invalid input") + assert pe.value.errno == ER_FAILED_PROCESSING_PYFORMAT + with pytest.raises( + ProgrammingError, + match="Binding parameters must be a list where one element is a single " + "value or a pair of Snowflake datatype and a value", + ) as pe: + cur.execute( + sql, + params=( + ( + 1, + 2, + 3, + ), + ), + ) + assert pe.value.errno == ER_FAILED_PROCESSING_QMARK + with pytest.raises( + ProgrammingError, + match=r"Python data type \[magicmock\] cannot be automatically mapped " + r"to Snowflake", + ) as pe: + cur.execute(sql, params=[mock.MagicMock()]) + assert pe.value.errno == ER_NOT_IMPLICITY_SNOWFLAKE_DATATYPE + + +@pytest.mark.skipolddriver +def test_process_param_dict_error(conn_cnx): + """Tests whether exceptions in __process_params_dict are handled correctly.""" + with conn_cnx() as conn: + with pytest.raises( + ProgrammingError, match="Failed processing pyformat-parameters: test" + ) as pe: + with mock.patch( + "snowflake.connector.converter.SnowflakeConverter.to_snowflake", + side_effect=Exception("test"), + ): + conn._process_params_pyformat({"asd": "something"}) + assert pe.errno == ER_FAILED_PROCESSING_PYFORMAT + + +@pytest.mark.skipolddriver +def test_process_param_error(conn_cnx): + """Tests whether exceptions in __process_params_dict are handled correctly.""" + with conn_cnx() as conn: + with pytest.raises( + ProgrammingError, match="Failed processing pyformat-parameters; test" + ) as pe: + with mock.patch( + "snowflake.connector.converter.SnowflakeConverter.to_snowflake", + side_effect=Exception("test"), + ): + conn._process_params_pyformat(mock.Mock()) + assert pe.errno == ER_FAILED_PROCESSING_PYFORMAT + + +@pytest.mark.parametrize( + "auto_commit", [pytest.param(True, marks=pytest.mark.skipolddriver), False] +) +def test_autocommit(conn_cnx, db_parameters, auto_commit): + conn = snowflake.connector.connect(**db_parameters) + with mock.patch.object(conn, "commit") as mocked_commit: + with conn: + with conn.cursor() as cur: + cur.execute(f"alter session set autocommit = {auto_commit}") + if auto_commit: + assert not mocked_commit.called + else: + assert mocked_commit.called + + +@pytest.mark.skipolddriver +def test_client_prefetch_threads_setting(conn_cnx): + """Tests whether client_prefetch_threads updated and is propagated to result set.""" + with conn_cnx() as conn: + assert conn.client_prefetch_threads == DEFAULT_CLIENT_PREFETCH_THREADS + new_thread_count = conn.client_prefetch_threads + 1 + with conn.cursor() as cur: + cur.execute(f"alter session set client_prefetch_threads={new_thread_count}") + assert cur._result_set.prefetch_thread_num == new_thread_count + assert conn.client_prefetch_threads == new_thread_count + + +@pytest.mark.external +def test_client_failover_connection_url(conn_cnx): + with conn_cnx("client_failover") as conn: + with conn.cursor() as cur: + assert cur.execute("select 1;").fetchall() == [ + (1,), + ] + + +def test_connection_gc(conn_cnx): + """This test makes sure that a heartbeat thread doesn't prevent garbage collection of SnowflakeConnection.""" + conn = conn_cnx(client_session_keep_alive=True).__enter__() + conn_wref = weakref.ref(conn) + del conn + gc.collect() + assert conn_wref() is None + + +@pytest.mark.skipolddriver +def test_connection_cant_be_reused(conn_cnx): + row_count = 50_000 + with conn_cnx() as conn: + cursors = conn.execute_string( + f"select seq4() as n from table(generator(rowcount => {row_count}));" + ) + assert len(cursors[0]._result_set.batches) > 1 # We need to have remote results + assert list(cursors[0]) diff --git a/test/integ/test_converter.py b/test/integ/test_converter.py new file mode 100644 index 000000000..810abda4c --- /dev/null +++ b/test/integ/test_converter.py @@ -0,0 +1,541 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from datetime import time, timedelta + +import pytest +import pytz + +from snowflake.connector.compat import IS_WINDOWS +from snowflake.connector.converter import ZERO_EPOCH, _generate_tzinfo_from_tzoffset +from snowflake.connector.converter_snowsql import SnowflakeConverterSnowSQL + + +def _compose_tz(dt, tzinfo): + ret = ZERO_EPOCH + timedelta(seconds=float(dt)) + ret += tzinfo.utcoffset(ret) + return ret.replace(tzinfo=tzinfo) + + +def _compose_ntz(dt): + return ZERO_EPOCH + timedelta(seconds=float(dt)) + + +def _compose_ltz(dt, tz): + ret = ZERO_EPOCH + timedelta(seconds=float(dt)) + return pytz.utc.localize(ret).astimezone(pytz.timezone(tz)) + + +def test_fetch_timestamps(conn_cnx): + PST_TZ = "America/Los_Angeles" + + tzdiff = 1860 - 1440 # -07:00 + tzinfo = _generate_tzinfo_from_tzoffset(tzdiff) + + # TIMESTAMP_TZ + r0 = _compose_tz("1325568896.123456", tzinfo) + r1 = _compose_tz("1325568896.123456", tzinfo) + r2 = _compose_tz("1325568896.123456", tzinfo) + r3 = _compose_tz("1325568896.123456", tzinfo) + r4 = _compose_tz("1325568896.12345", tzinfo) + r5 = _compose_tz("1325568896.1234", tzinfo) + r6 = _compose_tz("1325568896.123", tzinfo) + r7 = _compose_tz("1325568896.12", tzinfo) + r8 = _compose_tz("1325568896.1", tzinfo) + r9 = _compose_tz("1325568896", tzinfo) + + # TIMESTAMP_NTZ + r10 = _compose_ntz("1325568896.123456") + r11 = _compose_ntz("1325568896.123456") + r12 = _compose_ntz("1325568896.123456") + r13 = _compose_ntz("1325568896.123456") + r14 = _compose_ntz("1325568896.12345") + r15 = _compose_ntz("1325568896.1234") + r16 = _compose_ntz("1325568896.123") + r17 = _compose_ntz("1325568896.12") + r18 = _compose_ntz("1325568896.1") + r19 = _compose_ntz("1325568896") + + # TIMESTAMP_LTZ + r20 = _compose_ltz("1325568896.123456", PST_TZ) + r21 = _compose_ltz("1325568896.123456", PST_TZ) + r22 = _compose_ltz("1325568896.123456", PST_TZ) + r23 = _compose_ltz("1325568896.123456", PST_TZ) + r24 = _compose_ltz("1325568896.12345", PST_TZ) + r25 = _compose_ltz("1325568896.1234", PST_TZ) + r26 = _compose_ltz("1325568896.123", PST_TZ) + r27 = _compose_ltz("1325568896.12", PST_TZ) + r28 = _compose_ltz("1325568896.1", PST_TZ) + r29 = _compose_ltz("1325568896", PST_TZ) + + # TIME + r30 = time(5, 7, 8, 123456) + r31 = time(5, 7, 8, 123456) + r32 = time(5, 7, 8, 123456) + r33 = time(5, 7, 8, 123456) + r34 = time(5, 7, 8, 123450) + r35 = time(5, 7, 8, 123400) + r36 = time(5, 7, 8, 123000) + r37 = time(5, 7, 8, 120000) + r38 = time(5, 7, 8, 100000) + r39 = time(5, 7, 8) + + with conn_cnx() as cnx: + cur = cnx.cursor() + cur.execute( + """ +ALTER SESSION SET TIMEZONE='{tz}'; +""".format( + tz=PST_TZ + ) + ) + cur.execute( + """ +SELECT + '2012-01-03 12:34:56.123456789+07:00'::timestamp_tz(9), + '2012-01-03 12:34:56.12345678+07:00'::timestamp_tz(8), + '2012-01-03 12:34:56.1234567+07:00'::timestamp_tz(7), + '2012-01-03 12:34:56.123456+07:00'::timestamp_tz(6), + '2012-01-03 12:34:56.12345+07:00'::timestamp_tz(5), + '2012-01-03 12:34:56.1234+07:00'::timestamp_tz(4), + '2012-01-03 12:34:56.123+07:00'::timestamp_tz(3), + '2012-01-03 12:34:56.12+07:00'::timestamp_tz(2), + '2012-01-03 12:34:56.1+07:00'::timestamp_tz(1), + '2012-01-03 12:34:56+07:00'::timestamp_tz(0), + '2012-01-03 05:34:56.123456789'::timestamp_ntz(9), + '2012-01-03 05:34:56.12345678'::timestamp_ntz(8), + '2012-01-03 05:34:56.1234567'::timestamp_ntz(7), + '2012-01-03 05:34:56.123456'::timestamp_ntz(6), + '2012-01-03 05:34:56.12345'::timestamp_ntz(5), + '2012-01-03 05:34:56.1234'::timestamp_ntz(4), + '2012-01-03 05:34:56.123'::timestamp_ntz(3), + '2012-01-03 05:34:56.12'::timestamp_ntz(2), + '2012-01-03 05:34:56.1'::timestamp_ntz(1), + '2012-01-03 05:34:56'::timestamp_ntz(0), + '2012-01-02 21:34:56.123456789'::timestamp_ltz(9), + '2012-01-02 21:34:56.12345678'::timestamp_ltz(8), + '2012-01-02 21:34:56.1234567'::timestamp_ltz(7), + '2012-01-02 21:34:56.123456'::timestamp_ltz(6), + '2012-01-02 21:34:56.12345'::timestamp_ltz(5), + '2012-01-02 21:34:56.1234'::timestamp_ltz(4), + '2012-01-02 21:34:56.123'::timestamp_ltz(3), + '2012-01-02 21:34:56.12'::timestamp_ltz(2), + '2012-01-02 21:34:56.1'::timestamp_ltz(1), + '2012-01-02 21:34:56'::timestamp_ltz(0), + '05:07:08.123456789'::time(9), + '05:07:08.12345678'::time(8), + '05:07:08.1234567'::time(7), + '05:07:08.123456'::time(6), + '05:07:08.12345'::time(5), + '05:07:08.1234'::time(4), + '05:07:08.123'::time(3), + '05:07:08.12'::time(2), + '05:07:08.1'::time(1), + '05:07:08'::time(0) +""" + ) + ret = cur.fetchone() + assert ret[0] == r0 + assert ret[1] == r1 + assert ret[2] == r2 + assert ret[3] == r3 + assert ret[4] == r4 + assert ret[5] == r5 + assert ret[6] == r6 + assert ret[7] == r7 + assert ret[8] == r8 + assert ret[9] == r9 + assert ret[10] == r10 + assert ret[11] == r11 + assert ret[12] == r12 + assert ret[13] == r13 + assert ret[14] == r14 + assert ret[15] == r15 + assert ret[16] == r16 + assert ret[17] == r17 + assert ret[18] == r18 + assert ret[19] == r19 + assert ret[20] == r20 + assert ret[21] == r21 + assert ret[22] == r22 + assert ret[23] == r23 + assert ret[24] == r24 + assert ret[25] == r25 + assert ret[26] == r26 + assert ret[27] == r27 + assert ret[28] == r28 + assert ret[29] == r29 + assert ret[30] == r30 + assert ret[31] == r31 + assert ret[32] == r32 + assert ret[33] == r33 + assert ret[34] == r34 + assert ret[35] == r35 + assert ret[36] == r36 + assert ret[37] == r37 + assert ret[38] == r38 + assert ret[39] == r39 + + +def test_fetch_timestamps_snowsql(conn_cnx): + PST_TZ = "America/Los_Angeles" + + converter_class = SnowflakeConverterSnowSQL + sql = """ +SELECT + '2012-01-03 12:34:56.123456789+07:00'::timestamp_tz(9), + '2012-01-03 12:34:56.12345678+07:00'::timestamp_tz(8), + '2012-01-03 12:34:56.1234567+07:00'::timestamp_tz(7), + '2012-01-03 12:34:56.123456+07:00'::timestamp_tz(6), + '2012-01-03 12:34:56.12345+07:00'::timestamp_tz(5), + '2012-01-03 12:34:56.1234+07:00'::timestamp_tz(4), + '2012-01-03 12:34:56.123+07:00'::timestamp_tz(3), + '2012-01-03 12:34:56.12+07:00'::timestamp_tz(2), + '2012-01-03 12:34:56.1+07:00'::timestamp_tz(1), + '2012-01-03 12:34:56+07:00'::timestamp_tz(0), + '2012-01-03 05:34:56.123456789'::timestamp_ntz(9), + '2012-01-03 05:34:56.12345678'::timestamp_ntz(8), + '2012-01-03 05:34:56.1234567'::timestamp_ntz(7), + '2012-01-03 05:34:56.123456'::timestamp_ntz(6), + '2012-01-03 05:34:56.12345'::timestamp_ntz(5), + '2012-01-03 05:34:56.1234'::timestamp_ntz(4), + '2012-01-03 05:34:56.123'::timestamp_ntz(3), + '2012-01-03 05:34:56.12'::timestamp_ntz(2), + '2012-01-03 05:34:56.1'::timestamp_ntz(1), + '2012-01-03 05:34:56'::timestamp_ntz(0), + '2012-01-02 21:34:56.123456789'::timestamp_ltz(9), + '2012-01-02 21:34:56.12345678'::timestamp_ltz(8), + '2012-01-02 21:34:56.1234567'::timestamp_ltz(7), + '2012-01-02 21:34:56.123456'::timestamp_ltz(6), + '2012-01-02 21:34:56.12345'::timestamp_ltz(5), + '2012-01-02 21:34:56.1234'::timestamp_ltz(4), + '2012-01-02 21:34:56.123'::timestamp_ltz(3), + '2012-01-02 21:34:56.12'::timestamp_ltz(2), + '2012-01-02 21:34:56.1'::timestamp_ltz(1), + '2012-01-02 21:34:56'::timestamp_ltz(0), + '05:07:08.123456789'::time(9), + '05:07:08.12345678'::time(8), + '05:07:08.1234567'::time(7), + '05:07:08.123456'::time(6), + '05:07:08.12345'::time(5), + '05:07:08.1234'::time(4), + '05:07:08.123'::time(3), + '05:07:08.12'::time(2), + '05:07:08.1'::time(1), + '05:07:08'::time(0) +""" + with conn_cnx(converter_class=converter_class) as cnx: + cur = cnx.cursor() + cur.execute( + """ +alter session set python_connector_query_result_format='JSON' +""" + ) + cur.execute( + """ +ALTER SESSION SET TIMEZONE='{tz}'; +""".format( + tz=PST_TZ + ) + ) + cur.execute( + """ +ALTER SESSION SET + TIMESTAMP_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF9 TZH:TZM', + TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF9 TZH:TZM', + TIME_OUTPUT_FORMAT='HH24:MI:SS.FF9'; + """ + ) + cur.execute(sql) + ret = cur.fetchone() + assert ret[0] == "2012-01-03 12:34:56.123456789 +0700" + assert ret[1] == "2012-01-03 12:34:56.123456780 +0700" + assert ret[2] == "2012-01-03 12:34:56.123456700 +0700" + assert ret[3] == "2012-01-03 12:34:56.123456000 +0700" + assert ret[4] == "2012-01-03 12:34:56.123450000 +0700" + assert ret[5] == "2012-01-03 12:34:56.123400000 +0700" + assert ret[6] == "2012-01-03 12:34:56.123000000 +0700" + assert ret[7] == "2012-01-03 12:34:56.120000000 +0700" + assert ret[8] == "2012-01-03 12:34:56.100000000 +0700" + assert ret[9] == "2012-01-03 12:34:56.000000000 +0700" + assert ret[10] == "2012-01-03 05:34:56.123456789 " + assert ret[11] == "2012-01-03 05:34:56.123456780 " + assert ret[12] == "2012-01-03 05:34:56.123456700 " + assert ret[13] == "2012-01-03 05:34:56.123456000 " + assert ret[14] == "2012-01-03 05:34:56.123450000 " + assert ret[15] == "2012-01-03 05:34:56.123400000 " + assert ret[16] == "2012-01-03 05:34:56.123000000 " + assert ret[17] == "2012-01-03 05:34:56.120000000 " + assert ret[18] == "2012-01-03 05:34:56.100000000 " + assert ret[19] == "2012-01-03 05:34:56.000000000 " + assert ret[20] == "2012-01-02 21:34:56.123456789 -0800" + assert ret[21] == "2012-01-02 21:34:56.123456780 -0800" + assert ret[22] == "2012-01-02 21:34:56.123456700 -0800" + assert ret[23] == "2012-01-02 21:34:56.123456000 -0800" + assert ret[24] == "2012-01-02 21:34:56.123450000 -0800" + assert ret[25] == "2012-01-02 21:34:56.123400000 -0800" + assert ret[26] == "2012-01-02 21:34:56.123000000 -0800" + assert ret[27] == "2012-01-02 21:34:56.120000000 -0800" + assert ret[28] == "2012-01-02 21:34:56.100000000 -0800" + assert ret[29] == "2012-01-02 21:34:56.000000000 -0800" + assert ret[30] == "05:07:08.123456789" + assert ret[31] == "05:07:08.123456780" + assert ret[32] == "05:07:08.123456700" + assert ret[33] == "05:07:08.123456000" + assert ret[34] == "05:07:08.123450000" + assert ret[35] == "05:07:08.123400000" + assert ret[36] == "05:07:08.123000000" + assert ret[37] == "05:07:08.120000000" + assert ret[38] == "05:07:08.100000000" + assert ret[39] == "05:07:08.000000000" + + cur.execute( + """ +ALTER SESSION SET + TIMESTAMP_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF6 TZH:TZM', + TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF6 TZH:TZM', + TIME_OUTPUT_FORMAT='HH24:MI:SS.FF6'; + """ + ) + cur.execute(sql) + ret = cur.fetchone() + assert ret[0] == "2012-01-03 12:34:56.123456 +0700" + assert ret[1] == "2012-01-03 12:34:56.123456 +0700" + assert ret[2] == "2012-01-03 12:34:56.123456 +0700" + assert ret[3] == "2012-01-03 12:34:56.123456 +0700" + assert ret[4] == "2012-01-03 12:34:56.123450 +0700" + assert ret[5] == "2012-01-03 12:34:56.123400 +0700" + assert ret[6] == "2012-01-03 12:34:56.123000 +0700" + assert ret[7] == "2012-01-03 12:34:56.120000 +0700" + assert ret[8] == "2012-01-03 12:34:56.100000 +0700" + assert ret[9] == "2012-01-03 12:34:56.000000 +0700" + assert ret[10] == "2012-01-03 05:34:56.123456 " + assert ret[11] == "2012-01-03 05:34:56.123456 " + assert ret[12] == "2012-01-03 05:34:56.123456 " + assert ret[13] == "2012-01-03 05:34:56.123456 " + assert ret[14] == "2012-01-03 05:34:56.123450 " + assert ret[15] == "2012-01-03 05:34:56.123400 " + assert ret[16] == "2012-01-03 05:34:56.123000 " + assert ret[17] == "2012-01-03 05:34:56.120000 " + assert ret[18] == "2012-01-03 05:34:56.100000 " + assert ret[19] == "2012-01-03 05:34:56.000000 " + assert ret[20] == "2012-01-02 21:34:56.123456 -0800" + assert ret[21] == "2012-01-02 21:34:56.123456 -0800" + assert ret[22] == "2012-01-02 21:34:56.123456 -0800" + assert ret[23] == "2012-01-02 21:34:56.123456 -0800" + assert ret[24] == "2012-01-02 21:34:56.123450 -0800" + assert ret[25] == "2012-01-02 21:34:56.123400 -0800" + assert ret[26] == "2012-01-02 21:34:56.123000 -0800" + assert ret[27] == "2012-01-02 21:34:56.120000 -0800" + assert ret[28] == "2012-01-02 21:34:56.100000 -0800" + assert ret[29] == "2012-01-02 21:34:56.000000 -0800" + assert ret[30] == "05:07:08.123456" + assert ret[31] == "05:07:08.123456" + assert ret[32] == "05:07:08.123456" + assert ret[33] == "05:07:08.123456" + assert ret[34] == "05:07:08.123450" + assert ret[35] == "05:07:08.123400" + assert ret[36] == "05:07:08.123000" + assert ret[37] == "05:07:08.120000" + assert ret[38] == "05:07:08.100000" + assert ret[39] == "05:07:08.000000" + + +def test_fetch_timestamps_negative_epoch(conn_cnx): + """Negative epoch.""" + r0 = _compose_ntz("-602594703.876544") + r1 = _compose_ntz("1325594096.123456") + with conn_cnx() as cnx: + cur = cnx.cursor() + cur.execute( + """\ +SELECT + '1950-11-27 12:34:56.123456'::timestamp_ntz(6), + '2012-01-03 12:34:56.123456'::timestamp_ntz(6) +""" + ) + ret = cur.fetchone() + assert ret[0] == r0 + assert ret[1] == r1 + + +def test_date_0001_9999(conn_cnx): + """Test 0001 and 9999 for all platforms.""" + with conn_cnx( + converter_class=SnowflakeConverterSnowSQL, support_negative_year=True + ) as cnx: + cnx.cursor().execute( + """ +ALTER SESSION SET + DATE_OUTPUT_FORMAT='YYYY-MM-DD' +""" + ) + cur = cnx.cursor() + cur.execute( + """ +alter session set python_connector_query_result_format='JSON' +""" + ) + cur.execute( + """ +SELECT + DATE_FROM_PARTS(1900, 1, 1), + DATE_FROM_PARTS(2500, 2, 3), + DATE_FROM_PARTS(1, 10, 31), + DATE_FROM_PARTS(9999, 3, 20) + ; +""" + ) + ret = cur.fetchone() + assert ret[0] == "1900-01-01" + assert ret[1] == "2500-02-03" + assert ret[2] == "0001-10-31" + assert ret[3] == "9999-03-20" + + +@pytest.mark.skipif(IS_WINDOWS, reason="year out of range error") +def test_five_or_more_digit_year_date_converter(conn_cnx): + """Past and future dates.""" + with conn_cnx( + converter_class=SnowflakeConverterSnowSQL, support_negative_year=True + ) as cnx: + cnx.cursor().execute( + """ +ALTER SESSION SET + DATE_OUTPUT_FORMAT='YYYY-MM-DD' +""" + ) + cur = cnx.cursor() + cur.execute( + """ +alter session set python_connector_query_result_format='JSON' +""" + ) + cur.execute( + """ +SELECT + DATE_FROM_PARTS(10000, 1, 1), + DATE_FROM_PARTS(-0001, 2, 5), + DATE_FROM_PARTS(56789, 3, 4), + DATE_FROM_PARTS(198765, 4, 3), + DATE_FROM_PARTS(-234567, 5, 2) + ; +""" + ) + ret = cur.fetchone() + assert ret[0] == "10000-01-01" + assert ret[1] == "-0001-02-05" + assert ret[2] == "56789-03-04" + assert ret[3] == "198765-04-03" + assert ret[4] == "-234567-05-02" + + cnx.cursor().execute( + """ +ALTER SESSION SET + DATE_OUTPUT_FORMAT='YY-MM-DD' +""" + ) + cur = cnx.cursor() + cur.execute( + """ +SELECT + DATE_FROM_PARTS(10000, 1, 1), + DATE_FROM_PARTS(-0001, 2, 5), + DATE_FROM_PARTS(56789, 3, 4), + DATE_FROM_PARTS(198765, 4, 3), + DATE_FROM_PARTS(-234567, 5, 2) + ; +""" + ) + ret = cur.fetchone() + assert ret[0] == "00-01-01" + assert ret[1] == "-01-02-05" + assert ret[2] == "89-03-04" + assert ret[3] == "65-04-03" + assert ret[4] == "-67-05-02" + + +def test_franction_followed_by_year_format(conn_cnx): + """Both year and franctions are included but fraction shows up followed by year.""" + with conn_cnx(converter_class=SnowflakeConverterSnowSQL) as cnx: + cnx.cursor().execute( + """ +alter session set python_connector_query_result_format='JSON' +""" + ) + cnx.cursor().execute( + """ +ALTER SESSION SET + TIMESTAMP_OUTPUT_FORMAT='HH24:MI:SS.FF6 MON DD, YYYY', + TIMESTAMP_NTZ_OUTPUT_FORMAT='HH24:MI:SS.FF6 MON DD, YYYY' +""" + ) + for rec in cnx.cursor().execute( + """ +SELECT + '2012-01-03 05:34:56.123456'::TIMESTAMP_NTZ(6) +""" + ): + assert rec[0] == "05:34:56.123456 Jan 03, 2012" + + +def test_fetch_fraction_timestamp(conn_cnx): + """Additional fetch timestamp tests. Mainly used for SnowSQL which converts to string representations.""" + PST_TZ = "America/Los_Angeles" + + converter_class = SnowflakeConverterSnowSQL + sql = """ +SELECT + '1900-01-01T05:00:00.000Z'::timestamp_tz(7), + '1900-01-01T05:00:00.000'::timestamp_ntz(7), + '1900-01-01T05:00:01.000Z'::timestamp_tz(7), + '1900-01-01T05:00:01.000'::timestamp_ntz(7), + '1900-01-01T05:00:01.012Z'::timestamp_tz(7), + '1900-01-01T05:00:01.012'::timestamp_ntz(7), + '1900-01-01T05:00:00.012Z'::timestamp_tz(7), + '1900-01-01T05:00:00.012'::timestamp_ntz(7), + '2100-01-01T05:00:00.012Z'::timestamp_tz(7), + '2100-01-01T05:00:00.012'::timestamp_ntz(7), + '1970-01-01T00:00:00Z'::timestamp_tz(7), + '1970-01-01T00:00:00'::timestamp_ntz(7) +""" + with conn_cnx(converter_class=converter_class) as cnx: + cur = cnx.cursor() + cur.execute( + """ +alter session set python_connector_query_result_format='JSON' +""" + ) + cur.execute( + """ +ALTER SESSION SET TIMEZONE='{tz}'; +""".format( + tz=PST_TZ + ) + ) + cur.execute( + """ +ALTER SESSION SET + TIMESTAMP_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF9 TZH:TZM', + TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF9', + TIME_OUTPUT_FORMAT='HH24:MI:SS.FF9'; + """ + ) + cur.execute(sql) + ret = cur.fetchone() + assert ret[0] == "1900-01-01 05:00:00.000000000 +0000" + assert ret[1] == "1900-01-01 05:00:00.000000000" + assert ret[2] == "1900-01-01 05:00:01.000000000 +0000" + assert ret[3] == "1900-01-01 05:00:01.000000000" + assert ret[4] == "1900-01-01 05:00:01.012000000 +0000" + assert ret[5] == "1900-01-01 05:00:01.012000000" + assert ret[6] == "1900-01-01 05:00:00.012000000 +0000" + assert ret[7] == "1900-01-01 05:00:00.012000000" + assert ret[8] == "2100-01-01 05:00:00.012000000 +0000" + assert ret[9] == "2100-01-01 05:00:00.012000000" + assert ret[10] == "1970-01-01 00:00:00.000000000 +0000" + assert ret[11] == "1970-01-01 00:00:00.000000000" diff --git a/test/integ/test_converter_more_timestamp.py b/test/integ/test_converter_more_timestamp.py new file mode 100644 index 000000000..e8a1ca3f3 --- /dev/null +++ b/test/integ/test_converter_more_timestamp.py @@ -0,0 +1,133 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from datetime import datetime, timedelta + +import pytz +from dateutil.parser import parse + +from snowflake.connector.converter import ZERO_EPOCH, _generate_tzinfo_from_tzoffset + + +def test_fetch_various_timestamps(conn_cnx): + """More coverage of timestamp. + + Notes: + Currently TIMESTAMP_LTZ is not tested. + """ + PST_TZ = "America/Los_Angeles" + epoch_times = ["1325568896", "-2208943503", "0", "-1"] + timezones = ["+07:00", "+00:00", "-01:00", "-09:00"] + fractions = "123456789" + data_types = ["TIMESTAMP_TZ", "TIMESTAMP_NTZ"] + + data = [] + for dt in data_types: + for et in epoch_times: + if dt == "TIMESTAMP_TZ": + for tz in timezones: + tzdiff = (int(tz[1:3]) * 60 + int(tz[4:6])) * ( + -1 if tz[0] == "-" else 1 + ) + tzinfo = _generate_tzinfo_from_tzoffset(tzdiff) + try: + ts = datetime.fromtimestamp(float(et), tz=tzinfo) + except (OSError, ValueError): + ts = ZERO_EPOCH + timedelta(seconds=float(et)) + if pytz.utc != tzinfo: + ts += tzinfo.utcoffset(ts) + ts = ts.replace(tzinfo=tzinfo) + data.append( + { + "scale": 0, + "dt": dt, + "inp": ts.strftime(f"%Y-%m-%d %H:%M:%S{tz}"), + "out": ts, + } + ) + for idx in range(len(fractions)): + scale = idx + 1 + if idx + 1 != 6: # SNOW-28597 + try: + ts0 = datetime.fromtimestamp(float(et), tz=tzinfo) + except (OSError, ValueError): + ts0 = ZERO_EPOCH + timedelta(seconds=float(et)) + if pytz.utc != tzinfo: + ts0 += tzinfo.utcoffset(ts0) + ts0 = ts0.replace(tzinfo=tzinfo) + ts0_str = ts0.strftime( + "%Y-%m-%d %H:%M:%S.{ff}{tz}".format( + ff=fractions[: idx + 1], tz=tz + ) + ) + ts1 = parse(ts0_str) + data.append( + {"scale": scale, "dt": dt, "inp": ts0_str, "out": ts1} + ) + elif dt == "TIMESTAMP_LTZ": + # WIP. this test work in edge case + tzinfo = pytz.timezone(PST_TZ) + ts0 = datetime.fromtimestamp(float(et)) + ts0 = pytz.utc.localize(ts0).astimezone(tzinfo) + ts0_str = ts0.strftime("%Y-%m-%d %H:%M:%S") + ts1 = ts0 + data.append({"scale": 0, "dt": dt, "inp": ts0_str, "out": ts1}) + for idx in range(len(fractions)): + ts0 = datetime.fromtimestamp(float(et)) + ts0 = pytz.utc.localize(ts0).astimezone(tzinfo) + ts0_str = ts0.strftime(f"%Y-%m-%d %H:%M:%S.{fractions[: idx + 1]}") + ts1 = ts0 + timedelta(seconds=float(f"0.{fractions[: idx + 1]}")) + data.append( + {"scale": idx + 1, "dt": dt, "inp": ts0_str, "out": ts1} + ) + else: + # TIMESTAMP_NTZ + try: + ts0 = datetime.fromtimestamp(float(et)) + except (OSError, ValueError): + ts0 = ZERO_EPOCH + timedelta(seconds=(float(et))) + ts0_str = ts0.strftime("%Y-%m-%d %H:%M:%S") + ts1 = parse(ts0_str) + data.append({"scale": 0, "dt": dt, "inp": ts0_str, "out": ts1}) + for idx in range(len(fractions)): + try: + ts0 = datetime.fromtimestamp(float(et)) + except (OSError, ValueError): + ts0 = ZERO_EPOCH + timedelta(seconds=(float(et))) + ts0_str = ts0.strftime(f"%Y-%m-%d %H:%M:%S.{fractions[: idx + 1]}") + ts1 = parse(ts0_str) + data.append( + {"scale": idx + 1, "dt": dt, "inp": ts0_str, "out": ts1} + ) + sql = "SELECT " + for d in data: + sql += "'{inp}'::{dt}({scale}), ".format( + inp=d["inp"], dt=d["dt"], scale=d["scale"] + ) + sql += "1" + with conn_cnx() as cnx: + cur = cnx.cursor() + cur.execute( + """ +ALTER SESSION SET TIMEZONE='{tz}'; +""".format( + tz=PST_TZ + ) + ) + rec = cur.execute(sql).fetchone() + for idx, d in enumerate(data): + comp, lower, higher = _in_range(d["out"], rec[idx]) + assert ( + comp + ), "data: {d}: target={target}, lower={lower}, higher={" "higher}".format( + d=d, target=rec[idx], lower=lower, higher=higher + ) + + +def _in_range(reference, target): + lower = reference - timedelta(microseconds=1) + higher = reference + timedelta(microseconds=1) + return lower <= target <= higher, lower, higher diff --git a/test/integ/test_converter_null.py b/test/integ/test_converter_null.py new file mode 100644 index 000000000..99d431bc9 --- /dev/null +++ b/test/integ/test_converter_null.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import re +from datetime import datetime, timedelta + +import snowflake.connector +from snowflake.connector.converter import ZERO_EPOCH +from snowflake.connector.converter_null import SnowflakeNoConverterToPython + +NUMERIC_VALUES = re.compile(r"-?[\d.]*\d$") + + +def test_converter_no_converter_to_python(db_parameters): + """Tests no converter. + + This should not translate the Snowflake internal data representation to the Python native types. + """ + con = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + database=db_parameters["database"], + schema=db_parameters["schema"], + protocol=db_parameters["protocol"], + timezone="UTC", + converter_class=SnowflakeNoConverterToPython, + ) + con.cursor().execute( + """ +alter session set python_connector_query_result_format='JSON' +""" + ) + + ret = ( + con.cursor() + .execute( + """ +select current_timestamp(), + 1::NUMBER, + 2.0::FLOAT, + 'test1' +""" + ) + .fetchone() + ) + assert isinstance(ret[0], str) + assert NUMERIC_VALUES.match(ret[0]) + assert isinstance(ret[1], str) + assert NUMERIC_VALUES.match(ret[1]) + con.cursor().execute("create or replace table testtb(c1 timestamp_ntz(6))") + try: + current_time = datetime.utcnow() + # binding value should have no impact + con.cursor().execute("insert into testtb(c1) values(%s)", (current_time,)) + ret = con.cursor().execute("select * from testtb").fetchone()[0] + assert ZERO_EPOCH + timedelta(seconds=(float(ret))) == current_time + finally: + con.cursor().execute("drop table if exists testtb") diff --git a/test/integ/test_cursor.py b/test/integ/test_cursor.py new file mode 100644 index 000000000..ee176178e --- /dev/null +++ b/test/integ/test_cursor.py @@ -0,0 +1,1636 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import decimal +import json +import logging +import os +import pickle +import time +from datetime import date, datetime +from typing import TYPE_CHECKING, NamedTuple +from unittest import mock + +import pytest +import pytz + +import snowflake.connector +from snowflake.connector import ( + DictCursor, + InterfaceError, + NotSupportedError, + ProgrammingError, + constants, + errorcode, + errors, +) +from snowflake.connector.compat import IS_WINDOWS +from snowflake.connector.cursor import SnowflakeCursor + +try: + from snowflake.connector.cursor import ResultMetadata +except ImportError: + + class ResultMetadata(NamedTuple): + name: str + type_code: int + display_size: int + internal_size: int + precision: int + scale: int + is_nullable: bool + + +from snowflake.connector.errorcode import ( + ER_FAILED_TO_REWRITE_MULTI_ROW_INSERT, + ER_NOT_POSITIVE_SIZE, +) +from snowflake.connector.sqlstate import SQLSTATE_FEATURE_NOT_SUPPORTED +from snowflake.connector.telemetry import TelemetryField + +from ..randomize import random_string + +try: + from snowflake.connector.constants import ( + FIELD_ID_TO_NAME, + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT, + ) + from snowflake.connector.errorcode import ( + ER_NO_ARROW_RESULT, + ER_NO_PYARROW, + ER_NO_PYARROW_SNOWSQL, + ) + from snowflake.connector.result_batch import ArrowResultBatch, JSONResultBatch +except ImportError: + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT = None + ER_NO_ARROW_RESULT = None + ER_NO_PYARROW = None + ER_NO_PYARROW_SNOWSQL = None + ArrowResultBatch = JSONResultBatch = None + FIELD_ID_TO_NAME = {} + +if TYPE_CHECKING: # pragma: no cover + from snowflake.connector.result_batch import ResultBatch + +try: # pragma: no cover + from snowflake.connector.constants import QueryStatus +except ImportError: + QueryStatus = None + + +def _drop_warehouse(conn, db_parameters): + conn.cursor().execute( + "drop warehouse if exists {}".format(db_parameters["name_wh"]) + ) + + +@pytest.fixture() +def conn(request, conn_cnx, db_parameters): + def fin(): + with conn_cnx() as cnx: + cnx.cursor().execute( + "use {db}.{schema}".format( + db=db_parameters["database"], schema=db_parameters["schema"] + ) + ) + cnx.cursor().execute("drop table {name}".format(name=db_parameters["name"])) + + request.addfinalizer(fin) + + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +create table {name} ( +aa int, +dt date, +tm time, +ts timestamp, +tsltz timestamp_ltz, +tsntz timestamp_ntz, +tstz timestamp_tz, +pct float, +ratio number(5,2), +b binary) +""".format( + name=db_parameters["name"] + ) + ) + + return conn_cnx + + +def _check_results(cursor, results): + assert cursor.sfqid, "Snowflake query id is None" + assert cursor.rowcount == 3, "the number of records" + assert results[0] == 65432, "the first result was wrong" + assert results[1] == 98765, "the second result was wrong" + assert results[2] == 123456, "the third result was wrong" + + +def test_insert_select(conn, db_parameters): + """Inserts and selects integer data.""" + with conn() as cnx: + c = cnx.cursor() + try: + c.execute( + "insert into {name}(aa) values(123456)," + "(98765),(65432)".format(name=db_parameters["name"]) + ) + cnt = 0 + for rec in c: + cnt += int(rec[0]) + assert cnt == 3, "wrong number of records were inserted" + assert c.rowcount == 3, "wrong number of records were inserted" + finally: + c.close() + + try: + c = cnx.cursor() + c.execute( + "select aa from {name} order by aa".format(name=db_parameters["name"]) + ) + results = [] + for rec in c: + results.append(rec[0]) + _check_results(c, results) + finally: + c.close() + + with cnx.cursor(snowflake.connector.DictCursor) as c: + c.execute( + "select aa from {name} order by aa".format(name=db_parameters["name"]) + ) + results = [] + for rec in c: + results.append(rec["AA"]) + _check_results(c, results) + + +def test_insert_and_select_by_separate_connection(conn, db_parameters): + """Inserts a record and select it by a separate connection.""" + with conn() as cnx: + result = cnx.cursor().execute( + "insert into {name}(aa) values({value})".format( + name=db_parameters["name"], value="1234" + ) + ) + cnt = 0 + for rec in result: + cnt += int(rec[0]) + assert cnt == 1, "wrong number of records were inserted" + assert result.rowcount == 1, "wrong number of records were inserted" + + cnx2 = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + database=db_parameters["database"], + schema=db_parameters["schema"], + protocol=db_parameters["protocol"], + timezone="UTC", + ) + try: + c = cnx2.cursor() + c.execute("select aa from {name}".format(name=db_parameters["name"])) + results = [] + for rec in c: + results.append(rec[0]) + c.close() + assert results[0] == 1234, "the first result was wrong" + assert result.rowcount == 1, "wrong number of records were selected" + finally: + cnx2.close() + + +def _total_milliseconds_from_timedelta(td): + """Returns the total number of milliseconds contained in the duration object.""" + return (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) // 10**3 + + +def _total_seconds_from_timedelta(td): + """Returns the total number of seconds contained in the duration object.""" + return _total_milliseconds_from_timedelta(td) // 10**3 + + +def test_insert_timestamp_select(conn, db_parameters): + """Inserts and gets timestamp, timestamp with tz, date, and time. + + Notes: + Currently the session parameter TIMEZONE is ignored. + """ + PST_TZ = "America/Los_Angeles" + JST_TZ = "Asia/Tokyo" + current_timestamp = datetime.utcnow() + current_timestamp = current_timestamp.replace(tzinfo=pytz.timezone(PST_TZ)) + current_date = current_timestamp.date() + current_time = current_timestamp.time() + + other_timestamp = current_timestamp.replace(tzinfo=pytz.timezone(JST_TZ)) + + with conn() as cnx: + cnx.cursor().execute("alter session set TIMEZONE=%s", (PST_TZ,)) + c = cnx.cursor() + try: + fmt = ( + "insert into {name}(aa, tsltz, tstz, tsntz, dt, tm) " + "values(%(value)s,%(tsltz)s, %(tstz)s, %(tsntz)s, " + "%(dt)s, %(tm)s)" + ) + c.execute( + fmt.format(name=db_parameters["name"]), + { + "value": 1234, + "tsltz": current_timestamp, + "tstz": other_timestamp, + "tsntz": current_timestamp, + "dt": current_date, + "tm": current_time, + }, + ) + cnt = 0 + for rec in c: + cnt += int(rec[0]) + assert cnt == 1, "wrong number of records were inserted" + assert c.rowcount == 1, "wrong number of records were selected" + finally: + c.close() + + cnx2 = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + database=db_parameters["database"], + schema=db_parameters["schema"], + protocol=db_parameters["protocol"], + timezone="UTC", + ) + try: + c = cnx2.cursor() + c.execute( + "select aa, tsltz, tstz, tsntz, dt, tm from {name}".format( + name=db_parameters["name"] + ) + ) + + result_numeric_value = [] + result_timestamp_value = [] + result_other_timestamp_value = [] + result_ntz_timestamp_value = [] + result_date_value = [] + result_time_value = [] + + for (aa, ts, tstz, tsntz, dt, tm) in c: + result_numeric_value.append(aa) + result_timestamp_value.append(ts) + result_other_timestamp_value.append(tstz) + result_ntz_timestamp_value.append(tsntz) + result_date_value.append(dt) + result_time_value.append(tm) + c.close() + assert result_numeric_value[0] == 1234, "the integer result was wrong" + + td_diff = _total_milliseconds_from_timedelta( + current_timestamp - result_timestamp_value[0] + ) + assert td_diff == 0, "the timestamp result was wrong" + + td_diff = _total_milliseconds_from_timedelta( + other_timestamp - result_other_timestamp_value[0] + ) + assert td_diff == 0, "the other timestamp result was wrong" + + td_diff = _total_milliseconds_from_timedelta( + current_timestamp.replace(tzinfo=None) - result_ntz_timestamp_value[0] + ) + assert td_diff == 0, "the other timestamp result was wrong" + + assert current_date == result_date_value[0], "the date result was wrong" + + assert current_time == result_time_value[0], "the time result was wrong" + + desc = c.description + assert len(desc) == 6, "invalid number of column meta data" + assert desc[0][0].upper() == "AA", "invalid column name" + assert desc[1][0].upper() == "TSLTZ", "invalid column name" + assert desc[2][0].upper() == "TSTZ", "invalid column name" + assert desc[3][0].upper() == "TSNTZ", "invalid column name" + assert desc[4][0].upper() == "DT", "invalid column name" + assert desc[5][0].upper() == "TM", "invalid column name" + assert ( + constants.FIELD_ID_TO_NAME[desc[0][1]] == "FIXED" + ), f"invalid column name: {constants.FIELD_ID_TO_NAME[desc[0][1]]}" + assert ( + constants.FIELD_ID_TO_NAME[desc[1][1]] == "TIMESTAMP_LTZ" + ), "invalid column name" + assert ( + constants.FIELD_ID_TO_NAME[desc[2][1]] == "TIMESTAMP_TZ" + ), "invalid column name" + assert ( + constants.FIELD_ID_TO_NAME[desc[3][1]] == "TIMESTAMP_NTZ" + ), "invalid column name" + assert constants.FIELD_ID_TO_NAME[desc[4][1]] == "DATE", "invalid column name" + assert constants.FIELD_ID_TO_NAME[desc[5][1]] == "TIME", "invalid column name" + finally: + cnx2.close() + + +def test_insert_timestamp_ltz(conn, db_parameters): + """Inserts and retrieve timestamp ltz.""" + tzstr = "America/New_York" + # sync with the session parameter + with conn() as cnx: + cnx.cursor().execute(f"alter session set timezone='{tzstr}'") + + current_time = datetime.now() + current_time = current_time.replace(tzinfo=pytz.timezone(tzstr)) + + c = cnx.cursor() + try: + fmt = "insert into {name}(aa, tsltz) values(%(value)s,%(ts)s)" + c.execute( + fmt.format(name=db_parameters["name"]), + { + "value": 8765, + "ts": current_time, + }, + ) + cnt = 0 + for rec in c: + cnt += int(rec[0]) + assert cnt == 1, "wrong number of records were inserted" + finally: + c.close() + + try: + c = cnx.cursor() + c.execute("select aa,tsltz from {name}".format(name=db_parameters["name"])) + result_numeric_value = [] + result_timestamp_value = [] + for (aa, ts) in c: + result_numeric_value.append(aa) + result_timestamp_value.append(ts) + + td_diff = _total_milliseconds_from_timedelta( + current_time - result_timestamp_value[0] + ) + + assert td_diff == 0, "the first result was wrong" + finally: + c.close() + + +def test_struct_time(conn, db_parameters): + """Binds struct_time object for updating timestamp.""" + tzstr = "America/New_York" + os.environ["TZ"] = tzstr + if not IS_WINDOWS: + time.tzset() + test_time = time.strptime("30 Sep 01 11:20:30", "%d %b %y %H:%M:%S") + + with conn() as cnx: + c = cnx.cursor() + try: + fmt = "insert into {name}(aa, tsltz) values(%(value)s,%(ts)s)" + c.execute( + fmt.format(name=db_parameters["name"]), + { + "value": 87654, + "ts": test_time, + }, + ) + cnt = 0 + for rec in c: + cnt += int(rec[0]) + finally: + c.close() + os.environ["TZ"] = "UTC" + if not IS_WINDOWS: + time.tzset() + assert cnt == 1, "wrong number of records were inserted" + + try: + result = cnx.cursor().execute( + "select aa, tsltz from {name}".format(name=db_parameters["name"]) + ) + for (_, _tsltz) in result: + pass + + _tsltz -= _tsltz.tzinfo.utcoffset(_tsltz) + + assert test_time.tm_year == _tsltz.year, "Year didn't match" + assert test_time.tm_mon == _tsltz.month, "Month didn't match" + assert test_time.tm_mday == _tsltz.day, "Day didn't match" + assert test_time.tm_hour == _tsltz.hour, "Hour didn't match" + assert test_time.tm_min == _tsltz.minute, "Minute didn't match" + assert test_time.tm_sec == _tsltz.second, "Second didn't match" + finally: + os.environ["TZ"] = "UTC" + if not IS_WINDOWS: + time.tzset() + + +def test_insert_binary_select(conn, db_parameters): + """Inserts and get a binary value.""" + value = b"\x00\xFF\xA1\xB2\xC3" + + with conn() as cnx: + c = cnx.cursor() + try: + fmt = "insert into {name}(b) values(%(b)s)" + c.execute(fmt.format(name=db_parameters["name"]), {"b": value}) + count = sum(int(rec[0]) for rec in c) + assert count == 1, "wrong number of records were inserted" + assert c.rowcount == 1, "wrong number of records were selected" + finally: + c.close() + + cnx2 = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + database=db_parameters["database"], + schema=db_parameters["schema"], + protocol=db_parameters["protocol"], + ) + try: + c = cnx2.cursor() + c.execute("select b from {name}".format(name=db_parameters["name"])) + + results = [b for (b,) in c] + assert value == results[0], "the binary result was wrong" + + desc = c.description + assert len(desc) == 1, "invalid number of column meta data" + assert desc[0][0].upper() == "B", "invalid column name" + assert constants.FIELD_ID_TO_NAME[desc[0][1]] == "BINARY", "invalid column name" + finally: + cnx2.close() + + +def test_insert_binary_select_with_bytearray(conn, db_parameters): + """Inserts and get a binary value using the bytearray type.""" + value = bytearray(b"\x00\xFF\xA1\xB2\xC3") + + with conn() as cnx: + c = cnx.cursor() + try: + fmt = "insert into {name}(b) values(%(b)s)" + c.execute(fmt.format(name=db_parameters["name"]), {"b": value}) + count = sum(int(rec[0]) for rec in c) + assert count == 1, "wrong number of records were inserted" + assert c.rowcount == 1, "wrong number of records were selected" + finally: + c.close() + + cnx2 = snowflake.connector.connect( + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + account=db_parameters["account"], + database=db_parameters["database"], + schema=db_parameters["schema"], + protocol=db_parameters["protocol"], + ) + try: + c = cnx2.cursor() + c.execute("select b from {name}".format(name=db_parameters["name"])) + + results = [b for (b,) in c] + assert bytes(value) == results[0], "the binary result was wrong" + + desc = c.description + assert len(desc) == 1, "invalid number of column meta data" + assert desc[0][0].upper() == "B", "invalid column name" + assert constants.FIELD_ID_TO_NAME[desc[0][1]] == "BINARY", "invalid column name" + finally: + cnx2.close() + + +def test_variant(conn, db_parameters): + """Variant including JSON object.""" + name_variant = db_parameters["name"] + "_variant" + with conn() as cnx: + cnx.cursor().execute( + """ +create table {name} ( +created_at timestamp, data variant) +""".format( + name=name_variant + ) + ) + + try: + with conn() as cnx: + current_time = datetime.now() + c = cnx.cursor() + try: + fmt = ( + "insert into {name}(created_at, data) " + "select column1, parse_json(column2) " + "from values(%(created_at)s, %(data)s)" + ) + c.execute( + fmt.format(name=name_variant), + { + "created_at": current_time, + "data": ( + '{"SESSION-PARAMETERS":{' + '"TIMEZONE":"UTC", "SPECIAL_FLAG":true}}' + ), + }, + ) + cnt = 0 + for rec in c: + cnt += int(rec[0]) + assert cnt == 1, "wrong number of records were inserted" + assert c.rowcount == 1, "wrong number of records were inserted" + finally: + c.close() + + result = cnx.cursor().execute( + f"select created_at, data from {name_variant}" + ) + _, data = result.fetchone() + data = json.loads(data) + assert data["SESSION-PARAMETERS"]["SPECIAL_FLAG"], ( + "JSON data should be parsed properly. " "Invalid JSON data" + ) + finally: + with conn() as cnx: + cnx.cursor().execute(f"drop table {name_variant}") + + +@pytest.mark.skipolddriver +def test_geography(conn, db_parameters): + """Variant including JSON object.""" + name_geo = random_string(5, "test_geography_") + with conn() as cnx: + cnx.cursor().execute( + f"""\ +create table {name_geo} (geo geography) +""" + ) + cnx.cursor().execute( + f"""\ +insert into {name_geo} values ('POINT(0 0)'), ('LINESTRING(1 1, 2 2)') +""" + ) + expected_data = [ + {"coordinates": [0, 0], "type": "Point"}, + {"coordinates": [[1, 1], [2, 2]], "type": "LineString"}, + ] + + try: + with conn() as cnx: + c = cnx.cursor() + c.execute("alter session set GEOGRAPHY_OUTPUT_FORMAT='geoJson'") + + # Test with GEOGRAPHY return type + result = c.execute(f"select * from {name_geo}") + metadata = result.description + assert FIELD_ID_TO_NAME[metadata[0].type_code] == "GEOGRAPHY" + data = result.fetchall() + for raw_data in data: + row = json.loads(raw_data[0]) + assert row in expected_data + finally: + with conn() as cnx: + cnx.cursor().execute(f"drop table {name_geo}") + + +def test_callproc(conn_cnx): + """Callproc test. + + Notes: + It's a nop as of now. + """ + with conn_cnx() as cnx: + with pytest.raises(errors.NotSupportedError): + cnx.cursor().callproc("whatever the stored procedure") + + +def test_invalid_bind_data_type(conn_cnx): + """Invalid bind data type.""" + with conn_cnx() as cnx: + with pytest.raises(errors.ProgrammingError): + cnx.cursor().execute("select 1 from dual where 1=%s", ([1, 2, 3],)) + + +def test_timeout_query(conn_cnx): + with conn_cnx() as cnx: + with cnx.cursor() as c: + with pytest.raises(errors.ProgrammingError) as err: + c.execute( + "select seq8() as c1 from table(generator(timeLimit => 60))", + timeout=5, + ) + assert err.value.errno == 604, "Invalid error code" + + +def test_executemany(conn, db_parameters): + """Executes many statements. Client binding is supported by either dict, or list data types. + + Notes: + The binding data type is dict and tuple, respectively. + """ + with conn() as cnx: + c = cnx.cursor() + fmt = "insert into {name}(aa) values(%(value)s)".format( + name=db_parameters["name"] + ) + c.executemany( + fmt, + [ + {"value": "1234"}, + {"value": "234"}, + {"value": "34"}, + {"value": "4"}, + ], + ) + cnt = 0 + for rec in c: + cnt += int(rec[0]) + assert cnt == 4, "number of records" + assert c.rowcount == 4, "wrong number of records were inserted" + c.close() + + c = cnx.cursor() + fmt = "insert into {name}(aa) values(%s)".format(name=db_parameters["name"]) + c.executemany( + fmt, + [ + (12345,), + (1234,), + (234,), + (34,), + (4,), + ], + ) + rec = c.fetchone() + assert rec[0] == 5, "number of records" + assert c.rowcount == 5, "wrong number of records were inserted" + c.close() + + +@pytest.mark.skipolddriver +def test_executemany_qmark_types(conn, db_parameters): + table_name = random_string(5, "date_test_") + with conn(paramstyle="qmark") as cnx: + with cnx.cursor() as cur: + cur.execute(f"create table {table_name} (birth_date date)") + + insert_qy = f"INSERT INTO {table_name} (birth_date) values (?)" + date_1, date_2 = date(1969, 2, 7), date(1969, 1, 1) + + try: + # insert two dates, one in tuple format which specifies + # the snowflake type similar to how we support it in this + # example: + # https://docs.snowflake.com/en/user-guide/python-connector-example.html#using-qmark-or-numeric-binding-with-datetime-objects + cur.executemany( + insert_qy, + [[date_1], [("DATE", date_2)]], + ) + + cur.execute(f"select * from {table_name}") + inserted_dates = [row[0] for row in cur.fetchall()] + assert date_1 in inserted_dates + assert date_2 in inserted_dates + finally: + cur.execute(f"drop table if exists {table_name}") + + +@pytest.mark.skipolddriver +def test_executemany_params_iterator(conn): + """Cursor.executemany() works with an interator of params.""" + table_name = random_string(5, "executemany_params_iterator") + with conn() as cnx: + c = cnx.cursor() + c.execute(f"create temp table {table_name}(bar integer)") + fmt = f"insert into {table_name}(bar) values(%(value)s)" + c.executemany(fmt, ({"value": x} for x in ("1234", "234", "34", "4"))) + cnt = 0 + for rec in c: + cnt += int(rec[0]) + assert cnt == 4, "number of records" + assert c.rowcount == 4, "wrong number of records were inserted" + c.close() + + c = cnx.cursor() + fmt = f"insert into {table_name}(bar) values(%s)" + c.executemany(fmt, ((x,) for x in (12345, 1234, 234, 34, 4))) + rec = c.fetchone() + assert rec[0] == 5, "number of records" + assert c.rowcount == 5, "wrong number of records were inserted" + c.close() + + +@pytest.mark.skipolddriver +def test_executemany_empty_params(conn): + """Cursor.executemany() does nothing if params is empty.""" + table_name = random_string(5, "executemany_empty_params") + with conn() as cnx: + c = cnx.cursor() + # The table isn't created, so if this were executed, it would error. + fmt = f"insert into {table_name}(aa) values(%(value)s)" + c.executemany(fmt, []) + assert c.query is None + c.close() + + +@pytest.mark.skipolddriver( + reason="old driver raises DatabaseError instead of InterfaceError" +) +def test_closed_cursor(conn, db_parameters): + """Attempts to use the closed cursor. It should raise errors. + + Notes: + The binding data type is scalar. + """ + with conn() as cnx: + c = cnx.cursor() + fmt = "insert into {name}(aa) values(%s)".format(name=db_parameters["name"]) + c.executemany( + fmt, + [ + 12345, + 1234, + 234, + 34, + 4, + ], + ) + rec = c.fetchone() + assert rec[0] == 5, "number of records" + assert c.rowcount == 5, "number of records" + c.close() + + fmt = "select aa from {name}".format(name=db_parameters["name"]) + with pytest.raises(InterfaceError, match="Cursor is closed in execute") as err: + c.execute(fmt) + assert err.value.errno == errorcode.ER_CURSOR_IS_CLOSED + + +def test_fetchmany(conn, db_parameters): + with conn() as cnx: + c = cnx.cursor() + fmt = "insert into {name}(aa) values(%(value)s)".format( + name=db_parameters["name"] + ) + c.executemany( + fmt, + [ + {"value": "3456789"}, + {"value": "234567"}, + {"value": "1234"}, + {"value": "234"}, + {"value": "34"}, + {"value": "4"}, + ], + ) + cnt = 0 + for rec in c: + cnt += int(rec[0]) + assert cnt == 6, "number of records" + assert c.rowcount == 6, "number of records" + c.close() + + c = cnx.cursor() + fmt = "select aa from {name} order by aa desc".format( + name=db_parameters["name"] + ) + c.execute(fmt) + + rows = c.fetchmany(2) + assert len(rows) == 2, "The number of records" + assert rows[1][0] == 234567, "The second record" + + rows = c.fetchmany(1) + assert len(rows) == 1, "The number of records" + assert rows[0][0] == 1234, "The first record" + + rows = c.fetchmany(5) + assert len(rows) == 3, "The number of records" + assert rows[-1][0] == 4, "The last record" + + rows = c.fetchmany(15) + assert len(rows) == 0, "The number of records" + + c.close() + + +def test_process_params(conn, db_parameters): + """Binds variables for insert and other queries.""" + with conn() as cnx: + c = cnx.cursor() + fmt = "insert into {name}(aa) values(%(value)s)".format( + name=db_parameters["name"] + ) + c.executemany( + fmt, + [ + {"value": "3456789"}, + {"value": "234567"}, + {"value": "1234"}, + {"value": "234"}, + {"value": "34"}, + {"value": "4"}, + ], + ) + cnt = 0 + for rec in c: + cnt += int(rec[0]) + c.close() + assert cnt == 6, "number of records" + + fmt = "select count(aa) from {name} where aa > %(value)s".format( + name=db_parameters["name"] + ) + + c = cnx.cursor() + c.execute(fmt, {"value": 1233}) + for (_cnt,) in c: + pass + assert _cnt == 3, "the number of records" + c.close() + + fmt = "select count(aa) from {name} where aa > %s".format( + name=db_parameters["name"] + ) + c = cnx.cursor() + c.execute(fmt, (1234,)) + for (_cnt,) in c: + pass + assert _cnt == 2, "the number of records" + c.close() + + +@pytest.mark.skipolddriver +@pytest.mark.parametrize( + ("interpolate_empty_sequences", "expected_outcome"), [(False, "%%s"), (True, "%s")] +) +def test_process_params_empty(conn_cnx, interpolate_empty_sequences, expected_outcome): + """SQL is interpolated if params aren't None.""" + with conn_cnx(interpolate_empty_sequences=interpolate_empty_sequences) as cnx: + with cnx.cursor() as cursor: + cursor.execute("select '%%s'", None) + assert cursor.fetchone() == ("%%s",) + cursor.execute("select '%%s'", ()) + assert cursor.fetchone() == (expected_outcome,) + + +def test_real_decimal(conn, db_parameters): + with conn() as cnx: + c = cnx.cursor() + fmt = ("insert into {name}(aa, pct, ratio) " "values(%s,%s,%s)").format( + name=db_parameters["name"] + ) + c.execute(fmt, (9876, 12.3, decimal.Decimal("23.4"))) + for (_cnt,) in c: + pass + assert _cnt == 1, "the number of records" + c.close() + + c = cnx.cursor() + fmt = "select aa, pct, ratio from {name}".format(name=db_parameters["name"]) + c.execute(fmt) + for (_aa, _pct, _ratio) in c: + pass + assert _aa == 9876, "the integer value" + assert _pct == 12.3, "the float value" + assert _ratio == decimal.Decimal("23.4"), "the decimal value" + c.close() + + with cnx.cursor(snowflake.connector.DictCursor) as c: + fmt = "select aa, pct, ratio from {name}".format(name=db_parameters["name"]) + c.execute(fmt) + rec = c.fetchone() + assert rec["AA"] == 9876, "the integer value" + assert rec["PCT"] == 12.3, "the float value" + assert rec["RATIO"] == decimal.Decimal("23.4"), "the decimal value" + + +def test_none_errorhandler(conn_testaccount): + c = conn_testaccount.cursor() + with pytest.raises(errors.ProgrammingError): + c.errorhandler = None + + +def test_nope_errorhandler(conn_testaccount): + def user_errorhandler(connection, cursor, errorclass, errorvalue): + pass + + c = conn_testaccount.cursor() + c.errorhandler = user_errorhandler + c.execute("select * foooooo never_exists_table") + c.execute("select * barrrrr never_exists_table") + c.execute("select * daaaaaa never_exists_table") + assert c.messages[0][0] == errors.ProgrammingError, "One error was recorded" + assert len(c.messages) == 1, "should be one error" + + +@pytest.mark.internal +def test_binding_negative(negative_conn_cnx, db_parameters): + with negative_conn_cnx() as cnx: + with pytest.raises(TypeError): + cnx.cursor().execute( + "INSERT INTO {name}(aa) VALUES(%s)".format(name=db_parameters["name"]), + (1, 2, 3), + ) + with pytest.raises(errors.ProgrammingError): + cnx.cursor().execute( + "INSERT INTO {name}(aa) VALUES(%s)".format(name=db_parameters["name"]), + (), + ) + with pytest.raises(errors.ProgrammingError): + cnx.cursor().execute( + "INSERT INTO {name}(aa) VALUES(%s)".format(name=db_parameters["name"]), + (["a"],), + ) + + +@pytest.mark.skipolddriver +def test_execute_stores_query(conn_cnx): + with conn_cnx() as cnx: + with cnx.cursor() as cursor: + assert cursor.query is None + cursor.execute("select 1") + assert cursor.query == "select 1" + + +def test_execute_after_close(conn_testaccount): + """SNOW-13588: Raises an error if executing after the connection is closed.""" + cursor = conn_testaccount.cursor() + conn_testaccount.close() + with pytest.raises(errors.Error): + cursor.execute("show tables") + + +def test_multi_table_insert(conn, db_parameters): + try: + with conn() as cnx: + cur = cnx.cursor() + cur.execute( + """ + INSERT INTO {name}(aa) VALUES(1234),(9876),(2345) + """.format( + name=db_parameters["name"] + ) + ) + assert cur.rowcount == 3, "the number of records" + + cur.execute( + """ +CREATE OR REPLACE TABLE {name}_foo (aa_foo int) + """.format( + name=db_parameters["name"] + ) + ) + + cur.execute( + """ +CREATE OR REPLACE TABLE {name}_bar (aa_bar int) + """.format( + name=db_parameters["name"] + ) + ) + + cur.execute( + """ +INSERT ALL + INTO {name}_foo(aa_foo) VALUES(aa) + INTO {name}_bar(aa_bar) VALUES(aa) + SELECT aa FROM {name} + """.format( + name=db_parameters["name"] + ) + ) + assert cur.rowcount == 6 + finally: + with conn() as cnx: + cnx.cursor().execute( + """ +DROP TABLE IF EXISTS {name}_foo +""".format( + name=db_parameters["name"] + ) + ) + cnx.cursor().execute( + """ +DROP TABLE IF EXISTS {name}_bar +""".format( + name=db_parameters["name"] + ) + ) + + +@pytest.mark.skipif( + True, + reason=""" +Negative test case. +""", +) +def test_fetch_before_execute(conn_testaccount): + """SNOW-13574: Fetch before execute.""" + cursor = conn_testaccount.cursor() + with pytest.raises(errors.DataError): + cursor.fetchone() + + +def test_close_twice(conn_testaccount): + conn_testaccount.close() + conn_testaccount.close() + + +@pytest.mark.parametrize("result_format", ("arrow", "json")) +def test_fetch_out_of_range_timestamp_value(conn, result_format): + with conn() as cnx: + cur = cnx.cursor() + cur.execute( + f"alter session set python_connector_query_result_format='{result_format}'" + ) + cur.execute("select '12345-01-02'::timestamp_ntz") + with pytest.raises(errors.InterfaceError): + cur.fetchone() + + +@pytest.mark.skipolddriver +def test_null_in_non_null(conn): + table_name = random_string(5, "null_in_non_null") + error_msg = "NULL result in a non-nullable column" + with conn() as cnx: + cur = cnx.cursor() + cur.execute(f"create temp table {table_name}(bar char not null)") + with pytest.raises(errors.IntegrityError, match=error_msg): + cur.execute(f"insert into {table_name} values (null)") + + +@pytest.mark.parametrize("sql", (None, ""), ids=["None", "empty"]) +def test_empty_execution(conn, sql): + """Checks whether executing an empty string, or nothing behaves as expected.""" + with conn() as cnx: + with cnx.cursor() as cur: + if sql is not None: + cur.execute(sql) + assert cur._result is None + with pytest.raises( + TypeError, match="'NoneType' object is not( an)? itera(tor|ble)" + ): + cur.fetchone() + with pytest.raises( + TypeError, match="'NoneType' object is not( an)? itera(tor|ble)" + ): + cur.fetchall() + + +@pytest.mark.parametrize( + "reuse_results", (False, pytest.param(True, marks=pytest.mark.skipolddriver)) +) +def test_reset_fetch(conn, reuse_results): + """Tests behavior after resetting the cursor.""" + with conn(reuse_results=reuse_results) as cnx: + with cnx.cursor() as cur: + cur.execute("select 1") + cur.reset() + if reuse_results: + assert cur.fetchone() == (1,) + else: + assert cur.fetchone() is None + assert len(cur.fetchall()) == 0 + + +def test_rownumber(conn): + """Checks whether rownumber is returned as expected.""" + with conn() as cnx: + with cnx.cursor() as cur: + assert cur.execute("select * from values (1), (2)") + assert cur.rownumber is None + assert cur.fetchone() == (1,) + assert cur.rownumber == 0 + assert cur.fetchone() == (2,) + assert cur.rownumber == 1 + + +def test_values_set(conn): + """Checks whether a bunch of properties start as Nones, but get set to something else when a query was executed.""" + properties = [ + "timestamp_output_format", + "timestamp_ltz_output_format", + "timestamp_tz_output_format", + "timestamp_ntz_output_format", + "date_output_format", + "timezone", + "time_output_format", + "binary_output_format", + ] + with conn() as cnx: + with cnx.cursor() as cur: + for property in properties: + assert getattr(cur, property) is None + assert cur.execute("select 1").fetchone() == (1,) + # The default values might change in future, so let's just check that they aren't None anymore + for property in properties: + assert getattr(cur, property) is not None + + +def test_execute_helper_params_error(conn_testaccount): + """Tests whether calling _execute_helper with a non-dict statement params is handled correctly.""" + with conn_testaccount.cursor() as cur: + with pytest.raises( + ProgrammingError, + match=r"The data type of statement params is invalid. It must be dict.$", + ): + cur._execute_helper("select %()s", statement_params="1") + + +def test_desc_rewrite(conn, caplog): + """Tests whether describe queries are rewritten as expected and this action is logged.""" + with conn() as cnx: + with cnx.cursor() as cur: + table_name = random_string(5, "test_desc_rewrite_") + try: + cur.execute(f"create or replace table {table_name} (a int)") + caplog.set_level(logging.DEBUG, "snowflake.connector") + cur.execute(f"desc {table_name}") + assert ( + "snowflake.connector.cursor", + 20, + "query was rewritten: org=desc {table_name}, new=describe table {table_name}".format( + table_name=table_name + ), + ) in caplog.record_tuples + finally: + cur.execute(f"drop table {table_name}") + + +@pytest.mark.skipolddriver +@pytest.mark.parametrize("result_format", [False, None, "json"]) +def test_execute_helper_cannot_use_arrow(conn_cnx, caplog, result_format): + """Tests whether cannot use arrow is handled correctly inside of _execute_helper.""" + with conn_cnx() as cnx: + with cnx.cursor() as cur: + with mock.patch( + "snowflake.connector.cursor.CAN_USE_ARROW_RESULT_FORMAT", False + ): + if result_format is False: + result_format = None + else: + result_format = { + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT: result_format + } + caplog.set_level(logging.DEBUG, "snowflake.connector") + cur.execute("select 1", _statement_params=result_format) + assert ( + "snowflake.connector.cursor", + logging.DEBUG, + "Cannot use arrow result format, fallback to json format", + ) in caplog.record_tuples + assert cur.fetchone() == (1,) + + +@pytest.mark.skipolddriver +def test_execute_helper_cannot_use_arrow_exception(conn_cnx): + """Like test_execute_helper_cannot_use_arrow but when we are trying to force arrow an Exception should be raised.""" + with conn_cnx() as cnx: + with cnx.cursor() as cur: + with mock.patch( + "snowflake.connector.cursor.CAN_USE_ARROW_RESULT_FORMAT", False + ): + with pytest.raises( + ProgrammingError, + match="The result set in Apache Arrow format is not supported for the platform.", + ): + cur.execute( + "select 1", + _statement_params={ + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT: "arrow" + }, + ) + + +@pytest.mark.skipolddriver +def test_check_can_use_arrow_resultset(conn_cnx, caplog): + """Tests check_can_use_arrow_resultset has no effect when we can use arrow.""" + with conn_cnx() as cnx: + with cnx.cursor() as cur: + with mock.patch( + "snowflake.connector.cursor.CAN_USE_ARROW_RESULT_FORMAT", True + ): + caplog.set_level(logging.DEBUG, "snowflake.connector") + cur.check_can_use_arrow_resultset() + assert "Arrow" not in caplog.text + + +@pytest.mark.skipolddriver +@pytest.mark.parametrize("snowsql", [True, False]) +def test_check_cannot_use_arrow_resultset(conn_cnx, caplog, snowsql): + """Tests check_can_use_arrow_resultset expected outcomes.""" + config = {} + if snowsql: + config["application"] = "SnowSQL" + with conn_cnx(**config) as cnx: + with cnx.cursor() as cur: + with mock.patch( + "snowflake.connector.cursor.CAN_USE_ARROW_RESULT_FORMAT", False + ): + with pytest.raises( + ProgrammingError, + match="Currently SnowSQL doesn't support the result set in Apache Arrow format." + if snowsql + else "The result set in Apache Arrow format is not supported for the platform.", + ) as pe: + cur.check_can_use_arrow_resultset() + assert pe.errno == ( + ER_NO_PYARROW_SNOWSQL if snowsql else ER_NO_ARROW_RESULT + ) + + +@pytest.mark.skipolddriver +def test_check_can_use_pandas(conn_cnx): + """Tests check_can_use_arrow_resultset has no effect when we can import pandas.""" + with conn_cnx() as cnx: + with cnx.cursor() as cur: + with mock.patch("snowflake.connector.cursor.installed_pandas", True): + cur.check_can_use_pandas() + + +@pytest.mark.skipolddriver +def test_check_cannot_use_pandas(conn_cnx): + """Tests check_can_use_arrow_resultset has expected outcomes.""" + with conn_cnx() as cnx: + with cnx.cursor() as cur: + with mock.patch("snowflake.connector.cursor.installed_pandas", False): + with pytest.raises( + ProgrammingError, + match=r"Optional dependency: 'pandas' is not installed, please see the " + "following link for install instructions: https:.*", + ) as pe: + cur.check_can_use_pandas() + assert pe.errno == ER_NO_PYARROW + + +@pytest.mark.skipolddriver +def test_not_supported_pandas(conn_cnx): + """Check that fetch_pandas functions return expected error when arrow results are not available.""" + result_format = {PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT: "json"} + with conn_cnx() as cnx: + with cnx.cursor() as cur: + cur.execute("select 1", _statement_params=result_format) + with mock.patch("snowflake.connector.cursor.installed_pandas", True): + with pytest.raises(NotSupportedError): + cur.fetch_pandas_all() + with pytest.raises(NotSupportedError): + list(cur.fetch_pandas_batches()) + + +def test_query_cancellation(conn_cnx): + """Tests whether query_cancellation works.""" + with conn_cnx() as cnx: + with cnx.cursor() as cur: + cur.execute( + "select max(seq8()) from table(generator(timeLimit=>30));", + _no_results=True, + ) + sf_qid = cur.sfqid + cur.abort_query(sf_qid) + + +def test_executemany_insert_rewrite(conn_cnx): + """Tests calling executemany with a non rewritable pyformat insert query.""" + with conn_cnx() as con: + with con.cursor() as cur: + with pytest.raises( + InterfaceError, match="Failed to rewrite multi-row insert" + ) as ie: + cur.executemany("insert into numbers (select 1)", [1, 2]) + assert ie.errno == ER_FAILED_TO_REWRITE_MULTI_ROW_INSERT + + +def test_executemany_bulk_insert_size_mismatch(conn_cnx): + """Tests bulk insert error with variable length of arguments.""" + with conn_cnx(paramstyle="qmark") as con: + with con.cursor() as cur: + with pytest.raises( + InterfaceError, match="Bulk data size don't match. expected: 1, got: 2" + ) as ie: + cur.executemany("insert into numbers values (?,?)", [[1], [1, 2]]) + assert ie.errno == ER_FAILED_TO_REWRITE_MULTI_ROW_INSERT + + +def test_fetchmany_size_error(conn_cnx): + """Tests retrieving a negative number of results.""" + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute("select 1") + with pytest.raises( + ProgrammingError, + match="The number of rows is not zero or positive number: -1", + ) as ie: + cur.fetchmany(-1) + assert ie.errno == ER_NOT_POSITIVE_SIZE + + +def test_nextset(conn_cnx, caplog): + """Tests no op function nextset.""" + caplog.set_level(logging.DEBUG, "snowflake.connector") + with conn_cnx() as con: + with con.cursor() as cur: + caplog.set_level(logging.DEBUG, "snowflake.connector") + assert cur.nextset() is None + assert ("snowflake.connector.cursor", logging.DEBUG, "nop") in caplog.record_tuples + + +def test_scroll(conn_cnx): + """Tests if scroll returns a NotSupported exception.""" + with conn_cnx() as con: + with con.cursor() as cur: + with pytest.raises( + NotSupportedError, match="scroll is not supported." + ) as nse: + cur.scroll(2) + assert nse.errno == SQLSTATE_FEATURE_NOT_SUPPORTED + + +@pytest.mark.skipolddriver +def test__log_telemetry_job_data(conn_cnx, caplog): + """Tests whether we handle missing connection object correctly while logging a telemetry event.""" + with conn_cnx() as con: + with con.cursor() as cur: + with mock.patch.object(cur, "_connection", None): + caplog.set_level(logging.DEBUG, "snowflake.connector") + cur._log_telemetry_job_data( + TelemetryField.ARROW_FETCH_ALL, True + ) # dummy value + assert ( + "snowflake.connector.cursor", + logging.WARNING, + "Cursor failed to log to telemetry. Connection object may be None.", + ) in caplog.record_tuples + + +@pytest.mark.skipolddriver(reason="new feature in v2.5.0") +@pytest.mark.parametrize( + "result_format,expected_chunk_type", + ( + ("json", JSONResultBatch), + ("arrow", ArrowResultBatch), + ), +) +def test_resultbatch( + conn_cnx, + result_format, + expected_chunk_type, + capture_sf_telemetry, +): + """This test checks the following things: + 1. After executing a query can we pickle the result batches + 2. When we get the batches, do we emit a telemetry log + 3. Whether we can iterate through ResultBatches multiple times + 4. Whether the results make sense + 5. See whether getter functions are working + """ + rowcount = 100000 + with conn_cnx( + session_parameters={ + "python_connector_query_result_format": result_format, + } + ) as con: + with capture_sf_telemetry.patch_connection(con) as telemetry_data: + with con.cursor() as cur: + cur.execute( + f"select seq4() from table(generator(rowcount => {rowcount}));" + ) + assert cur._result_set.total_row_index() == rowcount + pre_pickle_partitions = cur.get_result_batches() + assert len(pre_pickle_partitions) > 1 + assert pre_pickle_partitions is not None + assert all( + isinstance(p, expected_chunk_type) for p in pre_pickle_partitions + ) + pickle_str = pickle.dumps(pre_pickle_partitions) + assert any( + t.message["type"] == TelemetryField.GET_PARTITIONS_USED.value + for t in telemetry_data.records + ) + post_pickle_partitions: list[ResultBatch] = pickle.loads(pickle_str) + total_rows = 0 + # Make sure the batches can be iterated over individually + for i, partition in enumerate(post_pickle_partitions): + # Tests whether the getter functions are working + if i == 0: + assert partition.compressed_size is None + assert partition.uncompressed_size is None + else: + assert partition.compressed_size is not None + assert partition.uncompressed_size is not None + for row in partition: + col1 = row[0] + assert col1 == total_rows + total_rows += 1 + assert total_rows == rowcount + total_rows = 0 + # Make sure the batches can be iterated over again + for partition in post_pickle_partitions: + for row in partition: + col1 = row[0] + assert col1 == total_rows + total_rows += 1 + assert total_rows == rowcount + + +@pytest.mark.skipolddriver(reason="new feature in v2.5.0") +@pytest.mark.parametrize( + "result_format,patch_path", + ( + ("json", "snowflake.connector.result_batch.JSONResultBatch.create_iter"), + ("arrow", "snowflake.connector.result_batch.ArrowResultBatch.create_iter"), + ), +) +def test_resultbatch_lazy_fetching_and_schemas(conn_cnx, result_format, patch_path): + """Tests whether pre-fetching results chunks fetches the right amount of them.""" + rowcount = 1000000 # We need at least 5 chunks for this test + with conn_cnx( + session_parameters={ + "python_connector_query_result_format": result_format, + } + ) as con: + with con.cursor() as cur: + # Dummy return value necessary to not iterate through every batch with + # first fetchone call + + downloads = [iter([(i,)]) for i in range(10)] + + with mock.patch( + patch_path, + side_effect=downloads, + ) as patched_download: + cur.execute( + f"select seq4() as c1, randstr(1,random()) as c2 " + f"from table(generator(rowcount => {rowcount}));" + ) + result_batches = cur.get_result_batches() + batch_schemas = [batch.schema for batch in result_batches] + for schema in batch_schemas: + # all batches should have the same schema + assert schema == [ + ResultMetadata("C1", 0, None, None, 10, 0, False), + ResultMetadata("C2", 2, None, 16777216, None, None, False), + ] + assert patched_download.call_count == 0 + assert len(result_batches) > 5 + assert result_batches[0]._local # Sanity check first chunk being local + cur.fetchone() # Trigger pre-fetching + + # While the first chunk is local we still call _download on it, which + # short circuits and just parses (for JSON batches) and then returns + # an iterator through that data, so we expect the call count to be 5. + # (0 local and 1, 2, 3, 4 pre-fetched) = 5 total + start_time = time.time() + while time.time() < start_time + 1: + if patched_download.call_count == 5: + break + else: + assert patched_download.call_count == 5 + + +@pytest.mark.skipolddriver(reason="new feature in v2.5.0") +@pytest.mark.parametrize("result_format", ["json", "arrow"]) +def test_resultbatch_schema_exists_when_zero_rows(conn_cnx, result_format): + with conn_cnx( + session_parameters={"python_connector_query_result_format": result_format} + ) as con: + with con.cursor() as cur: + cur.execute( + "select seq4() as c1, randstr(1,random()) as c2 from table(generator(rowcount => 1)) where 1=0" + ) + result_batches = cur.get_result_batches() + # verify there is 1 batch and 0 rows in that batch + assert len(result_batches) == 1 + assert result_batches[0].rowcount == 0 + # verify that the schema is correct + schema = result_batches[0].schema + assert schema == [ + ResultMetadata("C1", 0, None, None, 10, 0, False), + ResultMetadata("C2", 2, None, 16777216, None, None, False), + ] + + +@pytest.mark.skipolddriver +def test_optional_telemetry(conn_cnx, capture_sf_telemetry): + """Make sure that we do not fail when _first_chunk_time is not present in cursor.""" + with conn_cnx() as con: + with con.cursor() as cur: + with capture_sf_telemetry.patch_connection(con, False) as telemetry: + cur.execute("select 1;") + cur._first_chunk_time = None + assert cur.fetchall() == [ + (1,), + ] + assert not any( + r.message.get("type", "") + == TelemetryField.TIME_CONSUME_LAST_RESULT.value + for r in telemetry.records + ) + + +@pytest.mark.parametrize("result_format", ("json", "arrow")) +@pytest.mark.parametrize("cursor_type", (SnowflakeCursor, DictCursor)) +@pytest.mark.parametrize("fetch_method", ("__next__", "fetchone")) +def test_out_of_range_year(conn_cnx, result_format, cursor_type, fetch_method): + """Tests whether the year 10000 is out of range exception is raised as expected.""" + with conn_cnx( + session_parameters={ + PARAMETER_PYTHON_CONNECTOR_QUERY_RESULT_FORMAT: result_format + } + ) as con: + with con.cursor(cursor_type) as cur: + cur.execute( + "select * from VALUES (1, TO_TIMESTAMP('9999-01-01 00:00:00')), (2, TO_TIMESTAMP('10000-01-01 00:00:00'))" + ) + iterate_obj = cur if fetch_method == "fetchone" else iter(cur) + fetch_next_fn = getattr(iterate_obj, fetch_method) + # first fetch doesn't raise error + fetch_next_fn() + with pytest.raises( + InterfaceError, + match="date value out of range" + if IS_WINDOWS + else "year 10000 is out of range", + ): + fetch_next_fn() + + +@pytest.mark.skipolddriver +def test_describe(conn_cnx): + with conn_cnx() as con: + with con.cursor() as cur: + table_name = random_string(5, "test_describe_") + # test select + description = cur.describe( + "select * from VALUES(1, 3.1415926, 'snow', TO_TIMESTAMP('2021-01-01 00:00:00'))" + ) + assert description is not None + column_types = [column[1] for column in description] + assert constants.FIELD_ID_TO_NAME[column_types[0]] == "FIXED" + assert constants.FIELD_ID_TO_NAME[column_types[1]] == "FIXED" + assert constants.FIELD_ID_TO_NAME[column_types[2]] == "TEXT" + assert "TIMESTAMP" in constants.FIELD_ID_TO_NAME[column_types[3]] + assert len(cur.fetchall()) == 0 + + # test insert + cur.execute(f"create table {table_name} (aa int)") + try: + description = cur.describe( + "insert into {name}(aa) values({value})".format( + name=table_name, value="1234" + ) + ) + assert description[0][0] == "number of rows inserted" + assert cur.rowcount is None + finally: + cur.execute(f"drop table if exists {table_name}") + + +@pytest.mark.skipolddriver +def test_fetch_batches_with_sessions(conn_cnx): + rowcount = 250_000 + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute( + f"select seq4() as foo from table(generator(rowcount=>{rowcount}))" + ) + + num_batches = len(cur.get_result_batches()) + + with mock.patch( + "snowflake.connector.network.SnowflakeRestful._use_requests_session", + side_effect=con._rest._use_requests_session, + ) as get_session_mock: + result = cur.fetchall() + # all but one batch is downloaded using a session + assert get_session_mock.call_count == num_batches - 1 + assert len(result) == rowcount + + +@pytest.mark.skipolddriver +def test_null_connection(conn_cnx): + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute_async( + "select seq4() as c from table(generator(rowcount=>50000))" + ) + con.rest.delete_session() + status = con.get_query_status(cur.sfqid) + assert status == QueryStatus.FAILED_WITH_ERROR + assert con.is_an_error(status) diff --git a/test/integ/test_cursor_binding.py b/test/integ/test_cursor_binding.py new file mode 100644 index 000000000..e93d6ba3a --- /dev/null +++ b/test/integ/test_cursor_binding.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import pytest + +from snowflake.connector.errors import ProgrammingError + + +def test_binding_security(conn_cnx, db_parameters): + """SQL Injection Tests.""" + try: + with conn_cnx() as cnx: + cnx.cursor().execute( + "CREATE OR REPLACE TABLE {name} " + "(aa INT, bb STRING)".format(name=db_parameters["name"]) + ) + cnx.cursor().execute( + "INSERT INTO {name} VALUES(%s, %s)".format(name=db_parameters["name"]), + (1, "test1"), + ) + cnx.cursor().execute( + "INSERT INTO {name} VALUES(%(aa)s, %(bb)s)".format( + name=db_parameters["name"] + ), + {"aa": 2, "bb": "test2"}, + ) + for _rec in cnx.cursor().execute( + "SELECT * FROM {name} ORDER BY 1 DESC".format( + name=db_parameters["name"] + ) + ): + break + assert _rec[0] == 2, "First column" + assert _rec[1] == "test2", "Second column" + for _rec in cnx.cursor().execute( + "SELECT * FROM {name} WHERE aa=%s".format(name=db_parameters["name"]), + (1,), + ): + break + assert _rec[0] == 1, "First column" + assert _rec[1] == "test1", "Second column" + + # SQL injection safe test + # Good Example + with pytest.raises(ProgrammingError): + cnx.cursor().execute( + "SELECT * FROM {name} WHERE aa=%s".format( + name=db_parameters["name"] + ), + ("1 or aa>0",), + ) + + with pytest.raises(ProgrammingError): + cnx.cursor().execute( + "SELECT * FROM {name} WHERE aa=%(aa)s".format( + name=db_parameters["name"] + ), + {"aa": "1 or aa>0"}, + ) + + # Bad Example in application. DON'T DO THIS + c = cnx.cursor() + c.execute( + "SELECT * FROM {name} WHERE aa=%s".format(name=db_parameters["name"]) + % ("1 or aa>0",) + ) + rec = c.fetchall() + assert len(rec) == 2, "not raising error unlike the previous one." + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + "drop table if exists {name}".format(name=db_parameters["name"]) + ) + + +def test_binding_list(conn_cnx, db_parameters): + """SQL binding list type for IN.""" + try: + with conn_cnx() as cnx: + cnx.cursor().execute( + "CREATE OR REPLACE TABLE {name} " + "(aa INT, bb STRING)".format(name=db_parameters["name"]) + ) + cnx.cursor().execute( + "INSERT INTO {name} VALUES(%s, %s)".format(name=db_parameters["name"]), + (1, "test1"), + ) + cnx.cursor().execute( + "INSERT INTO {name} VALUES(%(aa)s, %(bb)s)".format( + name=db_parameters["name"] + ), + {"aa": 2, "bb": "test2"}, + ) + cnx.cursor().execute( + "INSERT INTO {name} VALUES(3, 'test3')".format( + name=db_parameters["name"] + ) + ) + for _rec in cnx.cursor().execute( + """ +SELECT * FROM {name} WHERE aa IN (%s) ORDER BY 1 DESC +""".format( + name=db_parameters["name"] + ), + ([1, 3],), + ): + break + assert _rec[0] == 3, "First column" + assert _rec[1] == "test3", "Second column" + + for _rec in cnx.cursor().execute( + "SELECT * FROM {name} WHERE aa=%s".format(name=db_parameters["name"]), + (1,), + ): + break + assert _rec[0] == 1, "First column" + assert _rec[1] == "test1", "Second column" + + cnx.cursor().execute( + """ +SELECT * FROM {name} WHERE aa IN (%s) ORDER BY 1 DESC +""".format( + name=db_parameters["name"] + ), + ((1,),), + ) + + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + "drop table if exists {name}".format(name=db_parameters["name"]) + ) + + +@pytest.mark.internal +def test_unsupported_binding(negative_conn_cnx, db_parameters): + """Unsupported data binding.""" + try: + with negative_conn_cnx() as cnx: + cnx.cursor().execute( + "CREATE OR REPLACE TABLE {name} " + "(aa INT, bb STRING)".format(name=db_parameters["name"]) + ) + cnx.cursor().execute( + "INSERT INTO {name} VALUES(%s, %s)".format(name=db_parameters["name"]), + (1, "test1"), + ) + + sql = "select count(*) from {name} where aa=%s".format( + name=db_parameters["name"] + ) + + with cnx.cursor() as cur: + rec = cur.execute(sql, (1,)).fetchone() + assert rec[0] is not None, "no value is returned" + + # dict + with pytest.raises(ProgrammingError): + cnx.cursor().execute(sql, ({"value": 1},)) + finally: + with negative_conn_cnx() as cnx: + cnx.cursor().execute( + "drop table if exists {name}".format(name=db_parameters["name"]) + ) diff --git a/test/integ/test_cursor_context_manager.py b/test/integ/test_cursor_context_manager.py new file mode 100644 index 000000000..689c37293 --- /dev/null +++ b/test/integ/test_cursor_context_manager.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from logging import getLogger + + +def test_context_manager(conn_testaccount, db_parameters): + """Tests context Manager support in Cursor.""" + logger = getLogger(__name__) + + def tables(conn): + with conn.cursor() as cur: + cur.execute("show tables") + name_to_idx = {elem[0]: idx for idx, elem in enumerate(cur.description)} + for row in cur: + yield row[name_to_idx["name"]] + + try: + conn_testaccount.cursor().execute( + "create or replace table {} (a int)".format(db_parameters["name"]) + ) + all_tables = [ + rec + for rec in tables(conn_testaccount) + if rec == db_parameters["name"].upper() + ] + logger.info("tables: %s", all_tables) + assert len(all_tables) == 1, "number of tables" + finally: + conn_testaccount.cursor().execute( + "drop table if exists {}".format(db_parameters["name"]) + ) diff --git a/test/test_dataintegrity.py b/test/integ/test_dataintegrity.py similarity index 55% rename from test/test_dataintegrity.py rename to test/integ/test_dataintegrity.py index 4e790ed65..9a8d3bb1b 100644 --- a/test/test_dataintegrity.py +++ b/test/integ/test_dataintegrity.py @@ -1,22 +1,25 @@ #!/usr/bin/env python -O +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# -""" Script to test database capabilities and the DB-API interface - for functionality and data integrity for some of the basic data types. - - Adapted from a script taken from the MySQL python driver. +"""Script to test database capabilities and the DB-API interface. +It tests for functionality and data integrity for some of the basic data types. Adapted from a script +taken from the MySQL python driver. """ +from __future__ import annotations + import random import time from math import fabs -import pytest import pytz -from snowflake.connector.compat import PY2 -from snowflake.connector.dbapi import ( - DateFromTicks, TimestampFromTicks, TimeFromTicks) +from snowflake.connector.dbapi import DateFromTicks, TimeFromTicks, TimestampFromTicks + +from ..randomize import random_string def table_exists(conn_cnx, name): @@ -24,7 +27,7 @@ def table_exists(conn_cnx, name): with cnx.cursor() as cursor: try: cursor.execute("select * from %s where 1=0" % name) - except: + except Exception: cnx.rollback() return False else: @@ -32,61 +35,64 @@ def table_exists(conn_cnx, name): def create_table(conn_cnx, columndefs, partial_name): - table = '"dbabi_dibasic_{0}"'.format(partial_name) + table = f'"dbabi_dibasic_{partial_name}"' with conn_cnx() as cnx: cnx.cursor().execute( "CREATE OR REPLACE TABLE {table} ({columns})".format( - table=table, columns='\n'.join(columndefs))) + table=table, columns="\n".join(columndefs) + ) + ) return table def check_data_integrity(conn_cnx, columndefs, partial_name, generator): rows = random.randrange(10, 15) # floating_point_types = ('REAL','DOUBLE','DECIMAL') - floating_point_types = ('REAL', 'DOUBLE') + floating_point_types = ("REAL", "DOUBLE") table = create_table(conn_cnx, columndefs, partial_name) with conn_cnx() as cnx: with cnx.cursor() as cursor: # insert some data as specified by generator passed in - insert_statement = ( - 'INSERT INTO %s VALUES (%s)' % - (table, - ','.join(['%s'] * len(columndefs)))) - data = [[generator(i, j) for j in range(len(columndefs))] - for i in range(rows)] + insert_statement = "INSERT INTO {} VALUES ({})".format( + table, + ",".join(["%s"] * len(columndefs)), + ) + data = [ + [generator(i, j) for j in range(len(columndefs))] for i in range(rows) + ] cursor.executemany(insert_statement, data) cnx.commit() # verify 2 things: correct number of rows, correct values for # each row - cursor.execute('select * from {0} order by 1'.format(table)) + cursor.execute(f"select * from {table} order by 1") result_sequences = cursor.fetchall() results = [] for i in result_sequences: results.append(i) # verify the right number of rows were returned - assert len(results) == rows, ('fetchall did not return ' - 'expected number of rows') + assert len(results) == rows, ( + "fetchall did not return " "expected number of rows" + ) # verify the right values were returned # for numbers, allow a difference of .000001 - for i, (x, y) in enumerate(zip(results, sorted(data))): - if any(data_type in partial_name for data_type in - floating_point_types): - for i in range(rows): + for x, y in zip(results, sorted(data)): + if any(data_type in partial_name for data_type in floating_point_types): + for _ in range(rows): df = fabs(float(x[0]) - float(y[0])) if float(y[0]) != 0.0: df = df / float(y[0]) - assert df <= 0.00000001, \ - ("fetchall did not return correct values within " - "the expected range") + assert df <= 0.00000001, ( + "fetchall did not return correct values within " + "the expected range" + ) else: - assert list(x) == list(y), \ - "fetchall did not return correct values" + assert list(x) == list(y), "fetchall did not return correct values" - cursor.execute('drop table if exists {0}'.format(table)) + cursor.execute(f"drop table if exists {table}") def test_INT(conn_cnx): @@ -94,44 +100,45 @@ def test_INT(conn_cnx): def generator(row, col): return row * row - check_data_integrity(conn_cnx, ('col1 INT',), 'INT', generator) + check_data_integrity(conn_cnx, ("col1 INT",), "INT", generator) def test_DECIMAL(conn_cnx): # DECIMAL def generator(row, col): from decimal import Decimal + return Decimal("%d.%02d" % (row, col)) - check_data_integrity(conn_cnx, ('col1 DECIMAL(5,2)',), 'DECIMAL', generator) + check_data_integrity(conn_cnx, ("col1 DECIMAL(5,2)",), "DECIMAL", generator) def test_REAL(conn_cnx): def generator(row, col): return row * 1000.0 - check_data_integrity(conn_cnx, ('col1 REAL',), 'REAL', generator) + check_data_integrity(conn_cnx, ("col1 REAL",), "REAL", generator) def test_REAL2(conn_cnx): def generator(row, col): return row * 3.14 - check_data_integrity(conn_cnx, ('col1 REAL',), 'REAL', generator) + check_data_integrity(conn_cnx, ("col1 REAL",), "REAL", generator) def test_DOUBLE(conn_cnx): def generator(row, col): return row / 1e-99 - check_data_integrity(conn_cnx, ('col1 DOUBLE',), 'DOUBLE', generator) + check_data_integrity(conn_cnx, ("col1 DOUBLE",), "DOUBLE", generator) def test_FLOAT(conn_cnx): def generator(row, col): return row * 2.0 - check_data_integrity(conn_cnx, ('col1 FLOAT(67)',), 'FLOAT', generator) + check_data_integrity(conn_cnx, ("col1 FLOAT(67)",), "FLOAT", generator) def test_DATE(conn_cnx): @@ -140,47 +147,42 @@ def test_DATE(conn_cnx): def generator(row, col): return DateFromTicks(ticks + row * 86400 - col * 1313) - check_data_integrity(conn_cnx, ('col1 DATE',), 'DATE', generator) + check_data_integrity(conn_cnx, ("col1 DATE",), "DATE", generator) def test_STRING(conn_cnx): def generator(row, col): import string - rstr = ''.join( - [random.choice(string.ascii_letters + string.digits) for n in - range(1024)]) + + rstr = random_string(1024, choices=string.ascii_letters + string.digits) return rstr - check_data_integrity(conn_cnx, ('col2 STRING',), 'STRING', generator) + check_data_integrity(conn_cnx, ("col2 STRING",), "STRING", generator) def test_TEXT(conn_cnx): def generator(row, col): - rstr = ''.join([chr(i) for i in range(33, 127)] * 100); + rstr = "".join([chr(i) for i in range(33, 127)] * 100) return rstr - check_data_integrity(conn_cnx, ('col2 TEXT',), 'TEXT', generator) + check_data_integrity(conn_cnx, ("col2 TEXT",), "TEXT", generator) def test_VARCHAR(conn_cnx): def generator(row, col): import string - rstr = ''.join( - [random.choice(string.ascii_letters + string.digits) for n in - range(50)]) + + rstr = random_string(50, choices=string.ascii_letters + string.digits) return rstr - check_data_integrity(conn_cnx, ('col2 VARCHAR',), 'VARCHAR', generator) + check_data_integrity(conn_cnx, ("col2 VARCHAR",), "VARCHAR", generator) -@pytest.mark.skipif(PY2, reason=""" -Binary not supported in Python 2 connector. -""") def test_BINARY(conn_cnx): def generator(row, col): return bytes(random.getrandbits(8) for _ in range(50)) - check_data_integrity(conn_cnx, ('col1 BINARY',), 'BINARY', generator) + check_data_integrity(conn_cnx, ("col1 BINARY",), "BINARY", generator) def test_TIMESTAMPNTZ(conn_cnx): @@ -189,8 +191,7 @@ def test_TIMESTAMPNTZ(conn_cnx): def generator(row, col): return TimestampFromTicks(ticks + row * 86400 - col * 1313) - check_data_integrity(conn_cnx, ('col1 TIMESTAMPNTZ',), 'TIMESTAMPNTZ', - generator) + check_data_integrity(conn_cnx, ("col1 TIMESTAMPNTZ",), "TIMESTAMPNTZ", generator) def test_TIMESTAMPNTZ_EXPLICIT(conn_cnx): @@ -199,9 +200,12 @@ def test_TIMESTAMPNTZ_EXPLICIT(conn_cnx): def generator(row, col): return TimestampFromTicks(ticks + row * 86400 - col * 1313) - check_data_integrity(conn_cnx, ('col1 TIMESTAMP without time zone',), - 'TIMESTAMPNTZ_EXPLICIT', - generator) + check_data_integrity( + conn_cnx, + ("col1 TIMESTAMP without time zone",), + "TIMESTAMPNTZ_EXPLICIT", + generator, + ) # string that contains control characters (white spaces), etc. @@ -213,7 +217,7 @@ def generator(row, col): myzone = pytz.timezone("US/Pacific") ret = myzone.localize(ret) - check_data_integrity(conn_cnx, ('col1 TIMESTAMP',), 'DATETIME', generator) + check_data_integrity(conn_cnx, ("col1 TIMESTAMP",), "DATETIME", generator) def test_TIMESTAMP(conn_cnx): @@ -222,10 +226,9 @@ def test_TIMESTAMP(conn_cnx): def generator(row, col): ret = TimestampFromTicks(ticks + row * 86400 - col * 1313) myzone = pytz.timezone("US/Pacific") - return (myzone.localize(ret)) + return myzone.localize(ret) - check_data_integrity(conn_cnx, ('col1 TIMESTAMP_LTZ',), 'TIMESTAMP', - generator) + check_data_integrity(conn_cnx, ("col1 TIMESTAMP_LTZ",), "TIMESTAMP", generator) def test_TIMESTAMP_EXPLICIT(conn_cnx): @@ -234,10 +237,14 @@ def test_TIMESTAMP_EXPLICIT(conn_cnx): def generator(row, col): ret = TimestampFromTicks(ticks + row * 86400 - col * 1313) myzone = pytz.timezone("Australia/Sydney") - return (myzone.localize(ret)) + return myzone.localize(ret) - check_data_integrity(conn_cnx, ('col1 TIMESTAMP with local time zone',), - 'TIMESTAMP_EXPLICIT', generator) + check_data_integrity( + conn_cnx, + ("col1 TIMESTAMP with local time zone",), + "TIMESTAMP_EXPLICIT", + generator, + ) def test_TIMESTAMPTZ(conn_cnx): @@ -246,10 +253,9 @@ def test_TIMESTAMPTZ(conn_cnx): def generator(row, col): ret = TimestampFromTicks(ticks + row * 86400 - col * 1313) myzone = pytz.timezone("America/Vancouver") - return (myzone.localize(ret)) + return myzone.localize(ret) - check_data_integrity(conn_cnx, ('col1 TIMESTAMPTZ',), 'TIMESTAMPTZ', - generator) + check_data_integrity(conn_cnx, ("col1 TIMESTAMPTZ",), "TIMESTAMPTZ", generator) def test_TIMESTAMPTZ_EXPLICIT(conn_cnx): @@ -258,10 +264,11 @@ def test_TIMESTAMPTZ_EXPLICIT(conn_cnx): def generator(row, col): ret = TimestampFromTicks(ticks + row * 86400 - col * 1313) myzone = pytz.timezone("America/Vancouver") - return (myzone.localize(ret)) + return myzone.localize(ret) - check_data_integrity(conn_cnx, ('col1 TIMESTAMP with time zone',), - 'TIMESTAMPTZ_EXPLICIT', generator) + check_data_integrity( + conn_cnx, ("col1 TIMESTAMP with time zone",), "TIMESTAMPTZ_EXPLICIT", generator + ) def test_TIMESTAMPLTZ(conn_cnx): @@ -270,10 +277,9 @@ def test_TIMESTAMPLTZ(conn_cnx): def generator(row, col): ret = TimestampFromTicks(ticks + row * 86400 - col * 1313) myzone = pytz.timezone("America/New_York") - return (myzone.localize(ret)) + return myzone.localize(ret) - check_data_integrity(conn_cnx, ('col1 TIMESTAMPLTZ',), 'TIMESTAMPLTZ', - generator) + check_data_integrity(conn_cnx, ("col1 TIMESTAMPLTZ",), "TIMESTAMPLTZ", generator) def test_fractional_TIMESTAMP(conn_cnx): @@ -281,12 +287,14 @@ def test_fractional_TIMESTAMP(conn_cnx): def generator(row, col): ret = TimestampFromTicks( - ticks + row * 86400 - col * 1313 + row * 0.7 * col / 3.0) + ticks + row * 86400 - col * 1313 + row * 0.7 * col / 3.0 + ) myzone = pytz.timezone("Europe/Paris") - return (myzone.localize(ret)) + return myzone.localize(ret) - check_data_integrity(conn_cnx, ('col1 TIMESTAMP_LTZ',), - 'TIMESTAMP_fractional', generator) + check_data_integrity( + conn_cnx, ("col1 TIMESTAMP_LTZ",), "TIMESTAMP_fractional", generator + ) def test_TIME(conn_cnx): @@ -294,6 +302,6 @@ def test_TIME(conn_cnx): def generator(row, col): ret = TimeFromTicks(ticks + row * 86400 - col * 1313) - return (ret) + return ret - check_data_integrity(conn_cnx, ('col1 TIME',), 'TIME', generator) + check_data_integrity(conn_cnx, ("col1 TIME",), "TIME", generator) diff --git a/test/integ/test_daylight_savings.py b/test/integ/test_daylight_savings.py new file mode 100644 index 000000000..d3f8c3f04 --- /dev/null +++ b/test/integ/test_daylight_savings.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from datetime import datetime + +import pytz + + +def _insert_timestamp(ctx, table, tz, dt): + myzone = pytz.timezone(tz) + ts = myzone.localize(dt, is_dst=True) + print("\n") + print(f"{repr(ts)}") + ctx.cursor().execute( + "INSERT INTO {table} VALUES(%s)".format( + table=table, + ), + (ts,), + ) + + result = ctx.cursor().execute(f"SELECT * FROM {table}").fetchone() + retrieved_ts = result[0] + print("#####") + print(f"Retrieved ts: {repr(retrieved_ts)}") + print(f"Retrieved and converted TS{repr(retrieved_ts.astimezone(myzone))}") + print("#####") + assert result[0] == ts + ctx.cursor().execute(f"DELETE FROM {table}") + + +def test_daylight_savings_in_TIMESTAMP_LTZ(conn_cnx, db_parameters): + with conn_cnx() as ctx: + ctx.cursor().execute( + "CREATE OR REPLACE TABLE {table} (c1 timestamp_ltz)".format( + table=db_parameters["name"], + ) + ) + try: + dt = datetime(year=2016, month=3, day=13, hour=18, minute=47, second=32) + _insert_timestamp(ctx, db_parameters["name"], "Australia/Sydney", dt) + dt = datetime(year=2016, month=3, day=13, hour=8, minute=39, second=23) + _insert_timestamp(ctx, db_parameters["name"], "Europe/Paris", dt) + dt = datetime(year=2016, month=3, day=13, hour=8, minute=39, second=23) + _insert_timestamp(ctx, db_parameters["name"], "UTC", dt) + + dt = datetime(year=2016, month=3, day=13, hour=1, minute=14, second=8) + _insert_timestamp(ctx, db_parameters["name"], "America/New_York", dt) + + dt = datetime(year=2016, month=3, day=12, hour=22, minute=32, second=4) + _insert_timestamp(ctx, db_parameters["name"], "US/Pacific", dt) + + finally: + ctx.cursor().execute( + "DROP TABLE IF EXISTS {table}".format( + table=db_parameters["name"], + ) + ) diff --git a/test/test_dbapi.py b/test/integ/test_dbapi.py similarity index 51% rename from test/test_dbapi.py rename to test/integ/test_dbapi.py index 12dfbe1d4..b8142cf06 100644 --- a/test/test_dbapi.py +++ b/test/integ/test_dbapi.py @@ -1,47 +1,42 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # -""" -Script to test database capabilities and the DB-API interface - for functionality and data integrity. - - Adapted from a script by M-A Lemburg and taken from the MySQL python driver. +"""Script to test database capabilities and the DB-API interface for functionality and data integrity. +Adapted from a script by M-A Lemburg and taken from the MySQL python driver. """ -import sys +from __future__ import annotations + import time import pytest import snowflake.connector import snowflake.connector.dbapi -from snowflake.connector import dbapi -from snowflake.connector import errorcode -from snowflake.connector import errors +from snowflake.connector import dbapi, errorcode, errors from snowflake.connector.compat import BASE_EXCEPTION_CLASS -TABLE1 = 'dbapi_ddl1' -TABLE2 = 'dbapi_ddl2' +TABLE1 = "dbapi_ddl1" +TABLE2 = "dbapi_ddl2" def drop_dbapi_tables(conn_cnx): with conn_cnx() as cnx: with cnx.cursor() as cursor: for ddl in (TABLE1, TABLE2): - dropsql = 'drop table if exists {0}'.format(ddl) + dropsql = f"drop table if exists {ddl}" cursor.execute(dropsql) def executeDDL1(cursor): - cursor.execute('create or replace table {0} (name string)'.format(TABLE1)) + cursor.execute(f"create or replace table {TABLE1} (name string)") def executeDDL2(cursor): - cursor.execute('create or replace table {0} (name string)'.format(TABLE2)) + cursor.execute(f"create or replace table {TABLE2} (name string)") @pytest.fixture() @@ -56,35 +51,32 @@ def fin(): def _paraminsert(cur): executeDDL1(cur) - cur.execute("insert into {0} values ('string inserted into table')".format( - TABLE1 - )) + cur.execute(f"insert into {TABLE1} values ('string inserted into table')") assert cur.rowcount in (-1, 1) - cur.execute("insert into {0} values (%(dbapi_ddl2)s)".format(TABLE1), - {TABLE2: "Cooper's"}) + cur.execute(f"insert into {TABLE1} values (%(dbapi_ddl2)s)", {TABLE2: "Cooper's"}) assert cur.rowcount in (-1, 1) - cur.execute('select name from {0}'.format(TABLE1)) + cur.execute(f"select name from {TABLE1}") res = cur.fetchall() - assert len(res) == 2, 'cursor.fetchall returned too few rows' + assert len(res) == 2, "cursor.fetchall returned too few rows" dbapi_ddl2s = [res[0][0], res[1][0]] dbapi_ddl2s.sort() - assert dbapi_ddl2s[0] == "Cooper's", ( - 'cursor.fetchall retrieved incorrect data') - assert dbapi_ddl2s[1] == "string inserted into table", ( - 'cursor.fetchall retrieved incorrect data') + assert dbapi_ddl2s[0] == "Cooper's", "cursor.fetchall retrieved incorrect data" + assert ( + dbapi_ddl2s[1] == "string inserted into table" + ), "cursor.fetchall retrieved incorrect data" def test_connect(conn_cnx): - with conn_cnx() as cnx: + with conn_cnx(): pass def test_apilevel(): try: apilevel = snowflake.connector.apilevel - assert apilevel == '2.0', 'test_dbapi:test_apilevel' + assert apilevel == "2.0", "test_dbapi:test_apilevel" except AttributeError: raise Exception("test_apilevel: apilevel not defined") @@ -92,7 +84,7 @@ def test_apilevel(): def test_threadsafety(): try: threadsafety = snowflake.connector.threadsafety - assert threadsafety == 2, 'check value of threadsaftey is 2' + assert threadsafety == 2, "check value of threadsafety is 2" except errors.AttributeError: raise Exception("AttributeError: not defined in Snowflake.connector") @@ -100,19 +92,19 @@ def test_threadsafety(): def test_paramstyle(): try: paramstyle = snowflake.connector.paramstyle - assert paramstyle == 'pyformat' + assert paramstyle == "pyformat" except AttributeError: raise Exception("snowflake.connector.paramstyle not defined") def test_exceptions(): # required exceptions should be defined in a hierarchy - if sys.version_info[0] > 2: + try: + assert issubclass(errors._Warning, Exception) + except AttributeError: + # Compatibility for olddriver tests assert issubclass(errors.Warning, Exception) - assert issubclass(errors.Error, Exception) - else: - assert issubclass(errors.Warning, StandardError) - assert issubclass(errors.Error, StandardError) + assert issubclass(errors.Error, Exception) assert issubclass(errors.InterfaceError, errors.Error) assert issubclass(errors.DatabaseError, errors.Error) assert issubclass(errors.OperationalError, errors.Error) @@ -122,9 +114,13 @@ def test_exceptions(): assert issubclass(errors.NotSupportedError, errors.Error) -def test_ExceptionsAsConnectionAttributes(conn_cnx): +def test_exceptions_as_connection_attributes(conn_cnx): with conn_cnx() as con: - assert con.Warning == errors.Warning + try: + assert con.Warning == errors._Warning + except AttributeError: + # Compatibility for olddriver tests + assert con.Warning == errors.Warning assert con.Error == errors.Error assert con.InterfaceError == errors.InterfaceError assert con.DatabaseError == errors.DatabaseError @@ -137,12 +133,12 @@ def test_ExceptionsAsConnectionAttributes(conn_cnx): def test_commit(db_parameters): con = snowflake.connector.connect( - account=db_parameters['account'], - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - protocol=db_parameters['protocol'], + account=db_parameters["account"], + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + protocol=db_parameters["protocol"], ) try: # Commit must work, even if it doesn't do anything @@ -154,18 +150,22 @@ def test_commit(db_parameters): def test_rollback(conn_cnx, db_parameters): with conn_cnx() as cnx: cur = cnx.cursor() - cur.execute('create or replace table {0} (a int)'.format( - db_parameters['name'])) + cur.execute("create or replace table {} (a int)".format(db_parameters["name"])) cnx.cursor().execute("begin") - cur.execute(""" -insert into {0} (select seq8() seq + cur.execute( + """ +insert into {} (select seq8() seq from table(generator(rowCount => 10)) v) -""".format(db_parameters['name'])) +""".format( + db_parameters["name"] + ) + ) cnx.rollback() dbapi_rollback = cur.execute( - "select count(*) from {0}".format(db_parameters['name'])).fetchone() - assert dbapi_rollback[0] == 0, 'transaction not rolled back' - cur.execute('drop table {0}'.format(db_parameters['name'])) + "select count(*) from {}".format(db_parameters["name"]) + ).fetchone() + assert dbapi_rollback[0] == 0, "transaction not rolled back" + cur.execute("drop table {}".format(db_parameters["name"])) cur.close() @@ -183,38 +183,36 @@ def test_cursor_isolation(conn_local): cur1 = con.cursor() cur2 = con.cursor() executeDDL1(cur1) - cur1.execute( - "insert into {0} values ('string inserted into table')".format( - TABLE1 - )) - cur2.execute("select name from {0}".format(TABLE1)) + cur1.execute(f"insert into {TABLE1} values ('string inserted into table')") + cur2.execute(f"select name from {TABLE1}") dbapi_ddl1 = cur2.fetchall() assert len(dbapi_ddl1) == 1 assert len(dbapi_ddl1[0]) == 1 - assert dbapi_ddl1[0][0], 'string inserted into table' + assert dbapi_ddl1[0][0], "string inserted into table" def test_description(conn_local): with conn_local() as con: cur = con.cursor() assert cur.description is None, ( - 'cursor.description should be none if there has not been any ' - 'statements executed') + "cursor.description should be none if there has not been any " + "statements executed" + ) executeDDL1(cur) - assert cur.description[0][0].lower() == 'status', ( - 'cursor.description returns status of insert' - ) - cur.execute('select name from %s' % TABLE1) - assert len(cur.description) == 1, ( - 'cursor.description describes too many columns' - ) - assert len(cur.description[0]) == 7, ( - 'cursor.description[x] tuples must have 7 elements' - ) - assert cur.description[0][0].lower() == 'name', ( - 'cursor.description[x][0] must return column name' - ) + assert ( + cur.description[0][0].lower() == "status" + ), "cursor.description returns status of insert" + cur.execute("select name from %s" % TABLE1) + assert ( + len(cur.description) == 1 + ), "cursor.description describes too many columns" + assert ( + len(cur.description[0]) == 7 + ), "cursor.description[x] tuples must have 7 elements" + assert ( + cur.description[0][0].lower() == "name" + ), "cursor.description[x][0] must return column name" # No, the column type is a numeric value # assert cur.description[0][1] == dbapi.STRING, ( @@ -224,34 +222,32 @@ def test_description(conn_local): # Make sure self.description gets reset executeDDL2(cur) - assert len(cur.description) == 1, ( - 'cursor.description is not reset') + assert len(cur.description) == 1, "cursor.description is not reset" def test_rowcount(conn_local): with conn_local() as con: cur = con.cursor() assert cur.rowcount is None, ( - 'cursor.rowcount not set to None when no statement have not be ' - 'executed yet' + "cursor.rowcount not set to None when no statement have not be " + "executed yet" ) executeDDL1(cur) - cur.execute(("insert into %s values " - "('string inserted into table')") % TABLE1) - cur.execute("select name from %s" % TABLE1) - assert cur.rowcount == 1, ( - 'cursor.rowcount should the number of rows returned' + cur.execute( + ("insert into %s values " "('string inserted into table')") % TABLE1 ) + cur.execute("select name from %s" % TABLE1) + assert cur.rowcount == 1, "cursor.rowcount should the number of rows returned" def test_close(db_parameters): con = snowflake.connector.connect( - account=db_parameters['account'], - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - protocol=db_parameters['protocol'], + account=db_parameters["account"], + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + protocol=db_parameters["protocol"], ) try: cur = con.cursor() @@ -263,7 +259,6 @@ def test_close(db_parameters): # closed. # assert calling(con.commit()),raises(errors.Error,'con.commit')) - # disabling due to SNOW-13645 # cursor.close() should raise an Error if called after connection closed # try: @@ -275,22 +270,21 @@ def test_close(db_parameters): # assert error.errno,equal_to( # errorcode.ER_CURSOR_IS_CLOSED),'cursor.close() called twice in a row') - # calling cursor.execute after connection is closed should raise an error try: - cur.execute( - 'create or replace table {0} (name string)'.format( - TABLE1)) + cur.execute(f"create or replace table {TABLE1} (name string)") except BASE_EXCEPTION_CLASS as error: - assert error.errno == errorcode.ER_CURSOR_IS_CLOSED, ( - 'cursor.execute() called twice in a row') + assert ( + error.errno == errorcode.ER_CURSOR_IS_CLOSED + ), "cursor.execute() called twice in a row" # try to create a cursor on a closed connection try: - cursor = con.cursor() + con.cursor() except BASE_EXCEPTION_CLASS as error: - assert error.errno == errorcode.ER_CONNECTION_IS_CLOSED, ( - 'tried to create a cursor on a closed cursor') + assert ( + error.errno == errorcode.ER_CONNECTION_IS_CLOSED + ), "tried to create a cursor on a closed cursor" def test_execute(conn_local): @@ -303,31 +297,20 @@ def test_executemany(conn_local): with conn_local() as con: cur = con.cursor() executeDDL1(cur) - largs = [("Cooper's",), ("Boag's",)] - margs = [{'dbapi_ddl2': "Cooper's"}, - {'dbapi_ddl2': "Boag's"}] - - cur.executemany( - 'insert into %s values (%%(dbapi_ddl2)s)' % ( - TABLE1 - ), - margs - ) + margs = [{"dbapi_ddl2": "Cooper's"}, {"dbapi_ddl2": "Boag's"}] + + cur.executemany("insert into %s values (%%(dbapi_ddl2)s)" % (TABLE1), margs) assert cur.rowcount == 2, ( - 'insert using cursor.executemany set cursor.rowcount to ' - 'incorrect value %r' % cur.rowcount + "insert using cursor.executemany set cursor.rowcount to " + "incorrect value %r" % cur.rowcount ) - cur.execute('select name from %s' % TABLE1) + cur.execute("select name from %s" % TABLE1) res = cur.fetchall() - assert len(res) == 2, ( - 'cursor.fetchall retrieved incorrect number of rows' - ) + assert len(res) == 2, "cursor.fetchall retrieved incorrect number of rows" dbapi_ddl2s = [res[0][0], res[1][0]] dbapi_ddl2s.sort() - assert dbapi_ddl2s[0] == "Boag's", ( - 'incorrect data retrieved') - assert dbapi_ddl2s[1] == "Cooper's", ( - 'incorrect data retrieved') + assert dbapi_ddl2s[0] == "Boag's", "incorrect data retrieved" + assert dbapi_ddl2s[1] == "Cooper's", "incorrect data retrieved" def test_fetchone(conn_local): @@ -340,43 +323,35 @@ def test_fetchone(conn_local): # ) executeDDL1(cur) - cur.execute('select name from %s' % TABLE1) + cur.execute("select name from %s" % TABLE1) # assert calling( # cur.fetchone()), is_(None), # 'cursor.fetchone should return None if a query does not return any rows') # assert cur.rowcount==-1)) - cur.execute( - "insert into %s values ('Row 1'),('Row 2')" % TABLE1) - cur.execute( - 'select name from %s order by 1' % TABLE1) + cur.execute("insert into %s values ('Row 1'),('Row 2')" % TABLE1) + cur.execute("select name from %s order by 1" % TABLE1) r = cur.fetchone() - assert len(r) == 1, ( - 'cursor.fetchone should have returned 1 row') - assert r[0] == 'Row 1', ( - 'cursor.fetchone returned incorrect data') - assert cur.rowcount == 2, ( - 'curosr.rowcount should be 2') + assert len(r) == 1, "cursor.fetchone should have returned 1 row" + assert r[0] == "Row 1", "cursor.fetchone returned incorrect data" + assert cur.rowcount == 2, "curosr.rowcount should be 2" SAMPLES = [ - 'Carlton Cold', - 'Carlton Draft', - 'Mountain Goat', - 'Redback', - 'String inserted into table', - 'XXXX' + "Carlton Cold", + "Carlton Draft", + "Mountain Goat", + "Redback", + "String inserted into table", + "XXXX", ] def _populate(): - ''' Return a list of sql commands to setup the DB for the fetch - tests. - ''' + """Returns a list of sql commands to setup the DB for the fetch tests.""" populate = [ # NOTE NO GOOD using format to bind data - "insert into {0} values ('{1}')".format( - TABLE1, s) + f"insert into {TABLE1} values ('{s}')" for s in SAMPLES ] return populate @@ -394,39 +369,35 @@ def test_fetchmany(conn_local): for sql in _populate(): cur.execute(sql) - cur.execute('select name from %s' % TABLE1) + cur.execute("select name from %s" % TABLE1) cur.arraysize = 1 r = cur.fetchmany() assert len(r) == 1, ( - 'cursor.fetchmany retrieved incorrect number of rows, ' - 'should get 1 rows, received %s' % len(r) + "cursor.fetchmany retrieved incorrect number of rows, " + "should get 1 rows, received %s" % len(r) ) cur.arraysize = 10 r = cur.fetchmany(3) # Should get 3 rows assert len(r) == 3, ( - 'cursor.fetchmany retrieved incorrect number of rows, ' - 'should get 3 rows, received %s' % len(r) + "cursor.fetchmany retrieved incorrect number of rows, " + "should get 3 rows, received %s" % len(r) ) r = cur.fetchmany(4) # Should get 2 more assert len(r) == 2, ( - 'cursor.fetchmany retrieved incorrect number of rows, ' - 'should get 2 more.' + "cursor.fetchmany retrieved incorrect number of rows, " "should get 2 more." ) - r = cur.fetchmany( - 4) # Should be an empty sequence + r = cur.fetchmany(4) # Should be an empty sequence assert len(r) == 0, ( - 'cursor.fetchmany should return an empty sequence after ' - 'results are exhausted' + "cursor.fetchmany should return an empty sequence after " + "results are exhausted" ) assert cur.rowcount in (-1, 6) # Same as above, using cursor.arraysize cur.arraysize = 4 - cur.execute('select name from %s' % TABLE1) + cur.execute("select name from %s" % TABLE1) r = cur.fetchmany() # Should get 4 rows - assert len(r) == 4, ( - 'cursor.arraysize not being honoured by fetchmany' - ) + assert len(r) == 4, "cursor.arraysize not being honoured by fetchmany" r = cur.fetchmany() # Should get 2 more assert len(r) == 2 r = cur.fetchmany() # Should be an empty sequence @@ -434,34 +405,31 @@ def test_fetchmany(conn_local): assert cur.rowcount in (-1, 6) cur.arraysize = 6 - cur.execute( - 'select name from %s order by 1' % TABLE1) + cur.execute("select name from %s order by 1" % TABLE1) rows = cur.fetchmany() # Should get all rows assert cur.rowcount in (-1, 6) assert len(rows) == 6 assert len(rows) == 6 - rows = [r[0] for r in rows] + rows = [row[0] for row in rows] rows.sort() # Make sure we get the right data back out for i in range(0, 6): - assert rows[i] == SAMPLES[i], ( - 'incorrect data retrieved by cursor.fetchmany' - ) + assert rows[i] == SAMPLES[i], "incorrect data retrieved by cursor.fetchmany" rows = cur.fetchmany() # Should return an empty list assert len(rows) == 0, ( - 'cursor.fetchmany should return an empty sequence if ' - 'called after the whole result set has been fetched' + "cursor.fetchmany should return an empty sequence if " + "called after the whole result set has been fetched" ) assert cur.rowcount in (-1, 6) executeDDL2(cur) - cur.execute('select name from %s' % TABLE2) + cur.execute("select name from %s" % TABLE2) r = cur.fetchmany() # Should get empty sequence assert len(r) == 0, ( - 'cursor.fetchmany should return an empty sequence if ' - 'query retrieved no rows' + "cursor.fetchmany should return an empty sequence if " + "query retrieved no rows" ) assert cur.rowcount in (-1, 0) @@ -480,35 +448,28 @@ def test_fetchall(conn_local): # 'after executing a a statement that does not return rows' # ) - cur.execute( - 'select name from {0}'.format( - TABLE1)) + cur.execute(f"select name from {TABLE1}") rows = cur.fetchall() assert cur.rowcount in (-1, len(SAMPLES)) - assert len(rows) == len(SAMPLES), ( - 'cursor.fetchall did not retrieve all rows' - ) + assert len(rows) == len(SAMPLES), "cursor.fetchall did not retrieve all rows" rows = [r[0] for r in rows] rows.sort() for i in range(0, len(SAMPLES)): - assert rows[i] == SAMPLES[i], ( - 'cursor.fetchall retrieved incorrect rows' - ) + assert rows[i] == SAMPLES[i], "cursor.fetchall retrieved incorrect rows" rows = cur.fetchall() assert len(rows) == 0, ( - 'cursor.fetchall should return an empty list if called ' - 'after the whole result set has been fetched' + "cursor.fetchall should return an empty list if called " + "after the whole result set has been fetched" ) assert cur.rowcount in (-1, len(SAMPLES)) executeDDL2(cur) - cur.execute( - 'select name from %s' % TABLE2) + cur.execute("select name from %s" % TABLE2) rows = cur.fetchall() - assert cur.rowcount == 0, 'executed but no row was returned' + assert cur.rowcount == 0, "executed but no row was returned" assert len(rows) == 0, ( - 'cursor.fetchall should return an empty list if ' - 'a select query returns no rows' + "cursor.fetchall should return an empty list if " + "a select query returns no rows" ) @@ -519,84 +480,68 @@ def test_mixedfetch(conn_local): for sql in _populate(): cur.execute(sql) - cur.execute( - 'select name from %s' % TABLE1) + cur.execute("select name from %s" % TABLE1) rows1 = cur.fetchone() rows23 = cur.fetchmany(2) rows4 = cur.fetchone() rows56 = cur.fetchall() assert cur.rowcount in (-1, 6) - assert len(rows23) == 2, ( - 'fetchmany returned incorrect number of rows' - ) - assert len(rows56) == 2, ( - 'fetchall returned incorrect number of rows' - ) + assert len(rows23) == 2, "fetchmany returned incorrect number of rows" + assert len(rows56) == 2, "fetchall returned incorrect number of rows" rows = [rows1[0]] - rows.extend( - [rows23[0][0], rows23[1][0]]) + rows.extend([rows23[0][0], rows23[1][0]]) rows.append(rows4[0]) - rows.extend( - [rows56[0][0], rows56[1][0]]) + rows.extend([rows56[0][0], rows56[1][0]]) rows.sort() for i in range(0, len(SAMPLES)): - assert rows[i] == SAMPLES[i], ( - 'incorrect data returned' - ) + assert rows[i] == SAMPLES[i], "incorrect data returned" def test_arraysize(conn_cnx): with conn_cnx() as con: cur = con.cursor() - assert hasattr(cur, 'arraysize'), ( - 'cursor.arraysize must be defined' - ) + assert hasattr(cur, "arraysize"), "cursor.arraysize must be defined" -def test_setinputsizes( - conn_local): +def test_setinputsizes(conn_local): with conn_local() as con: cur = con.cursor() cur.setinputsizes((25,)) - _paraminsert( - cur) # Make sure cursor still works + _paraminsert(cur) # Make sure cursor still works -def test_setoutputsize_basic( - conn_local): +def test_setoutputsize_basic(conn_local): # Basic test is to make sure setoutputsize doesn't blow up with conn_local() as con: cur = con.cursor() cur.setoutputsize(1000) - cur.setoutputsize(2000, - 0) - _paraminsert( - cur) # Make sure the cursor still works + cur.setoutputsize(2000, 0) + _paraminsert(cur) # Make sure the cursor still works def test_description2(conn_local): try: with conn_local() as con: + # ENABLE_FIX_67159 changes the column size to the actual size. By default it is disabled at the moment. + expected_column_size = ( + 26 if not con.account.startswith("sfctest0") else 16777216 + ) cur = con.cursor() executeDDL1(cur) - assert len( - cur.description) == 1, ( - 'length cursor.description should be 1 after executing an insert' - ) - cur.execute( - 'select name from %s' % TABLE1) - assert len( - cur.description) == 1, ( - 'cursor.description returns too many columns' - ) - assert len( - cur.description[0]) == 7, ( - 'cursor.description[x] tuples must have 7 elements' - ) - assert cur.description[0][0].lower() == 'name', ( - 'cursor.description[x][0] must return column name' - ) + assert ( + len(cur.description) == 1 + ), "length cursor.description should be 1 after executing an insert" + cur.execute("select name from %s" % TABLE1) + assert ( + len(cur.description) == 1 + ), "cursor.description returns too many columns" + assert ( + len(cur.description[0]) == 7 + ), "cursor.description[x] tuples must have 7 elements" + assert ( + cur.description[0][0].lower() == "name" + ), "cursor.description[x][0] must return column name" # Make sure self.description gets reset executeDDL2(cur) @@ -605,49 +550,52 @@ def test_description2(conn_local): # description fields: name | type_code | display_size | internal_size | precision | scale | null_ok # name and type_code are mandatory, the other five are optional and are set to None if no meaningful values can be provided. expected = [ - ('COL0', 0, None, None, 38, 0, True), + ("COL0", 0, None, None, 38, 0, True), # number (FIXED) - ('COL1', 0, None, None, 9, 4, False), + ("COL1", 0, None, None, 9, 4, False), # decimal - ('COL2', 2, None, 16777216, None, None, False), + ("COL2", 2, None, expected_column_size, None, None, False), # string - ('COL3', 3, None, None, None, None, True), + ("COL3", 3, None, None, None, None, True), # date - ('COL4', 6, None, None, 0, 9, True), + ("COL4", 6, None, None, 0, 9, True), # timestamp - ('COL5', 5, None, None, None, None, True), + ("COL5", 5, None, None, None, None, True), # variant - ('COL6', 6, None, None, 0, 9, True), + ("COL6", 6, None, None, 0, 9, True), # timestamp_ltz - ('COL7', 7, None, None, 0, 9, True), + ("COL7", 7, None, None, 0, 9, True), # timestamp_tz - ('COL8', 8, None, None, 0, 9, True), + ("COL8", 8, None, None, 0, 9, True), # timestamp_ntz - ('COL9', 9, None, None, None, None, True), + ("COL9", 9, None, None, None, None, True), # object - ('COL10', 10, None, None, - None, None, True), + ("COL10", 10, None, None, None, None, True), # array # ('col11', 11, ... # binary - ('COL12', 12, None, None, 0, 9, True) + ("COL12", 12, None, None, 0, 9, True) # time # ('col13', 13, ... # boolean ] with conn_local() as cnx: cursor = cnx.cursor() - cursor.execute(""" + cursor.execute( + """ alter session set timestamp_input_format = 'YYYY-MM-DD HH24:MI:SS TZH:TZM' -""") - cursor.execute(""" +""" + ) + cursor.execute( + """ create or replace table test_description ( col0 number, col1 decimal(9,4) not null, col2 string not null default 'place-holder', col3 date, col4 timestamp_ltz, col5 variant, col6 timestamp_ltz, col7 timestamp_tz, col8 timestamp_ntz, col9 object, col10 array, col12 time) """ # col11 binary, col12 time - ) - cursor.execute(""" + ) + cursor.execute( + """ insert into test_description select column1, column2, column3, column4, column5, parse_json(column6), column7, column8, column9, parse_xml(column10), parse_json(column11), column12 from VALUES @@ -657,19 +605,16 @@ def test_description2(conn_local): '2015-06-03 12:00:03 +03:00', ' JulietteRomeo', '["xx", "yy", "zz", null, 1]', '12:34:56') -""") - cursor.execute( - "select * from test_description") +""" + ) + cursor.execute("select * from test_description") cursor.fetchone() - assert cursor.description == expected, ( - "cursor.description is incorrect") + assert cursor.description == expected, "cursor.description is incorrect" finally: with conn_local() as con: with con.cursor() as cursor: - cursor.execute( - 'drop table if exists test_description') - cursor.execute( - 'alter session set timestamp_input_format = default') + cursor.execute("drop table if exists test_description") + cursor.execute("alter session set timestamp_input_format = default") def test_closecursor(conn_cnx): @@ -686,174 +631,107 @@ def test_None(conn_local): with conn_local() as con: cur = con.cursor() executeDDL1(cur) - cur.execute( - 'insert into %s values (NULL)' % TABLE1) - cur.execute( - 'select name from %s' % TABLE1) + cur.execute("insert into %s values (NULL)" % TABLE1) + cur.execute("select name from %s" % TABLE1) r = cur.fetchall() assert len(r) == 1 assert len(r[0]) == 1 - assert r[0][0] is None, 'NULL value not returned as None' + assert r[0][0] is None, "NULL value not returned as None" def test_Date(): - d1 = snowflake.connector.dbapi.Date( - 2002, 12, 25) + d1 = snowflake.connector.dbapi.Date(2002, 12, 25) d2 = snowflake.connector.dbapi.DateFromTicks( - time.mktime(( - 2002, - 12, - 25, - 0, - 0, - 0, - 0, - 0, - 0))) + time.mktime((2002, 12, 25, 0, 0, 0, 0, 0, 0)) + ) # API doesn't specify, but it seems to be implied assert str(d1) == str(d2) def test_Time(): - t1 = snowflake.connector.dbapi.Time( - 13, 45, 30) + t1 = snowflake.connector.dbapi.Time(13, 45, 30) t2 = snowflake.connector.dbapi.TimeFromTicks( - time.mktime( - ( - 2001, 1, - 1, 13, - 45, 30, - 0, 0, - 0))) + time.mktime((2001, 1, 1, 13, 45, 30, 0, 0, 0)) + ) # API doesn't specify, but it seems to be implied assert str(t1) == str(t2) def test_Timestamp(): - t1 = snowflake.connector.dbapi.Timestamp( - 2002, - 12, - 25, 13, - 45, - 30) + t1 = snowflake.connector.dbapi.Timestamp(2002, 12, 25, 13, 45, 30) t2 = snowflake.connector.dbapi.TimestampFromTicks( - time.mktime( - ( - 2002, - 12, - 25, - 13, - 45, - 30, - 0, - 0, - 0)) + time.mktime((2002, 12, 25, 13, 45, 30, 0, 0, 0)) ) # API doesn't specify, but it seems to be implied assert str(t1) == str(t2) def test_STRING(): - assert hasattr(dbapi, 'STRING'), ( - 'dbapi.STRING must be defined' - ) + assert hasattr(dbapi, "STRING"), "dbapi.STRING must be defined" def test_BINARY(): - assert hasattr( - dbapi, - 'BINARY'), ( - 'dbapi.BINARY must be defined.' - ) + assert hasattr(dbapi, "BINARY"), "dbapi.BINARY must be defined." def test_NUMBER(): - assert hasattr( - dbapi, - 'NUMBER'), ( - 'dbapi.NUMBER must be defined.' - ) + assert hasattr(dbapi, "NUMBER"), "dbapi.NUMBER must be defined." def test_DATETIME(): - assert hasattr( - dbapi, - 'DATETIME'), ( - 'dbapi.DATETIME must be defined.' - ) + assert hasattr(dbapi, "DATETIME"), "dbapi.DATETIME must be defined." def test_ROWID(): - assert hasattr( - dbapi, - 'ROWID'), ( - 'dbapi.ROWID must be defined.' - ) + assert hasattr(dbapi, "ROWID"), "dbapi.ROWID must be defined." -def test_substring( - conn_local): +def test_substring(conn_local): with conn_local() as con: cur = con.cursor() - executeDDL1( - cur) - args = { - 'dbapi_ddl2': '"" \"\'\",\\"\\"\"\'\"'} - cur.execute( - 'insert into %s values (%%(dbapi_ddl2)s)' % TABLE1, - args) - cur.execute( - 'select name from %s' % TABLE1) + executeDDL1(cur) + args = {"dbapi_ddl2": '"" "\'",\\"\\""\'"'} + cur.execute("insert into %s values (%%(dbapi_ddl2)s)" % TABLE1, args) + cur.execute("select name from %s" % TABLE1) res = cur.fetchall() - dbapi_ddl2 = \ - res[ - 0][ - 0] - assert dbapi_ddl2 == args['dbapi_ddl2'], ( - 'incorrect data retrieved, got %s, should be %s' % ( - dbapi_ddl2, - args['dbapi_ddl2'])) - - -def test_escape( - conn_local): + dbapi_ddl2 = res[0][0] + assert ( + dbapi_ddl2 == args["dbapi_ddl2"] + ), "incorrect data retrieved, got {}, should be {}".format( + dbapi_ddl2, args["dbapi_ddl2"] + ) + + +def test_escape(conn_local): teststrings = [ - 'abc\ndef', - 'abc\\ndef', - 'abc\\\ndef', - 'abc\\\\ndef', - 'abc\\\\\ndef', + "abc\ndef", + "abc\\ndef", + "abc\\\ndef", + "abc\\\\ndef", + "abc\\\\\ndef", + 'abc"def', + 'abc""def', + "abc'def", + "abc''def", 'abc"def', 'abc""def', - 'abc\'def', - 'abc\'\'def', - "abc\"def", - "abc\"\"def", "abc'def", "abc''def", "abc\tdef", "abc\\tdef", "abc\\\tdef", - "\\x" + "\\x", ] with conn_local() as con: cur = con.cursor() - executeDDL1( - cur) + executeDDL1(cur) for i in teststrings: - args = { - 'dbapi_ddl2': i} - cur.execute( - 'insert into %s values (%%(dbapi_ddl2)s)' % TABLE1, - args) - cur.execute( - 'select * from %s' % TABLE1) + args = {"dbapi_ddl2": i} + cur.execute("insert into %s values (%%(dbapi_ddl2)s)" % TABLE1, args) + cur.execute("select * from %s" % TABLE1) row = cur.fetchone() - cur.execute( - 'delete from %s where name=%%s' % TABLE1, - i) - assert i == row[ - 0], ( - 'newline not properly converted, got %s, should be %s' % ( - row[0], i)) + cur.execute("delete from %s where name=%%s" % TABLE1, i) + assert ( + i == row[0] + ), f"newline not properly converted, got {row[0]}, should be {i}" diff --git a/test/integ/test_errors.py b/test/integ/test_errors.py new file mode 100644 index 000000000..aefd5d3f3 --- /dev/null +++ b/test/integ/test_errors.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +import traceback + +import pytest + +import snowflake.connector +from snowflake.connector import errors +from snowflake.connector.telemetry import TelemetryField + + +def test_error_classes(conn_cnx): + """Error classes in Connector module, object.""" + # class + assert snowflake.connector.ProgrammingError == errors.ProgrammingError + assert snowflake.connector.OperationalError == errors.OperationalError + + # object + with conn_cnx() as ctx: + assert ctx.ProgrammingError == errors.ProgrammingError + + +def test_error_code(conn_cnx): + """Error code is included in the exception.""" + syntax_errno = 1003 + syntax_sqlstate = "42000" + with conn_cnx() as ctx: + with pytest.raises(errors.ProgrammingError) as e: + ctx.cursor().execute("SELECT * FROOOM TEST") + assert e.value.errno == syntax_errno, "Syntax error code" + assert e.value.sqlstate == syntax_sqlstate, "Syntax SQL state" + e.match(rf"^{syntax_errno:06d} \({syntax_sqlstate}\): ") + + +@pytest.mark.skipolddriver +def test_error_telemetry(conn_cnx): + with conn_cnx() as ctx: + with pytest.raises(errors.ProgrammingError) as e: + ctx.cursor().execute("SELECT * FROOOM TEST") + telemetry_stacktrace = e.value.telemetry_traceback + assert "SELECT * FROOOM TEST" not in telemetry_stacktrace + for frame in traceback.extract_tb(e.value.__traceback__): + assert frame.line not in telemetry_stacktrace + telemetry_data = e.value.generate_telemetry_exception_data() + assert ( + "Failed to detect Syntax error" + not in telemetry_data[TelemetryField.KEY_REASON.value] + ) diff --git a/test/integ/test_execute_multi_statements.py b/test/integ/test_execute_multi_statements.py new file mode 100644 index 000000000..e883e4055 --- /dev/null +++ b/test/integ/test_execute_multi_statements.py @@ -0,0 +1,267 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import codecs +import os +from io import BytesIO, StringIO +from unittest.mock import patch + +import pytest + +from snowflake.connector import DictCursor, ProgrammingError + +THIS_DIR = os.path.dirname(os.path.realpath(__file__)) + + +def test_execute_string(conn_cnx, db_parameters): + with conn_cnx() as cnx: + cnx.execute_string( + """ +CREATE OR REPLACE TABLE {tbl1} (c1 int, c2 string); +CREATE OR REPLACE TABLE {tbl2} (c1 int, c2 string); +INSERT INTO {tbl1} VALUES(1,'test123'); +INSERT INTO {tbl1} VALUES(2,'test234'); +INSERT INTO {tbl1} VALUES(3,'test345'); +INSERT INTO {tbl2} VALUES(101,'test123'); +INSERT INTO {tbl2} VALUES(102,'test234'); +INSERT INTO {tbl2} VALUES(103,'test345'); +""".format( + tbl1=db_parameters["name"] + "1", tbl2=db_parameters["name"] + "2" + ), + return_cursors=False, + ) + try: + with conn_cnx() as cnx: + ret = ( + cnx.cursor() + .execute( + """ +SELECT * FROM {tbl1} ORDER BY 1 +""".format( + tbl1=db_parameters["name"] + "1" + ) + ) + .fetchall() + ) + assert ret[0][0] == 1 + assert ret[2][1] == "test345" + ret = ( + cnx.cursor() + .execute( + """ +SELECT * FROM {tbl2} ORDER BY 2 +""".format( + tbl2=db_parameters["name"] + "2" + ) + ) + .fetchall() + ) + assert ret[0][0] == 101 + assert ret[2][1] == "test345" + + curs = cnx.execute_string( + """ +SELECT * FROM {tbl1} ORDER BY 1 DESC; +SELECT * FROM {tbl2} ORDER BY 1 DESC; +""".format( + tbl1=db_parameters["name"] + "1", tbl2=db_parameters["name"] + "2" + ) + ) + assert curs[0].rowcount == 3 + assert curs[1].rowcount == 3 + ret1 = curs[0].fetchone() + assert ret1[0] == 3 + ret2 = curs[1].fetchone() + assert ret2[0] == 103 + finally: + with conn_cnx() as cnx: + cnx.execute_string( + """ + DROP TABLE IF EXISTS {tbl1}; + DROP TABLE IF EXISTS {tbl2}; + """.format( + tbl1=db_parameters["name"] + "1", tbl2=db_parameters["name"] + "2" + ), + return_cursors=False, + ) + + +@pytest.mark.skipolddriver +def test_execute_string_dict_cursor(conn_cnx, db_parameters): + with conn_cnx() as cnx: + cnx.execute_string( + """ +CREATE OR REPLACE TABLE {tbl1} (C1 int, C2 string); +CREATE OR REPLACE TABLE {tbl2} (C1 int, C2 string); +INSERT INTO {tbl1} VALUES(1,'test123'); +INSERT INTO {tbl1} VALUES(2,'test234'); +INSERT INTO {tbl1} VALUES(3,'test345'); +INSERT INTO {tbl2} VALUES(101,'test123'); +INSERT INTO {tbl2} VALUES(102,'test234'); +INSERT INTO {tbl2} VALUES(103,'test345'); +""".format( + tbl1=db_parameters["name"] + "1", tbl2=db_parameters["name"] + "2" + ), + return_cursors=False, + ) + try: + with conn_cnx() as cnx: + ret = cnx.cursor(cursor_class=DictCursor).execute( + """ +SELECT * FROM {tbl1} ORDER BY 1 +""".format( + tbl1=db_parameters["name"] + "1" + ) + ) + assert ret.rowcount == 3 + assert ret._use_dict_result + ret = ret.fetchall() + assert type(ret) is list + assert type(ret[0]) is dict + assert type(ret[2]) is dict + assert ret[0]["C1"] == 1 + assert ret[2]["C2"] == "test345" + + ret = cnx.cursor(cursor_class=DictCursor).execute( + """ +SELECT * FROM {tbl2} ORDER BY 2 +""".format( + tbl2=db_parameters["name"] + "2" + ) + ) + assert ret.rowcount == 3 + ret = ret.fetchall() + assert type(ret) is list + assert type(ret[0]) is dict + assert type(ret[2]) is dict + assert ret[0]["C1"] == 101 + assert ret[2]["C2"] == "test345" + + curs = cnx.execute_string( + """ +SELECT * FROM {tbl1} ORDER BY 1 DESC; +SELECT * FROM {tbl2} ORDER BY 1 DESC; +""".format( + tbl1=db_parameters["name"] + "1", tbl2=db_parameters["name"] + "2" + ), + cursor_class=DictCursor, + ) + assert type(curs) is list + assert curs[0].rowcount == 3 + assert curs[1].rowcount == 3 + ret1 = curs[0].fetchone() + assert type(ret1) is dict + assert ret1["C1"] == 3 + assert ret1["C2"] == "test345" + ret2 = curs[1].fetchone() + assert type(ret2) is dict + assert ret2["C1"] == 103 + finally: + with conn_cnx() as cnx: + cnx.execute_string( + """ + DROP TABLE IF EXISTS {tbl1}; + DROP TABLE IF EXISTS {tbl2}; + """.format( + tbl1=db_parameters["name"] + "1", tbl2=db_parameters["name"] + "2" + ), + return_cursors=False, + ) + + +def test_execute_string_kwargs(conn_cnx, db_parameters): + with conn_cnx() as cnx: + with patch( + "snowflake.connector.cursor.SnowflakeCursor.execute", autospec=True + ) as mock_execute: + cnx.execute_string( + """ +CREATE OR REPLACE TABLE {tbl1} (c1 int, c2 string); +CREATE OR REPLACE TABLE {tbl2} (c1 int, c2 string); +INSERT INTO {tbl1} VALUES(1,'test123'); +INSERT INTO {tbl1} VALUES(2,'test234'); +INSERT INTO {tbl1} VALUES(3,'test345'); +INSERT INTO {tbl2} VALUES(101,'test123'); +INSERT INTO {tbl2} VALUES(102,'test234'); +INSERT INTO {tbl2} VALUES(103,'test345'); + """.format( + tbl1=db_parameters["name"] + "1", tbl2=db_parameters["name"] + "2" + ), + return_cursors=False, + _no_results=True, + ) + for call in mock_execute.call_args_list: + assert call[1].get("_no_results", False) + + +def test_execute_string_with_error(conn_cnx): + with conn_cnx() as cnx: + with pytest.raises(ProgrammingError): + cnx.execute_string( + """ +SELECT 1; +SELECT 234; +SELECT bafa; +""" + ) + + +def test_execute_stream(conn_cnx): + # file stream + expected_results = [1, 2, 3] + with codecs.open( + os.path.join(THIS_DIR, "../data", "multiple_statements.sql"), encoding="utf-8" + ) as f: + with conn_cnx() as cnx: + for idx, rec in enumerate(cnx.execute_stream(f)): + assert rec.fetchall()[0][0] == expected_results[idx] + + # text stream + expected_results = [3, 4, 5, 6] + with conn_cnx() as cnx: + for idx, rec in enumerate( + cnx.execute_stream(StringIO("SELECT 3; SELECT 4; SELECT 5;\nSELECT 6;")) + ): + assert rec.fetchall()[0][0] == expected_results[idx] + + +def test_execute_stream_with_error(conn_cnx): + # file stream + expected_results = [1, 2, 3] + with open(os.path.join(THIS_DIR, "../data", "multiple_statements.sql")) as f: + with conn_cnx() as cnx: + for idx, rec in enumerate(cnx.execute_stream(f)): + assert rec.fetchall()[0][0] == expected_results[idx] + + # read a file including syntax error in the middle + with codecs.open( + os.path.join(THIS_DIR, "../data", "multiple_statements_negative.sql"), + encoding="utf-8", + ) as f: + with conn_cnx() as cnx: + gen = cnx.execute_stream(f) + rec = next(gen).fetchall() + assert rec[0][0] == 987 # the first statement succeeds + with pytest.raises(ProgrammingError): + next(gen) # the second statement fails + + # binary stream including Ascii data + with conn_cnx() as cnx: + with pytest.raises(TypeError): + gen = cnx.execute_stream( + BytesIO(b"SELECT 3; SELECT 4; SELECT 5;\nSELECT 6;") + ) + next(gen) + + +@pytest.mark.skipolddriver +def test_execute_string_empty_lines(conn_cnx, db_parameters): + """Tests whether execute_string can filter out empty lines.""" + with conn_cnx() as cnx: + cursors = cnx.execute_string("select 1;\n\n") + assert len(cursors) == 1 + assert [c.fetchall() for c in cursors] == [[(1,)]] diff --git a/test/integ/test_key_pair_authentication.py b/test/integ/test_key_pair_authentication.py new file mode 100644 index 000000000..3f4610644 --- /dev/null +++ b/test/integ/test_key_pair_authentication.py @@ -0,0 +1,287 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import uuid +from datetime import datetime, timedelta +from os import path + +import jwt +import pytest +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import dsa, rsa + +import snowflake.connector + + +@pytest.mark.skipolddriver +@pytest.mark.parametrize( + "input_account,expected_account", + [ + ("s3testaccount.global", "S3TESTACCOUNT.GLOBAL"), + ("acct-with-dashes", "ACCT-WITH-DASHES"), + ("testaccount.extra", "TESTACCOUNT"), + ("testaccount-user.global", "TESTACCOUNT"), + ("normalaccount", "NORMALACCOUNT"), + ], +) +def test_get_token_from_private_key(input_account, expected_account): + test_user = "python_test_keypair_user_" + str(uuid.uuid4()).replace("-", "_") + current_dir = path.dirname(path.realpath(__file__)) + private_key_file_path = path.join( + current_dir, "..", "data", "rsa_keys", "rsa_key_encrypted.p8" + ) + private_key_password = "test" + public_key_fingerprint = snowflake.connector.auth.get_public_key_fingerprint( + private_key_file_path, private_key_password + ) + # generate the jwt token + jwt_token = snowflake.connector.auth.get_token_from_private_key( + test_user, input_account, private_key_file_path, private_key_password + ) + # decode the token to get its fields (iss, sub, issue time, expiration time) + decoded_token = jwt.decode(jwt_token, options={"verify_signature": False}) + # Assert "sub" field matches {corrected account}.{user} + assert expected_account + "." + test_user.upper() == decoded_token.get("sub") + # Assert "iss" field matches {corrected account}.{user}.{public key fingerprint} + assert ( + expected_account + + "." + + test_user.upper() + + "." + + public_key_fingerprint.upper() + == decoded_token.get("iss").upper() + ) + # Token should be valid for 24 hours. Assert that the token's expiration time is between 23 and 24 hours from now. + assert datetime.utcnow() + timedelta(minutes=1360) < datetime.fromtimestamp( + decoded_token.get("exp") + ) + assert datetime.utcnow() + timedelta(minutes=1441) > datetime.fromtimestamp( + decoded_token.get("exp") + ) + + +@pytest.mark.skipolddriver +def test_different_key_length(is_public_test, request, conn_cnx, db_parameters): + if is_public_test: + pytest.skip("This test requires ACCOUNTADMIN privilege to set the public key") + + test_user = "python_test_keypair_user_" + str(uuid.uuid4()).replace("-", "_") + + db_config = { + "protocol": db_parameters["protocol"], + "account": db_parameters["account"], + "user": test_user, + "host": db_parameters["host"], + "port": db_parameters["port"], + "database": db_parameters["database"], + "schema": db_parameters["schema"], + "timezone": "UTC", + } + + def fin(): + with conn_cnx() as cnx: + cnx.cursor().execute( + """ + use role accountadmin + """ + ) + cnx.cursor().execute( + """ + drop user if exists {user} + """.format( + user=test_user + ) + ) + + request.addfinalizer(fin) + + testcases = [2048, 4096, 8192] + + with conn_cnx() as cnx: + cursor = cnx.cursor() + cursor.execute( + """ + use role accountadmin + """ + ) + cursor.execute("create user " + test_user) + + for key_length in testcases: + private_key_der, public_key_der_encoded = generate_key_pair(key_length) + + cnx.cursor().execute( + """ + alter user {user} set rsa_public_key='{public_key}' + """.format( + user=test_user, public_key=public_key_der_encoded + ) + ) + + db_config["private_key"] = private_key_der + with snowflake.connector.connect(**db_config) as _: + pass + + +@pytest.mark.skipolddriver +def test_multiple_key_pair(is_public_test, request, conn_cnx, db_parameters): + if is_public_test: + pytest.skip("This test requires ACCOUNTADMIN privilege to set the public key") + + test_user = "python_test_keypair_user_" + str(uuid.uuid4()).replace("-", "_") + + db_config = { + "protocol": db_parameters["protocol"], + "account": db_parameters["account"], + "user": test_user, + "host": db_parameters["host"], + "port": db_parameters["port"], + "database": db_parameters["database"], + "schema": db_parameters["schema"], + "timezone": "UTC", + } + + def fin(): + with conn_cnx() as cnx: + cnx.cursor().execute( + """ + use role accountadmin + """ + ) + cnx.cursor().execute( + """ + drop user if exists {user} + """.format( + user=test_user + ) + ) + + request.addfinalizer(fin) + + private_key_one_der, public_key_one_der_encoded = generate_key_pair(2048) + private_key_two_der, public_key_two_der_encoded = generate_key_pair(2048) + + with conn_cnx() as cnx: + cnx.cursor().execute( + """ + use role accountadmin + """ + ) + cnx.cursor().execute( + """ + create user {user} + """.format( + user=test_user + ) + ) + cnx.cursor().execute( + """ + alter user {user} set rsa_public_key='{public_key}' + """.format( + user=test_user, public_key=public_key_one_der_encoded + ) + ) + + db_config["private_key"] = private_key_one_der + with snowflake.connector.connect(**db_config) as _: + pass + + # assert exception since different key pair is used + db_config["private_key"] = private_key_two_der + # although specifying password, + # key pair authentication should used and it should fail since we don't do fall back + db_config["password"] = "fake_password" + with pytest.raises(snowflake.connector.errors.DatabaseError) as exec_info: + snowflake.connector.connect(**db_config) + + assert exec_info.value.errno == 250001 + assert exec_info.value.sqlstate == "08001" + assert "JWT token is invalid" in exec_info.value.msg + + with conn_cnx() as cnx: + cnx.cursor().execute( + """ + use role accountadmin + """ + ) + cnx.cursor().execute( + """ + alter user {user} set rsa_public_key_2='{public_key}' + """.format( + user=test_user, public_key=public_key_two_der_encoded + ) + ) + + with snowflake.connector.connect(**db_config) as _: + pass + + +def test_bad_private_key(db_parameters): + db_config = { + "protocol": db_parameters["protocol"], + "account": db_parameters["account"], + "user": db_parameters["user"], + "host": db_parameters["host"], + "port": db_parameters["port"], + "database": db_parameters["database"], + "schema": db_parameters["schema"], + "timezone": "UTC", + } + + dsa_private_key = dsa.generate_private_key(key_size=2048, backend=default_backend()) + dsa_private_key_der = dsa_private_key.private_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + + encrypted_rsa_private_key_der = rsa.generate_private_key( + key_size=2048, public_exponent=65537, backend=default_backend() + ).private_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.BestAvailableEncryption(b"abcd"), + ) + + bad_private_key_test_cases = [ + "abcd", + 1234, + b"abcd", + dsa_private_key_der, + encrypted_rsa_private_key_der, + ] + + for private_key in bad_private_key_test_cases: + db_config["private_key"] = private_key + with pytest.raises(snowflake.connector.errors.ProgrammingError) as exec_info: + snowflake.connector.connect(**db_config) + assert exec_info.value.errno == 251008 + + +def generate_key_pair(key_length): + private_key = rsa.generate_private_key( + backend=default_backend(), public_exponent=65537, key_size=key_length + ) + + private_key_der = private_key.private_bytes( + encoding=serialization.Encoding.DER, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + + public_key_pem = ( + private_key.public_key() + .public_bytes( + serialization.Encoding.PEM, serialization.PublicFormat.SubjectPublicKeyInfo + ) + .decode("utf-8") + ) + + # strip off header + public_key_der_encoded = "".join(public_key_pem.split("\n")[1:-2]) + + return private_key_der, public_key_der_encoded diff --git a/test/integ/test_large_put.py b/test/integ/test_large_put.py new file mode 100644 index 000000000..90549e068 --- /dev/null +++ b/test/integ/test_large_put.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os +from unittest.mock import patch + +import pytest + +from snowflake.connector.file_transfer_agent import SnowflakeFileTransferAgent + +from ..generate_test_files import generate_k_lines_of_n_files + + +@pytest.mark.skipolddriver +@pytest.mark.aws +def test_put_copy_large_files(tmpdir, conn_cnx, db_parameters): + """[s3] Puts and Copies into large files.""" + # generates N files + number_of_files = 2 + number_of_lines = 200000 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + + files = os.path.join(tmp_dir, "file*") + with conn_cnx() as cnx: + cnx.cursor().execute( + f""" +create table {db_parameters['name']} ( +aa int, +dt date, +ts timestamp, +tsltz timestamp_ltz, +tsntz timestamp_ntz, +tstz timestamp_tz, +pct float, +ratio number(6,2)) +""" + ) + try: + with conn_cnx() as cnx: + files = files.replace("\\", "\\\\") + + def mocked_file_agent(*args, **kwargs): + newkwargs = kwargs.copy() + newkwargs.update(multipart_threshold=10000) + agent = SnowflakeFileTransferAgent(*args, **newkwargs) + mocked_file_agent.agent = agent + return agent + + with patch( + "snowflake.connector.cursor.SnowflakeFileTransferAgent", + side_effect=mocked_file_agent, + ): + cnx.cursor().execute( + f"put 'file://{files}' @%{db_parameters['name']}", + ) + assert mocked_file_agent.agent._multipart_threshold == 10000 + + c = cnx.cursor() + try: + c.execute("copy into {}".format(db_parameters["name"])) + cnt = 0 + for _ in c: + cnt += 1 + assert cnt == number_of_files, "Number of PUT files" + finally: + c.close() + + c = cnx.cursor() + try: + c.execute( + "select count(*) from {name}".format(name=db_parameters["name"]) + ) + cnt = 0 + for rec in c: + cnt += rec[0] + assert cnt == number_of_files * number_of_lines, "Number of rows" + finally: + c.close() + finally: + with conn_cnx( + user=db_parameters["user"], + account=db_parameters["account"], + password=db_parameters["password"], + ) as cnx: + cnx.cursor().execute( + "drop table if exists {table}".format(table=db_parameters["name"]) + ) diff --git a/test/integ/test_large_result_set.py b/test/integ/test_large_result_set.py new file mode 100644 index 000000000..c3a3adfbf --- /dev/null +++ b/test/integ/test_large_result_set.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from unittest.mock import Mock + +import pytest + +from snowflake.connector.telemetry import TelemetryField + +NUMBER_OF_ROWS = 50000 + +PREFETCH_THREADS = [8, 3, 1] + + +@pytest.fixture() +def ingest_data(request, conn_cnx, db_parameters): + with conn_cnx( + user=db_parameters["user"], + account=db_parameters["account"], + password=db_parameters["password"], + ) as cnx: + cnx.cursor().execute( + """ + create or replace table {name} ( + c0 int, + c1 int, + c2 int, + c3 int, + c4 int, + c5 int, + c6 int, + c7 int, + c8 int, + c9 int) + """.format( + name=db_parameters["name"] + ) + ) + cnx.cursor().execute( + """ + insert into {name} + select random(100), + random(100), + random(100), + random(100), + random(100), + random(100), + random(100), + random(100), + random(100), + random(100) + from table(generator(rowCount=>{number_of_rows})) + """.format( + name=db_parameters["name"], number_of_rows=NUMBER_OF_ROWS + ) + ) + first_val = ( + cnx.cursor() + .execute( + "select c0 from {name} order by 1 limit 1".format( + name=db_parameters["name"] + ) + ) + .fetchone()[0] + ) + last_val = ( + cnx.cursor() + .execute( + "select c9 from {name} order by 1 desc limit 1".format( + name=db_parameters["name"] + ) + ) + .fetchone()[0] + ) + + def fin(): + with conn_cnx( + user=db_parameters["user"], + account=db_parameters["account"], + password=db_parameters["password"], + ) as cnx: + cnx.cursor().execute( + "drop table if exists {name}".format(name=db_parameters["name"]) + ) + + request.addfinalizer(fin) + return first_val, last_val + + +@pytest.mark.aws +@pytest.mark.parametrize("num_threads", PREFETCH_THREADS) +def test_query_large_result_set_n_threads( + conn_cnx, db_parameters, ingest_data, num_threads +): + sql = "select * from {name} order by 1".format(name=db_parameters["name"]) + with conn_cnx( + user=db_parameters["user"], + account=db_parameters["account"], + password=db_parameters["password"], + client_prefetch_threads=num_threads, + ) as cnx: + assert cnx.client_prefetch_threads == num_threads + results = [] + for rec in cnx.cursor().execute(sql): + results.append(rec) + num_rows = len(results) + assert NUMBER_OF_ROWS == num_rows + assert results[0][0] == ingest_data[0] + assert results[num_rows - 1][8] == ingest_data[1] + + +@pytest.mark.aws +@pytest.mark.skipolddriver +def test_query_large_result_set(conn_cnx, db_parameters, ingest_data): + """[s3] Gets Large Result set.""" + sql = "select * from {name} order by 1".format(name=db_parameters["name"]) + with conn_cnx() as cnx: + telemetry_data = [] + add_log_mock = Mock() + add_log_mock.side_effect = lambda datum: telemetry_data.append(datum) + cnx._telemetry.add_log_to_batch = add_log_mock + + result2 = [] + for rec in cnx.cursor().execute(sql): + result2.append(rec) + + num_rows = len(result2) + assert result2[0][0] == ingest_data[0] + assert result2[num_rows - 1][8] == ingest_data[1] + + result999 = [] + for rec in cnx.cursor().execute(sql): + result999.append(rec) + + num_rows = len(result999) + assert result999[0][0] == ingest_data[0] + assert result999[num_rows - 1][8] == ingest_data[1] + + assert len(result2) == len( + result999 + ), "result length is different: result2, and result999" + for i, (x, y) in enumerate(zip(result2, result999)): + assert x == y, f"element {i}" + + # verify that the expected telemetry metrics were logged + expected = [ + TelemetryField.TIME_CONSUME_FIRST_RESULT, + TelemetryField.TIME_CONSUME_LAST_RESULT, + # NOTE: Arrow doesn't do parsing like how JSON does, so depending on what + # way this is executed only look for JSON result sets + # TelemetryField.TIME_PARSING_CHUNKS, + TelemetryField.TIME_DOWNLOADING_CHUNKS, + ] + for field in expected: + assert ( + sum( + 1 if x.message["type"] == field.value else 0 for x in telemetry_data + ) + == 2 + ), ( + "Expected three telemetry logs (one per query) " + "for log type {}".format(field.value) + ) diff --git a/test/integ/test_load_unload.py b/test/integ/test_load_unload.py new file mode 100644 index 000000000..e494da22b --- /dev/null +++ b/test/integ/test_load_unload.py @@ -0,0 +1,490 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os +import pathlib +from getpass import getuser +from logging import getLogger +from os import path + +import pytest + +try: + from parameters import CONNECTION_PARAMETERS_ADMIN +except ImportError: + CONNECTION_PARAMETERS_ADMIN = {} + +THIS_DIR = path.dirname(path.realpath(__file__)) + +logger = getLogger(__name__) + + +@pytest.fixture() +def test_data(request, conn_cnx, db_parameters): + def connection(): + """Abstracting away connection creation.""" + return conn_cnx() + + return create_test_data(request, db_parameters, connection) + + +@pytest.fixture() +def s3_test_data(request, conn_cnx, db_parameters): + def connection(): + """Abstracting away connection creation.""" + return conn_cnx( + user=db_parameters["user"], + account=db_parameters["account"], + password=db_parameters["password"], + ) + + return create_test_data(request, db_parameters, connection) + + +def create_test_data(request, db_parameters, connection): + assert "AWS_ACCESS_KEY_ID" in os.environ, "AWS_ACCESS_KEY_ID is missing" + assert "AWS_SECRET_ACCESS_KEY" in os.environ, "AWS_SECRET_ACCESS_KEY is missing" + + unique_name = db_parameters["name"] + database_name = f"{unique_name}_db" + warehouse_name = f"{unique_name}_wh" + + def fin(): + with connection() as cnx: + with cnx.cursor() as cur: + cur.execute(f"drop database {database_name}") + cur.execute(f"drop warehouse {warehouse_name}") + + request.addfinalizer(fin) + + class TestData: + def __init__(self): + self.test_data_dir = (pathlib.Path(__file__).parent / "data").absolute() + self.AWS_ACCESS_KEY_ID = "'{}'".format(os.environ["AWS_ACCESS_KEY_ID"]) + self.AWS_SECRET_ACCESS_KEY = "'{}'".format( + os.environ["AWS_SECRET_ACCESS_KEY"] + ) + self.stage_name = f"{unique_name}_stage" + self.warehouse_name = warehouse_name + self.database_name = database_name + self.connection = connection + self.user_bucket = os.getenv( + "SF_AWS_USER_BUCKET", f"sfc-dev1-regression/{getuser()}/reg" + ) + + ret = TestData() + + with connection() as cnx: + with cnx.cursor() as cur: + cur.execute("use role sysadmin") + cur.execute( + """ +create or replace warehouse {} +warehouse_size = 'small' warehouse_type='standard' +auto_suspend=1800 +""".format( + warehouse_name + ) + ) + cur.execute( + """ +create or replace database {} +""".format( + database_name + ) + ) + cur.execute( + """ +create or replace schema pytesting_schema +""" + ) + cur.execute( + """ +create or replace file format VSV type = 'CSV' +field_delimiter='|' error_on_column_count_mismatch=false + """ + ) + return ret + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_load_s3(test_data): + with test_data.connection() as cnx: + with cnx.cursor() as cur: + cur.execute(f"use warehouse {test_data.warehouse_name}") + cur.execute(f"use schema {test_data.database_name}.pytesting_schema") + cur.execute( + """ +create or replace table tweets(created_at timestamp, +id number, id_str string, text string, source string, +in_reply_to_status_id number, in_reply_to_status_id_str string, +in_reply_to_user_id number, in_reply_to_user_id_str string, +in_reply_to_screen_name string, user__id number, user__id_str string, +user__name string, user__screen_name string, user__location string, +user__description string, user__url string, +user__entities__description__urls string, user__protected string, +user__followers_count number, user__friends_count number, +user__listed_count number, user__created_at timestamp, +user__favourites_count number, user__utc_offset number, +user__time_zone string, user__geo_enabled string, user__verified string, +user__statuses_count number, user__lang string, +user__contributors_enabled string, user__is_translator string, +user__profile_background_color string, +user__profile_background_image_url string, +user__profile_background_image_url_https string, +user__profile_background_tile string, user__profile_image_url string, +user__profile_image_url_https string, user__profile_link_color string, +user__profile_sidebar_border_color string, +user__profile_sidebar_fill_color string, user__profile_text_color string, +user__profile_use_background_image string, user__default_profile string, +user__default_profile_image string, user__following string, +user__follow_request_sent string, user__notifications string, geo string, +coordinates string, place string, contributors string, retweet_count number, +favorite_count number, entities__hashtags string, entities__symbols string, +entities__urls string, entities__user_mentions string, favorited string, +retweeted string, lang string) +""" + ) + cur.execute("ls @%tweets") + assert cur.rowcount == 0, ( + "table newly created should not have any files in its " "staging area" + ) + cur.execute( + """ +copy into tweets from s3://sfc-dev1-data/twitter/O1k/tweets/ +credentials=(AWS_KEY_ID={aws_access_key_id} +AWS_SECRET_KEY={aws_secret_access_key}) +file_format=(skip_header=1 null_if=('') field_optionally_enclosed_by='"') +""".format( + aws_access_key_id=test_data.AWS_ACCESS_KEY_ID, + aws_secret_access_key=test_data.AWS_SECRET_ACCESS_KEY, + ) + ) + assert cur.rowcount == 1, "copy into tweets did not set rowcount to 1" + results = cur.fetchall() + assert ( + results[0][0] == "s3://sfc-dev1-data/twitter/O1k/tweets/1.csv.gz" + ), "ls @%tweets failed" + cur.execute("drop table tweets") + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_put_local_file(test_data): + with test_data.connection() as cnx: + with cnx.cursor() as cur: + cur.execute("alter session set DISABLE_PUT_AND_GET_ON_EXTERNAL_STAGE=false") + cur.execute(f"use warehouse {test_data.warehouse_name}") + cur.execute(f"""use schema {test_data.database_name}.pytesting_schema""") + cur.execute( + """ +create or replace table pytest_putget_t1 (c1 STRING, c2 STRING, c3 STRING, +c4 STRING, c5 STRING, c6 STRING, c7 STRING, c8 STRING, c9 STRING) +stage_file_format = (field_delimiter = '|' error_on_column_count_mismatch=false) +stage_copy_options = (purge=false) +stage_location = (url = 's3://sfc-dev1-regression/jenkins/{stage_name}' +credentials = ( +AWS_KEY_ID={aws_access_key_id} +AWS_SECRET_KEY={aws_secret_access_key})) +""".format( + aws_access_key_id=test_data.AWS_ACCESS_KEY_ID, + aws_secret_access_key=test_data.AWS_SECRET_ACCESS_KEY, + stage_name=test_data.stage_name, + ) + ) + cur.execute( + """put file://{}/ExecPlatform/Database/data/orders_10*.csv @%pytest_putget_t1""".format( + str(test_data.test_data_dir) + ) + ) + cur.execute("ls @%pytest_putget_t1") + _ = cur.fetchall() + assert cur.rowcount == 2, "ls @%pytest_putget_t1 did not return 2 rows" + cur.execute("copy into pytest_putget_t1") + results = cur.fetchall() + assert len(results) == 2, "2 files were not copied" + assert results[0][1] == "LOADED", "file 1 was not loaded after copy" + assert results[1][1] == "LOADED", "file 2 was not loaded after copy" + + cur.execute("select count(*) from pytest_putget_t1") + results = cur.fetchall() + assert results[0][0] == 73, "73 rows not loaded into putest_putget_t1" + cur.execute("rm @%pytest_putget_t1") + results = cur.fetchall() + assert len(results) == 2, "two files were not removed" + cur.execute( + "select STATUS from information_schema.load_history where table_name='PYTEST_PUTGET_T1'" + ) + results = cur.fetchall() + assert results[0][0] == "LOADED", "history does not show file to be loaded" + cur.execute("drop table pytest_putget_t1") + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_put_load_from_user_stage(test_data): + with test_data.connection() as cnx: + with cnx.cursor() as cur: + cur.execute("alter session set DISABLE_PUT_AND_GET_ON_EXTERNAL_STAGE=false") + cur.execute( + """ +use warehouse {} +""".format( + test_data.warehouse_name + ) + ) + cur.execute( + """ +use schema {}.pytesting_schema +""".format( + test_data.database_name + ) + ) + cur.execute( + """ +create or replace stage {stage_name} +url='s3://{user_bucket}/{stage_name}' +credentials = ( +AWS_KEY_ID={aws_access_key_id} +AWS_SECRET_KEY={aws_secret_access_key}) +""".format( + aws_access_key_id=test_data.AWS_ACCESS_KEY_ID, + aws_secret_access_key=test_data.AWS_SECRET_ACCESS_KEY, + user_bucket=test_data.user_bucket, + stage_name=test_data.stage_name, + ) + ) + cur.execute( + """ +create or replace table pytest_putget_t2 (c1 STRING, c2 STRING, c3 STRING, +c4 STRING, c5 STRING, c6 STRING, c7 STRING, c8 STRING, c9 STRING) +""" + ) + cur.execute( + """put file://{}/ExecPlatform/Database/data/orders_10*.csv @{}""".format( + test_data.test_data_dir, test_data.stage_name + ) + ) + # two files should have been put in the staging are + results = cur.fetchall() + assert len(results) == 2 + + cur.execute("ls @%pytest_putget_t2") + results = cur.fetchall() + assert len(results) == 0, "no files should have been loaded yet" + + # copy + cur.execute( + """ +copy into pytest_putget_t2 from @{stage_name} +file_format = (field_delimiter = '|' error_on_column_count_mismatch=false) +purge=true +""".format( + stage_name=test_data.stage_name + ) + ) + results = sorted(cur.fetchall()) + assert len(results) == 2, "copy failed to load two files from the stage" + assert results[0][ + 0 + ] == "s3://{user_bucket}/{stage_name}/orders_100.csv.gz".format( + user_bucket=test_data.user_bucket, + stage_name=test_data.stage_name, + ), "copy did not load file orders_100" + + assert results[1][ + 0 + ] == "s3://{user_bucket}/{stage_name}/orders_101.csv.gz".format( + user_bucket=test_data.user_bucket, + stage_name=test_data.stage_name, + ), "copy did not load file orders_101" + + # should be empty (purged) + cur.execute(f"ls @{test_data.stage_name}") + results = cur.fetchall() + assert len(results) == 0, "copied files not purged" + cur.execute("drop table pytest_putget_t2") + cur.execute(f"drop stage {test_data.stage_name}") + + +@pytest.mark.aws +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_unload(db_parameters, s3_test_data): + with s3_test_data.connection() as cnx: + with cnx.cursor() as cur: + cur.execute(f"""use warehouse {s3_test_data.warehouse_name}""") + cur.execute(f"""use schema {s3_test_data.database_name}.pytesting_schema""") + cur.execute( + """ +create or replace stage {stage_name} +url='s3://{user_bucket}/{stage_name}/unload/' +credentials = ( +AWS_KEY_ID={aws_access_key_id} +AWS_SECRET_KEY={aws_secret_access_key}) +""".format( + aws_access_key_id=s3_test_data.AWS_ACCESS_KEY_ID, + aws_secret_access_key=s3_test_data.AWS_SECRET_ACCESS_KEY, + user_bucket=s3_test_data.user_bucket, + stage_name=s3_test_data.stage_name, + ) + ) + + cur.execute( + """ +CREATE OR REPLACE TABLE pytest_t3 (c1 STRING, c2 STRING, c3 STRING, +c4 STRING, c5 STRING, c6 STRING, c7 STRING, c8 STRING, c9 STRING) +stage_file_format = (format_name = 'vsv' field_delimiter = '|' +error_on_column_count_mismatch=false) +""" + ) + cur.execute( + """ +alter stage {stage_name} set file_format = (format_name = 'VSV' ) +""".format( + stage_name=s3_test_data.stage_name + ) + ) + + # make sure its clean + cur.execute(f"rm @{s3_test_data.stage_name}") + + # put local file + cur.execute( + "put file://{}/ExecPlatform/Database/data/orders_10*.csv @%pytest_t3".format( + s3_test_data.test_data_dir + ) + ) + + # copy into table + cur.execute( + """ +copy into pytest_t3 +file_format = (field_delimiter = '|' error_on_column_count_mismatch=false) +purge=true +""" + ) + # unload from table + cur.execute( + """ +copy into @{stage_name}/pytest_t3/data_ +from pytest_t3 file_format=(format_name='VSV' compression='gzip') +max_file_size=10000000 +""".format( + stage_name=s3_test_data.stage_name + ) + ) + + # load the data back to another table + cur.execute( + """ +CREATE OR REPLACE TABLE pytest_t3_copy +(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, +c6 STRING, c7 STRING, c8 STRING, c9 STRING) +stage_file_format = (format_name = 'VSV' ) +""" + ) + + cur.execute( + """ +copy into pytest_t3_copy +from @{stage_name}/pytest_t3/data_ return_failed_only=true +""".format( + stage_name=s3_test_data.stage_name + ) + ) + + # check to make sure they are equal + cur.execute( + """ +(select * from pytest_t3 minus select * from pytest_t3_copy) +union +(select * from pytest_t3_copy minus select * from pytest_t3) +""" + ) + assert cur.rowcount == 0, "unloaded/reloaded data were not the same" + # clean stage + cur.execute( + "rm @{stage_name}/pytest_t3/data_".format( + stage_name=s3_test_data.stage_name + ) + ) + assert cur.rowcount == 1, "only one file was expected to be removed" + + # unload with deflate + cur.execute( + """ +copy into @{stage_name}/pytest_t3/data_ +from pytest_t3 file_format=(format_name='VSV' compression='deflate') +max_file_size=10000000 +""".format( + stage_name=s3_test_data.stage_name + ) + ) + results = cur.fetchall() + assert results[0][0] == 73, "73 rows were expected to be loaded" + + # create a table to unload data into + cur.execute( + """ +CREATE OR REPLACE TABLE pytest_t3_copy +(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, c6 STRING, +c7 STRING, c8 STRING, c9 STRING) +stage_file_format = (format_name = 'VSV' +compression='deflate') +""" + ) + results = cur.fetchall() + assert results[0][0] == "Table PYTEST_T3_COPY successfully created." + + cur.execute( + """ +alter stage {stage_name} set file_format = (format_name = 'VSV' + compression='deflate')""".format( + stage_name=s3_test_data.stage_name + ) + ) + + cur.execute( + """ +copy into pytest_t3_copy from @{stage_name}/pytest_t3/data_ +return_failed_only=true +""".format( + stage_name=s3_test_data.stage_name + ) + ) + results = cur.fetchall() + assert results[0][2] == "LOADED" + assert results[0][4] == 73 + # check to make sure they are equal + cur.execute( + """ +(select * from pytest_t3 minus select * from pytest_t3_copy) union +(select * from pytest_t3_copy minus select * from pytest_t3)""" + ) + assert cur.rowcount == 0, "unloaded/reloaded data were not the same" + cur.execute( + "rm @{stage_name}/pytest_t3/data_".format( + stage_name=s3_test_data.stage_name + ) + ) + assert cur.rowcount == 1, "only one file was expected to be removed" + + # clean stage + cur.execute( + "rm @{stage_name}/pytest_t3/data_".format( + stage_name=s3_test_data.stage_name + ) + ) + + cur.execute("drop table pytest_t3_copy") + cur.execute(f"drop stage {s3_test_data.stage_name}") diff --git a/test/integ/test_network.py b/test/integ/test_network.py new file mode 100644 index 000000000..547ee288b --- /dev/null +++ b/test/integ/test_network.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from logging import getLogger + +from snowflake.connector import errorcode, errors +from snowflake.connector.network import SnowflakeRestful + +logger = getLogger(__name__) + + +def test_no_auth(db_parameters): + """SNOW-13588: No auth Rest API test.""" + rest = SnowflakeRestful(host=db_parameters["host"], port=db_parameters["port"]) + try: + # no auth + # show warehouse + rest.request( + url="/queries", + body={ + "sequenceId": 10000, + "sqlText": "show warehouses", + "parameters": { + "ui_mode": True, + }, + }, + method="post", + client="rest", + ) + raise Exception("Must fail with auth error") + except errors.Error as e: + assert e.errno == errorcode.ER_CONNECTION_IS_CLOSED + finally: + rest.close() diff --git a/test/integ/test_numpy_binding.py b/test/integ/test_numpy_binding.py new file mode 100644 index 000000000..28adf8d10 --- /dev/null +++ b/test/integ/test_numpy_binding.py @@ -0,0 +1,195 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import datetime +import time + +import numpy as np + + +def test_numpy_datatype_binding(conn_cnx, db_parameters): + """Tests numpy data type bindings.""" + epoch_time = time.time() + current_datetime = datetime.datetime.fromtimestamp(epoch_time) + current_datetime64 = np.datetime64(current_datetime) + all_data = [ + { + "tz": "America/Los_Angeles", + "float": "1.79769313486e+308", + "numpy_bool": np.True_, + "epoch_time": epoch_time, + "current_time": current_datetime64, + "specific_date": np.datetime64("2005-02-25T03:30"), + "expected_specific_date": np.datetime64("2005-02-25T03:30").astype( + datetime.datetime + ), + }, + { + "tz": "Asia/Tokyo", + "float": "-1.79769313486e+308", + "numpy_bool": np.False_, + "epoch_time": epoch_time, + "current_time": current_datetime64, + "specific_date": np.datetime64("1970-12-31T05:00:00"), + "expected_specific_date": np.datetime64("1970-12-31T05:00:00").astype( + datetime.datetime + ), + }, + { + "tz": "America/New_York", + "float": "-1.79769313486e+308", + "numpy_bool": np.True_, + "epoch_time": epoch_time, + "current_time": current_datetime64, + "specific_date": np.datetime64("1969-12-31T05:00:00"), + "expected_specific_date": np.datetime64("1969-12-31T05:00:00").astype( + datetime.datetime + ), + }, + { + "tz": "UTC", + "float": "-1.79769313486e+308", + "numpy_bool": np.False_, + "epoch_time": epoch_time, + "current_time": current_datetime64, + "specific_date": np.datetime64("1968-11-12T07:00:00.123"), + "expected_specific_date": np.datetime64("1968-11-12T07:00:00.123").astype( + datetime.datetime + ), + }, + ] + try: + with conn_cnx(numpy=True) as cnx: + cnx.cursor().execute( + """ +CREATE OR REPLACE TABLE {name} ( + c1 integer, -- int8 + c2 integer, -- int16 + c3 integer, -- int32 + c4 integer, -- int64 + c5 float, -- float16 + c6 float, -- float32 + c7 float, -- float64 + c8 timestamp_ntz, -- datetime64 + c9 date, -- datetime64 + c10 timestamp_ltz, -- datetime64, + c11 timestamp_tz, -- datetime64 + c12 boolean) -- numpy.bool_ + """.format( + name=db_parameters["name"] + ) + ) + for data in all_data: + cnx.cursor().execute( + """ +ALTER SESSION SET timezone='{tz}'""".format( + tz=data["tz"] + ) + ) + cnx.cursor().execute( + """ +INSERT INTO {name}( + c1, + c2, + c3, + c4, + c5, + c6, + c7, + c8, + c9, + c10, + c11, + c12 +) +VALUES( + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s, + %s)""".format( + name=db_parameters["name"] + ), + ( + np.iinfo(np.int8).max, + np.iinfo(np.int16).max, + np.iinfo(np.int32).max, + np.iinfo(np.int64).max, + np.finfo(np.float16).max, + np.finfo(np.float32).max, + np.float64(data["float"]), + data["current_time"], + data["current_time"], + data["current_time"], + data["specific_date"], + data["numpy_bool"], + ), + ) + rec = ( + cnx.cursor() + .execute( + """ +SELECT + c1, + c2, + c3, + c4, + c5, + c6, + c7, + c8, + c9, + c10, + c11, + c12 + FROM {name}""".format( + name=db_parameters["name"] + ) + ) + .fetchone() + ) + assert np.int8(rec[0]) == np.iinfo(np.int8).max + assert np.int16(rec[1]) == np.iinfo(np.int16).max + assert np.int32(rec[2]) == np.iinfo(np.int32).max + assert np.int64(rec[3]) == np.iinfo(np.int64).max + assert np.float16(rec[4]) == np.finfo(np.float16).max + assert np.float32(rec[5]) == np.finfo(np.float32).max + assert rec[6] == np.float64(data["float"]) + assert rec[7] == data["current_time"] + assert str(rec[8]) == str(data["current_time"])[0:10] + assert rec[9] == datetime.datetime.fromtimestamp( + epoch_time, rec[9].tzinfo + ) + assert rec[10] == data["expected_specific_date"].replace( + tzinfo=rec[10].tzinfo + ) + assert ( + isinstance(rec[11], bool) + and rec[11] == data["numpy_bool"] + and np.bool_(rec[11]) == data["numpy_bool"] + ) + cnx.cursor().execute( + """ +DELETE FROM {name}""".format( + name=db_parameters["name"] + ) + ) + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ + DROP TABLE IF EXISTS {name} + """.format( + name=db_parameters["name"] + ) + ) diff --git a/test/integ/test_pickle_timestamp_tz.py b/test/integ/test_pickle_timestamp_tz.py new file mode 100644 index 000000000..5ba90a877 --- /dev/null +++ b/test/integ/test_pickle_timestamp_tz.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os +import pickle + + +def test_pickle_timestamp_tz(tmpdir, conn_cnx): + """Ensures the timestamp_tz result is pickle-able.""" + tmp_dir = str(tmpdir.mkdir("pickles")) + output = os.path.join(tmp_dir, "tz.pickle") + expected_tz = None + with conn_cnx() as con: + for rec in con.cursor().execute( + "select '2019-08-11 01:02:03.123 -03:00'::TIMESTAMP_TZ" + ): + expected_tz = rec[0] + with open(output, "wb") as f: + pickle.dump(expected_tz, f) + + with open(output, "rb") as f: + read_tz = pickle.load(f) + assert expected_tz == read_tz diff --git a/test/integ/test_put_get.py b/test/integ/test_put_get.py new file mode 100644 index 000000000..249bab01d --- /dev/null +++ b/test/integ/test_put_get.py @@ -0,0 +1,669 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +import filecmp +import os +import pathlib +from getpass import getuser +from io import BytesIO +from logging import getLogger +from os import path +from typing import TYPE_CHECKING, Callable, NamedTuple +from unittest import mock + +import pytest + +from snowflake.connector import OperationalError + +from ..generate_test_files import generate_k_lines_of_n_files +from ..integ_helpers import put +from ..randomize import random_string + +if TYPE_CHECKING: + from snowflake.connector import SnowflakeConnection + +try: + from ..parameters import CONNECTION_PARAMETERS_ADMIN +except ImportError: + CONNECTION_PARAMETERS_ADMIN = {} + +THIS_DIR = path.dirname(path.realpath(__file__)) + +logger = getLogger(__name__) + + +class _TestData(NamedTuple): + test_data_dir: pathlib.Path + AWS_ACCESS_KEY_ID: str + AWS_SECRET_ACCESS_KEY: str + stage_name: str + warehouse_name: str + database_name: str + user_bucket: str + connection: Callable[..., SnowflakeConnection] + + +@pytest.fixture() +def test_data(request, conn_cnx: Callable[..., SnowflakeConnection]) -> _TestData: + return create_test_data(request, conn_cnx) + + +def create_test_data( + request, connection: Callable[..., SnowflakeConnection] +) -> _TestData: + assert "AWS_ACCESS_KEY_ID" in os.environ + assert "AWS_SECRET_ACCESS_KEY" in os.environ + + unique_name = random_string(5, "create_test_data_") + warehouse_name = f"{unique_name}_wh" + database_name = f"{unique_name}_db" + + def fin(): + with connection() as cnx: + with cnx.cursor() as cur: + cur.execute(f"drop database {database_name}") + cur.execute(f"drop warehouse {warehouse_name}") + + request.addfinalizer(fin) + + ret = _TestData( + test_data_dir=pathlib.Path(__file__).absolute().parent.parent / "data", + AWS_ACCESS_KEY_ID=f"'{os.environ['AWS_ACCESS_KEY_ID']}'", + AWS_SECRET_ACCESS_KEY=f"'{os.environ['AWS_SECRET_ACCESS_KEY']}'", + stage_name=f"{unique_name}_stage", + warehouse_name=warehouse_name, + database_name=database_name, + user_bucket=os.getenv( + "SF_AWS_USER_BUCKET", f"sfc-dev1-regression/{getuser()}/reg" + ), + connection=connection, + ) + + with connection() as cnx: + with cnx.cursor() as cur: + cur.execute("use role sysadmin") + cur.execute( + f""" +create or replace warehouse {warehouse_name} +warehouse_size = 'small' +warehouse_type='standard' +auto_suspend=1800 +""" + ) + cur.execute( + f""" +create or replace database {database_name} +""" + ) + cur.execute( + """ +create or replace schema pytesting_schema +""" + ) + cur.execute( + """ +create or replace file format VSV type = 'CSV' +field_delimiter='|' error_on_column_count_mismatch=false +""" + ) + + return ret + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_load_s3(test_data): + with test_data.connection() as cnx: + with cnx.cursor() as cur: + cur.execute(f"use warehouse {test_data.warehouse_name}") + cur.execute(f"use schema {test_data.database_name}.pytesting_schema") + cur.execute( + """ +create or replace table tweets(created_at timestamp, +id number, id_str string, text string, source string, +in_reply_to_status_id number, in_reply_to_status_id_str string, +in_reply_to_user_id number, in_reply_to_user_id_str string, +in_reply_to_screen_name string, user__id number, user__id_str string, +user__name string, user__screen_name string, user__location string, +user__description string, user__url string, +user__entities__description__urls string, user__protected string, +user__followers_count number, user__friends_count number, +user__listed_count number, user__created_at timestamp, +user__favourites_count number, user__utc_offset number, +user__time_zone string, user__geo_enabled string, user__verified string, +user__statuses_count number, user__lang string, +user__contributors_enabled string, user__is_translator string, +user__profile_background_color string, +user__profile_background_image_url string, +user__profile_background_image_url_https string, +user__profile_background_tile string, user__profile_image_url string, +user__profile_image_url_https string, user__profile_link_color string, +user__profile_sidebar_border_color string, +user__profile_sidebar_fill_color string, user__profile_text_color string, +user__profile_use_background_image string, user__default_profile string, +user__default_profile_image string, user__following string, +user__follow_request_sent string, user__notifications string, geo string, +coordinates string, place string, contributors string, +retweet_count number, +favorite_count number, entities__hashtags string, entities__symbols string, +entities__urls string, entities__user_mentions string, favorited string, +retweeted string, lang string)""" + ) + cur.execute("ls @%tweets") + assert cur.rowcount == 0, ( + "table newly created should not have any " "files in its staging area" + ) + cur.execute( + f""" +copy into tweets from s3://sfc-dev1-data/twitter/O1k/tweets/ +credentials=( +AWS_KEY_ID={test_data.AWS_ACCESS_KEY_ID} +AWS_SECRET_KEY={test_data.AWS_SECRET_ACCESS_KEY}) +file_format=( + skip_header=1 null_if=('') + field_optionally_enclosed_by='"' +) +""" + ) + assert cur.rowcount == 1, "copy into tweets did not set rowcount to 1" + results = cur.fetchall() + assert results[0][0] == ("s3://sfc-dev1-data/twitter/O1k/tweets/1.csv.gz") + cur.execute("drop table tweets") + + +@pytest.mark.aws +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_put_local_file(test_data): + with test_data.connection() as cnx: + with cnx.cursor() as cur: + cur.execute(f"use warehouse {test_data.warehouse_name}") + cur.execute("alter session set DISABLE_PUT_AND_GET_ON_EXTERNAL_STAGE=false") + cur.execute(f"use schema {test_data.database_name}.pytesting_schema") + cur.execute( + f""" +create or replace table pytest_putget_t1 ( +c1 STRING, c2 STRING, c3 STRING, +c4 STRING, c5 STRING, c6 STRING, c7 STRING, c8 STRING, c9 STRING) +stage_file_format = ( + field_delimiter = '|' + error_on_column_count_mismatch=false) + stage_copy_options = (purge=false) + stage_location = ( + url = 's3://{test_data.user_bucket}/{test_data.stage_name}' + credentials = ( + AWS_KEY_ID={test_data.AWS_ACCESS_KEY_ID} + AWS_SECRET_KEY={test_data.AWS_SECRET_ACCESS_KEY}) +) +""" + ) + cur.execute( + f""" +put file://{test_data.test_data_dir}/ExecPlatform/Database/data/orders_10*.csv @%pytest_putget_t1 +""" + ) + assert cur.is_file_transfer + cur.execute("ls @%pytest_putget_t1").fetchall() + assert not cur.is_file_transfer + assert cur.rowcount == 2, "ls @%pytest_putget_t1 did not return 2 rows" + cur.execute("copy into pytest_putget_t1") + results = cur.fetchall() + assert len(results) == 2, "2 files were not copied" + assert results[0][1] == "LOADED", "file 1 was not loaded after copy" + assert results[1][1] == "LOADED", "file 2 was not loaded after copy" + + cur.execute("select count(*) from pytest_putget_t1") + results = cur.fetchall() + assert results[0][0] == 73, "73 rows not loaded into putest_putget_t1" + cur.execute("rm @%pytest_putget_t1") + results = cur.fetchall() + assert len(results) == 2, "two files were not removed" + cur.execute( + "select STATUS from information_schema.load_history where table_name='PYTEST_PUTGET_T1'" + ) + results = cur.fetchall() + assert results[0][0] == "LOADED", "history does not show file to be loaded" + cur.execute("drop table pytest_putget_t1") + + +@pytest.mark.flaky(reruns=3) +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_put_load_from_user_stage(test_data): + with test_data.connection() as cnx: + with cnx.cursor() as cur: + cur.execute("alter session set DISABLE_PUT_AND_GET_ON_EXTERNAL_STAGE=false") + cur.execute(f"use warehouse {test_data.warehouse_name}") + cur.execute(f"use schema {test_data.database_name}.pytesting_schema") + cur.execute( + f""" +create or replace stage {test_data.stage_name} +url='s3://{test_data.user_bucket}/{test_data.stage_name}' +credentials = ( +AWS_KEY_ID={test_data.AWS_ACCESS_KEY_ID} +AWS_SECRET_KEY={test_data.AWS_SECRET_ACCESS_KEY}) +""" + ) + cur.execute( + """ +create or replace table pytest_putget_t2 (c1 STRING, c2 STRING, c3 STRING, + c4 STRING, c5 STRING, c6 STRING, c7 STRING, c8 STRING, c9 STRING) +""" + ) + cur.execute( + f""" +put file://{test_data.test_data_dir}/ExecPlatform/Database/data/orders_10*.csv +@{test_data.stage_name} +""" + ) + # two files should have been put in the staging are + results = cur.fetchall() + assert len(results) == 2 + + cur.execute("ls @%pytest_putget_t2") + results = cur.fetchall() + assert len(results) == 0, "no files should have been loaded yet" + + # copy + cur.execute( + f""" +copy into pytest_putget_t2 from @{test_data.stage_name} +file_format = (field_delimiter = '|' error_on_column_count_mismatch=false) +purge=true +""" + ) + results = sorted(cur.fetchall()) + assert len(results) == 2, "copy failed to load two files from the stage" + assert results[0][0] == ( + f"s3://{test_data.user_bucket}/{test_data.stage_name}/orders_100.csv.gz" + ), "copy did not load file orders_100" + + assert results[1][0] == ( + f"s3://{test_data.user_bucket}/{test_data.stage_name}/orders_101.csv.gz" + ), "copy did not load file orders_101" + + # should be empty (purged) + cur.execute(f"ls @{test_data.stage_name}") + results = cur.fetchall() + assert len(results) == 0, "copied files not purged" + cur.execute("drop table pytest_putget_t2") + cur.execute(f"drop stage {test_data.stage_name}") + + +@pytest.mark.aws +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_unload(test_data): + with test_data.connection() as cnx: + with cnx.cursor() as cur: + cur.execute(f"use warehouse {test_data.warehouse_name}") + cur.execute(f"use schema {test_data.database_name}.pytesting_schema") + cur.execute( + f""" +create or replace stage {test_data.stage_name} +url='s3://{test_data.user_bucket}/{test_data.stage_name}/pytest_put_unload/unload/' +credentials = ( +AWS_KEY_ID={test_data.AWS_ACCESS_KEY_ID} +AWS_SECRET_KEY={test_data.AWS_SECRET_ACCESS_KEY}) +""" + ) + + cur.execute( + """ +CREATE OR REPLACE TABLE pytest_t3 ( +c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, +c6 STRING, c7 STRING, c8 STRING, c9 STRING) +stage_file_format = (format_name = 'vsv' field_delimiter = '|' + error_on_column_count_mismatch=false)""" + ) + cur.execute( + f"alter stage {test_data.stage_name} set file_format = ( format_name = 'VSV' )" + ) + + # make sure its clean + cur.execute(f"rm @{test_data.stage_name}") + + # put local file + cur.execute( + f"put file://{test_data.test_data_dir}/ExecPlatform/Database/data/orders_10*.csv @%pytest_t3" + ) + + # copy into table + cur.execute( + """ +copy into pytest_t3 +file_format = (field_delimiter = '|' error_on_column_count_mismatch=false) +purge=true""" + ) + # unload from table + cur.execute( + f""" +copy into @{test_data.stage_name}/data_ +from pytest_t3 file_format=(format_name='VSV' compression='gzip') +max_file_size=10000000""" + ) + + # load the data back to another table + cur.execute( + """ +CREATE OR REPLACE TABLE pytest_t3_copy ( +c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, +c6 STRING, c7 STRING, c8 STRING, c9 STRING) +stage_file_format = (format_name = 'VSV' )""" + ) + cur.execute( + f""" +copy into pytest_t3_copy +from @{test_data.stage_name}/data_ return_failed_only=true +""" + ) + + # check to make sure they are equal + cur.execute( + """ +(select * from pytest_t3 minus select * from pytest_t3_copy) +union +(select * from pytest_t3_copy minus select * from pytest_t3) +""" + ) + assert cur.rowcount == 0, "unloaded/reloaded data were not the same" + # clean stage + cur.execute(f"rm @{test_data.stage_name}/data_") + assert cur.rowcount == 1, "only one file was expected to be removed" + + # unload with deflate + cur.execute( + f""" +copy into @{test_data.stage_name}/data_ +from pytest_t3 file_format=(format_name='VSV' compression='deflate') +max_file_size=10000000 +""" + ) + results = cur.fetchall() + assert results[0][0] == 73, "73 rows were expected to be loaded" + + # create a table to unload data into + cur.execute( + """ +CREATE OR REPLACE TABLE pytest_t3_copy +(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, c6 STRING, +c7 STRING, c8 STRING, c9 STRING) +stage_file_format = ( +format_name = 'VSV' +compression='deflate')""" + ) + results = cur.fetchall() + assert results[0][0], ( + "Table PYTEST_T3_COPY successfully created.", + "table not created successfully", + ) + + cur.execute( + f""" +alter stage {test_data.stage_name} set file_format = ( +format_name = 'VSV' +compression='deflate') +""" + ) + + cur.execute( + f""" +copy into pytest_t3_copy from @{test_data.stage_name}/data_ +return_failed_only=true +""" + ) + results = cur.fetchall() + assert results[0][2] == "LOADED", "rows were not loaded successfully" + assert results[0][4] == 73, "not all 73 rows were loaded successfully" + # check to make sure they are equal + cur.execute( + """ +(select * from pytest_t3 minus select * from pytest_t3_copy) +union +(select * from pytest_t3_copy minus select * from pytest_t3) +""" + ) + assert cur.rowcount == 0, "unloaded/reloaded data were not the same" + cur.execute(f"rm @{test_data.stage_name}/data_") + assert cur.rowcount == 1, "only one file was expected to be removed" + + # clean stage + cur.execute(f"rm @{test_data.stage_name}/data_") + + cur.execute("drop table pytest_t3_copy") + cur.execute(f"drop stage {test_data.stage_name}") + cur.close() + + +@pytest.mark.aws +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +def test_put_with_auto_compress_false( + tmp_path: pathlib.Path, + conn_cnx, + from_path, +): + """Tests PUT command with auto_compress=False.""" + tmp_dir = tmp_path / "data" + tmp_dir.mkdir() + test_data = tmp_dir / "data.txt" + with test_data.open("w") as f: + f.write("test1,test2") + f.write("test3,test4") + + with conn_cnx() as cnx: + cnx.cursor().execute("RM @~/test_put_uncompress_file") + try: + file_stream = None if from_path else test_data.open("rb") + with cnx.cursor() as cur: + put( + cur, + str(test_data), + "~/test_put_uncompress_file", + from_path, + sql_options="auto_compress=FALSE", + file_stream=file_stream, + ) + + ret = cnx.cursor().execute("LS @~/test_put_uncompress_file").fetchone() + assert "test_put_uncompress_file/data.txt" in ret[0] + assert "data.txt.gz" not in ret[0] + finally: + cnx.cursor().execute("RM @~/test_put_uncompress_file") + if file_stream: + file_stream.close() + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +def test_put_overwrite(tmp_path: pathlib.Path, from_path, conn_cnx): + """Tests whether _force_put_overwrite and overwrite=true works as intended.""" + tmp_dir = tmp_path / "data" + tmp_dir.mkdir() + test_data = tmp_dir / "data.txt" + with test_data.open("w") as f: + f.write("test1,test2") + f.write("test3,test4") + + with conn_cnx() as cnx: + cnx.cursor().execute("RM @~/test_put_overwrite") + try: + file_stream = None if from_path else open(test_data, "rb") + with cnx.cursor() as cur: + with mock.patch.object( + cur, "_init_result_and_meta", wraps=cur._init_result_and_meta + ) as mock_result: + put( + cur, + str(test_data), + "~/test_put_overwrite", + from_path, + file_stream=file_stream, + ) + assert mock_result.call_args[0][0]["rowset"][0][-2] == "UPLOADED" + with mock.patch.object( + cur, "_init_result_and_meta", wraps=cur._init_result_and_meta + ) as mock_result: + put( + cur, + str(test_data), + "~/test_put_overwrite", + from_path, + file_stream=file_stream, + ) + assert mock_result.call_args[0][0]["rowset"][0][-2] == "SKIPPED" + with mock.patch.object( + cur, "_init_result_and_meta", wraps=cur._init_result_and_meta + ) as mock_result: + put( + cur, + str(test_data), + "~/test_put_overwrite", + from_path, + file_stream=file_stream, + sql_options="OVERWRITE = TRUE", + ) + assert mock_result.call_args[0][0]["rowset"][0][-2] == "UPLOADED" + + ret = cnx.cursor().execute("LS @~/test_put_overwrite").fetchone() + assert "test_put_overwrite/" + os.path.basename(test_data) in ret[0] + assert test_data.name + ".gz" in ret[0] + finally: + if file_stream: + file_stream.close() + cnx.cursor().execute("RM @~/test_put_overwrite") + + +@pytest.mark.skipolddriver +def test_utf8_filename(tmp_path, conn_cnx): + test_file = tmp_path / "utf卡豆.csv" + test_file.write_text("1,2,3\n") + stage_name = random_string(5, "test_utf8_filename_") + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute(f"create temporary stage {stage_name}") + cur.execute( + "PUT 'file://{}' @{}".format( + str(test_file).replace("\\", "/"), stage_name + ) + ).fetchall() + cur.execute(f"select $1, $2, $3 from @{stage_name}") + assert cur.fetchone() == ("1", "2", "3") + + +@pytest.mark.skipolddriver +def test_put_threshold(tmp_path, conn_cnx, is_public_test): + if is_public_test: + pytest.xfail( + reason="This feature hasn't been rolled out for public Snowflake deployments yet." + ) + file_name = "test_put_get_with_aws_token.txt.gz" + stage_name = random_string(5, "test_put_get_threshold_") + file = tmp_path / file_name + file.touch() + with conn_cnx() as cnx, cnx.cursor() as cur: + cur.execute(f"create temporary stage {stage_name}") + from snowflake.connector.file_transfer_agent import SnowflakeFileTransferAgent + + with mock.patch( + "snowflake.connector.cursor.SnowflakeFileTransferAgent", + autospec=SnowflakeFileTransferAgent, + ) as mock_agent: + cur.execute(f"put file://{file} @{stage_name} threshold=156") + assert mock_agent.call_args[1].get("multipart_threshold", -1) == 156 + + +# Snowflake on GCP does not support multipart uploads +@pytest.mark.aws +@pytest.mark.azure +@pytest.mark.skipolddriver +@pytest.mark.parametrize("use_stream", [False, True]) +def test_multipart_put(conn_cnx, tmp_path, use_stream): + """This test does a multipart upload of a smaller file and then downloads it.""" + stage_name = random_string(5, "test_multipart_put_") + chunk_size = 6967790 + # Generate about 12 MB + generate_k_lines_of_n_files(100_000, 1, tmp_dir=str(tmp_path)) + get_dir = tmp_path / "get_dir" + get_dir.mkdir() + upload_file = tmp_path / "file0" + with conn_cnx() as con: + with con.cursor() as cur: + cur.execute(f"create temporary stage {stage_name}") + real_cmd_query = con.cmd_query + + def fake_cmd_query(*a, **kw): + """Create a mock function to inject some value into the returned JSON""" + ret = real_cmd_query(*a, **kw) + ret["data"]["threshold"] = chunk_size + return ret + + with mock.patch.object(con, "cmd_query", side_effect=fake_cmd_query): + with mock.patch( + "snowflake.connector.constants.S3_CHUNK_SIZE", chunk_size + ): + if use_stream: + kw = { + "command": f"put file://file0 @{stage_name}/sub/folders/ AUTO_COMPRESS=FALSE", + "file_stream": BytesIO(upload_file.read_bytes()), + } + else: + kw = { + "command": f"put file://{upload_file} @{stage_name}/sub/folders/ AUTO_COMPRESS=FALSE", + } + cur.execute(**kw) + cur.execute( + f"get @{stage_name}/sub/folders/{upload_file.name} file://{get_dir}" + ) + downloaded_file = get_dir / upload_file.name + assert downloaded_file.exists() + assert filecmp.cmp(upload_file, downloaded_file) + + +@pytest.mark.skipolddriver +def test_put_special_file_name(tmp_path, conn_cnx): + test_file = tmp_path / "data~%23.csv" + test_file.write_text("1,2,3\n") + stage_name = random_string(5, "test_special_filename_") + with conn_cnx() as cnx: + with cnx.cursor() as cur: + cur.execute(f"create temporary stage {stage_name}") + filename_in_put = str(test_file).replace("\\", "/") + cur.execute( + f"PUT 'file://{filename_in_put}' @{stage_name}", + ).fetchall() + cur.execute(f"select $1, $2, $3 from @{stage_name}") + assert cur.fetchone() == ("1", "2", "3") + + +@pytest.mark.skipolddriver +def test_get_empty_file(tmp_path, conn_cnx): + test_file = tmp_path / "data.csv" + test_file.write_text("1,2,3\n") + stage_name = random_string(5, "test_get_empty_file_") + with conn_cnx() as cnx: + with cnx.cursor() as cur: + cur.execute(f"create temporary stage {stage_name}") + filename_in_put = str(test_file).replace("\\", "/") + cur.execute( + f"PUT 'file://{filename_in_put}' @{stage_name}", + ) + empty_file = tmp_path / "foo.csv" + with pytest.raises(OperationalError, match=".*the file does not exist.*$"): + cur.execute(f"GET @{stage_name}/foo.csv file://{tmp_path}") + assert not empty_file.exists() diff --git a/test/integ/test_put_get_medium.py b/test/integ/test_put_get_medium.py new file mode 100644 index 000000000..2d685e9b5 --- /dev/null +++ b/test/integ/test_put_get_medium.py @@ -0,0 +1,788 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import datetime +import gzip +import os +import sys +import time +from logging import getLogger +from typing import IO, TYPE_CHECKING + +import pytest +import pytz + +from snowflake.connector import ProgrammingError +from snowflake.connector.cursor import DictCursor +from snowflake.connector.file_transfer_agent import ( + SnowflakeAzureProgressPercentage, + SnowflakeProgressPercentage, + SnowflakeS3ProgressPercentage, +) + +from ..generate_test_files import generate_k_lines_of_n_files +from ..integ_helpers import put +from ..randomize import random_string + +if TYPE_CHECKING: + from snowflake.connector import SnowflakeConnection + +try: + from ..parameters import CONNECTION_PARAMETERS_ADMIN +except ImportError: + CONNECTION_PARAMETERS_ADMIN = {} + +THIS_DIR = os.path.dirname(os.path.realpath(__file__)) +logger = getLogger(__name__) + + +@pytest.fixture() +def file_src(request) -> tuple[str, int, IO[bytes]]: + file_name = request.param + data_file = os.path.join(THIS_DIR, "../data", file_name) + file_size = os.stat(data_file).st_size + stream = open(data_file, "rb") + yield data_file, file_size, stream + stream.close() + + +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +@pytest.mark.parametrize("file_src", ["put_get_1.txt"], indirect=["file_src"]) +def test_put_copy0(conn_cnx, db_parameters, from_path, file_src): + """Puts and Copies a file.""" + file_path, _, file_stream = file_src + kwargs = { + "_put_callback": SnowflakeS3ProgressPercentage, + "_get_callback": SnowflakeS3ProgressPercentage, + "_put_azure_callback": SnowflakeAzureProgressPercentage, + "_get_azure_callback": SnowflakeAzureProgressPercentage, + "file_stream": file_stream, + } + + def run(cnx: SnowflakeConnection, sql: str) -> list[tuple]: + sql = sql.format(name=db_parameters["name"]) + return cnx.cursor().execute(sql).fetchall() + + def run_with_cursor( + cnx: SnowflakeConnection, sql: str + ) -> tuple[SnowflakeCursor, list[tuple] | list[dict]]: + sql = sql.format(name=db_parameters["name"]) + c = cnx.cursor(DictCursor) + return c, c.execute(sql).fetchall() + + with conn_cnx() as cnx: + run( + cnx, + """ +create or replace table {name} ( +aa int, +dt date, +ts timestamp, +tsltz timestamp_ltz, +tsntz timestamp_ntz, +tstz timestamp_tz, +pct float, +ratio number(5,2)) +""", + ) + + with cnx.cursor(DictCursor) as csr: + ret = put( + csr, file_path, f"%{db_parameters['name']}", from_path, **kwargs + ).fetchall() + assert csr.is_file_transfer, "PUT" + assert len(ret) == 1, "Upload one file" + assert ret[0]["source"] == os.path.basename(file_path), "File name" + + c, ret = run_with_cursor(cnx, "copy into {name}") + assert not c.is_file_transfer, "COPY" + assert len(ret) == 1 and ret[0]["status"] == "LOADED", "Failed to load data" + + assert ret[0]["rows_loaded"] == 3, "Failed to load 3 rows of data" + + run(cnx, "drop table if exists {name}") + + +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +@pytest.mark.parametrize("file_src", ["gzip_sample.txt.gz"], indirect=["file_src"]) +def test_put_copy_compressed(conn_cnx, db_parameters, from_path, file_src): + """Puts and Copies compressed files.""" + file_name, file_size, file_stream = file_src + + def run(cnx: SnowflakeConnection, sql: str) -> list[dict]: + sql = sql.format(name=db_parameters["name"]) + return cnx.cursor(DictCursor).execute(sql).fetchall() + + with conn_cnx() as cnx: + run(cnx, "create or replace table {name} (value string)") + with cnx.cursor(DictCursor) as csr: + ret = put( + csr, + file_name, + f"%{db_parameters['name']}", + from_path, + file_stream=file_stream, + ).fetchall() + assert ret[0]["source"] == os.path.basename(file_name), "File name" + assert ret[0]["source_size"] == file_size, "File size" + assert ret[0]["status"] == "UPLOADED" + ret = run(cnx, "copy into {name}") + assert len(ret) == 1 and ret[0]["status"] == "LOADED", "Failed to load data" + assert ret[0]["rows_loaded"] == 1, "Failed to load 1 rows of data" + + run(cnx, "drop table if exists {name}") + + +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +@pytest.mark.parametrize("file_src", ["bzip2_sample.txt.bz2"], indirect=["file_src"]) +@pytest.mark.skip(reason="BZ2 is not detected in this test case. Need investigation") +def test_put_copy_bz2_compressed(conn_cnx, db_parameters, from_path, file_src): + """Put and Copy bz2 compressed files.""" + file_name, _, file_stream = file_src + + def run(cnx, sql): + sql = sql.format(name=db_parameters["name"]) + return cnx.cursor().execute(sql).fetchall() + + with conn_cnx() as cnx: + run(cnx, "create or replace table {name} (value string)") + for rec in put( + cnx.cursor(), + file_name, + f"%{db_parameters['name']}", + from_path, + file_stream=file_stream, + ).fetchall(): + print(rec) + assert rec[-2] == "UPLOADED" + + for rec in run(cnx, "copy into {name}"): + print(rec) + assert rec[1] == "LOADED" + + run(cnx, "drop table if exists {name}") + + +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +@pytest.mark.parametrize("file_src", ["brotli_sample.txt.br"], indirect=["file_src"]) +def test_put_copy_brotli_compressed(conn_cnx, db_parameters, from_path, file_src): + """Puts and Copies brotli compressed files.""" + file_name, _, file_stream = file_src + + def run(cnx, sql): + sql = sql.format(name=db_parameters["name"]) + return cnx.cursor().execute(sql).fetchall() + + with conn_cnx() as cnx: + + run(cnx, "create or replace table {name} (value string)") + for rec in put( + cnx.cursor(), + file_name, + f"%{db_parameters['name']}", + from_path, + file_stream=file_stream, + ).fetchall(): + print(rec) + assert rec[-2] == "UPLOADED" + + for rec in run(cnx, "copy into {name} file_format=(compression='BROTLI')"): + print(rec) + assert rec[1] == "LOADED" + + run(cnx, "drop table if exists {name}") + + +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +@pytest.mark.parametrize("file_src", ["zstd_sample.txt.zst"], indirect=["file_src"]) +def test_put_copy_zstd_compressed(conn_cnx, db_parameters, from_path, file_src): + """Puts and Copies zstd compressed files.""" + file_name, _, file_stream = file_src + + def run(cnx, sql): + sql = sql.format(name=db_parameters["name"]) + return cnx.cursor().execute(sql).fetchall() + + with conn_cnx() as cnx: + run(cnx, "create or replace table {name} (value string)") + for rec in put( + cnx.cursor(), + file_name, + f"%{db_parameters['name']}", + from_path, + file_stream=file_stream, + ).fetchall(): + print(rec) + assert rec[-2] == "UPLOADED" + for rec in run(cnx, "copy into {name} file_format=(compression='ZSTD')"): + print(rec) + assert rec[1] == "LOADED" + + run(cnx, "drop table if exists {name}") + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +@pytest.mark.parametrize("file_src", ["nation.impala.parquet"], indirect=["file_src"]) +def test_put_copy_parquet_compressed(conn_cnx, db_parameters, from_path, file_src): + """Puts and Copies parquet compressed files.""" + file_name, _, file_stream = file_src + + def run(cnx, sql): + sql = sql.format(name=db_parameters["name"]) + return cnx.cursor().execute(sql).fetchall() + + with conn_cnx() as cnx: + run(cnx, "alter session set enable_parquet_filetype=true") + run( + cnx, + """ +create or replace table {name} +(value variant) +stage_file_format=(type='parquet') +""", + ) + for rec in put( + cnx.cursor(), + file_name, + f"%{db_parameters['name']}", + from_path, + file_stream=file_stream, + ).fetchall(): + print(rec) + assert rec[-2] == "UPLOADED" + assert rec[4] == "PARQUET" + assert rec[5] == "PARQUET" + + for rec in run(cnx, "copy into {name}"): + print(rec) + assert rec[1] == "LOADED" + + run(cnx, "drop table if exists {name}") + run(cnx, "alter session unset enable_parquet_filetype") + + +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +@pytest.mark.parametrize("file_src", ["TestOrcFile.test1.orc"], indirect=["file_src"]) +def test_put_copy_orc_compressed(conn_cnx, db_parameters, from_path, file_src): + """Puts and Copies ORC compressed files.""" + file_name, _, file_stream = file_src + + def run(cnx, sql): + sql = sql.format(name=db_parameters["name"]) + return cnx.cursor().execute(sql).fetchall() + + with conn_cnx() as cnx: + run( + cnx, + """ +create or replace table {name} (value variant) stage_file_format=(type='orc') +""", + ) + for rec in put( + cnx.cursor(), + file_name, + f"%{db_parameters['name']}", + from_path, + file_stream=file_stream, + ).fetchall(): + print(rec) + assert rec[-2] == "UPLOADED" + assert rec[4] == "ORC" + assert rec[5] == "ORC" + for rec in run(cnx, "copy into {name}"): + print(rec) + assert rec[1] == "LOADED" + + run(cnx, "drop table if exists {name}") + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_copy_get(tmpdir, conn_cnx, db_parameters): + """Copies and Gets a file.""" + name_unload = db_parameters["name"] + "_unload" + tmp_dir = str(tmpdir.mkdir("copy_get_stage")) + tmp_dir_user = str(tmpdir.mkdir("user_get")) + + def run(cnx, sql): + sql = sql.format( + name_unload=name_unload, + tmpdir=tmp_dir, + tmp_dir_user=tmp_dir_user, + name=db_parameters["name"], + ) + return cnx.cursor().execute(sql).fetchall() + + with conn_cnx() as cnx: + run(cnx, "alter session set DISABLE_PUT_AND_GET_ON_EXTERNAL_STAGE=false") + run( + cnx, + """ +create or replace table {name} ( +aa int, +dt date, +ts timestamp, +tsltz timestamp_ltz, +tsntz timestamp_ntz, +tstz timestamp_tz, +pct float, +ratio number(5,2)) +""", + ) + run( + cnx, + """ +create or replace stage {name_unload} +file_format = ( +format_name = 'common.public.csv' +field_delimiter = '|' +error_on_column_count_mismatch=false); +""", + ) + current_time = datetime.datetime.utcnow() + current_time = current_time.replace(tzinfo=pytz.timezone("America/Los_Angeles")) + current_date = datetime.date.today() + other_time = current_time.replace(tzinfo=pytz.timezone("Asia/Tokyo")) + + fmt = """ +insert into {name}(aa, dt, tstz) +values(%(value)s,%(dt)s,%(tstz)s) +""".format( + name=db_parameters["name"] + ) + cnx.cursor().executemany( + fmt, + [ + {"value": 6543, "dt": current_date, "tstz": other_time}, + {"value": 1234, "dt": current_date, "tstz": other_time}, + ], + ) + + run( + cnx, + """ +copy into @{name_unload}/data_ +from {name} +file_format=( +format_name='common.public.csv' +compression='gzip') +max_file_size=10000000 +""", + ) + ret = run(cnx, "get @{name_unload}/ file://{tmp_dir_user}/") + + assert ret[0][2] == "DOWNLOADED", "Failed to download" + cnt = 0 + for _, _, _ in os.walk(tmp_dir_user): + cnt += 1 + assert cnt > 0, "No file was downloaded" + + run(cnx, "drop stage {name_unload}") + run(cnx, "drop table if exists {name}") + + +@pytest.mark.flaky(reruns=3) +def test_put_copy_many_files(tmpdir, conn_cnx, db_parameters): + """Puts and Copies many_files.""" + # generates N files + number_of_files = 100 + number_of_lines = 1000 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + + files = os.path.join(tmp_dir, "file*") + + def run(cnx, sql): + sql = sql.format(files=files.replace("\\", "\\\\"), name=db_parameters["name"]) + return cnx.cursor().execute(sql).fetchall() + + with conn_cnx() as cnx: + run( + cnx, + """ +create or replace table {name} ( +aa int, +dt date, +ts timestamp, +tsltz timestamp_ltz, +tsntz timestamp_ntz, +tstz timestamp_tz, +pct float, +ratio number(6,2)) +""", + ) + run(cnx, "put 'file://{files}' @%{name}") + run(cnx, "copy into {name}") + rows = 0 + for rec in run(cnx, "select count(*) from {name}"): + rows += rec[0] + assert rows == number_of_files * number_of_lines, "Number of rows" + + run(cnx, "drop table if exists {name}") + + +@pytest.mark.aws +def test_put_copy_many_files_s3(tmpdir, conn_cnx, db_parameters): + """[s3] Puts and Copies many files.""" + # generates N files + number_of_files = 10 + number_of_lines = 1000 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + + files = os.path.join(tmp_dir, "file*") + + def run(cnx, sql): + sql = sql.format(files=files.replace("\\", "\\\\"), name=db_parameters["name"]) + return cnx.cursor().execute(sql).fetchall() + + with conn_cnx() as cnx: + run( + cnx, + """ +create or replace table {name} ( +aa int, +dt date, +ts timestamp, +tsltz timestamp_ltz, +tsntz timestamp_ntz, +tstz timestamp_tz, +pct float, +ratio number(6,2)) +""", + ) + try: + with conn_cnx() as cnx: + run(cnx, "put 'file://{files}' @%{name}") + run(cnx, "copy into {name}") + + rows = 0 + for rec in run(cnx, "select count(*) from {name}"): + rows += rec[0] + assert rows == number_of_files * number_of_lines, "Number of rows" + finally: + with conn_cnx( + user=db_parameters["user"], + account=db_parameters["account"], + password=db_parameters["password"], + ) as cnx: + run(cnx, "drop table if exists {name}") + + +@pytest.mark.aws +@pytest.mark.azure +@pytest.mark.flaky(reruns=3) +def test_put_copy_duplicated_files_s3(tmpdir, conn_cnx, db_parameters): + """[s3] Puts and Copies duplicated files.""" + # generates N files + number_of_files = 5 + number_of_lines = 100 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + + files = os.path.join(tmp_dir, "file*") + + def run(cnx, sql): + sql = sql.format(files=files.replace("\\", "\\\\"), name=db_parameters["name"]) + return cnx.cursor().execute(sql, _raise_put_get_error=False).fetchall() + + with conn_cnx() as cnx: + run( + cnx, + """ +create or replace table {name} ( +aa int, +dt date, +ts timestamp, +tsltz timestamp_ltz, +tsntz timestamp_ntz, +tstz timestamp_tz, +pct float, +ratio number(6,2)) +""", + ) + + try: + with conn_cnx() as cnx: + success_cnt = 0 + skipped_cnt = 0 + for rec in run(cnx, "put 'file://{files}' @%{name}"): + logger.info("rec=%s", rec) + if rec[6] == "UPLOADED": + success_cnt += 1 + elif rec[6] == "SKIPPED": + skipped_cnt += 1 + assert success_cnt == number_of_files, "uploaded files" + assert skipped_cnt == 0, "skipped files" + + deleted_cnt = 0 + run(cnx, "rm @%{name}/file0") + deleted_cnt += 1 + run(cnx, "rm @%{name}/file1") + deleted_cnt += 1 + run(cnx, "rm @%{name}/file2") + deleted_cnt += 1 + + success_cnt = 0 + skipped_cnt = 0 + for rec in run(cnx, "put 'file://{files}' @%{name}"): + logger.info("rec=%s", rec) + if rec[6] == "UPLOADED": + success_cnt += 1 + elif rec[6] == "SKIPPED": + skipped_cnt += 1 + assert success_cnt == deleted_cnt, "uploaded files in the second time" + assert ( + skipped_cnt == number_of_files - deleted_cnt + ), "skipped files in the second time" + + run(cnx, "copy into {name}") + rows = 0 + for rec in run(cnx, "select count(*) from {name}"): + rows += rec[0] + assert rows == number_of_files * number_of_lines, "Number of rows" + finally: + with conn_cnx() as cnx: + run(cnx, "drop table if exists {name}") + + +@pytest.mark.skipolddriver +@pytest.mark.aws +@pytest.mark.azure +def test_put_collision(tmpdir, conn_cnx): + """File name collision test. The data set have the same file names but contents are different.""" + number_of_files = 5 + number_of_lines = 10 + # data set 1 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, + number_of_files, + compress=True, + tmp_dir=str(tmpdir.mkdir("data1")), + ) + files1 = os.path.join(tmp_dir, "file*") + + # data set 2 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, + number_of_files, + compress=True, + tmp_dir=str(tmpdir.mkdir("data2")), + ) + files2 = os.path.join(tmp_dir, "file*") + + stage_name = random_string(5, "test_put_collision_") + with conn_cnx() as cnx: + cnx.cursor().execute(f"RM @~/{stage_name}") + try: + # upload all files + success_cnt = 0 + skipped_cnt = 0 + for rec in cnx.cursor().execute( + "PUT 'file://{file}' @~/{stage_name}".format( + file=files1.replace("\\", "\\\\"), stage_name=stage_name + ) + ): + logger.info("rec=%s", rec) + if rec[6] == "UPLOADED": + success_cnt += 1 + elif rec[6] == "SKIPPED": + skipped_cnt += 1 + assert success_cnt == number_of_files + assert skipped_cnt == 0 + + # will skip uploading all files + success_cnt = 0 + skipped_cnt = 0 + for rec in cnx.cursor().execute( + "PUT 'file://{file}' @~/{stage_name}".format( + file=files2.replace("\\", "\\\\"), stage_name=stage_name + ) + ): + logger.info("rec=%s", rec) + if rec[6] == "UPLOADED": + success_cnt += 1 + elif rec[6] == "SKIPPED": + skipped_cnt += 1 + assert success_cnt == 0 + assert skipped_cnt == number_of_files + + # will overwrite all files + success_cnt = 0 + skipped_cnt = 0 + for rec in cnx.cursor().execute( + "PUT 'file://{file}' @~/{stage_name} OVERWRITE=true".format( + file=files2.replace("\\", "\\\\"), stage_name=stage_name + ) + ): + logger.info("rec=%s", rec) + if rec[6] == "UPLOADED": + success_cnt += 1 + elif rec[6] == "SKIPPED": + skipped_cnt += 1 + assert success_cnt == number_of_files + assert skipped_cnt == 0 + + finally: + with conn_cnx() as cnx: + cnx.cursor().execute(f"RM @~/{stage_name}") + + +def _generate_huge_value_json(tmpdir, n=1, value_size=1): + fname = str(tmpdir.join("test_put_get_huge_json")) + f = gzip.open(fname, "wb") + for i in range(n): + logger.debug(f"adding a value in {i}") + f.write(f'{{"k":"{random_string(value_size)}"}}') + f.close() + return fname + + +@pytest.mark.aws +def test_put_get_large_files_s3(tmpdir, conn_cnx, db_parameters): + """[s3] Puts and Gets Large files.""" + number_of_files = 3 + number_of_lines = 200000 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + + files = os.path.join(tmp_dir, "file*") + output_dir = os.path.join(tmp_dir, "output_dir") + os.makedirs(output_dir) + + class cb(SnowflakeProgressPercentage): + def __init__(self, filename, filesize, **_): + pass + + def __call__(self, bytes_amount): + pass + + def run(cnx, sql): + return ( + cnx.cursor() + .execute( + sql.format( + files=files.replace("\\", "\\\\"), + dir=db_parameters["name"], + output_dir=output_dir.replace("\\", "\\\\"), + ), + _put_callback_output_stream=sys.stdout, + _get_callback_output_stream=sys.stdout, + _get_callback=cb, + _put_callback=cb, + ) + .fetchall() + ) + + with conn_cnx() as cnx: + try: + run(cnx, "PUT 'file://{files}' @~/{dir}") + # run(cnx, "PUT 'file://{files}' @~/{dir}") # retry + all_recs = [] + for _ in range(100): + all_recs = run(cnx, "LIST @~/{dir}") + if len(all_recs) == number_of_files: + break + time.sleep(1) + else: + pytest.fail( + "cannot list all files. Potentially " + "PUT command missed uploading Files: {}".format(all_recs) + ) + all_recs = run(cnx, "GET @~/{dir} 'file://{output_dir}'") + assert len(all_recs) == number_of_files + assert all([rec[2] == "DOWNLOADED" for rec in all_recs]) + finally: + run(cnx, "RM @~/{dir}") + + +@pytest.mark.aws +@pytest.mark.azure +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +@pytest.mark.parametrize("file_src", ["put_get_1.txt"], indirect=["file_src"]) +def test_put_get_with_hint(tmpdir, conn_cnx, db_parameters, from_path, file_src): + """SNOW-15153: PUTs and GETs with hint.""" + tmp_dir = str(tmpdir.mkdir("put_get_with_hint")) + file_name, file_size, file_stream = file_src + + def run(cnx, sql, _is_put_get=None): + sql = sql.format( + local_dir=tmp_dir.replace("\\", "\\\\"), name=db_parameters["name"] + ) + return cnx.cursor().execute(sql, _is_put_get=_is_put_get).fetchone() + + with conn_cnx() as cnx: + # regular PUT case + ret = put( + cnx.cursor(), + file_name, + f"~/{db_parameters['name']}", + from_path, + file_stream=file_stream, + ).fetchone() + assert ret[0] == os.path.basename(file_name), "PUT filename" + # clean up a file + ret = run(cnx, "RM @~/{name}") + assert ret[0].endswith(os.path.basename(file_name) + ".gz"), "RM filename" + + # PUT detection failure + with pytest.raises(ProgrammingError): + put( + cnx.cursor(), + file_name, + f"~/{db_parameters['name']}", + from_path, + commented=True, + file_stream=file_stream, + ) + + # PUT with hint + ret = put( + cnx.cursor(), + file_name, + f"~/{db_parameters['name']}", + from_path, + file_stream=file_stream, + _is_put_get=True, + ).fetchone() + assert ret[0] == os.path.basename(file_name), "PUT filename" + + # GET detection failure + commented_get_sql = """ +--- test comments +GET @~/{name} file://{local_dir}""" + + with pytest.raises(ProgrammingError): + run(cnx, commented_get_sql) + + # GET with hint + ret = run(cnx, commented_get_sql, _is_put_get=True) + assert ret[0] == os.path.basename(file_name) + ".gz", "GET filename" diff --git a/test/integ/test_put_get_snow_4525.py b/test/integ/test_put_get_snow_4525.py new file mode 100644 index 000000000..7a62c7ac5 --- /dev/null +++ b/test/integ/test_put_get_snow_4525.py @@ -0,0 +1,61 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os +import pathlib + + +def test_load_bogus_file(tmp_path: pathlib.Path, conn_cnx, db_parameters): + """SNOW-4525: Loads Bogus file and should fail.""" + with conn_cnx() as cnx: + cnx.cursor().execute( + f""" +create or replace table {db_parameters["name"]} ( +aa int, +dt date, +ts timestamp, +tsltz timestamp_ltz, +tsntz timestamp_ntz, +tstz timestamp_tz, +pct float, +ratio number(5,2)) +""" + ) + temp_file = tmp_path / "bogus_files" + with temp_file.open("wb") as random_binary_file: + random_binary_file.write(os.urandom(1024)) + cnx.cursor().execute(f"put file://{temp_file} @%{db_parameters['name']}") + + with cnx.cursor() as c: + c.execute(f"copy into {db_parameters['name']} on_error='skip_file'") + cnt = 0 + for _rec in c: + cnt += 1 + assert _rec[1] == "LOAD_FAILED" + cnx.cursor().execute(f"drop table if exists {db_parameters['name']}") + + +def test_load_bogus_json_file(tmp_path: pathlib.Path, conn_cnx, db_parameters): + """SNOW-4525: Loads Bogus JSON file and should fail.""" + with conn_cnx() as cnx: + json_table = db_parameters["name"] + "_json" + cnx.cursor().execute(f"create or replace table {json_table} (v variant)") + + temp_file = tmp_path / "bogus_json_files" + temp_file.write_bytes(os.urandom(1024)) + cnx.cursor().execute(f"put file://{temp_file} @%{json_table}") + + with cnx.cursor() as c: + c.execute( + f"copy into {json_table} on_error='skip_file' " + "file_format=(type='json')" + ) + cnt = 0 + for _rec in c: + cnt += 1 + assert _rec[1] == "LOAD_FAILED" + cnx.cursor().execute(f"drop table if exists {json_table}") diff --git a/test/integ/test_put_get_user_stage.py b/test/integ/test_put_get_user_stage.py new file mode 100644 index 000000000..8ccde079e --- /dev/null +++ b/test/integ/test_put_get_user_stage.py @@ -0,0 +1,518 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import mimetypes +import os +import time +from getpass import getuser +from logging import getLogger +from unittest.mock import patch + +import pytest + +from snowflake.connector.cursor import SnowflakeCursor + +from ..generate_test_files import generate_k_lines_of_n_files +from ..integ_helpers import put +from ..randomize import random_string + + +@pytest.mark.aws +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +def test_put_get_small_data_via_user_stage(is_public_test, tmpdir, conn_cnx, from_path): + """[s3] Puts and Gets Small Data via User Stage.""" + if is_public_test or "AWS_ACCESS_KEY_ID" not in os.environ: + pytest.skip("This test requires to change the internal parameter") + number_of_files = 5 if from_path else 1 + number_of_lines = 1 + _put_get_user_stage( + tmpdir, + conn_cnx, + number_of_files=number_of_files, + number_of_lines=number_of_lines, + from_path=from_path, + ) + + +@pytest.mark.internal +@pytest.mark.skipolddriver +@pytest.mark.aws +@pytest.mark.parametrize( + "from_path", + [True, False], +) +@pytest.mark.parametrize( + "accelerate_config", + [True, False], +) +def test_put_get_accelerate_user_stage(tmpdir, conn_cnx, from_path, accelerate_config): + """[s3] Puts and Gets Small Data via User Stage.""" + from snowflake.connector.file_transfer_agent import SnowflakeFileTransferAgent + from snowflake.connector.s3_storage_client import SnowflakeS3RestClient + + number_of_files = 5 if from_path else 1 + number_of_lines = 1 + endpoints = [] + + def mocked_file_agent(*args, **kwargs): + agent = SnowflakeFileTransferAgent(*args, **kwargs) + mocked_file_agent.agent = agent + return agent + + original_accelerate_config = SnowflakeS3RestClient.transfer_accelerate_config + expected_cfg = accelerate_config + + def mock_s3_transfer_accelerate_config(self, *args, **kwargs) -> bool: + bret = original_accelerate_config(self, *args, **kwargs) + endpoints.append(self.endpoint) + return bret + + def mock_s3_get_bucket_config(self, *args, **kwargs) -> bool: + return expected_cfg + + with patch( + "snowflake.connector.cursor.SnowflakeFileTransferAgent", + side_effect=mocked_file_agent, + ): + with patch.multiple( + "snowflake.connector.s3_storage_client.SnowflakeS3RestClient", + _get_bucket_accelerate_config=mock_s3_get_bucket_config, + transfer_accelerate_config=mock_s3_transfer_accelerate_config, + ): + _put_get_user_stage( + tmpdir, + conn_cnx, + number_of_files=number_of_files, + number_of_lines=number_of_lines, + from_path=from_path, + ) + config_accl = mocked_file_agent.agent._use_accelerate_endpoint + if accelerate_config: + assert (config_accl is True) and all( + ele.find("s3-acc") >= 0 for ele in endpoints + ) + else: + assert (config_accl is False) and all( + ele.find("s3-acc") < 0 for ele in endpoints + ) + + +@pytest.mark.aws +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +def test_put_get_large_data_via_user_stage( + is_public_test, + tmpdir, + conn_cnx, + from_path, +): + """[s3] Puts and Gets Large Data via User Stage.""" + if is_public_test or "AWS_ACCESS_KEY_ID" not in os.environ: + pytest.skip("This test requires to change the internal parameter") + number_of_files = 2 if from_path else 1 + number_of_lines = 200000 + _put_get_user_stage( + tmpdir, + conn_cnx, + number_of_files=number_of_files, + number_of_lines=number_of_lines, + from_path=from_path, + ) + + +@pytest.mark.aws +@pytest.mark.internal +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +def test_put_small_data_use_s3_regional_url( + is_public_test, + tmpdir, + conn_cnx, + db_parameters, + from_path, +): + """[s3] Puts Small Data via User Stage using regional url.""" + if is_public_test or "AWS_ACCESS_KEY_ID" not in os.environ: + pytest.skip("This test requires to change the internal parameter") + number_of_files = 5 if from_path else 1 + number_of_lines = 1 + put_cursor = _put_get_user_stage_s3_regional_url( + tmpdir, + conn_cnx, + db_parameters, + number_of_files=number_of_files, + number_of_lines=number_of_lines, + from_path=from_path, + ) + assert put_cursor._connection._session_parameters.get( + "ENABLE_STAGE_S3_PRIVATELINK_FOR_US_EAST_1" + ) + + +def _put_get_user_stage_s3_regional_url( + tmpdir, + conn_cnx, + db_parameters, + number_of_files=1, + number_of_lines=1, + from_path=True, +) -> SnowflakeCursor | None: + with conn_cnx( + role="accountadmin", + ) as cnx: + cnx.cursor().execute( + "alter account set ENABLE_STAGE_S3_PRIVATELINK_FOR_US_EAST_1 = true;" + ) + try: + put_cursor = _put_get_user_stage( + tmpdir, + conn_cnx, + number_of_files, + number_of_lines, + from_path, + ) + finally: + with conn_cnx( + role="accountadmin", + ) as cnx: + cnx.cursor().execute( + "alter account set ENABLE_STAGE_S3_PRIVATELINK_FOR_US_EAST_1 = false;" + ) + return put_cursor + + +def _put_get_user_stage( + tmpdir, + conn_cnx, + number_of_files=1, + number_of_lines=1, + from_path=True, +) -> SnowflakeCursor | None: + put_cursor: SnowflakeCursor | None = None + # sanity check + assert "AWS_ACCESS_KEY_ID" in os.environ, "AWS_ACCESS_KEY_ID is missing" + assert "AWS_SECRET_ACCESS_KEY" in os.environ, "AWS_SECRET_ACCESS_KEY is missing" + if not from_path: + assert number_of_files == 1 + + random_str = random_string(5, "put_get_user_stage_") + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + + files = os.path.join(tmp_dir, "file*" if from_path else os.listdir(tmp_dir)[0]) + file_stream = None if from_path else open(files, "rb") + + stage_name = f"{random_str}_stage_{number_of_files}_{number_of_lines}" + with conn_cnx() as cnx: + cnx.cursor().execute( + f""" +create or replace table {random_str} ( +aa int, +dt date, +ts timestamp, +tsltz timestamp_ltz, +tsntz timestamp_ntz, +tstz timestamp_tz, +pct float, +ratio number(6,2)) +""" + ) + user_bucket = os.getenv( + "SF_AWS_USER_BUCKET", f"sfc-dev1-regression/{getuser()}/reg" + ) + cnx.cursor().execute( + f""" +create or replace stage {stage_name} +url='s3://{user_bucket}/{stage_name}-{number_of_files}-{number_of_lines}' +credentials=( + AWS_KEY_ID='{os.getenv("AWS_ACCESS_KEY_ID")}' + AWS_SECRET_KEY='{os.getenv("AWS_SECRET_ACCESS_KEY")}' +) +""" + ) + try: + with conn_cnx() as cnx: + cnx.cursor().execute( + "alter session set disable_put_and_get_on_external_stage = false" + ) + cnx.cursor().execute(f"rm @{stage_name}") + + put_cursor = cnx.cursor() + put(put_cursor, files, stage_name, from_path, file_stream=file_stream) + cnx.cursor().execute(f"copy into {random_str} from @{stage_name}") + c = cnx.cursor() + try: + c.execute(f"select count(*) from {random_str}") + rows = 0 + for rec in c: + rows += rec[0] + assert rows == number_of_files * number_of_lines, "Number of rows" + finally: + c.close() + cnx.cursor().execute(f"rm @{stage_name}") + cnx.cursor().execute(f"copy into @{stage_name} from {random_str}") + tmp_dir_user = str(tmpdir.mkdir("put_get_stage")) + cnx.cursor().execute(f"get @{stage_name}/ file://{tmp_dir_user}/") + for _, _, files in os.walk(tmp_dir_user): + for file in files: + mimetypes.init() + _, encoding = mimetypes.guess_type(file) + assert encoding == "gzip", "exported file type" + finally: + if file_stream: + file_stream.close() + with conn_cnx() as cnx: + cnx.cursor().execute(f"rm @{stage_name}") + cnx.cursor().execute(f"drop stage if exists {stage_name}") + cnx.cursor().execute(f"drop table if exists {random_str}") + return put_cursor + + +@pytest.mark.aws +@pytest.mark.flaky(reruns=3) +def test_put_get_duplicated_data_user_stage( + is_public_test, + tmpdir, + conn_cnx, + number_of_files=5, + number_of_lines=100, +): + """[s3] Puts and Gets Duplicated Data using User Stage.""" + if is_public_test or "AWS_ACCESS_KEY_ID" not in os.environ: + pytest.skip("This test requires to change the internal parameter") + + random_str = random_string(5, "test_put_get_duplicated_data_user_stage_") + logger = getLogger(__name__) + assert "AWS_ACCESS_KEY_ID" in os.environ, "AWS_ACCESS_KEY_ID is missing" + assert "AWS_SECRET_ACCESS_KEY" in os.environ, "AWS_SECRET_ACCESS_KEY is missing" + + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + + files = os.path.join(tmp_dir, "file*") + + stage_name = f"{random_str}_stage" + with conn_cnx() as cnx: + cnx.cursor().execute( + f""" +create or replace table {random_str} ( +aa int, +dt date, +ts timestamp, +tsltz timestamp_ltz, +tsntz timestamp_ntz, +tstz timestamp_tz, +pct float, +ratio number(6,2)) +""" + ) + user_bucket = os.getenv( + "SF_AWS_USER_BUCKET", f"sfc-dev1-regression/{getuser()}/reg" + ) + cnx.cursor().execute( + f""" +create or replace stage {stage_name} +url='s3://{user_bucket}/{stage_name}-{number_of_files}-{number_of_lines}' +credentials=( + AWS_KEY_ID='{os.getenv("AWS_ACCESS_KEY_ID")}' + AWS_SECRET_KEY='{os.getenv("AWS_SECRET_ACCESS_KEY")}' +) +""" + ) + try: + with conn_cnx() as cnx: + c = cnx.cursor() + try: + for rec in c.execute(f"rm @{stage_name}"): + logger.info("rec=%s", rec) + finally: + c.close() + + success_cnt = 0 + skipped_cnt = 0 + with cnx.cursor() as c: + c.execute( + "alter session set disable_put_and_get_on_external_stage = false" + ) + for rec in c.execute(f"put file://{files} @{stage_name}"): + logger.info(f"rec={rec}") + if rec[6] == "UPLOADED": + success_cnt += 1 + elif rec[6] == "SKIPPED": + skipped_cnt += 1 + assert success_cnt == number_of_files, "uploaded files" + assert skipped_cnt == 0, "skipped files" + + logger.info(f"deleting files in {stage_name}") + + deleted_cnt = 0 + cnx.cursor().execute(f"rm @{stage_name}/file0") + deleted_cnt += 1 + cnx.cursor().execute(f"rm @{stage_name}/file1") + deleted_cnt += 1 + cnx.cursor().execute(f"rm @{stage_name}/file2") + deleted_cnt += 1 + + success_cnt = 0 + skipped_cnt = 0 + with cnx.cursor() as c: + for rec in c.execute( + f"put file://{files} @{stage_name}", + _raise_put_get_error=False, + ): + logger.info(f"rec={rec}") + if rec[6] == "UPLOADED": + success_cnt += 1 + elif rec[6] == "SKIPPED": + skipped_cnt += 1 + assert success_cnt == deleted_cnt, "uploaded files in the second time" + assert ( + skipped_cnt == number_of_files - deleted_cnt + ), "skipped files in the second time" + + time.sleep(5) + cnx.cursor().execute(f"copy into {random_str} from @{stage_name}") + with cnx.cursor() as c: + c.execute(f"select count(*) from {random_str}") + rows = 0 + for rec in c: + rows += rec[0] + assert rows == number_of_files * number_of_lines, "Number of rows" + cnx.cursor().execute(f"rm @{stage_name}") + cnx.cursor().execute(f"copy into @{stage_name} from {random_str}") + tmp_dir_user = str(tmpdir.mkdir("stage2")) + cnx.cursor().execute(f"get @{stage_name}/ file://{tmp_dir_user}/") + for _, _, files in os.walk(tmp_dir_user): + for file in files: + mimetypes.init() + _, encoding = mimetypes.guess_type(file) + assert encoding == "gzip", "exported file type" + + finally: + with conn_cnx() as cnx: + cnx.cursor().execute(f"drop stage if exists {stage_name}") + cnx.cursor().execute(f"drop table if exists {random_str}") + + +@pytest.mark.aws +def test_get_data_user_stage( + is_public_test, + tmpdir, + conn_cnx, +): + """SNOW-20927: Tests Get failure with 404 error.""" + stage_name = random_string(5, "test_get_data_user_stage_") + if is_public_test or "AWS_ACCESS_KEY_ID" not in os.environ: + pytest.skip("This test requires to change the internal parameter") + + default_s3bucket = os.getenv( + "SF_AWS_USER_BUCKET", f"sfc-dev1-regression/{getuser()}/reg" + ) + test_data = [ + { + "s3location": "{}/{}".format(default_s3bucket, f"{stage_name}_stage"), + "stage_name": f"{stage_name}_stage1", + "data_file_name": "data.txt", + }, + ] + for elem in test_data: + _put_list_rm_files_in_stage(tmpdir, conn_cnx, elem) + + +def _put_list_rm_files_in_stage(tmpdir, conn_cnx, elem): + s3location = elem["s3location"] + stage_name = elem["stage_name"] + data_file_name = elem["data_file_name"] + + from io import open + + from snowflake.connector.compat import UTF8 + + tmp_dir = str(tmpdir.mkdir("data")) + data_file = os.path.join(tmp_dir, data_file_name) + with open(data_file, "w", encoding=UTF8) as f: + f.write("123,456,string1\n") + f.write("789,012,string2\n") + + output_dir = str(tmpdir.mkdir("output")) + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +create or replace stage {stage_name} + url='s3://{s3location}' + credentials=( + AWS_KEY_ID='{aws_key_id}' + AWS_SECRET_KEY='{aws_secret_key}' + ) +""".format( + s3location=s3location, + stage_name=stage_name, + aws_key_id=os.getenv("AWS_ACCESS_KEY_ID"), + aws_secret_key=os.getenv("AWS_SECRET_ACCESS_KEY"), + ) + ) + try: + with conn_cnx() as cnx: + cnx.cursor().execute(f"RM @{stage_name}") + cnx.cursor().execute( + "alter session set disable_put_and_get_on_external_stage = false" + ) + rec = ( + cnx.cursor() + .execute( + """ +PUT file://{file} @{stage_name} +""".format( + file=data_file, stage_name=stage_name + ) + ) + .fetchone() + ) + assert rec[0] == data_file_name + assert rec[6] == "UPLOADED" + rec = ( + cnx.cursor() + .execute( + """ +LIST @{stage_name} + """.format( + stage_name=stage_name + ) + ) + .fetchone() + ) + assert rec, "LIST should return something" + assert rec[0].startswith("s3://"), "The file location in S3" + rec = ( + cnx.cursor() + .execute( + """ +GET @{stage_name} file://{output_dir} +""".format( + stage_name=stage_name, output_dir=output_dir + ) + ) + .fetchone() + ) + assert rec[0] == data_file_name + ".gz" + assert rec[2] == "DOWNLOADED" + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + """ +RM @{stage_name} +""".format( + stage_name=stage_name + ) + ) + cnx.cursor().execute(f"drop stage if exists {stage_name}") diff --git a/test/integ/test_put_get_with_aws_token.py b/test/integ/test_put_get_with_aws_token.py new file mode 100644 index 000000000..70957e9c8 --- /dev/null +++ b/test/integ/test_put_get_with_aws_token.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import glob +import gzip +import os + +import pytest + +from snowflake.connector.constants import UTF8 + +try: # pragma: no cover + from snowflake.connector.vendored import requests +except ImportError: + requests = None + + +try: # pragma: no cover + from snowflake.connector.file_transfer_agent import ( + SnowflakeFileMeta, + StorageCredential, + ) + from snowflake.connector.s3_storage_client import S3Location, SnowflakeS3RestClient +except ImportError: + pass + +from ..integ_helpers import put +from ..randomize import random_string + +# Mark every test in this module as an aws test +pytestmark = pytest.mark.aws + + +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +def test_put_get_with_aws(tmpdir, conn_cnx, from_path): + """[s3] Puts and Gets a small text using AWS S3.""" + # create a data file + fname = str(tmpdir.join("test_put_get_with_aws_token.txt.gz")) + original_contents = "123,test1\n456,test2\n" + with gzip.open(fname, "wb") as f: + f.write(original_contents.encode(UTF8)) + tmp_dir = str(tmpdir.mkdir("test_put_get_with_aws_token")) + table_name = random_string(5, "snow9144_") + + with conn_cnx() as cnx: + with cnx.cursor() as csr: + try: + csr.execute(f"create or replace table {table_name} (a int, b string)") + file_stream = None if from_path else open(fname, "rb") + put( + csr, + fname, + f"%{table_name}", + from_path, + sql_options=" auto_compress=true parallel=30", + file_stream=file_stream, + ) + rec = csr.fetchone() + assert rec[6] == "UPLOADED" + csr.execute(f"copy into {table_name}") + csr.execute(f"rm @%{table_name}") + assert csr.execute(f"ls @%{table_name}").fetchall() == [] + csr.execute( + f"copy into @%{table_name} from {table_name} " + "file_format=(type=csv compression='gzip')" + ) + csr.execute(f"get @%{table_name} file://{tmp_dir}") + rec = csr.fetchone() + assert rec[0].startswith("data_"), "A file downloaded by GET" + assert rec[1] == 36, "Return right file size" + assert rec[2] == "DOWNLOADED", "Return DOWNLOADED status" + assert rec[3] == "", "Return no error message" + finally: + csr.execute(f"drop table {table_name}") + if file_stream: + file_stream.close() + + files = glob.glob(os.path.join(tmp_dir, "data_*")) + with gzip.open(files[0], "rb") as fd: + contents = fd.read().decode(UTF8) + assert original_contents == contents, "Output is different from the original file" + + +@pytest.mark.skipolddriver +def test_put_with_invalid_token(tmpdir, conn_cnx): + """[s3] SNOW-6154: Uses invalid combination of AWS credential.""" + # create a data file + fname = str(tmpdir.join("test_put_get_with_aws_token.txt.gz")) + with gzip.open(fname, "wb") as f: + f.write("123,test1\n456,test2".encode(UTF8)) + table_name = random_string(5, "snow6154_") + + with conn_cnx() as cnx: + try: + cnx.cursor().execute( + f"create or replace table {table_name} (a int, b string)" + ) + ret = cnx.cursor()._execute_helper(f"put file://{fname} @%{table_name}") + stage_info = ret["data"]["stageInfo"] + stage_info["location"] + stage_credentials = stage_info["creds"] + creds = StorageCredential( + stage_credentials, cnx, "COMMAND WILL NOT BE USED" + ) + statinfo = os.stat(fname) + meta = SnowflakeFileMeta( + name=os.path.basename(fname), + src_file_name=fname, + src_file_size=statinfo.st_size, + stage_location_type="S3", + encryption_material=None, + dst_file_name=os.path.basename(fname), + sha256_digest="None", + ) + + client = SnowflakeS3RestClient(meta, creds, stage_info, 8388608) + client.get_file_header(meta.name) # positive case + + # negative case, no aws token + token = stage_info["creds"]["AWS_TOKEN"] + del stage_info["creds"]["AWS_TOKEN"] + with pytest.raises(requests.HTTPError, match=".*Forbidden for url.*"): + client.get_file_header(meta.name) + + # negative case, wrong location + stage_info["creds"]["AWS_TOKEN"] = token + s3path = client.s3location.path + bad_path = os.path.dirname(os.path.dirname(s3path)) + "/" + _s3location = S3Location(client.s3location.bucket_name, bad_path) + client.s3location = _s3location + client.chunks = [b"this is a chunk"] + client.num_of_chunks = 1 + client.retry_count[0] = 0 + client.data_file = fname + with pytest.raises(requests.HTTPError, match=".*Forbidden for url.*"): + client.upload_chunk(0) + finally: + cnx.cursor().execute(f"drop table if exists {table_name}") diff --git a/test/integ/test_put_get_with_azure_token.py b/test/integ/test_put_get_with_azure_token.py new file mode 100644 index 000000000..a49275850 --- /dev/null +++ b/test/integ/test_put_get_with_azure_token.py @@ -0,0 +1,271 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import glob +import gzip +import os +import sys +import time +from logging import getLogger + +import pytest + +from snowflake.connector.constants import UTF8 +from snowflake.connector.file_transfer_agent import ( + SnowflakeAzureProgressPercentage, + SnowflakeProgressPercentage, +) + +from ..generate_test_files import generate_k_lines_of_n_files +from ..integ_helpers import put +from ..randomize import random_string + +logger = getLogger(__name__) + +# Mark every test in this module as an azure and a putget test +pytestmark = pytest.mark.azure + + +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +def test_put_get_with_azure(tmpdir, conn_cnx, from_path): + """[azure] Puts and Gets a small text using Azure.""" + # create a data file + fname = str(tmpdir.join("test_put_get_with_azure_token.txt.gz")) + original_contents = "123,test1\n456,test2\n" + with gzip.open(fname, "wb") as f: + f.write(original_contents.encode(UTF8)) + tmp_dir = str(tmpdir.mkdir("test_put_get_with_azure_token")) + table_name = random_string(5, "snow32806_") + + with conn_cnx() as cnx: + with cnx.cursor() as csr: + csr.execute(f"create or replace table {table_name} (a int, b string)") + try: + file_stream = None if from_path else open(fname, "rb") + put( + csr, + fname, + f"%{table_name}", + from_path, + sql_options=" auto_compress=true parallel=30", + _put_callback=SnowflakeAzureProgressPercentage, + _get_callback=SnowflakeAzureProgressPercentage, + file_stream=file_stream, + ) + assert csr.fetchone()[6] == "UPLOADED" + csr.execute(f"copy into {table_name}") + csr.execute(f"rm @%{table_name}") + assert csr.execute(f"ls @%{table_name}").fetchall() == [] + csr.execute( + f"copy into @%{table_name} from {table_name} " + "file_format=(type=csv compression='gzip')" + ) + csr.execute( + f"get @%{table_name} file://{tmp_dir}", + _put_callback=SnowflakeAzureProgressPercentage, + _get_callback=SnowflakeAzureProgressPercentage, + ) + rec = csr.fetchone() + assert rec[0].startswith("data_"), "A file downloaded by GET" + assert rec[1] == 36, "Return right file size" + assert rec[2] == "DOWNLOADED", "Return DOWNLOADED status" + assert rec[3] == "", "Return no error message" + finally: + if file_stream: + file_stream.close() + csr.execute(f"drop table {table_name}") + + files = glob.glob(os.path.join(tmp_dir, "data_*")) + with gzip.open(files[0], "rb") as fd: + contents = fd.read().decode(UTF8) + assert original_contents == contents, "Output is different from the original file" + + +def test_put_copy_many_files_azure(tmpdir, conn_cnx): + """[azure] Puts and Copies many files.""" + # generates N files + number_of_files = 10 + number_of_lines = 1000 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + folder_name = random_string(5, "test_put_copy_many_files_azure_") + + files = os.path.join(tmp_dir, "file*") + + def run(csr, sql): + sql = sql.format(files=files, name=folder_name) + return csr.execute(sql).fetchall() + + with conn_cnx() as cnx: + with cnx.cursor() as csr: + run( + csr, + """ + create or replace table {name} ( + aa int, + dt date, + ts timestamp, + tsltz timestamp_ltz, + tsntz timestamp_ntz, + tstz timestamp_tz, + pct float, + ratio number(6,2)) + """, + ) + try: + all_recs = run(csr, "put file://{files} @%{name}") + assert all([rec[6] == "UPLOADED" for rec in all_recs]) + run(csr, "copy into {name}") + + rows = sum(rec[0] for rec in run(csr, "select count(*) from {name}")) + assert rows == number_of_files * number_of_lines, "Number of rows" + finally: + run(csr, "drop table if exists {name}") + + +def test_put_copy_duplicated_files_azure(tmpdir, conn_cnx): + """[azure] Puts and Copies duplicated files.""" + # generates N files + number_of_files = 5 + number_of_lines = 100 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + table_name = random_string(5, "test_put_copy_duplicated_files_azure_") + + files = os.path.join(tmp_dir, "file*") + + def run(csr, sql): + sql = sql.format(files=files, name=table_name) + return csr.execute(sql, _raise_put_get_error=False).fetchall() + + with conn_cnx() as cnx: + with cnx.cursor() as csr: + run( + csr, + """ + create or replace table {name} ( + aa int, + dt date, + ts timestamp, + tsltz timestamp_ltz, + tsntz timestamp_ntz, + tstz timestamp_tz, + pct float, + ratio number(6,2)) + """, + ) + + try: + success_cnt = 0 + skipped_cnt = 0 + for rec in run(csr, "put file://{files} @%{name}"): + logger.info("rec=%s", rec) + if rec[6] == "UPLOADED": + success_cnt += 1 + elif rec[6] == "SKIPPED": + skipped_cnt += 1 + assert success_cnt == number_of_files, "uploaded files" + assert skipped_cnt == 0, "skipped files" + + deleted_cnt = 0 + run(csr, "rm @%{name}/file0") + deleted_cnt += 1 + run(csr, "rm @%{name}/file1") + deleted_cnt += 1 + run(csr, "rm @%{name}/file2") + deleted_cnt += 1 + + success_cnt = 0 + skipped_cnt = 0 + for rec in run(csr, "put file://{files} @%{name}"): + logger.info("rec=%s", rec) + if rec[6] == "UPLOADED": + success_cnt += 1 + elif rec[6] == "SKIPPED": + skipped_cnt += 1 + assert success_cnt == deleted_cnt, "uploaded files in the second time" + assert ( + skipped_cnt == number_of_files - deleted_cnt + ), "skipped files in the second time" + + run(csr, "copy into {name}") + rows = 0 + for rec in run(csr, "select count(*) from {name}"): + rows += rec[0] + assert rows == number_of_files * number_of_lines, "Number of rows" + finally: + run(csr, "drop table if exists {name}") + + +def test_put_get_large_files_azure(tmpdir, conn_cnx): + """[azure] Puts and Gets Large files.""" + number_of_files = 3 + number_of_lines = 200000 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + + files = os.path.join(tmp_dir, "file*") + output_dir = os.path.join(tmp_dir, "output_dir") + os.makedirs(output_dir) + folder_name = random_string(5, "test_put_get_large_files_azure_") + + class cb(SnowflakeProgressPercentage): + def __init__(self, filename, filesize, **_): + pass + + def __call__(self, bytes_amount): + pass + + def run(cnx, sql): + return ( + cnx.cursor() + .execute( + sql.format(files=files, dir=folder_name, output_dir=output_dir), + _put_callback_output_stream=sys.stdout, + _get_callback_output_stream=sys.stdout, + _get_callback=cb, + _put_callback=cb, + ) + .fetchall() + ) + + with conn_cnx() as cnx: + try: + all_recs = run(cnx, "PUT file://{files} @~/{dir}") + assert all([rec[6] == "UPLOADED" for rec in all_recs]) + + for _ in range(60): + for _ in range(100): + all_recs = run(cnx, "LIST @~/{dir}") + if len(all_recs) == number_of_files: + break + # you may not get the files right after PUT command + # due to the nature of Azure blob, which synchronizes + # data eventually. + time.sleep(1) + else: + # wait for another second and retry. + # this could happen if the files are partially available + # but not all. + time.sleep(1) + continue + break # success + else: + pytest.fail( + "cannot list all files. Potentially " + "PUT command missed uploading Files: {}".format(all_recs) + ) + all_recs = run(cnx, "GET @~/{dir} file://{output_dir}") + assert len(all_recs) == number_of_files + assert all([rec[2] == "DOWNLOADED" for rec in all_recs]) + finally: + run(cnx, "RM @~/{dir}") diff --git a/test/integ/test_put_get_with_gcp_account.py b/test/integ/test_put_get_with_gcp_account.py new file mode 100644 index 000000000..bcebbe844 --- /dev/null +++ b/test/integ/test_put_get_with_gcp_account.py @@ -0,0 +1,708 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import glob +import gzip +import os +import sys +import time +from filecmp import cmp +from logging import getLogger +from unittest import mock + +import pytest + +from snowflake.connector.constants import UTF8 +from snowflake.connector.errors import ProgrammingError + +try: # pragma: no cover + from snowflake.connector.file_transfer_agent import ( + SnowflakeFileTransferAgent, + SnowflakeProgressPercentage, + ) + from snowflake.connector.gcs_storage_client import SnowflakeGCSRestClient +except ImportError: + from snowflake.connector.file_transfer_agent import ( + SnowflakeFileTransferAgent, + SnowflakeProgressPercentage, + ) + + SnowflakeGCSRestClient = None + +from ..generate_test_files import generate_k_lines_of_n_files +from ..integ_helpers import put +from ..randomize import random_string + +# We need these for our OldDriver tests. We run most up to date tests with the oldest supported driver version +try: + from snowflake.connector.vendored import requests + + vendored_request = True +except ImportError: # pragma: no cover + import requests + + vendored_request = False + +logger = getLogger(__name__) + +# Mark every test in this module as a gcp test +pytestmark = pytest.mark.gcp + + +@pytest.mark.parametrize("enable_gcs_downscoped", [True, False]) +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +def test_put_get_with_gcp( + tmpdir, + conn_cnx, + is_public_test, + enable_gcs_downscoped, + from_path, +): + """[gcp] Puts and Gets a small text using gcp.""" + if enable_gcs_downscoped and is_public_test: + pytest.xfail( + "Server need to update with merged change. Expected release version: 4.41.0" + ) + # create a data file + fname = str(tmpdir.join("test_put_get_with_gcp_token.txt.gz")) + original_contents = "123,test1\n456,test2\n" + with gzip.open(fname, "wb") as f: + f.write(original_contents.encode(UTF8)) + tmp_dir = str(tmpdir.mkdir("test_put_get_with_gcp_token")) + table_name = random_string(5, "snow32806_") + + with conn_cnx() as cnx: + with cnx.cursor() as csr: + try: + csr.execute( + f"ALTER SESSION SET GCS_USE_DOWNSCOPED_CREDENTIAL = {enable_gcs_downscoped}" + ) + except ProgrammingError as e: + if enable_gcs_downscoped: + # not raise error when the parameter is not available yet, using old behavior + raise e + csr.execute(f"create or replace table {table_name} (a int, b string)") + try: + file_stream = None if from_path else open(fname, "rb") + put( + csr, + fname, + f"%{table_name}", + from_path, + sql_options=" auto_compress=true parallel=30", + file_stream=file_stream, + ) + assert csr.fetchone()[6] == "UPLOADED" + csr.execute(f"copy into {table_name}") + csr.execute(f"rm @%{table_name}") + assert csr.execute(f"ls @%{table_name}").fetchall() == [] + csr.execute( + f"copy into @%{table_name} from {table_name} " + "file_format=(type=csv compression='gzip')" + ) + csr.execute(f"get @%{table_name} file://{tmp_dir}") + rec = csr.fetchone() + assert rec[0].startswith("data_"), "A file downloaded by GET" + assert rec[1] == 36, "Return right file size" + assert rec[2] == "DOWNLOADED", "Return DOWNLOADED status" + assert rec[3] == "", "Return no error message" + finally: + if file_stream: + file_stream.close() + csr.execute(f"drop table {table_name}") + + files = glob.glob(os.path.join(tmp_dir, "data_*")) + with gzip.open(files[0], "rb") as fd: + contents = fd.read().decode(UTF8) + assert original_contents == contents, "Output is different from the original file" + + +@pytest.mark.parametrize("enable_gcs_downscoped", [True, False]) +def test_put_copy_many_files_gcp( + tmpdir, + conn_cnx, + is_public_test, + enable_gcs_downscoped, +): + """[gcp] Puts and Copies many files.""" + if enable_gcs_downscoped and is_public_test: + pytest.xfail( + "Server need to update with merged change. Expected release version: 4.41.0" + ) + # generates N files + number_of_files = 10 + number_of_lines = 1000 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + table_name = random_string(5, "test_put_copy_many_files_gcp_") + + files = os.path.join(tmp_dir, "file*") + + def run(csr, sql): + sql = sql.format(files=files, name=table_name) + return csr.execute(sql).fetchall() + + with conn_cnx() as cnx: + with cnx.cursor() as csr: + try: + csr.execute( + f"ALTER SESSION SET GCS_USE_DOWNSCOPED_CREDENTIAL = {enable_gcs_downscoped}" + ) + except ProgrammingError as e: + if enable_gcs_downscoped: + # not raise error when the parameter is not available yet, using old behavior + raise e + run( + csr, + """ + create or replace table {name} ( + aa int, + dt date, + ts timestamp, + tsltz timestamp_ltz, + tsntz timestamp_ntz, + tstz timestamp_tz, + pct float, + ratio number(6,2)) + """, + ) + try: + statement = "put file://{files} @%{name}" + if enable_gcs_downscoped: + statement += " overwrite = true" + + all_recs = run(csr, statement) + assert all([rec[6] == "UPLOADED" for rec in all_recs]) + run(csr, "copy into {name}") + + rows = sum(rec[0] for rec in run(csr, "select count(*) from {name}")) + assert rows == number_of_files * number_of_lines, "Number of rows" + finally: + run(csr, "drop table if exists {name}") + + +@pytest.mark.parametrize("enable_gcs_downscoped", [True, False]) +def test_put_copy_duplicated_files_gcp( + tmpdir, + conn_cnx, + is_public_test, + enable_gcs_downscoped, +): + """[gcp] Puts and Copies duplicated files.""" + if enable_gcs_downscoped and is_public_test: + pytest.xfail( + "Server need to update with merged change. Expected release version: 4.41.0" + ) + # generates N files + number_of_files = 5 + number_of_lines = 100 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + table_name = random_string(5, "test_put_copy_duplicated_files_gcp_") + + files = os.path.join(tmp_dir, "file*") + + def run(csr, sql): + sql = sql.format(files=files, name=table_name) + return csr.execute(sql).fetchall() + + with conn_cnx() as cnx: + with cnx.cursor() as csr: + try: + csr.execute( + f"ALTER SESSION SET GCS_USE_DOWNSCOPED_CREDENTIAL = {enable_gcs_downscoped}" + ) + except ProgrammingError as e: + if enable_gcs_downscoped: + # not raise error when the parameter is not available yet, using old behavior + raise e + run( + csr, + """ + create or replace table {name} ( + aa int, + dt date, + ts timestamp, + tsltz timestamp_ltz, + tsntz timestamp_ntz, + tstz timestamp_tz, + pct float, + ratio number(6,2)) + """, + ) + + try: + success_cnt = 0 + skipped_cnt = 0 + put_statement = "put file://{files} @%{name}" + if enable_gcs_downscoped: + put_statement += " overwrite = true" + for rec in run(csr, put_statement): + logger.info("rec=%s", rec) + if rec[6] == "UPLOADED": + success_cnt += 1 + elif rec[6] == "SKIPPED": + skipped_cnt += 1 + assert success_cnt == number_of_files, "uploaded files" + assert skipped_cnt == 0, "skipped files" + + deleted_cnt = 0 + run(csr, "rm @%{name}/file0") + deleted_cnt += 1 + run(csr, "rm @%{name}/file1") + deleted_cnt += 1 + run(csr, "rm @%{name}/file2") + deleted_cnt += 1 + + success_cnt = 0 + skipped_cnt = 0 + for rec in run(csr, put_statement): + logger.info("rec=%s", rec) + if rec[6] == "UPLOADED": + success_cnt += 1 + elif rec[6] == "SKIPPED": + skipped_cnt += 1 + assert ( + success_cnt == number_of_files + ), "uploaded files in the second time" + assert skipped_cnt == 0, "skipped files in the second time" + + run(csr, "copy into {name}") + rows = 0 + for rec in run(csr, "select count(*) from {name}"): + rows += rec[0] + assert rows == number_of_files * number_of_lines, "Number of rows" + finally: + run(csr, "drop table if exists {name}") + + +@pytest.mark.parametrize("enable_gcs_downscoped", [True, False]) +def test_put_get_large_files_gcp( + tmpdir, + conn_cnx, + is_public_test, + enable_gcs_downscoped, +): + """[gcp] Puts and Gets Large files.""" + if enable_gcs_downscoped and is_public_test: + pytest.xfail( + "Server need to update with merged change. Expected release version: 4.41.0" + ) + number_of_files = 3 + number_of_lines = 200000 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + folder_name = random_string(5, "test_put_get_large_files_gcp_") + + files = os.path.join(tmp_dir, "file*") + output_dir = os.path.join(tmp_dir, "output_dir") + os.makedirs(output_dir) + + class cb(SnowflakeProgressPercentage): + def __init__(self, filename, filesize, **_): + pass + + def __call__(self, bytes_amount): + pass + + def run(cnx, sql): + return ( + cnx.cursor() + .execute( + sql.format(files=files, dir=folder_name, output_dir=output_dir), + _put_callback_output_stream=sys.stdout, + _get_callback_output_stream=sys.stdout, + _get_callback=cb, + _put_callback=cb, + ) + .fetchall() + ) + + with conn_cnx() as cnx: + try: + try: + run( + cnx, + f"ALTER SESSION SET GCS_USE_DOWNSCOPED_CREDENTIAL = {enable_gcs_downscoped}", + ) + except ProgrammingError as e: + if enable_gcs_downscoped: + # not raise error when the parameter is not available yet, using old behavior + raise e + all_recs = run(cnx, "PUT file://{files} @~/{dir}") + assert all([rec[6] == "UPLOADED" for rec in all_recs]) + + for _ in range(60): + for _ in range(100): + all_recs = run(cnx, "LIST @~/{dir}") + if len(all_recs) == number_of_files: + break + # you may not get the files right after PUT command + # due to the nature of gcs blob, which synchronizes + # data eventually. + time.sleep(1) + else: + # wait for another second and retry. + # this could happen if the files are partially available + # but not all. + time.sleep(1) + continue + break # success + else: + pytest.fail( + "cannot list all files. Potentially " + f"PUT command missed uploading Files: {all_recs}" + ) + all_recs = run(cnx, "GET @~/{dir} file://{output_dir}") + assert len(all_recs) == number_of_files + assert all([rec[2] == "DOWNLOADED" for rec in all_recs]) + finally: + run(cnx, "RM @~/{dir}") + + +def test_get_gcp_file_object_http_400_error(tmpdir, conn_cnx): + pytest.skip("This test needs to be totally rewritten for sdkless mode") + fname = str(tmpdir.join("test_put_get_with_gcp_token.txt.gz")) + original_contents = "123,test1\n456,test2\n" + with gzip.open(fname, "wb") as f: + f.write(original_contents.encode(UTF8)) + tmp_dir = str(tmpdir.mkdir("test_put_get_with_gcp_token")) + table_name = random_string(5, "snow32807_") + + with conn_cnx() as cnx: + with cnx.cursor() as csr: + csr.execute(f"create or replace table {table_name} (a int, b string)") + try: + from snowflake.connector.vendored.requests import get, put + + def mocked_put(*args, **kwargs): + if mocked_put.counter == 0: + mocked_put.counter += 1 + exc = requests.exceptions.HTTPError( + response=requests.Response() + ) + exc.response.status_code = 400 + raise exc + else: + return put(*args, **kwargs) + + mocked_put.counter = 0 + + def mocked_file_agent(*args, **kwargs): + agent = SnowflakeGCSRestClient(*args, **kwargs) + agent._update_presigned_url = mock.MagicMock( + wraps=agent._update_presigned_url + ) + mocked_file_agent.agent = agent + return agent + + with mock.patch( + "snowflake.connector.file_transfer_agent.SnowflakeGCSRestClient", + side_effect=mocked_file_agent, + ): + with mock.patch( + "snowflake.connector.vendored.requests.put" + if vendored_request + else "request.put", + side_effect=mocked_put, + ): + csr.execute( + f"put file://{fname} @%{table_name} auto_compress=true parallel=30" + ) + assert csr.fetchone()[6] == "UPLOADED" + csr.execute(f"copy into {table_name} purge = true") + assert csr.execute(f"ls @%{table_name}").fetchall() == [] + csr.execute( + f"copy into @%{table_name} from {table_name} " + "file_format=(type=csv compression='gzip')" + ) + + def mocked_get(*args, **kwargs): + if mocked_get.counter == 0: + mocked_get.counter += 1 + exc = requests.exceptions.HTTPError( + response=requests.Response() + ) + exc.response.status_code = 400 + raise exc + else: + return get(*args, **kwargs) + + mocked_get.counter = 0 + + with mock.patch( + "snowflake.connector.cursor.SnowflakeFileTransferAgent", + side_effect=mocked_file_agent, + ): + with mock.patch( + "snowflake.connector.vendored.requests.get" + if vendored_request + else "request.get", + side_effect=mocked_get, + ): + csr.execute(f"get @%{table_name} file://{tmp_dir}") + assert ( + mocked_file_agent.agent._update_file_metas_with_presigned_url.call_count + == 2 + ) + rec = csr.fetchone() + assert rec[0].startswith("data_"), "A file downloaded by GET" + assert rec[1] == 36, "Return right file size" + assert rec[2] == "DOWNLOADED", "Return DOWNLOADED status" + assert rec[3] == "", "Return no error message" + finally: + csr.execute(f"drop table {table_name}") + + files = glob.glob(os.path.join(tmp_dir, "data_*")) + with gzip.open(files[0], "rb") as fd: + contents = fd.read().decode(UTF8) + assert original_contents == contents, "Output is different from the original file" + + +@pytest.mark.parametrize("enable_gcs_downscoped", [True, False]) +def test_auto_compress_off_gcp( + tmpdir, + conn_cnx, + is_public_test, + enable_gcs_downscoped, +): + """[gcp] Puts and Gets a small text using gcp with no auto compression.""" + if enable_gcs_downscoped and is_public_test: + pytest.xfail( + "Server need to update with merged change. Expected release version: 4.41.0" + ) + fname = str( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "../data", "example.json" + ) + ) + stage_name = random_string(5, "teststage_") + with conn_cnx() as cnx: + with cnx.cursor() as cursor: + try: + cursor.execute( + f"ALTER SESSION SET GCS_USE_DOWNSCOPED_CREDENTIAL = {enable_gcs_downscoped}" + ) + except ProgrammingError as e: + if enable_gcs_downscoped: + # not raise error when the parameter is not available yet, using old behavior + raise e + try: + cursor.execute(f"create or replace stage {stage_name}") + cursor.execute(f"put file://{fname} @{stage_name} auto_compress=false") + cursor.execute(f"get @{stage_name} file://{tmpdir}") + downloaded_file = os.path.join(str(tmpdir), "example.json") + assert cmp(fname, downloaded_file) + finally: + cursor.execute(f"drop stage {stage_name}") + + +# TODO +@pytest.mark.parametrize("error_code", [401, 403, 408, 429, 500, 503]) +def test_get_gcp_file_object_http_recoverable_error_refresh_with_downscoped( + tmpdir, + conn_cnx, + error_code, + is_public_test, +): + if is_public_test: + pytest.xfail( + "Server need to update with merged change. Expected release version: 4.41.0" + ) + fname = str(tmpdir.join("test_put_get_with_gcp_token.txt.gz")) + original_contents = "123,test1\n456,test2\n" + with gzip.open(fname, "wb") as f: + f.write(original_contents.encode(UTF8)) + tmp_dir = str(tmpdir.mkdir("test_put_get_with_gcp_token")) + table_name = random_string(5, "snow32807_") + + with conn_cnx() as cnx: + with cnx.cursor() as csr: + csr.execute("ALTER SESSION SET GCS_USE_DOWNSCOPED_CREDENTIAL = TRUE") + csr.execute(f"create or replace table {table_name} (a int, b string)") + try: + from snowflake.connector.vendored.requests import get, head, put + + def mocked_put(*args, **kwargs): + if mocked_put.counter == 0: + exc = requests.exceptions.HTTPError( + response=requests.Response() + ) + exc.response.status_code = error_code + mocked_put.counter += 1 + raise exc + else: + return put(*args, **kwargs) + + mocked_put.counter = 0 + + def mocked_head(*args, **kwargs): + if mocked_head.counter == 0: + mocked_head.counter += 1 + exc = requests.exceptions.HTTPError( + response=requests.Response() + ) + exc.response.status_code = error_code + raise exc + else: + return head(*args, **kwargs) + + mocked_head.counter = 0 + + def mocked_file_agent(*args, **kwargs): + agent = SnowflakeFileTransferAgent(*args, **kwargs) + agent.renew_expired_client = mock.MagicMock( + wraps=agent.renew_expired_client + ) + mocked_file_agent.agent = agent + return agent + + with mock.patch( + "snowflake.connector.cursor.SnowflakeFileTransferAgent", + side_effect=mocked_file_agent, + ): + with mock.patch( + "snowflake.connector.vendored.requests.put" + if vendored_request + else "requests.put", + side_effect=mocked_put, + ): + with mock.patch( + "snowflake.connector.vendored.requests.head" + if vendored_request + else "requests.head", + side_effect=mocked_head, + ): + csr.execute( + f"put file://{fname} @%{table_name} auto_compress=true parallel=30" + ) + if error_code == 401: + assert ( + mocked_file_agent.agent.renew_expired_client.call_count == 2 + ) + assert csr.fetchone()[6] == "UPLOADED" + csr.execute(f"copy into {table_name}") + csr.execute(f"rm @%{table_name}") + assert csr.execute(f"ls @%{table_name}").fetchall() == [] + csr.execute( + f"copy into @%{table_name} from {table_name} " + "file_format=(type=csv compression='gzip')" + ) + + def mocked_get(*args, **kwargs): + if mocked_get.counter == 0: + mocked_get.counter += 1 + exc = requests.exceptions.HTTPError( + response=requests.Response() + ) + exc.response.status_code = error_code + raise exc + else: + return get(*args, **kwargs) + + mocked_get.counter = 0 + + with mock.patch( + "snowflake.connector.cursor.SnowflakeFileTransferAgent", + side_effect=mocked_file_agent, + ): + with mock.patch( + "snowflake.connector.vendored.requests.get" + if vendored_request + else "requests.get", + ide_effect=mocked_get, + ): + csr.execute(f"get @%{table_name} file://{tmp_dir}") + if error_code == 401: + assert ( + mocked_file_agent.agent.renew_expired_client.call_count == 1 + ) + rec = csr.fetchone() + assert rec[0].startswith("data_"), "A file downloaded by GET" + assert rec[1] == 36, "Return right file size" + assert rec[2] == "DOWNLOADED", "Return DOWNLOADED status" + assert rec[3] == "", "Return no error message" + finally: + csr.execute(f"drop table {table_name}") + + files = glob.glob(os.path.join(tmp_dir, "data_*")) + with gzip.open(files[0], "rb") as fd: + contents = fd.read().decode(UTF8) + assert original_contents == contents, "Output is different from the original file" + + +@pytest.mark.parametrize( + "from_path", [True, pytest.param(False, marks=pytest.mark.skipolddriver)] +) +def test_put_overwrite_with_downscope( + tmpdir, + conn_cnx, + is_public_test, + from_path, +): + """Tests whether _force_put_overwrite and overwrite=true works as intended.""" + if is_public_test: + pytest.xfail( + "Server need to update with merged change. Expected release version: 4.41.0" + ) + + with conn_cnx() as cnx: + + tmp_dir = str(tmpdir.mkdir("data")) + test_data = os.path.join(tmp_dir, "data.txt") + with open(test_data, "w") as f: + f.write("test1,test2") + f.write("test3,test4") + + cnx.cursor().execute("RM @~/test_put_overwrite") + try: + file_stream = None if from_path else open(test_data, "rb") + with cnx.cursor() as cur: + cur.execute("ALTER SESSION SET GCS_USE_DOWNSCOPED_CREDENTIAL = TRUE") + put( + cur, + test_data, + "~/test_put_overwrite", + from_path, + file_stream=file_stream, + ) + data = cur.fetchall() + assert data[0][6] == "UPLOADED" + + put( + cur, + test_data, + "~/test_put_overwrite", + from_path, + file_stream=file_stream, + ) + data = cur.fetchall() + assert data[0][6] == "SKIPPED" + + put( + cur, + test_data, + "~/test_put_overwrite", + from_path, + sql_options="OVERWRITE = TRUE", + file_stream=file_stream, + ) + data = cur.fetchall() + assert data[0][6] == "UPLOADED" + + ret = cnx.cursor().execute("LS @~/test_put_overwrite").fetchone() + assert "test_put_overwrite/data.txt" in ret[0] + assert "data.txt.gz" in ret[0] + finally: + if file_stream: + file_stream.close() + cnx.cursor().execute("RM @~/test_put_overwrite") diff --git a/test/integ/test_put_windows_path.py b/test/integ/test_put_windows_path.py new file mode 100644 index 000000000..33126e46e --- /dev/null +++ b/test/integ/test_put_windows_path.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os + + +def test_abc(conn_cnx, tmpdir, db_parameters): + """Tests PUTing a file on Windows using the URI and Windows path.""" + import pathlib + + tmp_dir = str(tmpdir.mkdir("data")) + test_data = os.path.join(tmp_dir, "data.txt") + with open(test_data, "w") as f: + f.write("test1,test2") + f.write("test3,test4") + + fileURI = pathlib.Path(test_data).as_uri() + + subdir = db_parameters["name"] + with conn_cnx( + user=db_parameters["user"], + account=db_parameters["account"], + password=db_parameters["password"], + ) as con: + rec = con.cursor().execute(f"put {fileURI} @~/{subdir}0/").fetchall() + assert rec[0][6] == "UPLOADED" + + rec = con.cursor().execute(f"put file://{test_data} @~/{subdir}1/").fetchall() + assert rec[0][6] == "UPLOADED" + + con.cursor().execute(f"rm @~/{subdir}0") + con.cursor().execute(f"rm @~/{subdir}1") diff --git a/test/integ/test_qmark.py b/test/integ/test_qmark.py new file mode 100644 index 000000000..1dc924bc5 --- /dev/null +++ b/test/integ/test_qmark.py @@ -0,0 +1,170 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import pytest + +from snowflake.connector import errors + + +def test_qmark_paramstyle(conn_cnx, db_parameters): + """Tests that binding question marks is not supported by default.""" + try: + with conn_cnx() as cnx: + cnx.cursor().execute( + "CREATE OR REPLACE TABLE {name} " + "(aa STRING, bb STRING)".format(name=db_parameters["name"]) + ) + cnx.cursor().execute( + "INSERT INTO {name} VALUES('?', '?')".format(name=db_parameters["name"]) + ) + for rec in cnx.cursor().execute( + "SELECT * FROM {name}".format(name=db_parameters["name"]) + ): + assert rec[0] == "?", "First column value" + with pytest.raises(errors.ProgrammingError): + cnx.cursor().execute( + "INSERT INTO {name} VALUES(?,?)".format( + name=db_parameters["name"] + ) + ) + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + "DROP TABLE IF EXISTS {name}".format(name=db_parameters["name"]) + ) + + +def test_numeric_paramstyle(conn_cnx, db_parameters): + """Tests that binding numeric positional style is not supported.""" + try: + with conn_cnx() as cnx: + cnx.cursor().execute( + "CREATE OR REPLACE TABLE {name} " + "(aa STRING, bb STRING)".format(name=db_parameters["name"]) + ) + cnx.cursor().execute( + "INSERT INTO {name} VALUES(':1', ':2')".format( + name=db_parameters["name"] + ) + ) + for rec in cnx.cursor().execute( + "SELECT * FROM {name}".format(name=db_parameters["name"]) + ): + assert rec[0] == ":1", "First column value" + with pytest.raises(errors.ProgrammingError): + cnx.cursor().execute( + "INSERT INTO {name} VALUES(:1,:2)".format( + name=db_parameters["name"] + ) + ) + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + "DROP TABLE IF EXISTS {name}".format(name=db_parameters["name"]) + ) + + +@pytest.mark.internal +def test_qmark_paramstyle_enabled(negative_conn_cnx, db_parameters): + """Enable qmark binding.""" + import snowflake.connector + + snowflake.connector.paramstyle = "qmark" + try: + with negative_conn_cnx() as cnx: + cnx.cursor().execute( + "CREATE OR REPLACE TABLE {name} " + "(aa STRING, bb STRING)".format(name=db_parameters["name"]) + ) + cnx.cursor().execute( + "INSERT INTO {name} VALUES(?, ?)".format(name=db_parameters["name"]), + ("test11", "test12"), + ) + ret = ( + cnx.cursor() + .execute("select * from {name}".format(name=db_parameters["name"])) + .fetchone() + ) + assert ret[0] == "test11" + assert ret[1] == "test12" + finally: + with negative_conn_cnx() as cnx: + cnx.cursor().execute( + "DROP TABLE IF EXISTS {name}".format(name=db_parameters["name"]) + ) + snowflake.connector.paramstyle = "pyformat" + + # After changing back to pyformat, binding qmark should fail. + try: + with negative_conn_cnx() as cnx: + cnx.cursor().execute( + "CREATE OR REPLACE TABLE {name} " + "(aa STRING, bb STRING)".format(name=db_parameters["name"]) + ) + with pytest.raises(TypeError): + cnx.cursor().execute( + "INSERT INTO {name} VALUES(?, ?)".format( + name=db_parameters["name"] + ), + ("test11", "test12"), + ) + finally: + with negative_conn_cnx() as cnx: + cnx.cursor().execute( + "DROP TABLE IF EXISTS {name}".format(name=db_parameters["name"]) + ) + + +def test_binding_datetime_qmark(conn_cnx, db_parameters): + """Ensures datetime can bound.""" + import datetime + + import snowflake.connector + + snowflake.connector.paramstyle = "qmark" + try: + with conn_cnx() as cnx: + cnx.cursor().execute( + "CREATE OR REPLACE TABLE {name} " + "(aa TIMESTAMP_NTZ)".format(name=db_parameters["name"]) + ) + days = 2 + inserts = tuple((datetime.datetime(2018, 1, i + 1),) for i in range(days)) + cnx.cursor().executemany( + "INSERT INTO {name} VALUES(?)".format(name=db_parameters["name"]), + inserts, + ) + ret = ( + cnx.cursor() + .execute( + "SELECT * FROM {name} ORDER BY 1".format(name=db_parameters["name"]) + ) + .fetchall() + ) + for i in range(days): + assert ret[i][0] == inserts[i][0] + finally: + with conn_cnx() as cnx: + cnx.cursor().execute( + "DROP TABLE IF EXISTS {name}".format(name=db_parameters["name"]) + ) + + +def test_binding_none(conn_cnx): + import snowflake.connector + + original = snowflake.connector.paramstyle + snowflake.connector.paramstyle = "qmark" + + with conn_cnx() as con: + try: + table_name = "foo" + con.cursor().execute(f"CREATE TABLE {table_name}(bar text)") + con.cursor().execute(f"INSERT INTO {table_name} VALUES (?)", [None]) + finally: + con.cursor().execute(f"DROP TABLE {table_name}") + snowflake.connector.paramstyle = original diff --git a/test/integ/test_query_cancelling.py b/test/integ/test_query_cancelling.py new file mode 100644 index 000000000..2710715e7 --- /dev/null +++ b/test/integ/test_query_cancelling.py @@ -0,0 +1,158 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging +import time +from logging import getLogger +from threading import Lock, Thread + +import pytest + +from snowflake.connector import errors + +logger = getLogger(__name__) +logging.basicConfig(level=logging.CRITICAL) + +try: + from ..parameters import CONNECTION_PARAMETERS_ADMIN +except ImportError: + CONNECTION_PARAMETERS_ADMIN = {} + + +@pytest.fixture() +def conn_cnx_query_cancelling(request, conn_cnx): + with conn_cnx() as cnx: + cnx.cursor().execute("use role securityadmin") + cnx.cursor().execute( + "create or replace user magicuser1 password='xxx' " "default_role='PUBLIC'" + ) + cnx.cursor().execute( + "create or replace user magicuser2 password='xxx' " "default_role='PUBLIC'" + ) + + yield conn_cnx + + with conn_cnx() as cnx: + cnx.cursor().execute("use role accountadmin") + cnx.cursor().execute("drop user magicuser1") + cnx.cursor().execute("drop user magicuser2") + + +def _query_run(conn, shared, expectedCanceled=True): + """Runs a query, and wait for possible cancellation.""" + with conn(user="magicuser1", password="xxx") as cnx: + cnx.cursor().execute("use warehouse regress") + + # Collect the session_id + with cnx.cursor() as c: + c.execute("SELECT current_session()") + for rec in c: + with shared.lock: + shared.session_id = int(rec[0]) + logger.info(f"Current Session id: {shared.session_id}") + + # Run a long query and see if we're canceled + canceled = False + try: + c = cnx.cursor() + c.execute( + """ +select count(*) from table(generator(timeLimit => 10))""" + ) + except errors.ProgrammingError as e: + logger.info("FAILED TO RUN QUERY: %s", e) + canceled = e.errno == 604 + if not canceled: + logger.exception("must have been canceled") + raise + finally: + c.close() + + if canceled: + logger.info("Query failed or was canceled") + else: + logger.info("Query finished successfully") + + assert canceled == expectedCanceled + + +def _query_cancel(conn, shared, user, password, expectedCanceled): + """Tests cancelling the query running in another thread.""" + with conn(user=user, password=password) as cnx: + cnx.cursor().execute("use warehouse regress") + # .use_warehouse_database_schema(cnx) + + logger.info( + "User %s's role is: %s", + user, + cnx.cursor().execute("select current_role()").fetchone()[0], + ) + # Run the cancel query + logger.info("User %s is waiting for Session ID to be available", user) + while True: + with shared.lock: + if shared.session_id is not None: + break + logger.info("User %s is waiting for Session ID to be available", user) + time.sleep(1) + logger.info(f"Target Session id: {shared.session_id}") + try: + query = f"call system$cancel_all_queries({shared.session_id})" + logger.info("Query: %s", query) + cnx.cursor().execute(query) + assert ( + expectedCanceled + ), "You should NOT be able to " "cancel the query [{}]".format( + shared.session_id + ) + except errors.ProgrammingError as e: + logger.info("FAILED TO CANCEL THE QUERY: %s", e) + assert ( + not expectedCanceled + ), "You should be able to " "cancel the query [{}]".format( + shared.session_id + ) + + +def _test_helper(conn, expectedCanceled, cancelUser, cancelPass): + """Helper function for the actual tests. + + queryRun is always run with magicuser1/xxx. + queryCancel is run with cancelUser/cancelPass + """ + + class Shared: + def __init__(self): + self.lock = Lock() + self.session_id = None + + shared = Shared() + queryRun = Thread(target=_query_run, args=(conn, shared, expectedCanceled)) + queryRun.start() + queryCancel = Thread( + target=_query_cancel, + args=(conn, shared, cancelUser, cancelPass, expectedCanceled), + ) + queryCancel.start() + queryCancel.join(5) + queryRun.join(20) + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_same_user_canceling(conn_cnx_query_cancelling): + """Tests that the same user CAN cancel his own query.""" + _test_helper(conn_cnx_query_cancelling, True, "magicuser1", "xxx") + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." +) +def test_other_user_canceling(conn_cnx_query_cancelling): + """Tests that the other user CAN NOT cancel his own query.""" + _test_helper(conn_cnx_query_cancelling, False, "magicuser2", "xxx") diff --git a/test/test_results.py b/test/integ/test_results.py similarity index 56% rename from test/test_results.py rename to test/integ/test_results.py index 4d3478ccf..63242302a 100644 --- a/test/test_results.py +++ b/test/integ/test_results.py @@ -1,18 +1,17 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # +from __future__ import annotations + import pytest from snowflake.connector import ProgrammingError def test_results(conn_cnx): - """ - Gets results for the given qid - """ + """Gets results for the given qid.""" with conn_cnx() as cnx: cur = cnx.cursor() cur.execute("select * from values(1,2),(3,4)") @@ -24,24 +23,17 @@ def test_results(conn_cnx): def test_results_with_error(conn_cnx): - """ - Gets results with error - """ + """Gets results with error.""" with conn_cnx() as cnx: cur = cnx.cursor() - sfqid = None - try: + with pytest.raises(ProgrammingError) as e: cur.execute("select blah") - pytest.fail("Should fail here!") - except ProgrammingError as e: - sfqid = e.sfqid + sfqid = e.value.sfqid - got_sfqid = None - try: + with pytest.raises(ProgrammingError) as e: cur.query_result(sfqid) - pytest.fail("Should fail here again!") - except ProgrammingError as e: - got_sfqid = e.sfqid + got_sfqid = e.value.sfqid + assert sfqid is not None assert got_sfqid is not None assert got_sfqid == sfqid diff --git a/test/integ/test_reuse_cursor.py b/test/integ/test_reuse_cursor.py new file mode 100644 index 000000000..1267672cb --- /dev/null +++ b/test/integ/test_reuse_cursor.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + + +def test_reuse_cursor(conn_cnx, db_parameters): + """Ensures only the last executed command/query's result sets are returned.""" + with conn_cnx() as cnx: + c = cnx.cursor() + c.execute( + "create or replace table {name}(c1 string)".format( + name=db_parameters["name"] + ) + ) + try: + c.execute( + "insert into {name} values('123'),('456'),('678')".format( + name=db_parameters["name"] + ) + ) + c.execute("show tables") + c.execute("select current_date()") + rec = c.fetchone() + assert len(rec) == 1, "number of records is wrong" + c.execute( + "select * from {name} order by 1".format(name=db_parameters["name"]) + ) + recs = c.fetchall() + assert c.description[0][0] == "C1", "fisrt column name" + assert len(recs) == 3, "number of records is wrong" + finally: + c.execute("drop table if exists {name}".format(name=db_parameters["name"])) diff --git a/test/integ/test_session_parameters.py b/test/integ/test_session_parameters.py new file mode 100644 index 000000000..247a6146e --- /dev/null +++ b/test/integ/test_session_parameters.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import pytest + +import snowflake.connector + +try: # pragma: no cover + from parameters import CONNECTION_PARAMETERS_ADMIN +except ImportError: + CONNECTION_PARAMETERS_ADMIN = {} + + +def test_session_parameters(db_parameters): + """Sets the session parameters in connection time.""" + connection = snowflake.connector.connect( + protocol=db_parameters["protocol"], + account=db_parameters["account"], + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + database=db_parameters["database"], + schema=db_parameters["schema"], + session_parameters={"TIMEZONE": "UTC"}, + ) + ret = connection.cursor().execute("show parameters like 'TIMEZONE'").fetchone() + assert ret[1] == "UTC" + + +@pytest.mark.skipif( + not CONNECTION_PARAMETERS_ADMIN, + reason="Snowflake admin required to setup parameter.", +) +def test_client_session_keep_alive(db_parameters, conn_cnx): + """Tests client_session_keep_alive setting. + + Ensures that client's explicit config for client_session_keep_alive + session parameter is always honored and given higher precedence over + user and account level backend configuration. + """ + admin_cnxn = snowflake.connector.connect( + protocol=db_parameters["sf_protocol"], + account=db_parameters["sf_account"], + user=db_parameters["sf_user"], + password=db_parameters["sf_password"], + host=db_parameters["sf_host"], + port=db_parameters["sf_port"], + ) + + # Ensure backend parameter is set to False + set_backend_client_session_keep_alive(db_parameters, admin_cnxn, False) + with conn_cnx(client_session_keep_alive=True) as connection: + ret = ( + connection.cursor() + .execute("show parameters like 'CLIENT_SESSION_KEEP_ALIVE'") + .fetchone() + ) + assert ret[1] == "true" + + # Set backend parameter to True + set_backend_client_session_keep_alive(db_parameters, admin_cnxn, True) + + # Set session parameter to False + with conn_cnx(client_session_keep_alive=False) as connection: + ret = ( + connection.cursor() + .execute("show parameters like 'CLIENT_SESSION_KEEP_ALIVE'") + .fetchone() + ) + assert ret[1] == "false" + + # Set session parameter to None backend parameter continues to be True + with conn_cnx(client_session_keep_alive=None) as connection: + ret = ( + connection.cursor() + .execute("show parameters like 'CLIENT_SESSION_KEEP_ALIVE'") + .fetchone() + ) + assert ret[1] == "true" + + admin_cnxn.close() + + +def create_client_connection(db_parameters: object, val: bool) -> object: + """Create connection with client session keep alive set to specific value.""" + connection = snowflake.connector.connect( + protocol=db_parameters["protocol"], + account=db_parameters["account"], + user=db_parameters["user"], + password=db_parameters["password"], + host=db_parameters["host"], + port=db_parameters["port"], + database=db_parameters["database"], + schema=db_parameters["schema"], + client_session_keep_alive=val, + ) + return connection + + +def set_backend_client_session_keep_alive( + db_parameters: object, admin_cnx: object, val: bool +) -> None: + """Set both at Account level and User level.""" + query = "alter account {} set CLIENT_SESSION_KEEP_ALIVE={}".format( + db_parameters["account"], str(val) + ) + admin_cnx.cursor().execute(query) + + query = "alter user {}.{} set CLIENT_SESSION_KEEP_ALIVE={}".format( + db_parameters["account"], db_parameters["user"], str(val) + ) + admin_cnx.cursor().execute(query) diff --git a/test/test_snowsql_timestamp_format.py b/test/integ/test_snowsql_timestamp_format.py similarity index 50% rename from test/test_snowsql_timestamp_format.py rename to test/integ/test_snowsql_timestamp_format.py index e7f83ee39..0dce84b96 100644 --- a/test/test_snowsql_timestamp_format.py +++ b/test/integ/test_snowsql_timestamp_format.py @@ -1,90 +1,122 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # + +from __future__ import annotations + import pytest -from snowflake.connector.compat import (IS_WINDOWS, PY2, PY34_EXACT) +from snowflake.connector.compat import IS_WINDOWS from snowflake.connector.converter_snowsql import SnowflakeConverterSnowSQL @pytest.mark.skipif( - IS_WINDOWS or PY2 or PY34_EXACT, + IS_WINDOWS, reason="SnowSQL runs on Python 35+. " - "Windows doesn't support more than 9999 yeers") + "Windows doesn't support more than 9999 yeers", +) def test_snowsql_timestamp_format(conn_cnx): - """ - In SnowSQL, OverflowError should not happen - """ - with conn_cnx( - converter_class=SnowflakeConverterSnowSQL - ) as cnx: - cnx.cursor().execute(""" -ALTER SESSION SET + """In SnowSQL, OverflowError should not happen.""" + with conn_cnx(converter_class=SnowflakeConverterSnowSQL) as cnx: + cnx.cursor().execute( + """ +alter session set python_connector_query_result_format='JSON' +""" + ) + cnx.cursor().execute( + """ +ALTER SESSION SET TIMEZONE='America/Los_Angeles', TIMESTAMP_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM', TIMESTAMP_NTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM', TIMESTAMP_LTZ_OUTPUT_FORMAT='DY, DD MON YYYY HH24:MI:SS TZHTZM'; -""") - ret = cnx.cursor().execute(""" +""" + ) + ret = ( + cnx.cursor() + .execute( + """ SELECT '19999-09-30 12:34:56'::timestamp_ltz, '19999-09-30 12:34:56'::timestamp_ntz, '2001-09-30 12:34:56.00123400'::timestamp_ntz(8) -""").fetchone() - assert ret[0] == 'Thu, 30 Sep 19999 19:34:56 +0000' - assert ret[1] == 'Thu, 30 Sep 19999 12:34:56 ' +""" + ) + .fetchone() + ) + assert ret[0] == "Thu, 30 Sep 19999 19:34:56 +0000" + assert ret[1] == "Thu, 30 Sep 19999 12:34:56 " # The last space is included as TZHTZM is an empty value if # datatype is datetime. - assert ret[2] == 'Sun, 30 Sep 2001 12:34:56 ' + assert ret[2] == "Sun, 30 Sep 2001 12:34:56 " # NOTE timestamp_tz doesn't accept the timestamp out of range # what is the range? -@pytest.mark.skipif(PY2 or PY34_EXACT, reason="SnowSQL runs on Python35+") def test_snowsql_timestamp_negative_epoch(conn_cnx): with conn_cnx(converter_class=SnowflakeConverterSnowSQL) as cnx: - cnx.cursor().execute(""" + cnx.cursor().execute( + """ +alter session set python_connector_query_result_format='JSON' +""" + ) + cnx.cursor().execute( + """ ALTER SESSION SET TIMEZONE='America/Los_Angeles', TIMESTAMP_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF9 TZH:TZM', TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF9 TZH:TZM', TIMESTAMP_LTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF9 TZH:TZM'; -""") - ret = cnx.cursor().execute(""" +""" + ) + ret = ( + cnx.cursor() + .execute( + """ SELECT - '1969-09-30 12:34:56.123456789'::timestamp_ltz(7), + '1969-09-30 12:34:56.123456789'::timestamp_ltz(7), '1969-09-30 12:34:56.123456789'::timestamp_ntz(8), '1969-09-30 12:34:56.123456789 -08:00'::timestamp_tz(8), '1969-09-30 12:34:56.123456789 -08:00'::timestamp_tz(4), '2001-09-30 12:34:56.00123400'::timestamp_ntz(8) - """).fetchone() - assert ret[0] == '1969-09-30 12:34:56.123456700 -0700' - assert ret[1] == '1969-09-30 12:34:56.123456780 ' - assert ret[2] == '1969-09-30 12:34:56.123456780 -0800' - assert ret[3] == '1969-09-30 12:34:56.123400000 -0800' + """ + ) + .fetchone() + ) + assert ret[0] == "1969-09-30 12:34:56.123456700 -0700" + assert ret[1] == "1969-09-30 12:34:56.123456780 " + assert ret[2] == "1969-09-30 12:34:56.123456780 -0800" + assert ret[3] == "1969-09-30 12:34:56.123400000 -0800" # a scale in format forces to add 0 to the end - assert ret[4] == '2001-09-30 12:34:56.001234000 ' - cnx.cursor().execute(""" + assert ret[4] == "2001-09-30 12:34:56.001234000 " + cnx.cursor().execute( + """ ALTER SESSION SET TIMEZONE='America/Los_Angeles', TIMESTAMP_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF TZH:TZM', TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF TZH:TZM', TIMESTAMP_LTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF TZH:TZM'; -""") - ret = cnx.cursor().execute(""" +""" + ) + ret = ( + cnx.cursor() + .execute( + """ SELECT '1969-09-30 12:34:56.123456789'::timestamp_ltz(7), '1969-09-30 12:34:56.123456789'::timestamp_ntz(8), '1969-09-30 12:34:56.123456789 -08:00'::timestamp_tz(8), '1969-09-30 12:34:56.123456789 -08:00'::timestamp_tz(4), '2001-09-30 12:34:56.00123400'::timestamp_ntz(8) - """).fetchone() - assert ret[0] == '1969-09-30 12:34:56.1234567 -0700' - assert ret[1] == '1969-09-30 12:34:56.12345678 ' - assert ret[2] == '1969-09-30 12:34:56.12345678 -0800' - assert ret[3] == '1969-09-30 12:34:56.1234 -0800' - assert ret[4] == '2001-09-30 12:34:56.00123400 ' + """ + ) + .fetchone() + ) + assert ret[0] == "1969-09-30 12:34:56.1234567 -0700" + assert ret[1] == "1969-09-30 12:34:56.12345678 " + assert ret[2] == "1969-09-30 12:34:56.12345678 -0800" + assert ret[3] == "1969-09-30 12:34:56.1234 -0800" + assert ret[4] == "2001-09-30 12:34:56.00123400 " diff --git a/test/test_statement_parameter_binding.py b/test/integ/test_statement_parameter_binding.py similarity index 62% rename from test/test_statement_parameter_binding.py rename to test/integ/test_statement_parameter_binding.py index 8d2c6b3bc..87bb84208 100644 --- a/test/test_statement_parameter_binding.py +++ b/test/integ/test_statement_parameter_binding.py @@ -1,35 +1,32 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # -from datetime import datetime -import pytz +from __future__ import annotations + +from datetime import datetime import pytest +import pytz try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: + from parameters import CONNECTION_PARAMETERS_ADMIN +except ImportError: CONNECTION_PARAMETERS_ADMIN = {} @pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." + not CONNECTION_PARAMETERS_ADMIN, reason="Snowflake admin account is not accessible." ) def test_binding_security(conn_cnx): - """ - Binding statement parameters - """ + """Tests binding statement parameters.""" expected_qa_mode_datetime = datetime(1967, 6, 23, 7, 0, 0, 123000, pytz.UTC) with conn_cnx() as cnx: cnx.cursor().execute("alter session set timezone='UTC'") with cnx.cursor() as cur: - cur.execute( - "show databases like 'TESTDB'") + cur.execute("show databases like 'TESTDB'") rec = cur.fetchone() assert rec[0] != expected_qa_mode_datetime @@ -37,13 +34,13 @@ def test_binding_security(conn_cnx): cur.execute( "show databases like 'TESTDB'", _statement_params={ - 'QA_MODE': True, - }) + "QA_MODE": True, + }, + ) rec = cur.fetchone() assert rec[0] == expected_qa_mode_datetime with cnx.cursor() as cur: - cur.execute( - "show databases like 'TESTDB'") + cur.execute("show databases like 'TESTDB'") rec = cur.fetchone() assert rec[0] != expected_qa_mode_datetime diff --git a/test/integ/test_transaction.py b/test/integ/test_transaction.py new file mode 100644 index 000000000..761288d9e --- /dev/null +++ b/test/integ/test_transaction.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import snowflake.connector + + +def test_transaction(conn_cnx, db_parameters): + """Tests transaction API.""" + with conn_cnx() as cnx: + cnx.cursor().execute( + "create table {name} (c1 int)".format(name=db_parameters["name"]) + ) + cnx.cursor().execute( + "insert into {name}(c1) " + "values(1234),(3456)".format(name=db_parameters["name"]) + ) + c = cnx.cursor() + c.execute("select * from {name}".format(name=db_parameters["name"])) + total = 0 + for rec in c: + total += rec[0] + assert total == 4690, "total integer" + + # + cnx.cursor().execute("begin") + cnx.cursor().execute( + "insert into {name}(c1) values(5678),(7890)".format( + name=db_parameters["name"] + ) + ) + c = cnx.cursor() + c.execute("select * from {name}".format(name=db_parameters["name"])) + total = 0 + for rec in c: + total += rec[0] + assert total == 18258, "total integer" + cnx.rollback() + + c.execute("select * from {name}".format(name=db_parameters["name"])) + total = 0 + for rec in c: + total += rec[0] + assert total == 4690, "total integer" + + # + cnx.cursor().execute("begin") + cnx.cursor().execute( + "insert into {name}(c1) values(2345),(6789)".format( + name=db_parameters["name"] + ) + ) + c = cnx.cursor() + c.execute("select * from {name}".format(name=db_parameters["name"])) + total = 0 + for rec in c: + total += rec[0] + assert total == 13824, "total integer" + cnx.commit() + cnx.rollback() + c = cnx.cursor() + c.execute("select * from {name}".format(name=db_parameters["name"])) + total = 0 + for rec in c: + total += rec[0] + assert total == 13824, "total integer" + + +def test_connection_context_manager(request, db_parameters): + db_config = { + "protocol": db_parameters["protocol"], + "account": db_parameters["account"], + "user": db_parameters["user"], + "password": db_parameters["password"], + "host": db_parameters["host"], + "port": db_parameters["port"], + "database": db_parameters["database"], + "schema": db_parameters["schema"], + "timezone": "UTC", + } + + def fin(): + with snowflake.connector.connect(**db_config) as cnx: + cnx.cursor().execute( + """ +DROP TABLE IF EXISTS {name} +""".format( + name=db_parameters["name"] + ) + ) + + request.addfinalizer(fin) + + try: + with snowflake.connector.connect(**db_config) as cnx: + cnx.autocommit(False) + cnx.cursor().execute( + """ +CREATE OR REPLACE TABLE {name} (cc1 int) +""".format( + name=db_parameters["name"] + ) + ) + cnx.cursor().execute( + """ +INSERT INTO {name} VALUES(1),(2),(3) +""".format( + name=db_parameters["name"] + ) + ) + ret = ( + cnx.cursor() + .execute( + """ +SELECT SUM(cc1) FROM {name} +""".format( + name=db_parameters["name"] + ) + ) + .fetchone() + ) + assert ret[0] == 6 + cnx.commit() + cnx.cursor().execute( + """ +INSERT INTO {name} VALUES(4),(5),(6) +""".format( + name=db_parameters["name"] + ) + ) + ret = ( + cnx.cursor() + .execute( + """ +SELECT SUM(cc1) FROM {name} +""".format( + name=db_parameters["name"] + ) + ) + .fetchone() + ) + assert ret[0] == 21 + cnx.cursor().execute( + """ +SELECT WRONG SYNTAX QUERY +""" + ) + raise Exception("Failed to cause the syntax error") + except snowflake.connector.Error: + # syntax error should be caught here + # and the last change must have been rollbacked + with snowflake.connector.connect(**db_config) as cnx: + ret = ( + cnx.cursor() + .execute( + """ +SELECT SUM(cc1) FROM {name} +""".format( + name=db_parameters["name"] + ) + ) + .fetchone() + ) + assert ret[0] == 6 diff --git a/test/integ_helpers.py b/test/integ_helpers.py new file mode 100644 index 000000000..9346fb19b --- /dev/null +++ b/test/integ_helpers.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os +from typing import TYPE_CHECKING + +if TYPE_CHECKING: # pragma: no cover + from snowflake.connector.cursor import SnowflakeCursor + + +def put( + csr: SnowflakeCursor, + file_path: str, + stage_path: str, + from_path: bool, + sql_options: str | None = "", + **kwargs, +) -> SnowflakeCursor: + """Execute PUT query with given cursor. + + Args: + csr: Snowflake cursor object. + file_path: Path to the target file in local system; Or . when from_path is False. + stage_path: Destination path of file on the stage. + from_path: Whether the target file is fetched with given path, specify file_stream= if False. + sql_options: Optional arguments to the PUT command. + **kwargs: Optional arguments passed to SnowflakeCursor.execute() + + Returns: + A result class with the results in it. This can either be json, or an arrow result class. + """ + sql = "put 'file://{file}' @{stage} {sql_options}" + if from_path: + kwargs.pop("file_stream", None) + else: + # PUT from stream + file_path = os.path.basename(file_path) + if kwargs.pop("commented", False): + sql = "--- test comments\n" + sql + sql = sql.format( + file=file_path.replace("\\", "\\\\"), stage=stage_path, sql_options=sql_options + ) + return csr.execute(sql, **kwargs) diff --git a/test/lazy_var.py b/test/lazy_var.py new file mode 100644 index 000000000..c3c21c0f7 --- /dev/null +++ b/test/lazy_var.py @@ -0,0 +1,30 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +from typing import Callable, Generic, TypeVar + +T = TypeVar("T") + + +class LazyVar(Generic[T]): + """Our implementation of a lazy variable. + + Mostly used for when we want to implement a shared variable between tests (should be calculated at most once), + but only if necessary. + """ + + def __init__(self, generator: Callable[[], T]): + """Initializes a lazy variable. + + Args: + generator: A function that takes no arguments and generates the actual variable. + """ + self.value = None + self.generator = generator + + def get(self) -> T: + if self.value is None: + self.value = self.generator() + return self.value diff --git a/test/randomize.py b/test/randomize.py new file mode 100644 index 000000000..54568801f --- /dev/null +++ b/test/randomize.py @@ -0,0 +1,27 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import random +import string +from typing import Sequence + + +def random_string( + length: int, + prefix: str = "", + suffix: str = "", + choices: Sequence[str] = string.ascii_lowercase, +) -> str: + """Our convenience function to generate random string for object names. + + Args: + length: How many random characters to choose from choices. + prefix: Prefix to add to random string generated. + suffix: Suffix to add to random string generated. + choices: A generator of things to choose from. + """ + random_part = "".join([random.choice(choices) for _ in range(length)]) + return "".join([prefix, random_part, suffix]) diff --git a/test/test_arrow_pandas.py b/test/test_arrow_pandas.py deleted file mode 100644 index 9414d46e2..000000000 --- a/test/test_arrow_pandas.py +++ /dev/null @@ -1,129 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import pytest -import time -import pandas as pd -import random -from datetime import datetime - -try: - import pyarrow -except ImportError as e: - pass - -try: - from snowflake.connector.arrow_iterator import PyArrowIterator - no_arrow_iterator_ext = False -except ImportError: - no_arrow_iterator_ext = True - -sql_arrow = "alter session set query_result_format='ARROW_FORCE';" - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_num_one(conn_cnx): - print('Test fetching one single dataframe') - row_count = 50000 - col_count = 2 - random_seed = get_random_seed() - sql_exec = ("select seq4() as c1, uniform(1, 10, random({})) as c2 from ".format(random_seed) + - "table(generator(rowcount=>{})) order by c1, c2".format(row_count)) - fetch_pandas(conn_cnx, sql_exec,row_count, col_count, 'one') - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_num_batch(conn_cnx): - print('Test fetching dataframes in batch') - row_count = 50000 - col_count = 2 - random_seed = get_random_seed() - sql_exec = ("select seq4() as c1, uniform(1, 10, random({})) as c2 from ".format(random_seed) + - "table(generator(rowcount=>{})) order by c1, c2".format(row_count)) - fetch_pandas(conn_cnx, sql_exec, row_count, col_count, 'batch') - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_empty(conn_cnx): - print('Test fetch empty dataframe') - with conn_cnx() as cnx: - cursor = cnx.cursor() - cursor.execute(sql_arrow) - cursor.execute("select seq4() from table(generator(rowcount=>1)) limit 0") - assert cursor.fetch_pandas_all() is None, 'the result is not none' - -def get_random_seed(): - random.seed(datetime.now()) - return random.randint(0, 10000) - -def fetch_pandas(conn_cnx, sql, row_count, col_count, method='one'): - """ - test parameters can be customized - @param conn_cnx: connection - @param sql: SQL command for execution - @param row_count: # of total rows combining all dataframes - @param col_count: # of columns in dataframe - @param method: - 1. If method is 'batch', we fetch dataframes in batch. - 2. If method is 'one', we fetch a single dataframe containing all data - """ - - assert row_count != 0, '# of rows should be larger than 0' - assert col_count != 0, '# of columns should be larger than 0' - - with conn_cnx() as cnx_row: - with conn_cnx() as cnx_table: - # fetch dataframe by fetching row by row - cursor_row = cnx_row.cursor() - cursor_row.execute(sql_arrow) - cursor_row.execute(sql) - - # build dataframe - # actually its exec time would be different from `pd.read_sql()` via sqlalchemy as most people use - # further perf test can be done separately - start_time = time.time() - df_old = pd.DataFrame(cursor_row.fetchall(), columns=['c{}'.format(i) for i in range(col_count)]) - end_time = time.time() - print('The original way took {}s'.format(end_time - start_time)) - cursor_row.close() - - # fetch dataframe with new arrow support - cursor_table = cnx_table.cursor() - cursor_table.execute(sql_arrow) - cursor_table.execute(sql) - - # build dataframe - total_rows, total_batches = 0, 0 - start_time = time.time() - if method == 'one': - df_new = cursor_table.fetch_pandas_all() - total_rows = df_new.shape[0] - else: - for df_new in cursor_table.fetch_pandas_batches(): - total_rows += df_new.shape[0] - total_batches += 1 - end_time = time.time() - print('new way (fetching {}) took {}s'.format(method, end_time - start_time)) - if method == 'batch': - print('new way has # of batches : {}'.format(total_batches)) - cursor_table.close() - assert total_rows == row_count, 'there should be {} rows, but {} rows'.format(row_count, total_rows) - - # verify the correctness - # only do it when fetch one dataframe - if method == 'one' : - assert df_old.shape == df_new.shape, 'the shape of old dataframe is {}, the shape of new dataframe is {}, \ - shapes are not equal'.format(df_old.shape, df_new.shape) - - for i in range(row_count): - col_old = df_old.iloc[i] - col_new = df_new.iloc[i] - for j, (c_old, c_new) in enumerate(zip(col_old, col_new)): - assert c_old == c_new, '{} row, {} column: old value is {}, new value is {}, \ - values are not equal'.format(i, j, c_old, c_new) \ No newline at end of file diff --git a/test/test_arrow_result.py b/test/test_arrow_result.py deleted file mode 100644 index c60361a0c..000000000 --- a/test/test_arrow_result.py +++ /dev/null @@ -1,108 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import random -import pytest -from datetime import datetime -try: - from snowflake.connector.arrow_iterator import PyArrowIterator - no_arrow_iterator_ext = False -except ImportError: - no_arrow_iterator_ext = True - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_select_with_num(conn_cnx): - col_count = 2 - row_count = 50000 - random_seed = get_random_seed() - sql_text = ("select seq4() as c1, uniform(1, 10, random({})) as c2 from ".format(random_seed) + - "table(generator(rowcount=>{})) order by c1".format(row_count)) - iterate_over_test_chunk("num", conn_cnx, sql_text, row_count, col_count) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_select_with_string(conn_cnx): - col_count = 2 - row_count = 50000 - random_seed = get_random_seed() - length = random.randint(1, 10) - sql_text = ("select seq4() as c1, randstr({}, random({})) as c2 from ".format(length, random_seed) + - "table(generator(rowcount=>50000)) order by c1") - iterate_over_test_chunk("string", conn_cnx, sql_text, row_count, col_count) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_select_with_bool(conn_cnx): - col_count = 2 - row_count = 50000 - random_seed = get_random_seed() - sql_text = ("select seq4() as c1, as_boolean(uniform(0, 1, random({}))) as c2 from ".format(random_seed) + - "table(generator(rowcount=>{})) order by c1".format(row_count)) - iterate_over_test_chunk("bool", conn_cnx, sql_text, row_count, col_count) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_select_with_float(conn_cnx): - col_count = 2 - row_count = 50000 - random_seed = get_random_seed() - pow_val = random.randint(0, 10) - val_len = random.randint(0, 16) - # if we assign val_len a larger value like 20, then the precision difference between c++ and python will become - # very obvious so if we meet some error in this test in the future, please check that whether it is caused by - # different precision between python and c++ - val_range = random.randint(0, 10**val_len) - - sql_text = ("select seq4() as c1, as_double(uniform({}, {}, random({})))/{} as c2 from ".format(-val_range, val_range, random_seed, 10**pow_val) + - "table(generator(rowcount=>{})) order by c1".format(row_count)) - iterate_over_test_chunk("float", conn_cnx, sql_text, row_count, col_count, eps=10**(-pow_val+1)) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_select_with_empty_resultset(conn_cnx): - with conn_cnx() as cnx: - cursor = cnx.cursor() - cursor.execute("alter session set query_result_format='ARROW_FORCE'") - cursor.execute("select seq4() from table(generator(rowcount=>100)) limit 0") - - assert cursor.fetchone() is None - - -def get_random_seed(): - random.seed(datetime.now()) - return random.randint(0, 10000) - - -def iterate_over_test_chunk(test_name, conn_cnx, sql_text, row_count, col_count, eps=None): - with conn_cnx() as json_cnx: - with conn_cnx() as arrow_cnx: - cursor_json = json_cnx.cursor() - cursor_json.execute("alter session set query_result_format='JSON'") - cursor_json.execute(sql_text) - - cursor_arrow = arrow_cnx.cursor() - cursor_arrow.execute("alter session set query_result_format='ARROW_FORCE'") - cursor_arrow.execute(sql_text) - - for i in range(0, row_count): - json_res = cursor_json.fetchone() - arrow_res = cursor_arrow.fetchone() - for j in range(0, col_count): - if test_name == "float" and eps is not None: - assert abs(json_res[j] - arrow_res[j]) <= eps - else: - assert json_res[j] == arrow_res[j] diff --git a/test/test_autocommit.py b/test/test_autocommit.py deleted file mode 100644 index 5426483f9..000000000 --- a/test/test_autocommit.py +++ /dev/null @@ -1,143 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import snowflake.connector - - -def exe0(cnx, sql): - return cnx.cursor().execute(sql) - - -def _run_autocommit_off(cnx, db_parameters): - """ - Runs autocommit off test - :param cnx: database connection context - :param db_parameters: database parameters - """ - def exe(cnx, sql): - return cnx.cursor().execute(sql.format(name=db_parameters['name'])) - - exe(cnx, """ -INSERT INTO {name} VALUES(True), (False), (False) -""") - res = exe0(cnx, """ -SELECT CURRENT_TRANSACTION() -""").fetchone() - assert res[0] is not None - res = exe(cnx, """ -SELECT COUNT(*) FROM {name} WHERE c1 -""").fetchone() - assert res[0] == 1 - res = exe(cnx, """ -SELECT COUNT(*) FROM {name} WHERE NOT c1 -""").fetchone() - assert res[0] == 2 - cnx.rollback() - res = exe0(cnx, """ -SELECT CURRENT_TRANSACTION() -""").fetchone() - assert res[0] is None - res = exe(cnx, """ -SELECT COUNT(*) FROM {name} WHERE NOT c1 -""").fetchone() - assert res[0] == 0 - exe(cnx, """ -INSERT INTO {name} VALUES(True), (False), (False) -""") - cnx.commit() - res = exe(cnx, """ -SELECT COUNT(*) FROM {name} WHERE NOT c1 -""").fetchone() - assert res[0] == 2 - cnx.rollback() - res = exe(cnx, """ -SELECT COUNT(*) FROM {name} WHERE NOT c1 -""").fetchone() - assert res[0] == 2 - - -def _run_autocommit_on(cnx, db_parameters): - """ - Run autocommit on test - :param cnx: database connection context - :param db_parameters: database parameters - """ - def exe(cnx, sql): - return cnx.cursor().execute(sql.format(name=db_parameters['name'])) - - exe(cnx, """ -INSERT INTO {name} VALUES(True), (False), (False) -""") - cnx.rollback() - res = exe(cnx, """ -SELECT COUNT(*) FROM {name} WHERE NOT c1 -""").fetchone() - assert res[0] == 4 - - -def test_autocommit_attribute(conn_cnx, db_parameters): - """ - Tests autocommit attribute - :param conn_cnx: database connection context - :param db_parameters: database parameters - """ - def exe(cnx, sql): - return cnx.cursor().execute(sql.format(name=db_parameters['name'])) - - with conn_cnx() as cnx: - exe(cnx, """ -CREATE TABLE {name} (c1 boolean) -""") - try: - cnx.autocommit(False) - _run_autocommit_off(cnx, db_parameters) - cnx.autocommit(True) - _run_autocommit_on(cnx, db_parameters) - finally: - exe(cnx, """ -DROP TABLE IF EXISTS {name} - """) - - -def test_autocommit_parameters(db_parameters): - """ - Tests autocommit parameter - :param db_parameters: database parameters - """ - def exe(cnx, sql): - return cnx.cursor().execute(sql.format(name=db_parameters['name'])) - - with snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - protocol=db_parameters['protocol'], - schema=db_parameters['schema'], - database=db_parameters['database'], - autocommit=False, - ) as cnx: - exe(cnx, """ -CREATE TABLE {name} (c1 boolean) -""") - _run_autocommit_off(cnx, db_parameters) - - with snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - protocol=db_parameters['protocol'], - schema=db_parameters['schema'], - database=db_parameters['database'], - autocommit=True, - ) as cnx: - _run_autocommit_on(cnx, db_parameters) - exe(cnx, """ -DROP TABLE IF EXISTS {name} -""") diff --git a/test/test_bindings.py b/test/test_bindings.py deleted file mode 100644 index e63a94ca2..000000000 --- a/test/test_bindings.py +++ /dev/null @@ -1,439 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import logging -import tempfile -from os import path - -tempfile.gettempdir() - -for logger_name in ['snowflake.connector', 'botocore']: - logger = logging.getLogger(logger_name) - logger.setLevel(logging.DEBUG) - ch = logging.FileHandler( - path.join(tempfile.gettempdir(), 'python_connector.log')) - ch.setLevel(logging.DEBUG) - ch.setFormatter(logging.Formatter( - '%(asctime)s - %(threadName)s %(filename)s:%(lineno)d - %(funcName)s() - %(levelname)s - %(message)s')) - logger.addHandler(ch) - -import calendar -import time -from datetime import datetime, date, timedelta -from datetime import time as datetime_time -from decimal import Decimal -import pendulum -import pytest -import pytz - -from snowflake.connector.compat import PY2 -from snowflake.connector.converter import convert_datetime_to_epoch -from snowflake.connector.errors import ProgrammingError - -PST_TZ = "America/Los_Angeles" -JST_TZ = "Asia/Tokyo" - - -def test_invalid_binding_option(conn_cnx): - """ - Invalid paramstyle parameters - """ - with pytest.raises(ProgrammingError): - with conn_cnx(paramstyle=u'hahaha'): - pass - - # valid cases - for s in [u'format', u'pyformat', u'qmark', u'numeric']: - with conn_cnx(paramstyle=s): - pass - - -def test_binding(conn_cnx, db_parameters): - """ - Paramstyle qmark basic tests - """ - with conn_cnx(paramstyle=u'qmark') as cnx: - cnx.cursor().execute(""" -create or replace table {name} ( - c1 BOOLEAN, - c2 INTEGER, - c3 NUMBER(38,2), - c4 VARCHAR(1234), - c5 FLOAT, - c6 BINARY, - c7 BINARY, - c8 TIMESTAMP_NTZ, - c9 TIMESTAMP_NTZ, - c10 TIMESTAMP_NTZ, - c11 TIMESTAMP_NTZ, - c12 TIMESTAMP_LTZ, - c13 TIMESTAMP_LTZ, - c14 TIMESTAMP_LTZ, - c15 TIMESTAMP_LTZ, - c16 TIMESTAMP_TZ, - c17 TIMESTAMP_TZ, - c18 TIMESTAMP_TZ, - c19 TIMESTAMP_TZ, - c20 DATE, - c21 TIME, - c22 TIMESTAMP_NTZ, - c23 TIME, - c24 STRING - ) -""".format(name=db_parameters['name'])) - current_utctime = datetime.utcnow() - current_localtime = pytz.utc.localize( - current_utctime, - is_dst=False).astimezone(pytz.timezone(PST_TZ)) - current_localtime_without_tz = datetime.now() - current_localtime_with_other_tz = pytz.utc.localize( - current_localtime_without_tz, - is_dst=False).astimezone(pytz.timezone(JST_TZ)) - dt = date(2017, 12, 30) - tm = datetime_time(hour=1, minute=2, second=3, microsecond=456) - struct_time_v = time.strptime("30 Sep 01 11:20:30", "%d %b %y %H:%M:%S") - tdelta = timedelta(seconds=tm.hour * 3600 + tm.minute * 60 + tm.second, - microseconds=tm.microsecond) - try: - with conn_cnx(paramstyle=u'qmark', timezone=PST_TZ) as cnx: - cnx.cursor().execute(""" -insert into {name} values( -?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?, ?,?,?) -""".format(name=db_parameters['name']), ( - True, - 1, - Decimal("1.2"), - 'str1', - 1.2, - # Py2 has bytes in str type, so Python Connector - bytes(b'abc') if not PY2 else bytearray(b'abc'), - bytearray(b'def'), - current_utctime, - current_localtime, - current_localtime_without_tz, - current_localtime_with_other_tz, - (u"TIMESTAMP_LTZ", current_utctime), - (u"TIMESTAMP_LTZ", current_localtime), - (u"TIMESTAMP_LTZ", current_localtime_without_tz), - (u"TIMESTAMP_LTZ", current_localtime_with_other_tz), - (u"TIMESTAMP_TZ", current_utctime), - (u"TIMESTAMP_TZ", current_localtime), - (u"TIMESTAMP_TZ", current_localtime_without_tz), - (u"TIMESTAMP_TZ", current_localtime_with_other_tz), - dt, - tm, - (u"TIMESTAMP_NTZ", struct_time_v), - (u"TIME", tdelta), - (u"TEXT", None) - )) - ret = cnx.cursor().execute(""" -select * from {name} where c1=? and c2=? -""".format(name=db_parameters['name']), ( - True, - 1 - )).fetchone() - assert ret[0], "BOOLEAN" - assert ret[2] == Decimal("1.2"), "NUMBER" - assert ret[4] == 1.2, "FLOAT" - assert ret[5] == b'abc' - assert ret[6] == b'def' - assert ret[7] == current_utctime - assert convert_datetime_to_epoch( - ret[8]) == convert_datetime_to_epoch(current_localtime) - assert convert_datetime_to_epoch( - ret[9]) == convert_datetime_to_epoch( - current_localtime_without_tz) - assert convert_datetime_to_epoch( - ret[10]) == convert_datetime_to_epoch( - current_localtime_with_other_tz) - assert convert_datetime_to_epoch( - ret[11]) == convert_datetime_to_epoch(current_utctime) - assert convert_datetime_to_epoch( - ret[12]) == convert_datetime_to_epoch(current_localtime) - assert convert_datetime_to_epoch( - ret[13]) == convert_datetime_to_epoch( - current_localtime_without_tz) - assert convert_datetime_to_epoch( - ret[14]) == convert_datetime_to_epoch( - current_localtime_with_other_tz) - assert convert_datetime_to_epoch( - ret[15]) == convert_datetime_to_epoch(current_utctime) - assert convert_datetime_to_epoch( - ret[16]) == convert_datetime_to_epoch(current_localtime) - assert convert_datetime_to_epoch( - ret[17]) == convert_datetime_to_epoch( - current_localtime_without_tz) - assert convert_datetime_to_epoch( - ret[18]) == convert_datetime_to_epoch( - current_localtime_with_other_tz) - assert ret[19] == dt - assert ret[20] == tm - assert convert_datetime_to_epoch( - ret[21]) == calendar.timegm(struct_time_v) - assert timedelta(seconds=ret[22].hour * 3600 + ret[22].minute * 60 + - ret[22].second, - microseconds=ret[22].microsecond) == tdelta - assert ret[23] is None - finally: - with conn_cnx() as cnx: - cnx.cursor().execute(""" -drop table if exists {name} -""".format(name=db_parameters['name'])) - - -def test_pendulum_binding(conn_cnx, db_parameters): - pendulum_test = pendulum.now() - try: - with conn_cnx() as cnx: - cnx.cursor().execute(""" - create or replace table {name} ( - c1 timestamp - ) - """.format(name=db_parameters['name'])) - c = cnx.cursor() - fmt = "insert into {name}(c1) values(%(v1)s)".format( - name=db_parameters['name'] - ) - c.execute(fmt, {'v1': pendulum_test}) - assert len(cnx.cursor().execute( - "select count(*) from {name}".format( - name=db_parameters['name'])).fetchall()) == 1 - with conn_cnx(paramstyle=u'qmark') as cnx: - cnx.cursor().execute(""" - create or replace table {name} (c1 timestamp, c2 timestamp) - """.format(name=db_parameters['name'])) - with conn_cnx(paramstyle=u'qmark') as cnx: - cnx.cursor().execute(""" - insert into {name} values(?, ?) - """.format(name=db_parameters['name']), (pendulum_test, pendulum_test)) - ret = cnx.cursor().execute(""" - select * from {name} - """.format(name=db_parameters['name'])).fetchone() - assert convert_datetime_to_epoch( - ret[0]) == convert_datetime_to_epoch(pendulum_test) - finally: - with conn_cnx() as cnx: - cnx.cursor().execute(""" - drop table if exists {name} - """.format(name=db_parameters['name'])) - - -def test_binding_with_numeric(conn_cnx, db_parameters): - """ - Paramstyle numeric tests. Both qmark and numeric leverages server side - bindings. - """ - with conn_cnx(paramstyle=u'numeric') as cnx: - cnx.cursor().execute(""" -create or replace table {name} (c1 integer, c2 string) -""".format(name=db_parameters['name'])) - - try: - with conn_cnx(paramstyle=u'numeric') as cnx: - cnx.cursor().execute(""" -insert into {name}(c1, c2) values(:2, :1) - """.format(name=db_parameters['name']), ( - u'str1', - 123 - )) - cnx.cursor().execute(""" -insert into {name}(c1, c2) values(:2, :1) - """.format(name=db_parameters['name']), ( - u'str2', - 456 - )) - # numeric and qmark can be used in the same session - rec = cnx.cursor().execute(""" -select * from {name} where c1=? -""".format(name=db_parameters['name']), (123,)).fetchall() - assert len(rec) == 1 - assert rec[0][0] == 123 - assert rec[0][1] == u'str1' - finally: - with conn_cnx() as cnx: - cnx.cursor().execute(""" -drop table if exists {name} -""".format(name=db_parameters['name'])) - - -def test_binding_timestamps(conn_cnx, db_parameters): - """ - Binding datetime object with TIMESTAMP_LTZ. The value is bound - as TIMESTAMP_NTZ, but since it is converted to UTC in the backend, - the returned value must be - """ - with conn_cnx() as cnx: - cnx.cursor().execute(""" -create or replace table {name} ( - c1 integer, - c2 timestamp_ltz) -""".format(name=db_parameters['name'])) - - try: - with conn_cnx(paramstyle=u'numeric', timezone=PST_TZ) as cnx: - current_localtime = datetime.now() - cnx.cursor().execute(""" -insert into {name}(c1, c2) values(:1, :2) - """.format(name=db_parameters['name']), ( - 123, - ("TIMESTAMP_LTZ", current_localtime) - )) - rec = cnx.cursor().execute(""" -select * from {name} where c1=? - """.format(name=db_parameters['name']), (123,)).fetchall() - assert len(rec) == 1 - assert rec[0][0] == 123 - assert convert_datetime_to_epoch(rec[0][1]) == \ - convert_datetime_to_epoch(current_localtime) - finally: - with conn_cnx() as cnx: - cnx.cursor().execute(""" -drop table if exists {name} -""".format(name=db_parameters['name'])) - - -def test_binding_bulk_insert(conn_cnx, db_parameters): - """ - Bulk insert test. - """ - with conn_cnx() as cnx: - cnx.cursor().execute(""" -create or replace table {name} ( - c1 integer, - c2 string -) -""".format(name=db_parameters['name'])) - try: - with conn_cnx(paramstyle=u'qmark') as cnx: - # short list - c = cnx.cursor() - fmt = 'insert into {name}(c1,c2) values(?,?)'.format( - name=db_parameters['name']) - c.executemany(fmt, [ - (1, 'test1'), - (2, 'test2'), - (3, 'test3'), - (4, 'test4'), - ]) - assert c.rowcount == 4 - - # large list - num_rows = 100000 - c = cnx.cursor() - c.executemany(fmt, [ - (idx, 'test{}'.format(idx)) for idx in range(num_rows) - ]) - assert c.rowcount == num_rows - - finally: - with conn_cnx() as cnx: - cnx.cursor().execute(""" -drop table if exists {name} -""".format(name=db_parameters['name'])) - - -def test_binding_bulk_update(conn_cnx, db_parameters): - """ - Bulk update test. - - NOTE: UPDATE,MERGE and DELETE are not supported for actual bulk operation - but executemany accepts the multiple rows and iterate DMLs - """ - with conn_cnx() as cnx: - cnx.cursor().execute(""" -create or replace table {name} ( - c1 integer, - c2 string -) -""".format(name=db_parameters['name'])) - try: - with conn_cnx(paramstyle=u'qmark') as cnx: - # short list - c = cnx.cursor() - fmt = 'insert into {name}(c1,c2) values(?,?)'.format( - name=db_parameters['name']) - c.executemany(fmt, [ - (1, 'test1'), - (2, 'test2'), - (3, 'test3'), - (4, 'test4'), - ]) - assert c.rowcount == 4 - - fmt = "update {name} set c2=:2 where c1=:1".format( - name=db_parameters['name']) - c.executemany(fmt, [ - (1, 'test5'), - (2, 'test6'), - ]) - assert c.rowcount == 2 - - fmt = "select * from {name} where c1=?".format( - name=db_parameters['name'] - ) - rec = cnx.cursor().execute(fmt, (1,)).fetchall() - assert rec[0][0] == 1 - assert rec[0][1] == 'test5' - - finally: - with conn_cnx() as cnx: - cnx.cursor().execute(""" -drop table if exists {name} -""".format(name=db_parameters['name'])) - - -@pytest.mark.skipif(not PY2, reason="Long type test") -def test_binding_long_value(conn_cnx, db_parameters): - """ - Test binding a long value. The problem was it was bound as '1L' and raised - a SQL compilation error. - """ - try: - with conn_cnx() as cnx: - cnx.cursor().execute(""" -create or replace table {name} ( - c1 integer -) -""".format(name=db_parameters['name'])) - c = cnx.cursor() - fmt = "insert into {name}(c1) values(%(v1)s)".format( - name=db_parameters['name'] - ) - c.execute(fmt, {'v1': long(1)}) - assert len(cnx.cursor().execute( - "select count(*) from {name}".format( - name=db_parameters['name'])).fetchall()) == 1 - finally: - with conn_cnx() as cnx: - cnx.cursor().execute(""" -drop table if exists {name} -""".format(name=db_parameters['name'])) - - -def test_binding_identifier(conn_cnx, db_parameters): - """ - Binding a table name - """ - try: - with conn_cnx(paramstyle=u'qmark') as cnx: - data = u'test' - cnx.cursor().execute(""" -create or replace table identifier(?) (c1 string) -""", (db_parameters['name'],)) - with conn_cnx(paramstyle=u'qmark') as cnx: - cnx.cursor().execute(""" -insert into identifier(?) values(?) -""", (db_parameters['name'], data)) - ret = cnx.cursor().execute(""" -select * from identifier(?) -""", (db_parameters['name'],)).fetchall() - assert len(ret) == 1 - assert ret[0][0] == data - finally: - with conn_cnx(paramstyle=u'qmark') as cnx: - cnx.cursor().execute(""" -drop table if exists identifier(?) -""", (db_parameters['name'],)) diff --git a/test/test_boolean.py b/test/test_boolean.py deleted file mode 100644 index 099bab082..000000000 --- a/test/test_boolean.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - - -def test_binding_fetching_boolean(conn_cnx, db_parameters): - try: - with conn_cnx() as cnx: - cnx.cursor().execute(""" -create or replace table {name} (c1 boolean, c2 integer) -""".format(name=db_parameters['name'])) - - with conn_cnx() as cnx: - cnx.cursor().execute(""" -insert into {name} values(%s,%s), (%s,%s), (%s,%s) -""".format(name=db_parameters['name']), (True, 1, False, 2, True, 3)) - results = cnx.cursor().execute(""" -select * from {name} order by 1""".format( - name=db_parameters['name'])).fetchall() - assert not results[0][0] - assert results[1][0] - assert results[2][0] - results = cnx.cursor().execute(""" -select c1 from {name} where c2=2 -""".format(name=db_parameters['name'])).fetchall() - assert not results[0][0] - - # SNOW-15905: boolean support - results = cnx.cursor().execute(""" -SELECT CASE WHEN (null LIKE trim(null)) THEN null ELSE null END -""".format(name=db_parameters['name'])).fetchall() - assert not results[0][0] - - finally: - with conn_cnx() as cnx: - cnx.cursor().execute(""" -drop table if exists {name} -""".format(name=db_parameters['name'])) - - -def test_boolean_from_compiler(conn_cnx): - with conn_cnx() as cnx: - ret = cnx.cursor().execute("SELECT true").fetchone() - assert ret[0] - - ret = cnx.cursor().execute("SELECT false").fetchone() - assert not ret[0] diff --git a/test/test_concurrent_create_objects.py b/test/test_concurrent_create_objects.py deleted file mode 100644 index 734e7bce5..000000000 --- a/test/test_concurrent_create_objects.py +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from logging import getLogger -from multiprocessing.pool import ThreadPool - -import pytest - -try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: - CONNECTION_PARAMETERS_ADMIN ={} - -logger = getLogger(__name__) - -from snowflake.connector import ProgrammingError -from snowflake.connector.compat import TO_UNICODE - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_snow5871(conn_cnx, db_parameters): - _test_snow5871(conn_cnx, db_parameters, - number_of_threads=5, - rt_max_outgoing_rate=60, rt_max_burst_size=5, - rt_max_borrowing_limt=1000, rt_reset_period=10000) - - _test_snow5871(conn_cnx, db_parameters, - number_of_threads=40, - rt_max_outgoing_rate=60, rt_max_burst_size=1, - rt_max_borrowing_limt=200, rt_reset_period=1000) - - -def _create_a_table(meta): - cnx = meta['cnx'] - name = meta['name'] - try: - cnx.cursor().execute(""" -create table {0} (aa int) - """.format(name)) - # print("Success #" + meta['idx']) - return {'success': True} - except ProgrammingError: - logger.exception('Failed to create a table') - return {'success': False} - - -def _test_snow5871(conn_cnx, - db_parameters, - number_of_threads=10, - rt_max_outgoing_rate=60, - rt_max_burst_size=1, - rt_max_borrowing_limt=1000, - rt_reset_period=10000): - """ - SNOW-5871: rate limiting for creation of non-recycable objects - """ - logger.debug(( - 'number_of_threads = %s, rt_max_outgoing_rate = %s, ' - 'rt_max_burst_size = %s, rt_max_borrowing_limt = %s, ' - 'rt_reset_period = %s'), - number_of_threads, rt_max_outgoing_rate, rt_max_burst_size, - rt_max_borrowing_limt, rt_reset_period) - with conn_cnx(user=db_parameters['sf_user'], - password=db_parameters['sf_password'], - account=db_parameters['sf_account']) as cnx: - cnx.cursor().execute(""" -alter system set - RT_MAX_OUTGOING_RATE={0}, - RT_MAX_BURST_SIZE={1}, - RT_MAX_BORROWING_LIMIT={2}, - RT_RESET_PERIOD={3}""".format( - rt_max_outgoing_rate, rt_max_burst_size, rt_max_borrowing_limt, rt_reset_period)) - - try: - with conn_cnx() as cnx: - cnx.cursor().execute( - "create or replace database {name}_db".format( - name=db_parameters['name'])) - meta = [] - for i in range(number_of_threads): - meta.append({'idx': TO_UNICODE(i + 1), - 'cnx': cnx, - 'name': db_parameters[ - 'name'] + 'tbl_5871_' + TO_UNICODE( - i + 1)}) - pool = ThreadPool(processes=number_of_threads) - results = pool.map(_create_a_table, meta) - success = 0 - for r in results: - success += 1 if r['success'] else 0 - - # at least one should be success - assert success >= 1, 'success queries' - finally: - with conn_cnx() as cnx: - cnx.cursor().execute( - "drop database if exists {name}_db".format( - name=db_parameters['name'])) - - with conn_cnx(user=db_parameters['sf_user'], - password=db_parameters['sf_password'], - account=db_parameters['sf_account']) as cnx: - cnx.cursor().execute(""" -alter system set - RT_MAX_OUTGOING_RATE=default, - RT_MAX_BURST_SIZE=default, - RT_RESET_PERIOD=default, - RT_MAX_BORROWING_LIMIT=default""") diff --git a/test/test_concurrent_insert.py b/test/test_concurrent_insert.py deleted file mode 100644 index d07b8f4f7..000000000 --- a/test/test_concurrent_insert.py +++ /dev/null @@ -1,181 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -""" -Concurrent test module -""" -from logging import getLogger -from multiprocessing.pool import ThreadPool - -import pytest - -try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: - CONNECTION_PARAMETERS_ADMIN = {} - -logger = getLogger(__name__) -import snowflake.connector -from snowflake.connector.compat import TO_UNICODE -from snowflake.connector.errors import ProgrammingError - - -def _concurrent_insert(meta): - """ - Concurrent insert method - """ - cnx = snowflake.connector.connect( - user=meta['user'], - password=meta['password'], - host=meta['host'], - port=meta['port'], - account=meta['account'], - database=meta['database'], - schema=meta['schema'], - timezone='UTC', - protocol='http' - # tracing = logging.DEBUG, - ) - try: - cnx.cursor().execute("use warehouse {0}".format(meta['warehouse'])) - table = meta['table'] - sql = "insert into {name} values(%(c1)s, %(c2)s)".format(name=table) - logger.debug(sql) - cnx.cursor().execute(sql, { - 'c1': meta['idx'], - 'c2': 'test string ' + meta['idx'], - }) - meta['success'] = True - logger.debug("Succeeded process #%s", meta['idx']) - except: - logger.exception('failed to insert into a table [%s]', table) - meta['success'] = False - finally: - cnx.close() - return meta - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="The user needs a privilege of create warehouse." -) -def test_concurrent_insert(conn_cnx, db_parameters): - """ - Concurrent insert tests. Inserts block on the one that's running. - """ - number_of_threads = 22 # change this to increase the concurrency - expected_success_runs = number_of_threads - 1 - cnx_array = [] - - try: - with conn_cnx() as cnx: - cnx.cursor().execute(""" -create or replace warehouse {0} -warehouse_type=standard -warehouse_size=small -""".format(db_parameters['name_wh'])) - sql = """ -create or replace table {name} (c1 integer, c2 string) -""".format(name=db_parameters['name']) - cnx.cursor().execute(sql) - for i in range(number_of_threads): - cnx_array.append({ - 'host': db_parameters['host'], - 'port': db_parameters['port'], - 'user': db_parameters['user'], - 'password': db_parameters['password'], - 'account': db_parameters['account'], - 'database': db_parameters['database'], - 'schema': db_parameters['schema'], - 'table': db_parameters['name'], - 'idx': TO_UNICODE(i), - 'warehouse': db_parameters['name_wh'] - }) - - pool = ThreadPool(processes=number_of_threads) - results = pool.map( - _concurrent_insert, - cnx_array) - success = 0 - for record in results: - success += 1 if record['success'] else 0 - - # 21 threads or more - assert success >= expected_success_runs, "Number of success run" - - c = cnx.cursor() - sql = "select * from {name} order by 1".format( - name=db_parameters['name']) - c.execute(sql) - for rec in c: - logger.debug(rec) - c.close() - - finally: - with conn_cnx() as cnx: - cnx.cursor().execute( - "drop table if exists {0}".format(db_parameters['name'])) - cnx.cursor().execute( - "drop warehouse if exists {0}".format(db_parameters['name_wh'])) - - -def _concurrent_insert_using_connection(meta): - connection = meta['connection'] - idx = meta['idx'] - name = meta['name'] - try: - connection.cursor().execute( - "INSERT INTO {name} VALUES(%s, %s)".format( - name=name), - (idx, 'test string{0}'.format(idx))) - except ProgrammingError as e: - if e.errno != 619: # SQL Execution Canceled - raise - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="The user needs a privilege of create warehouse." -) -def test_concurrent_insert_using_connection(conn_cnx, db_parameters): - """ - Concurrent insert tests using the same connection - """ - try: - with conn_cnx() as cnx: - cnx.cursor().execute(""" -create or replace warehouse {0} -warehouse_type=standard -warehouse_size=small -""".format(db_parameters['name_wh'])) - cnx.cursor().execute(""" -CREATE OR REPLACE TABLE {name} (c1 INTEGER, c2 STRING) -""".format( - name=db_parameters['name'])) - number_of_threads = 5 - metas = [] - for i in range(number_of_threads): - metas.append({ - 'connection': cnx, - 'idx': i, - 'name': db_parameters['name'], - }) - pool = ThreadPool(processes=number_of_threads) - pool.map(_concurrent_insert_using_connection, metas) - cnt = 0 - for _ in cnx.cursor().execute( - "SELECT * FROM {name} ORDER BY 1".format( - name=db_parameters['name'])): - cnt += 1 - assert cnt <= number_of_threads, \ - "Number of records should be less than the number of threads" - assert cnt > 0, \ - "Number of records should be one or more number of threads" - finally: - with conn_cnx() as cnx: - cnx.cursor().execute( - "drop table if exists {0}".format(db_parameters['name'])) - cnx.cursor().execute( - "drop warehouse if exists {0}".format(db_parameters['name_wh'])) diff --git a/test/test_connection.py b/test/test_connection.py deleted file mode 100644 index c98aa8f6a..000000000 --- a/test/test_connection.py +++ /dev/null @@ -1,637 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import os - -import pytest -import threading -import queue - -import snowflake.connector -from snowflake.connector import ( - DatabaseError, - ProgrammingError, OperationalError) -from snowflake.connector.errors import (ForbiddenError) - -try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: - CONNECTION_PARAMETERS_ADMIN = {} - -from snowflake.connector.description import CLIENT_NAME -from snowflake.connector.network import APPLICATION_SNOWSQL -from snowflake.connector.connection import SnowflakeConnection - - -def test_basic(conn_testaccount): - """ - Basic Connection test - """ - assert conn_testaccount, 'invalid cnx' - conn_testaccount._set_current_objects() - - -def test_connection_without_schema(db_parameters): - """ - Basic Connection test without schema - """ - cnx = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - database=db_parameters['database'], - protocol=db_parameters['protocol'], - timezone='UTC', - ) - assert cnx, 'invalid cnx' - cnx.close() - - -def test_connection_without_database_schema(db_parameters): - """ - Basic Connection test without database and schema - """ - cnx = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - protocol=db_parameters['protocol'], - timezone='UTC', - ) - assert cnx, 'invalid cnx' - cnx.close() - - -def test_connection_without_database2(db_parameters): - """ - Basic Connection test without database - """ - cnx = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - schema=db_parameters['schema'], - protocol=db_parameters['protocol'], - timezone='UTC', - ) - assert cnx, 'invalid cnx' - cnx.close() - - -def test_with_config(db_parameters): - """ - Creates a connection with the config parameter - """ - config = { - 'user': db_parameters['user'], - 'password': db_parameters['password'], - 'host': db_parameters['host'], - 'port': db_parameters['port'], - 'account': db_parameters['account'], - 'schema': db_parameters['schema'], - 'database': db_parameters['database'], - 'protocol': db_parameters['protocol'], - 'timezone': 'UTC', - } - cnx = snowflake.connector.connect(**config) - try: - assert cnx, 'invalid cnx' - assert not cnx.client_session_keep_alive # default is False - finally: - cnx.close() - - -def test_keep_alive_true(db_parameters): - """ - Creates a connection with client_session_keep_alive parameter. - """ - config = { - 'user': db_parameters['user'], - 'password': db_parameters['password'], - 'host': db_parameters['host'], - 'port': db_parameters['port'], - 'account': db_parameters['account'], - 'schema': db_parameters['schema'], - 'database': db_parameters['database'], - 'protocol': db_parameters['protocol'], - 'timezone': 'UTC', - 'client_session_keep_alive': True - } - cnx = snowflake.connector.connect(**config) - try: - assert cnx.client_session_keep_alive - finally: - cnx.close() - - -def test_keep_alive_heartbeat_frequency(db_parameters): - """ - Creates a connection with client_session_keep_alive_heartbeat_frequency - parameter. - """ - config = { - 'user': db_parameters['user'], - 'password': db_parameters['password'], - 'host': db_parameters['host'], - 'port': db_parameters['port'], - 'account': db_parameters['account'], - 'schema': db_parameters['schema'], - 'database': db_parameters['database'], - 'protocol': db_parameters['protocol'], - 'timezone': 'UTC', - 'client_session_keep_alive': True, - 'client_session_keep_alive_heartbeat_frequency': 1000, - } - cnx = snowflake.connector.connect(**config) - try: - assert cnx.client_session_keep_alive_heartbeat_frequency == 1000 - finally: - cnx.close() - - -def test_keep_alive_heartbeat_frequency_min(db_parameters): - """ - Creates a connection with client_session_keep_alive_heartbeat_frequency - parameter and set the minimum frequency - """ - config = { - 'user': db_parameters['user'], - 'password': db_parameters['password'], - 'host': db_parameters['host'], - 'port': db_parameters['port'], - 'account': db_parameters['account'], - 'schema': db_parameters['schema'], - 'database': db_parameters['database'], - 'protocol': db_parameters['protocol'], - 'timezone': 'UTC', - 'client_session_keep_alive': True, - 'client_session_keep_alive_heartbeat_frequency': 10, - } - cnx = snowflake.connector.connect(**config) - try: - # The min value of client_session_keep_alive_heartbeat_frequency - # is 1/16 of master token validity, so 14400 / 4 /4 => 900 - assert cnx.client_session_keep_alive_heartbeat_frequency == 900 - finally: - cnx.close() - - -def test_bad_db(db_parameters): - """ - Attempts to use a bad DB - """ - cnx = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - protocol=db_parameters['protocol'], - database='baddb', - ) - assert cnx, 'invald cnx' - cnx.close() - - -def test_bogus(db_parameters): - """ - Attempts to login with invalid user name and password - NOTE: this takes long time. - """ - with pytest.raises(DatabaseError): - snowflake.connector.connect( - protocol='http', - user='bogus', - password='bogus', - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - ) - - with pytest.raises(DatabaseError): - snowflake.connector.connect( - protocol='http', - user='bogus', - password='bogus', - account='testaccount123', - host=db_parameters['host'], - port=db_parameters['port'], - login_timeout=5, - insecure_mode=True - ) - - with pytest.raises(DatabaseError): - snowflake.connector.connect( - protocol='http', - user='snowman', - password='', - account='testaccount123', - host=db_parameters['host'], - port=db_parameters['port'], - login_timeout=5, - ) - - with pytest.raises(ProgrammingError): - snowflake.connector.connect( - protocol='http', - user='', - password='password', - account='testaccount123', - host=db_parameters['host'], - port=db_parameters['port'], - login_timeout=5, - ) - - -def test_invalid_application(db_parameters): - """ - Invalid application - """ - with pytest.raises(snowflake.connector.Error): - snowflake.connector.connect( - protocol=db_parameters['protocol'], - user=db_parameters['user'], - password=db_parameters['password'], - application='%%%') - - -def test_valid_application(db_parameters): - """ - Valid app name - """ - application = 'Special_Client' - cnx = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - application=application, - protocol=db_parameters['protocol'], - ) - assert cnx.application == application, "Must be valid application" - cnx.close() - - -def test_invalid_default_parameters(db_parameters): - """ - Invalid database, schema, warehouse and role name - """ - cnx = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - protocol=db_parameters['protocol'], - database='neverexists', - schema='neverexists', - warehouse='neverexits', - ) - assert cnx, "Must be success" - - with pytest.raises(snowflake.connector.DatabaseError): - # must not success - snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - protocol=db_parameters['protocol'], - database='neverexists', - schema='neverexists', - validate_default_parameters=True, - ) - - with pytest.raises(snowflake.connector.DatabaseError): - # must not success - snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - protocol=db_parameters['protocol'], - database=db_parameters['database'], - schema='neverexists', - validate_default_parameters=True, - ) - - with pytest.raises(snowflake.connector.DatabaseError): - # must not success - snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - protocol=db_parameters['protocol'], - database=db_parameters['database'], - schema=db_parameters['schema'], - warehouse='neverexists', - validate_default_parameters=True, - ) - - # Invalid role name is already validated - with pytest.raises(snowflake.connector.DatabaseError): - # must not success - snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - protocol=db_parameters['protocol'], - database=db_parameters['database'], - schema=db_parameters['schema'], - role='neverexists', - ) - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="The user needs a privilege of create warehouse." -) -def test_drop_create_user(conn_cnx, db_parameters): - """ - Drops and creates user - """ - - with conn_cnx() as cnx: - def exe(sql): - return cnx.cursor().execute(sql) - - exe('use role accountadmin') - exe('drop user if exists snowdog') - exe("create user if not exists snowdog identified by 'testdoc'") - exe("use {0}".format(db_parameters['database'])) - exe("create or replace role snowdog_role") - exe("grant role snowdog_role to user snowdog") - exe("grant all on database {0} to role snowdog_role".format( - db_parameters['database'])) - exe("grant all on schema {0} to role snowdog_role".format( - db_parameters['schema'])) - - with conn_cnx(user='snowdog', password='testdoc') as cnx2: - def exe(sql): - return cnx2.cursor().execute(sql) - - exe('use role snowdog_role') - exe(u"use {0}".format(db_parameters['database'])) - exe(u"use schema {0}".format(db_parameters['schema'])) - exe('create or replace table friends(name varchar(100))') - exe('drop table friends') - with conn_cnx() as cnx: - def exe(sql): - return cnx.cursor().execute(sql) - - exe('use role accountadmin') - exe( - 'revoke all on database {0} from role snowdog_role'.format( - db_parameters['database'])) - exe('drop role snowdog_role') - exe('drop user if exists snowdog') - - -@pytest.mark.timeout(15) -def test_invalid_account_timeout(): - with pytest.raises(ForbiddenError): - snowflake.connector.connect( - account='bogus', - user='test', - password='test', - login_timeout=5 - ) - - -@pytest.mark.timeout(15) -def test_invalid_port(db_parameters): - with pytest.raises(OperationalError): - snowflake.connector.connect( - protocol='http', - account='testaccount', - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=12345, - login_timeout=5, - ) - - -@pytest.mark.timeout(15) -def test_invalid_proxy(db_parameters): - with pytest.raises(OperationalError): - snowflake.connector.connect( - protocol='http', - account='testaccount', - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - login_timeout=5, - proxy_host='localhost', - proxy_port='3333' - ) - # NOTE environment variable is set if the proxy parameter is specified. - del os.environ['HTTP_PROXY'] - del os.environ['HTTPS_PROXY'] - - -@pytest.mark.timeout(15) -def test_eu_connection(tmpdir): - """ - If region is specified to eu-central-1, the URL will become - https://testaccount1234.eu-central-1.snowflakecomputing.com/ - NOTE: region is deprecated. - """ - import os - os.environ["SF_OCSP_RESPONSE_CACHE_SERVER_ENABLED"] = "true" - with pytest.raises(ForbiddenError): - # must reach Snowflake - snowflake.connector.connect( - account='testaccount1234', - user='testuser', - password='testpassword', - region='eu-central-1', - login_timeout=5, - ocsp_response_cache_filename=os.path.join( - str(tmpdir), "test_ocsp_cache.txt") - ) - - -#@pytest.mark.timeout(15) -def test_us_west_connection(tmpdir): - """ - region='us-west-2' indicates no region is included in the hostname, i.e., - https://testaccount1234.snowflakecomputing.com. - NOTE: region is deprecated. - """ - with pytest.raises(ForbiddenError): - # must reach Snowflake - snowflake.connector.connect( - account='testaccount1234', - user='testuser', - password='testpassword', - region='us-west-2', - login_timeout=5, - ) - - -@pytest.mark.timeout(60) -def test_privatelink(db_parameters): - """ - Ensure the OCSP cache server URL is overridden if privatelink - connection is used. - """ - try: - os.environ['SF_OCSP_FAIL_OPEN'] = 'false' - os.environ['SF_OCSP_DO_RETRY'] = 'false' - snowflake.connector.connect( - account='testaccount', - user='testuser', - password='testpassword', - region='eu-central-1.privatelink', - login_timeout=5, - ) - pytest.fail("should not make connection") - except OperationalError: - ocsp_url = os.getenv('SF_OCSP_RESPONSE_CACHE_SERVER_URL') - assert ocsp_url is not None, "OCSP URL should not be None" - assert ocsp_url == "http://ocsp.testaccount.eu-central-1." \ - "privatelink.snowflakecomputing.com/" \ - "ocsp_response_cache.json" - - cnx = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - database=db_parameters['database'], - protocol=db_parameters['protocol'], - timezone='UTC', - ) - assert cnx, 'invalid cnx' - - ocsp_url = os.getenv('SF_OCSP_RESPONSE_CACHE_SERVER_URL') - assert ocsp_url is None, "OCSP URL should be None: {0}".format(ocsp_url) - del os.environ['SF_OCSP_DO_RETRY'] - del os.environ['SF_OCSP_FAIL_OPEN'] - - -def test_disable_request_pooling(db_parameters): - """ - Creates a connection with client_session_keep_alive parameter. - """ - config = { - 'user': db_parameters['user'], - 'password': db_parameters['password'], - 'host': db_parameters['host'], - 'port': db_parameters['port'], - 'account': db_parameters['account'], - 'schema': db_parameters['schema'], - 'database': db_parameters['database'], - 'protocol': db_parameters['protocol'], - 'timezone': 'UTC', - 'disable_request_pooling': True - } - cnx = snowflake.connector.connect(**config) - try: - assert cnx.disable_request_pooling - finally: - cnx.close() - - -def test_privatelink_ocsp_url_creation(): - hostname = "testaccount.us-east-1.privatelink.snowflakecomputing.com" - SnowflakeConnection.setup_ocsp_privatelink(APPLICATION_SNOWSQL, hostname) - - ocsp_cache_server = os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_URL", None) - assert ocsp_cache_server == \ - "http://ocsp.testaccount.us-east-1.privatelink.snowflakecomputing.com/ocsp_response_cache.json" - - del os.environ['SF_OCSP_RESPONSE_CACHE_SERVER_URL'] - - SnowflakeConnection.setup_ocsp_privatelink(CLIENT_NAME, hostname) - ocsp_cache_server = os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_URL", None) - assert ocsp_cache_server == \ - "http://ocsp.testaccount.us-east-1.privatelink.snowflakecomputing.com/ocsp_response_cache.json" - - -def test_privatelink_ocsp_url_multithreaded(): - bucket = queue.Queue() - - hostname = "testaccount.us-east-1.privatelink.snowflakecomputing.com" - expectation = "http://ocsp.testaccount.us-east-1.privatelink.snowflakecomputing.com/ocsp_response_cache.json" - thread_obj = [] - for i in range(15): - thread_obj.append(ExecPrivatelinkThread(bucket, hostname, expectation, CLIENT_NAME)) - - for i in range(15): - thread_obj[i].start() - - fail_flag = False - for i in range(15): - thread_obj[i].join() - exc = bucket.get(block=False) - if exc != 'Success' and not fail_flag: - fail_flag = True - - if fail_flag: - assert False, "OCSP URL was set incorrectly" - - if os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_URL", None) is not None: - del os.environ["SF_OCSP_RESPONSE_CACHE_SERVER_URL"] - - -def test_privatelink_ocsp_url_multithreaded_snowsql(): - bucket = queue.Queue() - - hostname = "testaccount.us-east-1.privatelink.snowflakecomputing.com" - expectation = "http://ocsp.testaccount.us-east-1.privatelink.snowflakecomputing.com/ocsp_response_cache.json" - thread_obj = [] - for i in range(15): - thread_obj.append(ExecPrivatelinkThread(bucket, hostname, expectation, APPLICATION_SNOWSQL)) - - for i in range(15): - thread_obj[i].start() - - fail_flag = False - for i in range(15): - thread_obj[i].join() - exc = bucket.get(block=False) - if exc != 'Success' and not fail_flag: - fail_flag = True - - if fail_flag: - assert False, "OCSP URL was set incorrectly" - - -class ExecPrivatelinkThread(threading.Thread): - - def __init__(self, bucket, hostname, expectation, client_name): - threading.Thread.__init__(self) - self.bucket = bucket - self.hostname = hostname - self.expectation = expectation - self.client_name = client_name - - def run(self): - SnowflakeConnection.setup_ocsp_privatelink(self.client_name, self.hostname) - ocsp_cache_server = os.getenv("SF_OCSP_RESPONSE_CACHE_SERVER_URL", None) - if ocsp_cache_server is not None and ocsp_cache_server !=\ - self.expectation: - print("Got {0} Expected {1}".format(ocsp_cache_server, self.expectation)) - self.bucket.put("Fail") - else: - self.bucket.put("Success") diff --git a/test/test_connection_manual.py b/test/test_connection_manual.py deleted file mode 100644 index 1e0b01fd6..000000000 --- a/test/test_connection_manual.py +++ /dev/null @@ -1,132 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -# This test requires the SSO and Snowflake admin connection parameters. -# -# CONNECTION_PARAMETERS_SSO = { -# 'account': 'testaccount', -# 'user': 'qa@snowflakecomputing.com', -# 'protocol': 'http', -# 'host': 'testaccount.reg.snowflakecomputing.com', -# 'port': '8082', -# 'authenticator': 'externalbrowser', -# 'timezone': 'UTC', -# } -# -# CONNECTION_PARAMETERS_ADMIN = { ... Snowflake admin ... } - -import pytest - -import snowflake.connector -from snowflake.connector.auth import delete_temporary_credential_file - -try: - from parameters import (CONNECTION_PARAMETERS_SSO) -except: - CONNECTION_PARAMETERS_SSO = {} - -try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: - CONNECTION_PARAMETERS_ADMIN = {} - - -@pytest.fixture -def token_validity_test_values(request): - with snowflake.connector.connect(**CONNECTION_PARAMETERS_ADMIN) as cnx: - cnx.cursor().execute(""" -ALTER SYSTEM SET - MASTER_TOKEN_VALIDITY=60, - SESSION_TOKEN_VALIDITY=5, - ID_TOKEN_VALIDITY=60 -""") - - def fin(): - with snowflake.connector.connect(**CONNECTION_PARAMETERS_ADMIN) as cnx: - cnx.cursor().execute(""" -ALTER SYSTEM SET - MASTER_TOKEN_VALIDITY=default, - SESSION_TOKEN_VALIDITY=default, - ID_TOKEN_VALIDITY=default -""") - - request.addfinalizer(fin) - return None - - -@pytest.mark.skipif( - not (CONNECTION_PARAMETERS_SSO and CONNECTION_PARAMETERS_ADMIN), - reason="SSO and ADMIN connection parameters must be provided." -) -def test_connect_externalbrowser(token_validity_test_values): - """ - SSO Id Token Cache tests. This is disabled by default. - In order to run this test, remove the above pytest.mark.skip annotation - and run it. It will popup a windows once but the rest connections - should not create popups. - """ - - delete_temporary_credential_file(True) # delete secure storage - delete_temporary_credential_file(False) # delete file cache - CONNECTION_PARAMETERS_SSO['session_parameters'] = \ - { - "CLIENT_USE_SECURE_STORAGE_FOR_TEMPORARY_CREDENTAIL": True, - } - - # change database and schema to non-default one - print("[INFO] 1st connection gets id token and stores in the cache file. " - "This popup a browser to SSO login") - cnx = snowflake.connector.connect(**CONNECTION_PARAMETERS_SSO) - assert cnx.database == 'TESTDB' - assert cnx.schema == 'PUBLIC' - assert cnx.role == 'SYSADMIN' - assert cnx.warehouse == 'REGRESS' - ret = cnx.cursor().execute( - "select current_database(), current_schema(), " - "current_role(), current_warehouse()").fetchall() - assert ret[0][0] == 'TESTDB' - assert ret[0][1] == 'PUBLIC' - assert ret[0][2] == 'SYSADMIN' - assert ret[0][3] == 'REGRESS' - cnx.close() - - print("[INFO] 2nd connection reads the cache file and uses the id token. " - "This should not popups a browser.") - CONNECTION_PARAMETERS_SSO['database'] = 'testdb' - CONNECTION_PARAMETERS_SSO['schema'] = 'testschema' - cnx = snowflake.connector.connect(**CONNECTION_PARAMETERS_SSO) - print("[INFO] Running a 10 seconds query. If the session expires in 10 " - "seconds, the query should renew the token in the middle, " - "and the current objects should be refreshed.") - cnx.cursor().execute("select seq8() from table(generator(timelimit=>10))") - assert cnx.database == 'TESTDB' - assert cnx.schema == 'TESTSCHEMA' - assert cnx.role == 'SYSADMIN' - assert cnx.warehouse == 'REGRESS' - - print("[INFO] Running a 1 seconds query. ") - cnx.cursor().execute("select seq8() from table(generator(timelimit=>1))") - assert cnx.database == 'TESTDB' - assert cnx.schema == 'TESTSCHEMA' - assert cnx.role == 'SYSADMIN' - assert cnx.warehouse == 'REGRESS' - - print("[INFO] Running a 90 seconds query. This pops up a browser in the " - "middle of the query.") - cnx.cursor().execute("select seq8() from table(generator(timelimit=>90))") - assert cnx.database == 'TESTDB' - assert cnx.schema == 'TESTSCHEMA' - assert cnx.role == 'SYSADMIN' - assert cnx.warehouse == 'REGRESS' - - # change database and schema again to ensure they are overridden - CONNECTION_PARAMETERS_SSO['database'] = 'testdb' - CONNECTION_PARAMETERS_SSO['schema'] = 'testschema' - cnx = snowflake.connector.connect(**CONNECTION_PARAMETERS_SSO) - assert cnx.database == 'TESTDB' - assert cnx.schema == 'TESTSCHEMA' - assert cnx.role == 'SYSADMIN' - assert cnx.warehouse == 'REGRESS' - cnx.close() diff --git a/test/test_converter.py b/test/test_converter.py deleted file mode 100644 index 2cc8d25a5..000000000 --- a/test/test_converter.py +++ /dev/null @@ -1,494 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from datetime import timedelta, time - -import pytest -import pytz - -from snowflake.connector.compat import PY2, PY34_EXACT, IS_WINDOWS -from snowflake.connector.converter import ( - ZERO_EPOCH, - _generate_tzinfo_from_tzoffset) -from snowflake.connector.converter_snowsql import (SnowflakeConverterSnowSQL) - - -def _compose_tz(dt, tzinfo): - ret = ZERO_EPOCH + timedelta(seconds=float(dt)) - ret += tzinfo.utcoffset(ret) - return ret.replace(tzinfo=tzinfo) - - -def _compose_ntz(dt): - return ZERO_EPOCH + timedelta(seconds=float(dt)) - - -def _compose_ltz(dt, tz): - ret = ZERO_EPOCH + timedelta(seconds=float(dt)) - return pytz.utc.localize(ret).astimezone( - pytz.timezone(tz)) - - -def test_fetch_timestamps(conn_cnx): - PST_TZ = "America/Los_Angeles" - - tzdiff = 1860 - 1440 # -07:00 - tzinfo = _generate_tzinfo_from_tzoffset(tzdiff) - - # TIMESTAMP_TZ - r0 = _compose_tz('1325568896.123456', tzinfo) - r1 = _compose_tz('1325568896.123456', tzinfo) - r2 = _compose_tz('1325568896.123456', tzinfo) - r3 = _compose_tz('1325568896.123456', tzinfo) - r4 = _compose_tz('1325568896.12345', tzinfo) - r5 = _compose_tz('1325568896.1234', tzinfo) - r6 = _compose_tz('1325568896.123', tzinfo) - r7 = _compose_tz('1325568896.12', tzinfo) - r8 = _compose_tz('1325568896.1', tzinfo) - r9 = _compose_tz('1325568896', tzinfo) - - # TIMESTAMP_NTZ - r10 = _compose_ntz('1325568896.123456') - r11 = _compose_ntz('1325568896.123456') - r12 = _compose_ntz('1325568896.123456') - r13 = _compose_ntz('1325568896.123456') - r14 = _compose_ntz('1325568896.12345') - r15 = _compose_ntz('1325568896.1234') - r16 = _compose_ntz('1325568896.123') - r17 = _compose_ntz('1325568896.12') - r18 = _compose_ntz('1325568896.1') - r19 = _compose_ntz('1325568896') - - # TIMESTAMP_LTZ - r20 = _compose_ltz('1325568896.123456', PST_TZ) - r21 = _compose_ltz('1325568896.123456', PST_TZ) - r22 = _compose_ltz('1325568896.123456', PST_TZ) - r23 = _compose_ltz('1325568896.123456', PST_TZ) - r24 = _compose_ltz('1325568896.12345', PST_TZ) - r25 = _compose_ltz('1325568896.1234', PST_TZ) - r26 = _compose_ltz('1325568896.123', PST_TZ) - r27 = _compose_ltz('1325568896.12', PST_TZ) - r28 = _compose_ltz('1325568896.1', PST_TZ) - r29 = _compose_ltz('1325568896', PST_TZ) - - # TIME - r30 = time(5, 7, 8, 123456) - r31 = time(5, 7, 8, 123456) - r32 = time(5, 7, 8, 123456) - r33 = time(5, 7, 8, 123456) - r34 = time(5, 7, 8, 123450) - r35 = time(5, 7, 8, 123400) - r36 = time(5, 7, 8, 123000) - r37 = time(5, 7, 8, 120000) - r38 = time(5, 7, 8, 100000) - r39 = time(5, 7, 8) - - with conn_cnx() as cnx: - cur = cnx.cursor() - cur.execute(""" -ALTER SESSION SET TIMEZONE='{tz}'; -""".format(tz=PST_TZ)) - cur.execute(""" -SELECT - '2012-01-03 12:34:56.123456789+07:00'::timestamp_tz(9), - '2012-01-03 12:34:56.12345678+07:00'::timestamp_tz(8), - '2012-01-03 12:34:56.1234567+07:00'::timestamp_tz(7), - '2012-01-03 12:34:56.123456+07:00'::timestamp_tz(6), - '2012-01-03 12:34:56.12345+07:00'::timestamp_tz(5), - '2012-01-03 12:34:56.1234+07:00'::timestamp_tz(4), - '2012-01-03 12:34:56.123+07:00'::timestamp_tz(3), - '2012-01-03 12:34:56.12+07:00'::timestamp_tz(2), - '2012-01-03 12:34:56.1+07:00'::timestamp_tz(1), - '2012-01-03 12:34:56+07:00'::timestamp_tz(0), - '2012-01-03 05:34:56.123456789'::timestamp_ntz(9), - '2012-01-03 05:34:56.12345678'::timestamp_ntz(8), - '2012-01-03 05:34:56.1234567'::timestamp_ntz(7), - '2012-01-03 05:34:56.123456'::timestamp_ntz(6), - '2012-01-03 05:34:56.12345'::timestamp_ntz(5), - '2012-01-03 05:34:56.1234'::timestamp_ntz(4), - '2012-01-03 05:34:56.123'::timestamp_ntz(3), - '2012-01-03 05:34:56.12'::timestamp_ntz(2), - '2012-01-03 05:34:56.1'::timestamp_ntz(1), - '2012-01-03 05:34:56'::timestamp_ntz(0), - '2012-01-02 21:34:56.123456789'::timestamp_ltz(9), - '2012-01-02 21:34:56.12345678'::timestamp_ltz(8), - '2012-01-02 21:34:56.1234567'::timestamp_ltz(7), - '2012-01-02 21:34:56.123456'::timestamp_ltz(6), - '2012-01-02 21:34:56.12345'::timestamp_ltz(5), - '2012-01-02 21:34:56.1234'::timestamp_ltz(4), - '2012-01-02 21:34:56.123'::timestamp_ltz(3), - '2012-01-02 21:34:56.12'::timestamp_ltz(2), - '2012-01-02 21:34:56.1'::timestamp_ltz(1), - '2012-01-02 21:34:56'::timestamp_ltz(0), - '05:07:08.123456789'::time(9), - '05:07:08.12345678'::time(8), - '05:07:08.1234567'::time(7), - '05:07:08.123456'::time(6), - '05:07:08.12345'::time(5), - '05:07:08.1234'::time(4), - '05:07:08.123'::time(3), - '05:07:08.12'::time(2), - '05:07:08.1'::time(1), - '05:07:08'::time(0) -""") - ret = cur.fetchone() - assert ret[0] == r0 - assert ret[1] == r1 - assert ret[2] == r2 - assert ret[3] == r3 - assert ret[4] == r4 - assert ret[5] == r5 - assert ret[6] == r6 - assert ret[7] == r7 - assert ret[8] == r8 - assert ret[9] == r9 - assert ret[10] == r10 - assert ret[11] == r11 - assert ret[12] == r12 - assert ret[13] == r13 - assert ret[14] == r14 - assert ret[15] == r15 - assert ret[16] == r16 - assert ret[17] == r17 - assert ret[18] == r18 - assert ret[19] == r19 - assert ret[20] == r20 - assert ret[21] == r21 - assert ret[22] == r22 - assert ret[23] == r23 - assert ret[24] == r24 - assert ret[25] == r25 - assert ret[26] == r26 - assert ret[27] == r27 - assert ret[28] == r28 - assert ret[29] == r29 - assert ret[30] == r30 - assert ret[31] == r31 - assert ret[32] == r32 - assert ret[33] == r33 - assert ret[34] == r34 - assert ret[35] == r35 - assert ret[36] == r36 - assert ret[37] == r37 - assert ret[38] == r38 - assert ret[39] == r39 - - -def test_fetch_timestamps_snowsql(conn_cnx): - PST_TZ = "America/Los_Angeles" - - converter_class = SnowflakeConverterSnowSQL - sql = """ -SELECT - '2012-01-03 12:34:56.123456789+07:00'::timestamp_tz(9), - '2012-01-03 12:34:56.12345678+07:00'::timestamp_tz(8), - '2012-01-03 12:34:56.1234567+07:00'::timestamp_tz(7), - '2012-01-03 12:34:56.123456+07:00'::timestamp_tz(6), - '2012-01-03 12:34:56.12345+07:00'::timestamp_tz(5), - '2012-01-03 12:34:56.1234+07:00'::timestamp_tz(4), - '2012-01-03 12:34:56.123+07:00'::timestamp_tz(3), - '2012-01-03 12:34:56.12+07:00'::timestamp_tz(2), - '2012-01-03 12:34:56.1+07:00'::timestamp_tz(1), - '2012-01-03 12:34:56+07:00'::timestamp_tz(0), - '2012-01-03 05:34:56.123456789'::timestamp_ntz(9), - '2012-01-03 05:34:56.12345678'::timestamp_ntz(8), - '2012-01-03 05:34:56.1234567'::timestamp_ntz(7), - '2012-01-03 05:34:56.123456'::timestamp_ntz(6), - '2012-01-03 05:34:56.12345'::timestamp_ntz(5), - '2012-01-03 05:34:56.1234'::timestamp_ntz(4), - '2012-01-03 05:34:56.123'::timestamp_ntz(3), - '2012-01-03 05:34:56.12'::timestamp_ntz(2), - '2012-01-03 05:34:56.1'::timestamp_ntz(1), - '2012-01-03 05:34:56'::timestamp_ntz(0), - '2012-01-02 21:34:56.123456789'::timestamp_ltz(9), - '2012-01-02 21:34:56.12345678'::timestamp_ltz(8), - '2012-01-02 21:34:56.1234567'::timestamp_ltz(7), - '2012-01-02 21:34:56.123456'::timestamp_ltz(6), - '2012-01-02 21:34:56.12345'::timestamp_ltz(5), - '2012-01-02 21:34:56.1234'::timestamp_ltz(4), - '2012-01-02 21:34:56.123'::timestamp_ltz(3), - '2012-01-02 21:34:56.12'::timestamp_ltz(2), - '2012-01-02 21:34:56.1'::timestamp_ltz(1), - '2012-01-02 21:34:56'::timestamp_ltz(0), - '05:07:08.123456789'::time(9), - '05:07:08.12345678'::time(8), - '05:07:08.1234567'::time(7), - '05:07:08.123456'::time(6), - '05:07:08.12345'::time(5), - '05:07:08.1234'::time(4), - '05:07:08.123'::time(3), - '05:07:08.12'::time(2), - '05:07:08.1'::time(1), - '05:07:08'::time(0) -""" - with conn_cnx(converter_class=converter_class) as cnx: - cur = cnx.cursor() - cur.execute(""" -ALTER SESSION SET TIMEZONE='{tz}'; -""".format(tz=PST_TZ)) - cur.execute(""" -ALTER SESSION SET - TIMESTAMP_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF9 TZH:TZM', - TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF9 TZH:TZM', - TIME_OUTPUT_FORMAT='HH24:MI:SS.FF9'; - """) - cur.execute(sql) - ret = cur.fetchone() - assert ret[0] == '2012-01-03 12:34:56.123456789 +0700' - assert ret[1] == '2012-01-03 12:34:56.123456780 +0700' - assert ret[2] == '2012-01-03 12:34:56.123456700 +0700' - assert ret[3] == '2012-01-03 12:34:56.123456000 +0700' - assert ret[4] == '2012-01-03 12:34:56.123450000 +0700' - assert ret[5] == '2012-01-03 12:34:56.123400000 +0700' - assert ret[6] == '2012-01-03 12:34:56.123000000 +0700' - assert ret[7] == '2012-01-03 12:34:56.120000000 +0700' - assert ret[8] == '2012-01-03 12:34:56.100000000 +0700' - assert ret[9] == '2012-01-03 12:34:56.000000000 +0700' - assert ret[10] == '2012-01-03 05:34:56.123456789 ' - assert ret[11] == '2012-01-03 05:34:56.123456780 ' - assert ret[12] == '2012-01-03 05:34:56.123456700 ' - assert ret[13] == '2012-01-03 05:34:56.123456000 ' - assert ret[14] == '2012-01-03 05:34:56.123450000 ' - assert ret[15] == '2012-01-03 05:34:56.123400000 ' - assert ret[16] == '2012-01-03 05:34:56.123000000 ' - assert ret[17] == '2012-01-03 05:34:56.120000000 ' - assert ret[18] == '2012-01-03 05:34:56.100000000 ' - assert ret[19] == '2012-01-03 05:34:56.000000000 ' - assert ret[20] == '2012-01-02 21:34:56.123456789 -0800' - assert ret[21] == '2012-01-02 21:34:56.123456780 -0800' - assert ret[22] == '2012-01-02 21:34:56.123456700 -0800' - assert ret[23] == '2012-01-02 21:34:56.123456000 -0800' - assert ret[24] == '2012-01-02 21:34:56.123450000 -0800' - assert ret[25] == '2012-01-02 21:34:56.123400000 -0800' - assert ret[26] == '2012-01-02 21:34:56.123000000 -0800' - assert ret[27] == '2012-01-02 21:34:56.120000000 -0800' - assert ret[28] == '2012-01-02 21:34:56.100000000 -0800' - assert ret[29] == '2012-01-02 21:34:56.000000000 -0800' - assert ret[30] == '05:07:08.123456789' - assert ret[31] == '05:07:08.123456780' - assert ret[32] == '05:07:08.123456700' - assert ret[33] == '05:07:08.123456000' - assert ret[34] == '05:07:08.123450000' - assert ret[35] == '05:07:08.123400000' - assert ret[36] == '05:07:08.123000000' - assert ret[37] == '05:07:08.120000000' - assert ret[38] == '05:07:08.100000000' - assert ret[39] == '05:07:08.000000000' - - cur.execute(""" -ALTER SESSION SET - TIMESTAMP_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF6 TZH:TZM', - TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF6 TZH:TZM', - TIME_OUTPUT_FORMAT='HH24:MI:SS.FF6'; - """) - cur.execute(sql) - ret = cur.fetchone() - assert ret[0] == '2012-01-03 12:34:56.123456 +0700' - assert ret[1] == '2012-01-03 12:34:56.123456 +0700' - assert ret[2] == '2012-01-03 12:34:56.123456 +0700' - assert ret[3] == '2012-01-03 12:34:56.123456 +0700' - assert ret[4] == '2012-01-03 12:34:56.123450 +0700' - assert ret[5] == '2012-01-03 12:34:56.123400 +0700' - assert ret[6] == '2012-01-03 12:34:56.123000 +0700' - assert ret[7] == '2012-01-03 12:34:56.120000 +0700' - assert ret[8] == '2012-01-03 12:34:56.100000 +0700' - assert ret[9] == '2012-01-03 12:34:56.000000 +0700' - assert ret[10] == '2012-01-03 05:34:56.123456 ' - assert ret[11] == '2012-01-03 05:34:56.123456 ' - assert ret[12] == '2012-01-03 05:34:56.123456 ' - assert ret[13] == '2012-01-03 05:34:56.123456 ' - assert ret[14] == '2012-01-03 05:34:56.123450 ' - assert ret[15] == '2012-01-03 05:34:56.123400 ' - assert ret[16] == '2012-01-03 05:34:56.123000 ' - assert ret[17] == '2012-01-03 05:34:56.120000 ' - assert ret[18] == '2012-01-03 05:34:56.100000 ' - assert ret[19] == '2012-01-03 05:34:56.000000 ' - assert ret[20] == '2012-01-02 21:34:56.123456 -0800' - assert ret[21] == '2012-01-02 21:34:56.123456 -0800' - assert ret[22] == '2012-01-02 21:34:56.123456 -0800' - assert ret[23] == '2012-01-02 21:34:56.123456 -0800' - assert ret[24] == '2012-01-02 21:34:56.123450 -0800' - assert ret[25] == '2012-01-02 21:34:56.123400 -0800' - assert ret[26] == '2012-01-02 21:34:56.123000 -0800' - assert ret[27] == '2012-01-02 21:34:56.120000 -0800' - assert ret[28] == '2012-01-02 21:34:56.100000 -0800' - assert ret[29] == '2012-01-02 21:34:56.000000 -0800' - assert ret[30] == '05:07:08.123456' - assert ret[31] == '05:07:08.123456' - assert ret[32] == '05:07:08.123456' - assert ret[33] == '05:07:08.123456' - assert ret[34] == '05:07:08.123450' - assert ret[35] == '05:07:08.123400' - assert ret[36] == '05:07:08.123000' - assert ret[37] == '05:07:08.120000' - assert ret[38] == '05:07:08.100000' - assert ret[39] == '05:07:08.000000' - - -def test_fetch_timestamps_negative_epoch(conn_cnx): - """ - Negative epoch - """ - r0 = _compose_ntz('-602594703.876544') - r1 = _compose_ntz('1325594096.123456') - with conn_cnx() as cnx: - cur = cnx.cursor() - cur.execute(""" -SELECT - '1950-11-27 12:34:56.123456'::timestamp_ntz(6), - '2012-01-03 12:34:56.123456'::timestamp_ntz(6) -""") - ret = cur.fetchone() - assert ret[0] == r0 - assert ret[1] == r1 - - -def test_date_0001_9999(conn_cnx): - """ - Test 0001 and 9999 for all platforms - """ - with conn_cnx( - converter_class=SnowflakeConverterSnowSQL, - support_negative_year=True) as cnx: - cnx.cursor().execute(""" -ALTER SESSION SET - DATE_OUTPUT_FORMAT='YYYY-MM-DD' -""") - cur = cnx.cursor() - cur.execute(""" -SELECT - DATE_FROM_PARTS(1900, 1, 1), - DATE_FROM_PARTS(2500, 2, 3), - DATE_FROM_PARTS(1, 10, 31), - DATE_FROM_PARTS(9999, 3, 20) - ; -""") - ret = cur.fetchone() - assert ret[0] == '1900-01-01' - assert ret[1] == '2500-02-03' - assert ret[2] == '0001-10-31' - assert ret[3] == '9999-03-20' - - -@pytest.mark.skipif(PY2 or IS_WINDOWS, reason="year out of range error") -def test_five_or_more_digit_year_date_converter(conn_cnx): - """ - Past and future dates - """ - with conn_cnx( - converter_class=SnowflakeConverterSnowSQL, - support_negative_year=True) as cnx: - cnx.cursor().execute(""" -ALTER SESSION SET - DATE_OUTPUT_FORMAT='YYYY-MM-DD' -""") - cur = cnx.cursor() - cur.execute(""" -SELECT - DATE_FROM_PARTS(10000, 1, 1), - DATE_FROM_PARTS(-0001, 2, 5), - DATE_FROM_PARTS(56789, 3, 4), - DATE_FROM_PARTS(198765, 4, 3), - DATE_FROM_PARTS(-234567, 5, 2) - ; -""") - ret = cur.fetchone() - assert ret[0] == '10000-01-01' - assert ret[1] == '-0001-02-05' - assert ret[2] == '56789-03-04' - assert ret[3] == '198765-04-03' - assert ret[4] == '-234567-05-02' - - cnx.cursor().execute(""" -ALTER SESSION SET - DATE_OUTPUT_FORMAT='YY-MM-DD' -""") - cur = cnx.cursor() - cur.execute(""" -SELECT - DATE_FROM_PARTS(10000, 1, 1), - DATE_FROM_PARTS(-0001, 2, 5), - DATE_FROM_PARTS(56789, 3, 4), - DATE_FROM_PARTS(198765, 4, 3), - DATE_FROM_PARTS(-234567, 5, 2) - ; -""") - ret = cur.fetchone() - assert ret[0] == '00-01-01' - assert ret[1] == '-01-02-05' - assert ret[2] == '89-03-04' - assert ret[3] == '65-04-03' - assert ret[4] == '-67-05-02' - - -def test_franction_followed_by_year_format(conn_cnx): - """ - Both year and franctions are included but fraction shows up followed by - year. - """ - with conn_cnx(converter_class=SnowflakeConverterSnowSQL) as cnx: - cnx.cursor().execute(""" -ALTER SESSION SET - TIMESTAMP_OUTPUT_FORMAT='HH24:MI:SS.FF6 MON DD, YYYY', - TIMESTAMP_NTZ_OUTPUT_FORMAT='HH24:MI:SS.FF6 MON DD, YYYY' -""") - for rec in cnx.cursor().execute(""" -SELECT - '2012-01-03 05:34:56.123456'::TIMESTAMP_NTZ(6) -"""): - assert rec[0] == '05:34:56.123456 Jan 03, 2012' - - -def test_fetch_fraction_timestamp(conn_cnx): - """ - Additional fetch timestamp tests. Mainly used for SnowSQL - which converts to string representations. - """ - PST_TZ = "America/Los_Angeles" - - converter_class = SnowflakeConverterSnowSQL - sql = """ -SELECT - '1900-01-01T05:00:00.000Z'::timestamp_tz(7), - '1900-01-01T05:00:00.000'::timestamp_ntz(7), - '1900-01-01T05:00:01.000Z'::timestamp_tz(7), - '1900-01-01T05:00:01.000'::timestamp_ntz(7), - '1900-01-01T05:00:01.012Z'::timestamp_tz(7), - '1900-01-01T05:00:01.012'::timestamp_ntz(7), - '1900-01-01T05:00:00.012Z'::timestamp_tz(7), - '1900-01-01T05:00:00.012'::timestamp_ntz(7), - '2100-01-01T05:00:00.012Z'::timestamp_tz(7), - '2100-01-01T05:00:00.012'::timestamp_ntz(7), - '1970-01-01T00:00:00Z'::timestamp_tz(7), - '1970-01-01T00:00:00'::timestamp_ntz(7) -""" - with conn_cnx(converter_class=converter_class) as cnx: - cur = cnx.cursor() - cur.execute(""" -ALTER SESSION SET TIMEZONE='{tz}'; -""".format(tz=PST_TZ)) - cur.execute(""" -ALTER SESSION SET - TIMESTAMP_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF9 TZH:TZM', - TIMESTAMP_NTZ_OUTPUT_FORMAT='YYYY-MM-DD HH24:MI:SS.FF9', - TIME_OUTPUT_FORMAT='HH24:MI:SS.FF9'; - """) - cur.execute(sql) - ret = cur.fetchone() - assert ret[0] == '1900-01-01 05:00:00.000000000 +0000' - assert ret[1] == '1900-01-01 05:00:00.000000000' - assert ret[2] == '1900-01-01 05:00:01.000000000 +0000' - assert ret[3] == '1900-01-01 05:00:01.000000000' - assert ret[4] == '1900-01-01 05:00:01.012000000 +0000' - if not PY2 and not PY34_EXACT: - assert ret[5] == '1900-01-01 05:00:01.012000000' - assert ret[6] == '1900-01-01 05:00:00.012000000 +0000' - if not PY2 and not PY34_EXACT: - assert ret[7] == '1900-01-01 05:00:00.012000000' - assert ret[8] == '2100-01-01 05:00:00.012000000 +0000' - assert ret[9] == '2100-01-01 05:00:00.012000000' - assert ret[10] == '1970-01-01 00:00:00.000000000 +0000' - assert ret[11] == '1970-01-01 00:00:00.000000000' diff --git a/test/test_converter_more_timestamp.py b/test/test_converter_more_timestamp.py deleted file mode 100644 index 6795479bd..000000000 --- a/test/test_converter_more_timestamp.py +++ /dev/null @@ -1,156 +0,0 @@ -from datetime import timedelta, datetime - -import pytz -from dateutil.parser import parse - -from snowflake.connector.converter import ( - _generate_tzinfo_from_tzoffset, - ZERO_EPOCH) - - -def test_fetch_various_timestamps(conn_cnx): - """ - More coverage of timestamp - Currently TIMESTAMP_LTZ is not tested. - """ - PST_TZ = "America/Los_Angeles" - epoch_times = [ - '1325568896', - '-2208943503', - '0', - '-1' - ] - timezones = [ - '+07:00', - '+00:00', - '-01:00', - '-09:00' - ] - fractions = '123456789' - data_types = ['TIMESTAMP_TZ', 'TIMESTAMP_NTZ'] - - data = [] - for dt in data_types: - for et in epoch_times: - if dt == 'TIMESTAMP_TZ': - for tz in timezones: - tzdiff = (int(tz[1:3]) * 60 + int(tz[4:6])) * ( - -1 if tz[0] == '-' else 1) - tzinfo = _generate_tzinfo_from_tzoffset(tzdiff) - try: - ts = datetime.fromtimestamp(float(et), tz=tzinfo) - except OSError: - ts = ZERO_EPOCH + timedelta(seconds=float(et)) - if pytz.utc != tzinfo: - ts += tzinfo.utcoffset(ts) - ts = ts.replace(tzinfo=tzinfo) - data.append({ - 'scale': 0, - 'dt': dt, - 'inp': ts.strftime( - '%Y-%m-%d %H:%M:%S{tz}'.format(tz=tz)), - 'out': ts - }) - for idx in range(len(fractions)): - scale = idx + 1 - if idx + 1 != 6: # SNOW-28597 - try: - ts0 = datetime.fromtimestamp(float(et), - tz=tzinfo) - except OSError: - ts0 = ZERO_EPOCH + timedelta(seconds=float(et)) - if pytz.utc != tzinfo: - ts0 += tzinfo.utcoffset(ts0) - ts0 = ts0.replace(tzinfo=tzinfo) - ts0_str = ts0.strftime( - '%Y-%m-%d %H:%M:%S.{ff}{tz}'.format( - ff=fractions[:idx + 1], tz=tz)) - ts1 = parse(ts0_str) - data.append({ - 'scale': scale, - 'dt': dt, - 'inp': ts0_str, - 'out': ts1 - }) - elif dt == 'TIMESTAMP_LTZ': - # WIP. this test work in edge case - tzinfo = pytz.timezone(PST_TZ) - ts0 = datetime.fromtimestamp(float(et)) - ts0 = pytz.utc.localize(ts0).astimezone(tzinfo) - ts0_str = ts0.strftime('%Y-%m-%d %H:%M:%S') - ts1 = ts0 - data.append({ - 'scale': 0, - 'dt': dt, - 'inp': ts0_str, - 'out': ts1 - }) - for idx in range(len(fractions)): - ts0 = datetime.fromtimestamp(float(et)) - ts0 = pytz.utc.localize(ts0).astimezone(tzinfo) - ts0_str = ts0.strftime( - '%Y-%m-%d %H:%M:%S.{ff}'.format( - ff=fractions[:idx + 1] - )) - ts1 = ts0 + timedelta(seconds=float( - '0.{0}'.format(fractions[:idx + 1]))) - data.append({ - 'scale': idx + 1, - 'dt': dt, - 'inp': ts0_str, - 'out': ts1 - }) - else: - # TIMESTAMP_NTZ - try: - ts0 = datetime.fromtimestamp(float(et)) - except OSError: - ts0 = ZERO_EPOCH + timedelta(seconds=(float(et))) - ts0_str = ts0.strftime('%Y-%m-%d %H:%M:%S') - ts1 = parse(ts0_str) - data.append({ - 'scale': 0, - 'dt': dt, - 'inp': ts0_str, - 'out': ts1 - }) - for idx in range(len(fractions)): - try: - ts0 = datetime.fromtimestamp(float(et)) - except OSError: - ts0 = ZERO_EPOCH + timedelta(seconds=(float(et))) - ts0_str = ts0.strftime( - '%Y-%m-%d %H:%M:%S.{ff}'.format( - ff=fractions[:idx + 1])) - ts1 = parse(ts0_str) - data.append({ - 'scale': idx + 1, - 'dt': dt, - 'inp': ts0_str, - 'out': ts1 - }) - sql = "SELECT " - for d in data: - sql += "'{inp}'::{dt}({scale}), ".format( - inp=d['inp'], - dt=d['dt'], - scale=d['scale'] - ) - sql += "1" - with conn_cnx() as cnx: - cur = cnx.cursor() - cur.execute(""" -ALTER SESSION SET TIMEZONE='{tz}'; -""".format(tz=PST_TZ)) - rec = cur.execute(sql).fetchone() - for idx, d in enumerate(data): - comp, lower, higher = _in_range(d['out'], rec[idx]) - assert comp, 'data: {d}: target={target}, lower={lower}, higher={' \ - 'higher}'.format( - d=d, target=rec[idx], lower=lower, higher=higher) - - -def _in_range(reference, target): - lower = reference - timedelta(microseconds=1) - higher = reference + timedelta(microseconds=1) - return lower <= target <= higher, lower, higher diff --git a/test/test_converter_null.py b/test/test_converter_null.py deleted file mode 100644 index 4f4fa613a..000000000 --- a/test/test_converter_null.py +++ /dev/null @@ -1,59 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import re -from datetime import datetime, timedelta - -import six - -import snowflake.connector -from snowflake.connector.converter import ZERO_EPOCH -from snowflake.connector.converter_null import SnowflakeNoConverterToPython - -NUMERIC_VALUES = re.compile(r'-?[\d.]*\d$') - - -def test_converter_no_converter_to_python(db_parameters): - """ - Test a converter that doesn't translate the Snowflake internal data - representation to the Python native types. - """ - con = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - database=db_parameters['database'], - schema=db_parameters['schema'], - protocol=db_parameters['protocol'], - timezone='UTC', - converter_class=SnowflakeNoConverterToPython, - ) - - ret = con.cursor().execute(""" -select current_timestamp(), - 1::NUMBER, - 2.0::FLOAT, - 'test1' -""").fetchone() - assert isinstance(ret[0], six.text_type) - assert NUMERIC_VALUES.match(ret[0]) - assert isinstance(ret[1], six.text_type) - assert NUMERIC_VALUES.match(ret[1]) - con.cursor().execute("create or replace table testtb(c1 timestamp_ntz(6))") - try: - current_time = datetime.utcnow() - # binding value should have no impact - con.cursor().execute( - "insert into testtb(c1) values(%s)", - (current_time,)) - ret = con.cursor().execute( - "select * from testtb" - ).fetchone()[0] - assert ZERO_EPOCH + timedelta(seconds=(float(ret))) == current_time - finally: - con.cursor().execute("drop table if exists testtb") diff --git a/test/test_cursor.py b/test/test_cursor.py deleted file mode 100644 index 2b2fef4d8..000000000 --- a/test/test_cursor.py +++ /dev/null @@ -1,845 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import decimal -import json -import os -import time -from datetime import datetime - -import pytest -import pytz - -import snowflake.connector -from snowflake.connector import (constants, errorcode, errors) -from snowflake.connector.compat import (BASE_EXCEPTION_CLASS, PY2, IS_WINDOWS) - - -def _drop_warehouse(conn, db_parameters): - conn.cursor().execute("drop warehouse if exists {0}".format( - db_parameters['name_wh'] - )) - - -@pytest.fixture() -def conn(request, conn_cnx, db_parameters): - def fin(): - with conn_cnx() as cnx: - cnx.cursor().execute( - 'use {db}.{schema}'.format( - db=db_parameters['database'], - schema=db_parameters['schema'])) - cnx.cursor().execute("drop table {name}".format( - name=db_parameters['name'])) - - request.addfinalizer(fin) - - with conn_cnx() as cnx: - cnx.cursor().execute(""" -create table {name} ( -aa int, -dt date, -tm time, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(5,2), -b binary) -""".format(name=db_parameters['name'])) - - return conn_cnx - - -def _check_results(cursor, results): - assert cursor.sfqid, 'Snowflake query id is None' - assert cursor.rowcount == 3, 'the number of records' - assert results[0] == 65432, 'the first result was wrong' - assert results[1] == 98765, 'the second result was wrong' - assert results[2] == 123456, 'the third result was wrong' - - -def test_insert_select(conn, db_parameters): - """ - Inserts and selects integer data - """ - with conn() as cnx: - c = cnx.cursor() - try: - c.execute( - "insert into {name}(aa) values(123456)," - "(98765),(65432)".format(name=db_parameters['name'])) - cnt = 0 - for rec in c: - cnt += int(rec[0]) - assert cnt == 3, 'wrong number of records were inserted' - assert c.rowcount == 3, 'wrong number of records were inserted' - finally: - c.close() - - try: - c = cnx.cursor() - c.execute("select aa from {name} order by aa".format( - name=db_parameters['name'])) - results = [] - for rec in c: - results.append(rec[0]) - _check_results(c, results) - finally: - c.close() - - with cnx.cursor(snowflake.connector.DictCursor) as c: - c.execute("select aa from {name} order by aa".format( - name=db_parameters['name'])) - results = [] - for rec in c: - results.append(rec['AA']) - _check_results(c, results) - - -def test_insert_and_select_by_separate_connection( - conn, db_parameters): - """ - Insert a record and select it by a separate connection. - """ - with conn() as cnx: - result = cnx.cursor().execute( - "insert into {name}(aa) values({value})".format( - name=db_parameters['name'], value='1234')) - cnt = 0 - for rec in result: - cnt += int(rec[0]) - assert cnt == 1, 'wrong number of records were inserted' - assert result.rowcount == 1, 'wrong number of records were inserted' - - cnx2 = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - database=db_parameters['database'], - schema=db_parameters['schema'], - protocol=db_parameters['protocol'], - timezone='UTC', - ) - try: - c = cnx2.cursor() - c.execute("select aa from {name}".format(name=db_parameters['name'])) - results = [] - for rec in c: - results.append(rec[0]) - c.close() - assert results[0] == 1234, 'the first result was wrong' - assert result.rowcount == 1, 'wrong number of records were selected' - finally: - cnx2.close() - - -def _total_milliseconds_from_timedelta(td): - """ - Returns the total number of milliseconds contained in the duration object. - """ - return (td.microseconds + ( - td.seconds + td.days * 24 * 3600) * 10 ** 6) // 10 ** 3 - - -def _total_seconds_from_timedelta(td): - """ - Returns the total number of seconds contained in the duration object. - """ - return _total_milliseconds_from_timedelta(td) // 10 ** 3 - - -def test_insert_timestamp_select(conn, db_parameters): - """ - Insert and get timestamp, timestamp with tz, date, and time. - - Currently the session parameter TIMEZONE is ignored - """ - PST_TZ = "America/Los_Angeles" - JST_TZ = "Asia/Tokyo" - current_timestamp = datetime.utcnow() - current_timestamp = current_timestamp.replace(tzinfo=pytz.timezone(PST_TZ)) - current_date = current_timestamp.date() - current_time = current_timestamp.time() - - other_timestamp = current_timestamp.replace(tzinfo=pytz.timezone(JST_TZ)) - - with conn() as cnx: - cnx.cursor().execute("alter session set TIMEZONE=%s", (PST_TZ,)) - c = cnx.cursor() - try: - fmt = ("insert into {name}(aa, tsltz, tstz, tsntz, dt, tm) " - "values(%(value)s,%(tsltz)s, %(tstz)s, %(tsntz)s, " - "%(dt)s, %(tm)s)") - c.execute(fmt.format(name=db_parameters['name']), { - 'value': 1234, - 'tsltz': current_timestamp, - 'tstz': other_timestamp, - 'tsntz': current_timestamp, - 'dt': current_date, - 'tm': current_time - }) - cnt = 0 - for rec in c: - cnt += int(rec[0]) - assert cnt == 1, 'wrong number of records were inserted' - assert c.rowcount == 1, 'wrong number of records were selected' - finally: - c.close() - - cnx2 = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - database=db_parameters['database'], - schema=db_parameters['schema'], - protocol=db_parameters['protocol'], - timezone='UTC', - ) - try: - c = cnx2.cursor() - c.execute("select aa, tsltz, tstz, tsntz, dt, tm from {name}".format( - name=db_parameters['name'])) - - result_numeric_value = [] - result_timestamp_value = [] - result_other_timestamp_value = [] - result_ntz_timestamp_value = [] - result_date_value = [] - result_time_value = [] - - for (aa, ts, tstz, tsntz, dt, tm) in c: - result_numeric_value.append(aa) - result_timestamp_value.append(ts) - result_other_timestamp_value.append(tstz) - result_ntz_timestamp_value.append(tsntz) - result_date_value.append(dt) - result_time_value.append(tm) - c.close() - assert result_numeric_value[0] == 1234, \ - 'the integer result was wrong' - - td_diff = _total_milliseconds_from_timedelta( - current_timestamp - result_timestamp_value[0]) - assert td_diff == 0, 'the timestamp result was wrong' - - td_diff = _total_milliseconds_from_timedelta( - other_timestamp - result_other_timestamp_value[0]) - assert td_diff == 0, 'the other timestamp result was wrong' - - td_diff = _total_milliseconds_from_timedelta( - current_timestamp.replace(tzinfo=None) - - result_ntz_timestamp_value[0]) - assert td_diff == 0, 'the other timestamp result was wrong' - - assert current_date == result_date_value[0], \ - 'the date result was wrong' - - assert current_time == result_time_value[0], \ - 'the time result was wrong' - - desc = c.description - assert len(desc) == 6, 'invalid number of column meta data' - assert desc[0][0].upper() == 'AA', 'invalid column name' - assert desc[1][0].upper() == 'TSLTZ', 'invalid column name' - assert desc[2][0].upper() == 'TSTZ', 'invalid column name' - assert desc[3][0].upper() == 'TSNTZ', 'invalid column name' - assert desc[4][0].upper() == 'DT', 'invalid column name' - assert desc[5][0].upper() == 'TM', 'invalid column name' - assert constants.FIELD_ID_TO_NAME[desc[0][1]] == 'FIXED', \ - 'invalid column name: {0}'.format( - constants.FIELD_ID_TO_NAME[desc[0][1]]) - assert constants.FIELD_ID_TO_NAME[desc[1][1]] == 'TIMESTAMP_LTZ', \ - 'invalid column name' - assert constants.FIELD_ID_TO_NAME[desc[2][1]] == 'TIMESTAMP_TZ', \ - 'invalid column name' - assert constants.FIELD_ID_TO_NAME[desc[3][1]] == 'TIMESTAMP_NTZ', \ - 'invalid column name' - assert constants.FIELD_ID_TO_NAME[desc[4][1]] == 'DATE', \ - 'invalid column name' - assert constants.FIELD_ID_TO_NAME[desc[5][1]] == 'TIME', \ - 'invalid column name' - finally: - cnx2.close() - - -def test_insert_timestamp_ltz(conn, db_parameters): - """ - Inserts and retrieve timestamp ltz - """ - tzstr = 'America/New_York' - # sync with the session parameter - with conn() as cnx: - cnx.cursor().execute( - "alter session set timezone='{tzstr}'".format(tzstr=tzstr)) - - current_time = datetime.now() - current_time = current_time.replace(tzinfo=pytz.timezone(tzstr)) - - c = cnx.cursor() - try: - fmt = "insert into {name}(aa, tsltz) values(%(value)s,%(ts)s)" - c.execute(fmt.format(name=db_parameters['name']), { - 'value': 8765, - 'ts': current_time, - }) - cnt = 0 - for rec in c: - cnt += int(rec[0]) - assert cnt == 1, 'wrong number of records were inserted' - finally: - c.close() - - try: - c = cnx.cursor() - c.execute("select aa,tsltz from {name}".format( - name=db_parameters['name'])) - result_numeric_value = [] - result_timestamp_value = [] - for (aa, ts) in c: - result_numeric_value.append(aa) - result_timestamp_value.append(ts) - - td_diff = _total_milliseconds_from_timedelta( - current_time - result_timestamp_value[0]) - - assert td_diff == 0, 'the first result was wrong' - finally: - c.close() - - -def test_struct_time(conn, db_parameters): - """ - Binds struct_time object for updating timestamp - """ - tzstr = 'America/New_York' - os.environ['TZ'] = tzstr - if not IS_WINDOWS: - time.tzset() - test_time = time.strptime("30 Sep 01 11:20:30", "%d %b %y %H:%M:%S") - - with conn() as cnx: - c = cnx.cursor() - try: - fmt = "insert into {name}(aa, tsltz) values(%(value)s,%(ts)s)" - c.execute(fmt.format(name=db_parameters['name']), { - 'value': 87654, - 'ts': test_time, - }) - cnt = 0 - for rec in c: - cnt += int(rec[0]) - finally: - c.close() - os.environ['TZ'] = 'UTC' - if not IS_WINDOWS: - time.tzset() - assert cnt == 1, 'wrong number of records were inserted' - - try: - result = cnx.cursor().execute( - "select aa, tsltz from {name}".format( - name=db_parameters['name'])) - for (aa, tsltz) in result: - pass - - tsltz -= tsltz.tzinfo.utcoffset(tsltz) - - assert test_time.tm_year == tsltz.year, "Year didn't match" - assert test_time.tm_mon == tsltz.month, "Month didn't match" - assert test_time.tm_mday == tsltz.day, "Day didn't match" - assert test_time.tm_hour == tsltz.hour, "Hour didn't match" - assert test_time.tm_min == tsltz.minute, "Minute didn't match" - assert test_time.tm_sec == tsltz.second, "Second didn't match" - finally: - os.environ['TZ'] = 'UTC' - if not IS_WINDOWS: - time.tzset() - - -@pytest.mark.skipif(PY2, reason=""" -Binary not supported in Python 2 connector. -""") -def test_insert_binary_select(conn, db_parameters): - """ - Insert and get a binary value. - """ - value = b'\x00\xFF\xA1\xB2\xC3' - - with conn() as cnx: - c = cnx.cursor() - try: - fmt = ("insert into {name}(b) values(%(b)s)") - c.execute(fmt.format(name=db_parameters['name']), {'b': value}) - count = sum(int(rec[0]) for rec in c) - assert count == 1, 'wrong number of records were inserted' - assert c.rowcount == 1, 'wrong number of records were selected' - finally: - c.close() - - cnx2 = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - database=db_parameters['database'], - schema=db_parameters['schema'], - protocol=db_parameters['protocol'], - ) - try: - c = cnx2.cursor() - c.execute("select b from {name}".format(name=db_parameters['name'])) - - results = [b for (b,) in c] - assert value == results[0], 'the binary result was wrong' - - desc = c.description - assert len(desc) == 1, 'invalid number of column meta data' - assert desc[0][0].upper() == 'B', 'invalid column name' - assert constants.FIELD_ID_TO_NAME[desc[0][1]] == 'BINARY', \ - 'invalid column name' - finally: - cnx2.close() - - -def test_insert_binary_select_with_bytearray(conn, db_parameters): - """ - Insert and get a binary value using the bytearray type. - """ - value = bytearray(b'\x00\xFF\xA1\xB2\xC3') - - with conn() as cnx: - c = cnx.cursor() - try: - fmt = ("insert into {name}(b) values(%(b)s)") - c.execute(fmt.format(name=db_parameters['name']), {'b': value}) - count = sum(int(rec[0]) for rec in c) - assert count == 1, 'wrong number of records were inserted' - assert c.rowcount == 1, 'wrong number of records were selected' - finally: - c.close() - - cnx2 = snowflake.connector.connect( - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - account=db_parameters['account'], - database=db_parameters['database'], - schema=db_parameters['schema'], - protocol=db_parameters['protocol'], - ) - try: - c = cnx2.cursor() - c.execute("select b from {name}".format(name=db_parameters['name'])) - - results = [b for (b,) in c] - assert bytes(value) == results[0], 'the binary result was wrong' - - desc = c.description - assert len(desc) == 1, 'invalid number of column meta data' - assert desc[0][0].upper() == 'B', 'invalid column name' - assert constants.FIELD_ID_TO_NAME[desc[0][1]] == 'BINARY', \ - 'invalid column name' - finally: - cnx2.close() - - -def test_variant(conn, db_parameters): - """Variant including JSON object - """ - - name_variant = db_parameters['name'] + "_variant" - with conn() as cnx: - cnx.cursor().execute(""" -create table {name} ( -created_at timestamp, data variant) -""".format(name=name_variant)) - - try: - with conn() as cnx: - current_time = datetime.now() - c = cnx.cursor() - try: - fmt = ("insert into {name}(created_at, data) " - "select column1, parse_json(column2) " - "from values(%(created_at)s, %(data)s)") - c.execute(fmt.format(name=name_variant), { - 'created_at': current_time, - 'data': ('{"SESSION-PARAMETERS":{' - '"TIMEZONE":"UTC", "SPECIAL_FLAG":true}}') - }) - cnt = 0 - for rec in c: - cnt += int(rec[0]) - assert cnt == 1, 'wrong number of records were inserted' - assert c.rowcount == 1, \ - 'wrong number of records were inserted' - finally: - c.close() - - result = cnx.cursor().execute( - "select created_at, data from {name}".format( - name=name_variant)) - _, data = result.fetchone() - data = json.loads(data) - assert data['SESSION-PARAMETERS']['SPECIAL_FLAG'], \ - ("JSON data should be parsed properly. " - "Invalid JSON data") - finally: - with conn() as cnx: - cnx.cursor().execute( - "drop table {name}".format(name=name_variant)) - - -def test_callproc(conn_cnx): - """Callproc. nop as of now - """ - with conn_cnx() as cnx: - with pytest.raises(errors.NotSupportedError): - cnx.cursor().callproc("whatever the stored procedure") - - -def test_invalid_bind_data_type(conn_cnx): - """Invalid bind data type - """ - with conn_cnx() as cnx: - with pytest.raises(errors.ProgrammingError): - cnx.cursor().execute( - "select 1 from dual where 1=%s", ([1, 2, 3],)) - - -def test_timeout_query(conn_cnx): - """Timeout - """ - with conn_cnx() as cnx: - cnx.cursor().execute("select 1") - c = cnx.cursor() - try: - c.execute( - 'select seq8() as c1 ' - 'from table(generator(timeLimit => 60))', - timeout=5) - raise Exception("Must be canceled") - except BASE_EXCEPTION_CLASS as err: - assert isinstance(err, errors.ProgrammingError), \ - "Programming Error Exception" - assert err.errno == 604, "Invalid error code" - finally: - c.close() - - -def test_executemany(conn, db_parameters): - """Executes many statements. Client binding is supported by either - dictor list data types. - - NOTE the binding data type is dict and tuple, respectively - """ - with conn() as cnx: - c = cnx.cursor() - fmt = 'insert into {name}(aa) values(%(value)s)'.format( - name=db_parameters['name']) - c.executemany(fmt, [ - {'value': '1234'}, - {'value': '234'}, - {'value': '34'}, - {'value': '4'}, - ]) - cnt = 0 - for rec in c: - cnt += int(rec[0]) - assert cnt == 4, 'number of records' - assert c.rowcount == 4, 'wrong number of records were inserted' - c.close() - - c = cnx.cursor() - fmt = 'insert into {name}(aa) values(%s)'.format( - name=db_parameters['name']) - c.executemany(fmt, [ - (12345,), - (1234,), - (234,), - (34,), - (4,), - ]) - rec = c.fetchone() - assert rec[0] == 5, 'number of records' - assert c.rowcount == 5, 'wrong number of records were inserted' - c.close() - - -def test_closed_cursor(conn, db_parameters): - """ - Attempt to use the closed cursor. It should raise errors - - NOTE the binding data type is scalar - """ - with conn() as cnx: - c = cnx.cursor() - fmt = 'insert into {name}(aa) values(%s)'.format( - name=db_parameters['name']) - c.executemany(fmt, [ - 12345, - 1234, - 234, - 34, - 4, - ]) - rec = c.fetchone() - assert rec[0] == 5, 'number of records' - assert c.rowcount == 5, 'number of records' - c.close() - - fmt = 'select aa from {name}'.format(name=db_parameters['name']) - try: - c.execute(fmt) - raise Exception('should fail as the cursor was closed.') - except snowflake.connector.Error as err: - assert err.errno == errorcode.ER_CURSOR_IS_CLOSED - - -def test_fetchmany(conn, db_parameters): - """ - Fetches many - """ - with conn() as cnx: - c = cnx.cursor() - fmt = 'insert into {name}(aa) values(%(value)s)'.format( - name=db_parameters['name']) - c.executemany(fmt, [ - {'value': '3456789'}, - {'value': '234567'}, - {'value': '1234'}, - {'value': '234'}, - {'value': '34'}, - {'value': '4'}, - ]) - cnt = 0 - for rec in c: - cnt += int(rec[0]) - assert cnt == 6, 'number of records' - assert c.rowcount == 6, 'number of records' - c.close() - - c = cnx.cursor() - fmt = 'select aa from {name} order by aa desc'.format( - name=db_parameters['name']) - c.execute(fmt) - - rows = c.fetchmany(2) - assert len(rows) == 2, 'The number of records' - assert rows[1][0] == 234567, 'The second record' - - rows = c.fetchmany(1) - assert len(rows) == 1, 'The number of records' - assert rows[0][0] == 1234, 'The first record' - - rows = c.fetchmany(5) - assert len(rows) == 3, 'The number of records' - assert rows[-1][0] == 4, 'The last record' - - rows = c.fetchmany(15) - assert len(rows) == 0, 'The number of records' - - c.close() - - -def test_process_params(conn, db_parameters): - """Binds variables for insert and other queries - """ - with conn() as cnx: - c = cnx.cursor() - fmt = 'insert into {name}(aa) values(%(value)s)'.format( - name=db_parameters['name']) - c.executemany(fmt, [ - {'value': '3456789'}, - {'value': '234567'}, - {'value': '1234'}, - {'value': '234'}, - {'value': '34'}, - {'value': '4'}, - ]) - cnt = 0 - for rec in c: - cnt += int(rec[0]) - c.close() - assert cnt == 6, 'number of records' - - fmt = 'select count(aa) from {name} where aa > %(value)s'.format( - name=db_parameters['name']) - - c = cnx.cursor() - c.execute(fmt, {'value': 1233}) - for (cnt,) in c: - pass - assert cnt == 3, 'the number of records' - c.close() - - fmt = 'select count(aa) from {name} where aa > %s'.format( - name=db_parameters['name']) - c = cnx.cursor() - c.execute(fmt, (1234,)) - for (cnt,) in c: - pass - assert cnt == 2, 'the number of records' - c.close() - - -def test_real_decimal(conn, db_parameters): - """Uses Real and Decimal type - """ - with conn() as cnx: - c = cnx.cursor() - fmt = ('insert into {name}(aa, pct, ratio) ' - 'values(%s,%s,%s)').format( - name=db_parameters['name']) - c.execute(fmt, (9876, 12.3, decimal.Decimal('23.4'))) - for (cnt,) in c: - pass - assert cnt == 1, 'the number of records' - c.close() - - c = cnx.cursor() - fmt = 'select aa, pct, ratio from {name}'.format( - name=db_parameters['name']) - c.execute(fmt) - for (aa, pct, ratio) in c: - pass - assert aa == 9876, 'the integer value' - assert pct == 12.3, 'the float value' - assert ratio == decimal.Decimal('23.4'), 'the decimal value' - c.close() - - with cnx.cursor(snowflake.connector.DictCursor) as c: - fmt = 'select aa, pct, ratio from {name}'.format( - name=db_parameters['name']) - c.execute(fmt) - rec = c.fetchone() - assert rec['AA'] == 9876, 'the integer value' - assert rec['PCT'] == 12.3, 'the float value' - assert rec['RATIO'] == decimal.Decimal('23.4'), 'the decimal value' - - -def test_none_errorhandler(conn_testaccount): - """ - None errorhandler for Cursor - """ - c = conn_testaccount.cursor() - with pytest.raises(errors.ProgrammingError): - c.errorhandler = None - - -def test_nope_errorhandler(conn_testaccount): - """ - NOOP errorhandler for Cursor - """ - - def user_errorhandler(connection, cursor, errorclass, errorvalue): - pass - - c = conn_testaccount.cursor() - c.errorhandler = user_errorhandler - c.execute("select * foooooo never_exists_table") - c.execute("select * barrrrr never_exists_table") - c.execute("select * daaaaaa never_exists_table") - assert c.messages[0][0] == errors.ProgrammingError, \ - 'One error was recorded' - assert len(c.messages) == 1, 'should be one error' - - -def test_binding_negative(negative_conn_cnx, db_parameters): - """ - Negative binding tests - """ - with negative_conn_cnx() as cnx: - with pytest.raises(TypeError): - cnx.cursor().execute( - "INSERT INTO {name}(aa) VALUES(%s)".format( - name=db_parameters['name']), (1, 2, 3)) - with pytest.raises(errors.ProgrammingError): - cnx.cursor().execute( - "INSERT INTO {name}(aa) VALUES(%s)".format( - name=db_parameters['name']), ()) - with pytest.raises(errors.ProgrammingError): - cnx.cursor().execute( - "INSERT INTO {name}(aa) VALUES(%s)".format( - name=db_parameters['name']), (['a'],)) - - -def test_execute_after_close(conn_testaccount): - """ - SNOW-13588: raises an error if executing after the connection is closed - """ - cursor = conn_testaccount.cursor() - conn_testaccount.close() - with pytest.raises(errors.Error): - cursor.execute('show tables') - - -def test_multi_table_insert(conn, db_parameters): - try: - with conn() as cnx: - cur = cnx.cursor() - cur.execute(""" - INSERT INTO {name}(aa) VALUES(1234),(9876),(2345) - """.format(name=db_parameters['name'])) - assert cur.rowcount == 3, 'the number of records' - - cur.execute(""" -CREATE OR REPLACE TABLE {name}_foo (aa_foo int) - """.format(name=db_parameters['name'])) - - cur.execute(""" -CREATE OR REPLACE TABLE {name}_bar (aa_bar int) - """.format(name=db_parameters['name'])) - - cur.execute(""" -INSERT ALL - INTO {name}_foo(aa_foo) VALUES(aa) - INTO {name}_bar(aa_bar) VALUES(aa) - SELECT aa FROM {name} - """.format(name=db_parameters['name'])) - assert cur.rowcount == 6 - finally: - with conn() as cnx: - cnx.cursor().execute(""" -DROP TABLE IF EXISTS {name}_foo -""".format(name=db_parameters['name'])) - cnx.cursor().execute(""" -DROP TABLE IF EXISTS {name}_bar -""".format(name=db_parameters['name'])) - - -@pytest.mark.skipif(True, reason=""" -Negative test case. -""") -def test_fetch_before_execute(conn_testaccount): - """ - SNOW-13574: fetch before execute - """ - cursor = conn_testaccount.cursor() - with pytest.raises(errors.DataError): - cursor.fetchone() - - -def test_close_twice(conn_testaccount): - conn_testaccount.close() - conn_testaccount.close() - - -def test_fetch_out_of_range_timestamp_value(conn): - with conn() as cnx: - cur = cnx.cursor() - cur.execute(""" -select '12345-01-02'::timestamp_ntz -""") - with pytest.raises(errors.InterfaceError): - cur.fetchone() diff --git a/test/test_cursor_binding.py b/test/test_cursor_binding.py deleted file mode 100644 index b5141a29b..000000000 --- a/test/test_cursor_binding.py +++ /dev/null @@ -1,144 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import pytest -from snowflake.connector.errors import (ProgrammingError) - - -def test_binding_security(conn_cnx, db_parameters): - """ - SQL Injection Tests - """ - try: - with conn_cnx() as cnx: - cnx.cursor().execute( - "CREATE OR REPLACE TABLE {name} " - "(aa INT, bb STRING)".format( - name=db_parameters['name'])) - cnx.cursor().execute( - "INSERT INTO {name} VALUES(%s, %s)".format( - name=db_parameters['name']), - (1, 'test1')) - cnx.cursor().execute( - "INSERT INTO {name} VALUES(%(aa)s, %(bb)s)".format( - name=db_parameters['name']), - {'aa': 2, 'bb': 'test2'}) - for rec in cnx.cursor().execute( - "SELECT * FROM {name} ORDER BY 1 DESC".format( - name=db_parameters['name'])): - break - assert rec[0] == 2, 'First column' - assert rec[1] == 'test2', 'Second column' - for rec in cnx.cursor().execute( - "SELECT * FROM {name} WHERE aa=%s".format( - name=db_parameters['name']), (1,)): - break - assert rec[0] == 1, 'First column' - assert rec[1] == 'test1', 'Second column' - - # SQL injection safe test - # Good Example - with pytest.raises(ProgrammingError): - cnx.cursor().execute( - "SELECT * FROM {name} WHERE aa=%s".format( - name=db_parameters['name']), - ("1 or aa>0",)) - - with pytest.raises(ProgrammingError): - cnx.cursor().execute( - "SELECT * FROM {name} WHERE aa=%(aa)s".format( - name=db_parameters['name']), - {"aa": "1 or aa>0"}) - - # Bad Example in application. DON'T DO THIS - c = cnx.cursor() - c.execute("SELECT * FROM {name} WHERE aa=%s".format( - name=db_parameters['name']) % ("1 or aa>0",)) - rec = c.fetchall() - assert len(rec) == 2, "not raising error unlike the previous one." - finally: - with conn_cnx() as cnx: - cnx.cursor().execute( - "drop table if exists {name}".format( - name=db_parameters['name'])) - - -def test_binding_list(conn_cnx, db_parameters): - """ - SQL binding list type for IN - """ - try: - with conn_cnx() as cnx: - cnx.cursor().execute( - "CREATE OR REPLACE TABLE {name} " - "(aa INT, bb STRING)".format( - name=db_parameters['name'])) - cnx.cursor().execute( - "INSERT INTO {name} VALUES(%s, %s)".format( - name=db_parameters['name']), - (1, 'test1')) - cnx.cursor().execute( - "INSERT INTO {name} VALUES(%(aa)s, %(bb)s)".format( - name=db_parameters['name']), - {'aa': 2, 'bb': 'test2'}) - cnx.cursor().execute( - "INSERT INTO {name} VALUES(3, 'test3')".format( - name=db_parameters['name'])) - for rec in cnx.cursor().execute(""" -SELECT * FROM {name} WHERE aa IN (%s) ORDER BY 1 DESC -""".format(name=db_parameters['name']), ([1, 3],)): - break - assert rec[0] == 3, 'First column' - assert rec[1] == 'test3', 'Second column' - - for rec in cnx.cursor().execute( - "SELECT * FROM {name} WHERE aa=%s".format( - name=db_parameters['name']), (1,)): - break - assert rec[0] == 1, 'First column' - assert rec[1] == 'test1', 'Second column' - - rec = cnx.cursor().execute(""" -SELECT * FROM {name} WHERE aa IN (%s) ORDER BY 1 DESC -""".format(name=db_parameters['name']), ((1,),)) - - finally: - with conn_cnx() as cnx: - cnx.cursor().execute( - "drop table if exists {name}".format( - name=db_parameters['name'])) - - -def test_unsupported_binding(negative_conn_cnx, db_parameters): - """ - Unsupported data binding - """ - try: - with negative_conn_cnx() as cnx: - cnx.cursor().execute( - "CREATE OR REPLACE TABLE {name} " - "(aa INT, bb STRING)".format( - name=db_parameters['name'])) - cnx.cursor().execute( - "INSERT INTO {name} VALUES(%s, %s)".format( - name=db_parameters['name']), - (1, 'test1')) - - sql = 'select count(*) from {name} where aa=%s'.format( - name=db_parameters['name']) - - with cnx.cursor() as cur: - rec = cur.execute(sql, (1,)).fetchone() - assert rec[0] is not None, 'no value is returned' - - # dict - with pytest.raises(ProgrammingError): - cnx.cursor().execute(sql, ({'value': 1},)) - finally: - with negative_conn_cnx() as cnx: - cnx.cursor().execute( - "drop table if exists {name}".format( - name=db_parameters['name'])) diff --git a/test/test_cursor_context_manager.py b/test/test_cursor_context_manager.py deleted file mode 100644 index f3a056456..000000000 --- a/test/test_cursor_context_manager.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -from logging import getLogger - - -def test_context_manager(conn_testaccount, db_parameters): - """ - Context Manager support in Cursor - """ - logger = getLogger(__name__) - - def tables(conn): - with conn.cursor() as cur: - cur.execute("show tables") - name_to_idx = {elem[0]: idx for idx, elem in - enumerate(cur.description)} - for row in cur: - yield row[name_to_idx['name']] - - try: - conn_testaccount.cursor().execute( - 'create or replace table {0} (a int)'.format(db_parameters['name'])) - all_tables = [rec for rec in tables(conn_testaccount) \ - if rec == db_parameters['name'].upper()] - logger.info('tables: %s', all_tables) - assert len(all_tables) == 1, u'number of tables' - finally: - conn_testaccount.cursor().execute( - 'drop table if exists {0}'.format(db_parameters['name'])) diff --git a/test/test_daylight_savings.py b/test/test_daylight_savings.py deleted file mode 100644 index 376a1adf6..000000000 --- a/test/test_daylight_savings.py +++ /dev/null @@ -1,67 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -from datetime import (datetime) - -import pytz - - -def _insert_timestamp(ctx, table, tz, dt): - myzone = pytz.timezone(tz) - ts = myzone.localize(dt, is_dst=True) - print("\n") - print('{0}'.format(repr(ts))) - ctx.cursor().execute("INSERT INTO {table} VALUES(%s)".format( - table=table, - ), (ts,)) - - result = ctx.cursor().execute("SELECT * FROM {table}".format( - table=table)).fetchone() - retrieved_ts = result[0] - print("#####") - print('Retrieved ts: {0}'.format( - repr(retrieved_ts))) - print('Retrieved and converted TS{0}'.format( - repr(retrieved_ts.astimezone(myzone)))) - print("#####") - assert result[0] == ts - ctx.cursor().execute("DELETE FROM {table}".format( - table=table)) - - -def test_daylight_savings_in_TIMESTAMP_LTZ(conn_cnx, db_parameters): - with conn_cnx() as ctx: - ctx.cursor().execute( - "CREATE OR REPLACE TABLE {table} (c1 timestamp_ltz)".format( - table=db_parameters['name'], - )) - try: - dt = datetime( - year=2016, month=3, day=13, hour=18, minute=47, second=32) - _insert_timestamp(ctx, db_parameters['name'], 'Australia/Sydney', - dt) - dt = datetime( - year=2016, month=3, day=13, hour=8, minute=39, second=23) - _insert_timestamp(ctx, db_parameters['name'], 'Europe/Paris', - dt) - dt = datetime( - year=2016, month=3, day=13, hour=8, minute=39, second=23) - _insert_timestamp(ctx, db_parameters['name'], 'UTC', - dt) - - dt = datetime( - year=2016, month=3, day=13, hour=1, minute=14, second=8) - _insert_timestamp(ctx, db_parameters['name'], 'America/New_York', - dt) - - dt = datetime( - year=2016, month=3, day=12, hour=22, minute=32, second=4) - _insert_timestamp(ctx, db_parameters['name'], 'US/Pacific', dt) - - finally: - ctx.cursor().execute( - "DROP TABLE IF EXISTS {table}".format( - table=db_parameters['name'], - )) diff --git a/test/test_errors.py b/test/test_errors.py deleted file mode 100644 index 58ba3fd13..000000000 --- a/test/test_errors.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - - -import snowflake.connector -from snowflake.connector import errors - - -def test_error_classes(conn_cnx): - u""" - Error classes in Connector module, object - """ - # class - assert snowflake.connector.ProgrammingError == errors.ProgrammingError - assert snowflake.connector.OperationalError == errors.OperationalError - - # object - with conn_cnx() as ctx: - assert ctx.ProgrammingError == errors.ProgrammingError - - -def test_error_code(conn_cnx): - u""" - Error code is included in the exception - """ - with conn_cnx() as ctx: - try: - ctx.cursor().execute(u"SELECT * FROOOM TEST") - raise Exception('Failed to detect Syntax error') - except errors.ProgrammingError as e: - assert e.errno == 1003, u"Syntax error code" diff --git a/test/test_execute_multi_statements.py b/test/test_execute_multi_statements.py deleted file mode 100644 index b85ae7564..000000000 --- a/test/test_execute_multi_statements.py +++ /dev/null @@ -1,161 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import codecs -import os -from six import PY2 -from io import StringIO, BytesIO - -if PY2: - from mock import patch -else: - from unittest.mock import patch -import pytest - -from snowflake.connector import ProgrammingError -from snowflake.connector.compat import PY2 - -THIS_DIR = os.path.dirname(os.path.realpath(__file__)) - - -def test_execute_string(conn_cnx, db_parameters): - with conn_cnx() as cnx: - cnx.execute_string(""" -CREATE OR REPLACE TABLE {tbl1} (c1 int, c2 string); -CREATE OR REPLACE TABLE {tbl2} (c1 int, c2 string); -INSERT INTO {tbl1} VALUES(1,'test123'); -INSERT INTO {tbl1} VALUES(2,'test234'); -INSERT INTO {tbl1} VALUES(3,'test345'); -INSERT INTO {tbl2} VALUES(101,'test123'); -INSERT INTO {tbl2} VALUES(102,'test234'); -INSERT INTO {tbl2} VALUES(103,'test345'); -""".format( - tbl1=db_parameters['name'] + '1', - tbl2=db_parameters['name'] + '2'), return_cursors=False) - try: - with conn_cnx() as cnx: - ret = cnx.cursor().execute(""" -SELECT * FROM {tbl1} ORDER BY 1 -""".format( - tbl1=db_parameters['name'] + '1' - )).fetchall() - assert ret[0][0] == 1 - assert ret[2][1] == 'test345' - ret = cnx.cursor().execute(""" -SELECT * FROM {tbl2} ORDER BY 2 -""".format( - tbl2=db_parameters['name'] + '2' - )).fetchall() - assert ret[0][0] == 101 - assert ret[2][1] == 'test345' - - curs = cnx.execute_string(""" -SELECT * FROM {tbl1} ORDER BY 1 DESC; -SELECT * FROM {tbl2} ORDER BY 1 DESC; -""".format( - tbl1=db_parameters['name'] + '1', - tbl2=db_parameters['name'] + '2' - )) - assert curs[0].rowcount == 3 - assert curs[1].rowcount == 3 - ret1 = curs[0].fetchone() - assert ret1[0] == 3 - ret2 = curs[1].fetchone() - assert ret2[0] == 103 - finally: - with conn_cnx() as cnx: - cnx.execute_string(""" - DROP TABLE IF EXISTS {tbl1}; - DROP TABLE IF EXISTS {tbl2}; - """.format( - tbl1=db_parameters['name'] + '1', - tbl2=db_parameters['name'] + '2'), return_cursors=False) - - -def test_execute_string_kwargs(conn_cnx, db_parameters): - with conn_cnx() as cnx: - with patch('snowflake.connector.cursor.SnowflakeCursor.execute', autospec=True) as mock_execute: - cnx.execute_string(""" -CREATE OR REPLACE TABLE {tbl1} (c1 int, c2 string); -CREATE OR REPLACE TABLE {tbl2} (c1 int, c2 string); -INSERT INTO {tbl1} VALUES(1,'test123'); -INSERT INTO {tbl1} VALUES(2,'test234'); -INSERT INTO {tbl1} VALUES(3,'test345'); -INSERT INTO {tbl2} VALUES(101,'test123'); -INSERT INTO {tbl2} VALUES(102,'test234'); -INSERT INTO {tbl2} VALUES(103,'test345'); - """.format( - tbl1=db_parameters['name'] + '1', - tbl2=db_parameters['name'] + '2'), return_cursors=False, _no_results=True) - for call in mock_execute.call_args_list: - assert call[1].get('_no_results', False) - - -def test_execute_string_with_error(conn_cnx): - with conn_cnx() as cnx: - with pytest.raises(ProgrammingError): - cnx.execute_string(""" -SELECT 1; -SELECT 234; -SELECT bafa; -""") - - -def test_execute_stream(conn_cnx): - # file stream - expected_results = [1, 2, 3] - with codecs.open(os.path.join( - THIS_DIR, 'data', 'multiple_statements.sql'), - encoding='utf-8') as f: - with conn_cnx() as cnx: - for idx, rec in enumerate(cnx.execute_stream(f)): - assert rec.fetchall()[0][0] == expected_results[idx] - - # text stream - expected_results = [3, 4, 5, 6] - with conn_cnx() as cnx: - for idx, rec in enumerate(cnx.execute_stream( - StringIO(u"SELECT 3; SELECT 4; SELECT 5;\nSELECT 6;"))): - assert rec.fetchall()[0][0] == expected_results[idx] - - -def test_execute_stream_with_error(conn_cnx): - # file stream - if PY2: - # Python2 converts data into binary data - # codecs.open() must be used - with open(os.path.join( - THIS_DIR, 'data', 'multiple_statements.sql')) as f: - with conn_cnx() as cnx: - gen = cnx.execute_stream(f) - with pytest.raises(TypeError): - next(gen) - else: - # Python 3 converts data into Unicode data - expected_results = [1, 2, 3] - with open(os.path.join( - THIS_DIR, 'data', 'multiple_statements.sql')) as f: - with conn_cnx() as cnx: - for idx, rec in enumerate(cnx.execute_stream(f)): - assert rec.fetchall()[0][0] == expected_results[idx] - - # read a file including syntax error in the middle - with codecs.open(os.path.join( - THIS_DIR, 'data', - 'multiple_statements_negative.sql'), encoding='utf-8') as f: - with conn_cnx() as cnx: - gen = cnx.execute_stream(f) - rec = next(gen).fetchall() - assert rec[0][0] == 987 # the first statement succeeds - with pytest.raises(ProgrammingError): - next(gen) # the second statement fails - - # binary stream including Ascii data - with conn_cnx() as cnx: - with pytest.raises(TypeError): - gen = cnx.execute_stream( - BytesIO(b"SELECT 3; SELECT 4; SELECT 5;\nSELECT 6;")) - next(gen) diff --git a/test/test_incident.py b/test/test_incident.py deleted file mode 100644 index eca7889f4..000000000 --- a/test/test_incident.py +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2019 Snowflake Computing Inc. All right reserved. -# - -import warnings -from uuid import uuid4 - -import pytest -from pytest import fail - -from snowflake.connector.compat import PY2 -if PY2: - from mock import patch -else: - from unittest.mock import patch - -from snowflake.connector import converter, ProgrammingError - -from snowflake.connector.incident import Incident -from traceback import format_exc - -# NOTE the incident throttling feature is working and will stop returning new -# incident ids, so do not assert them, or don't add many more incidents to be -# reported - - -def test_incident_creation(): - error_message = "This is an exception" - error_stack_trace = "this is\n\twhat happened" - driver = "unit-testing" - driver_version = "0.0.0" - os = "unit testinux" - os_version = "1.0.0" - incident = Incident(None, - None, - driver, - driver_version, - error_message, - error_stack_trace, - os, - os_version) - print(incident) - assert (incident.errorMessage == error_message) - assert (incident.errorStackTrace == error_stack_trace) - assert (incident.driver == driver) - assert (incident.driverVersion == driver_version) - assert (incident.os == os) - assert (incident.osVersion == os_version) - - -def test_default_values(): - incident = Incident("ji", "ri", "dr", "dv", "em", "est\n\test2") - print(incident) - assert (incident.jobId == "ji") - assert (incident.requestId == "ri") - assert (incident.driver == "dr") - assert (incident.driverVersion == "dv") - assert (incident.errorMessage == "em") - assert (incident.errorStackTrace == "est\n\test2") - assert incident.driver - assert incident.driverVersion - assert incident.os - assert incident.osVersion - - -@pytest.mark.internal -def test_create_incident_from_exception(negative_conn_cnx): - with negative_conn_cnx() as con: - try: - raise ValueError("This is a test") - except Exception as e: - em = str(e) - est = format_exc() - incident = Incident(None, None, "unit test", "99.99.99", em, est) - new_incident_id = con.incident.report_incident(incident) - if new_incident_id is None: - warnings.warn( - UserWarning("incident reported in 'test_create_incident_from_exception' was ignored")) - - -@pytest.mark.internal -def test_report_automatic_incident(negative_conn_cnx): - def helper(number): - if number == 0: - raise RuntimeWarning("I'm done") - else: - helper(number - 1) - - with negative_conn_cnx() as con: - try: - helper(5) - except RuntimeWarning: - new_incident_id = con.incident.report_incident(job_id=uuid4(), request_id=uuid4()) - if new_incident_id is None: - warnings.warn( - UserWarning("incident reported in 'test_report_automatic_incident' was ignored")) - - -@pytest.mark.internal -@pytest.mark.parametrize('app_name', ['asd', 'mark']) -def test_reporting_values(app_name, db_parameters): - import snowflake.connector - original_paramstyle = snowflake.connector.paramstyle - snowflake.connector.paramstyle = 'qmark' - original_blacklist = snowflake.connector.incident.CLS_BLACKLIST - snowflake.connector.incident.CLS_BLACKLIST = frozenset() - converter.PYTHON_TO_SNOWFLAKE_TYPE[u'nonetype'] = None - db_parameters['internal_application_name'] = app_name - con = None - try: - con = snowflake.connector.connect(**db_parameters) - con.cursor().execute("alter session set SUPPRESS_INCIDENT_DUMPS=true") - cursor = con.cursor() - with patch.object(con.rest, 'request') as incident_report: - cursor.execute("INSERT INTO foo VALUES (?)", [None]) - fail("Shouldn't reach ths statement") - except ProgrammingError: - pass # ignore, should be thrown - finally: - converter.PYTHON_TO_SNOWFLAKE_TYPE[u'nonetype'] = u'ANY' - snowflake.connector.paramstyle = original_paramstyle - snowflake.connector.incident.CLS_BLACKLIST = original_blacklist - for tag in incident_report.call_args[0][1][u'Tags']: - if tag[u'Name'] == u'driver': - assert tag[u'Value'] == app_name - if con is not None: - con.close() - diff --git a/test/test_key_pair_authentication.py b/test/test_key_pair_authentication.py deleted file mode 100644 index 322351d74..000000000 --- a/test/test_key_pair_authentication.py +++ /dev/null @@ -1,190 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import os - -import pytest -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric import dsa -from cryptography.hazmat.primitives.asymmetric import rsa - -import snowflake.connector - -NO_ACCOUNTADMIN_PRIV = os.getenv('TRAVIS') == 'true' or \ - os.getenv('APPVEYOR') == 'True' or \ - os.getenv('sf_account') == 'testaccount5' - - -@pytest.mark.skipif( - NO_ACCOUNTADMIN_PRIV, - reason="Change user's public key requires accountadmin privilege" -) -def test_different_key_length(request, conn_cnx, db_parameters): - db_config = { - 'protocol': db_parameters['protocol'], - 'account': db_parameters['account'], - 'user': db_parameters['user'], - 'host': db_parameters['host'], - 'port': db_parameters['port'], - 'database': db_parameters['database'], - 'schema': db_parameters['schema'], - 'timezone': 'UTC', - } - - def fin(): - with conn_cnx() as cnx: - cnx.cursor().execute(""" - use role accountadmin - """) - cnx.cursor().execute(""" - alter user {user} unset rsa_public_key - """.format(user=db_parameters['user'])) - - request.addfinalizer(fin) - - testcases = [2048, 4096, 8192] - - for key_length in testcases: - private_key_der, public_key_der_encoded = generate_key_pair(key_length) - - with conn_cnx() as cnx: - cnx.cursor().execute(""" - use role accountadmin - """) - cnx.cursor().execute(""" - alter user {user} set rsa_public_key='{public_key}' - """.format(user=db_parameters['user'], public_key=public_key_der_encoded)) - - db_config['private_key'] = private_key_der - snowflake.connector.connect(**db_config) - - -@pytest.mark.skipif( - NO_ACCOUNTADMIN_PRIV, - reason="Change user's public key requires accountadmin privilege" -) -def test_multiple_key_pair(request, conn_cnx, db_parameters): - db_config = { - 'protocol': db_parameters['protocol'], - 'account': db_parameters['account'], - 'user': db_parameters['user'], - 'host': db_parameters['host'], - 'port': db_parameters['port'], - 'database': db_parameters['database'], - 'schema': db_parameters['schema'], - 'timezone': 'UTC', - } - - def fin(): - with conn_cnx() as cnx: - cnx.cursor().execute(""" - use role accountadmin - """) - cnx.cursor().execute(""" - alter user {user} unset rsa_public_key - """.format(user=db_parameters['user'])) - cnx.cursor().execute(""" - alter user {user} unset rsa_public_key_2 - """.format(user=db_parameters['user'])) - - request.addfinalizer(fin) - - private_key_one_der, public_key_one_der_encoded = generate_key_pair(2048) - private_key_two_der, public_key_two_der_encoded = generate_key_pair(2048) - - with conn_cnx() as cnx: - cnx.cursor().execute(""" - use role accountadmin - """) - cnx.cursor().execute(""" - alter user {user} set rsa_public_key='{public_key}' - """.format(user=db_parameters['user'], - public_key=public_key_one_der_encoded)) - - db_config['private_key'] = private_key_one_der - snowflake.connector.connect(**db_config) - - # assert exception since different key pair is used - db_config['private_key'] = private_key_two_der - # although specifying password, - # key pair authentication should used and it should fail since we don't do fall back - db_config['password'] = 'fake_password' - with pytest.raises(snowflake.connector.errors.DatabaseError) as exec_info: - snowflake.connector.connect(**db_config) - - assert (exec_info.value.errno == 250001) - assert (exec_info.value.sqlstate == '08001') - assert ("JWT token is invalid" in exec_info.value.msg) - - with conn_cnx() as cnx: - cnx.cursor().execute(""" - use role accountadmin - """) - cnx.cursor().execute(""" - alter user {user} set rsa_public_key_2='{public_key}' - """.format(user=db_parameters['user'], - public_key=public_key_two_der_encoded)) - snowflake.connector.connect(**db_config) - - -def test_bad_private_key(db_parameters): - db_config = { - 'protocol': db_parameters['protocol'], - 'account': db_parameters['account'], - 'user': db_parameters['user'], - 'host': db_parameters['host'], - 'port': db_parameters['port'], - 'database': db_parameters['database'], - 'schema': db_parameters['schema'], - 'timezone': 'UTC', - } - - dsa_private_key = dsa.generate_private_key(key_size=2048, - backend=default_backend()) - dsa_private_key_der = dsa_private_key.private_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption()) - - encrypted_rsa_private_key_der = rsa.generate_private_key(key_size=2048, - public_exponent=65537, - backend=default_backend()) \ - .private_bytes(encoding=serialization.Encoding.DER, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.BestAvailableEncryption( - b'abcd')) - - bad_private_key_test_cases = ["abcd", 1234, b'abcd', dsa_private_key_der, - encrypted_rsa_private_key_der] - - for private_key in bad_private_key_test_cases: - db_config['private_key'] = private_key - with pytest.raises( - snowflake.connector.errors.ProgrammingError) as exec_info: - snowflake.connector.connect(**db_config) - assert (exec_info.value.errno == 251008) - - -def generate_key_pair(key_length): - private_key = rsa.generate_private_key(backend=default_backend(), - public_exponent=65537, - key_size=key_length) - - private_key_der = private_key.private_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption()) - - public_key_pem = private_key.public_key().public_bytes( - serialization.Encoding.PEM, - serialization.PublicFormat.SubjectPublicKeyInfo) \ - .decode("utf-8") - - # strip off header - public_key_der_encoded = ''.join(public_key_pem.split('\n')[1:-2]) - - return private_key_der, public_key_der_encoded diff --git a/test/test_large_put.py b/test/test_large_put.py deleted file mode 100644 index 59d803587..000000000 --- a/test/test_large_put.py +++ /dev/null @@ -1,70 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import os - - -def test_put_copy_large_files(tmpdir, conn_cnx, db_parameters, test_files): - """ - [s3] Put and Copy large files - """ - # generates N files - number_of_files = 2 - number_of_lines = 200000 - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files) - - files = os.path.join(tmp_dir, 'file*') - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute(""" -create table {name} ( -aa int, -dt date, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(6,2)) -""".format(name=db_parameters['name'])) - try: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['password']) as cnx: - files = files.replace('\\', '\\\\') - cnx.cursor().execute("put 'file://{file}' @%{name}".format( - file=files, name=db_parameters['name'])) - c = cnx.cursor() - try: - c.execute("copy into {0}".format(db_parameters['name'])) - cnt = 0 - for _ in c: - cnt += 1 - assert cnt == number_of_files, 'Number of PUT files' - finally: - c.close() - - c = cnx.cursor() - try: - c.execute("select count(*) from {name}".format( - name=db_parameters['name'])) - cnt = 0 - for rec in c: - cnt += rec[0] - assert cnt == number_of_files * number_of_lines, \ - "Number of rows" - finally: - c.close() - finally: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute("drop table if exists {table}".format( - table=db_parameters['name'])) diff --git a/test/test_large_result_set.py b/test/test_large_result_set.py deleted file mode 100644 index 3c0725260..000000000 --- a/test/test_large_result_set.py +++ /dev/null @@ -1,152 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import pytest - -from snowflake.connector.compat import PY2 -from snowflake.connector.telemetry import TelemetryField - -if PY2: - from mock import Mock -else: - from unittest.mock import Mock - -NUMBER_OF_ROWS = 50000 - -PREFETCH_THREADS = [8, 3, 1] - - -@pytest.fixture() -def ingest_data(request, conn_cnx, db_parameters): - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute(""" - create or replace table {name} ( - c0 int, - c1 int, - c2 int, - c3 int, - c4 int, - c5 int, - c6 int, - c7 int, - c8 int, - c9 int) - """.format(name=db_parameters['name'])) - cnx.cursor().execute(""" - insert into {name} - select random(100), - random(100), - random(100), - random(100), - random(100), - random(100), - random(100), - random(100), - random(100), - random(100) - from table(generator(rowCount=>{number_of_rows})) - """.format(name=db_parameters['name'], number_of_rows=NUMBER_OF_ROWS)) - first_val = cnx.cursor().execute( - "select c0 from {name} order by 1 limit 1".format( - name=db_parameters['name'])).fetchone()[0] - last_val = cnx.cursor().execute( - "select c9 from {name} order by 1 desc limit 1".format( - name=db_parameters['name'])).fetchone()[0] - - def fin(): - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute("drop table if exists {name}".format( - name=db_parameters['name'])) - - request.addfinalizer(fin) - return first_val, last_val - - -@pytest.mark.parametrize('num_threads', PREFETCH_THREADS) -def test_query_large_result_set_n_threads( - conn_cnx, db_parameters, ingest_data, num_threads): - sql = "select * from {name} order by 1".format(name=db_parameters['name']) - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password'], - client_prefetch_threads=num_threads) as cnx: - assert cnx.client_prefetch_threads == num_threads - results = [] - for rec in cnx.cursor().execute(sql): - results.append(rec) - num_rows = len(results) - assert NUMBER_OF_ROWS == num_rows - assert results[0][0] == ingest_data[0] - assert results[num_rows - 1][8] == ingest_data[1] - - -def test_query_large_result_set(conn_cnx, db_parameters, ingest_data): - """ - [s3] Get Large Result set - """ - sql = "select * from {name} order by 1".format(name=db_parameters['name']) - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - telemetry_data = [] - add_log_mock = Mock() - add_log_mock.side_effect = lambda datum: telemetry_data.append( - datum) - cnx._telemetry.add_log_to_batch = add_log_mock - - # large result set fetch in the default mode - result1 = [] - for rec in cnx.cursor().execute(sql): - result1.append(rec) - - num_rows = len(result1) - assert result1[0][0] == ingest_data[0] - assert result1[num_rows - 1][8] == ingest_data[1] - - # large result set fetch in ijson mode - result2 = [] - for rec in cnx.cursor().execute(sql, _use_ijson=True): - result2.append(rec) - - num_rows = len(result2) - assert result2[0][0] == ingest_data[0] - assert result2[num_rows - 1][8] == ingest_data[1] - - result999 = [] - for rec in cnx.cursor().execute(sql): - result999.append(rec) - - num_rows = len(result999) - assert result999[0][0] == ingest_data[0] - assert result999[num_rows - 1][8] == ingest_data[1] - - assert len(result1) == len(result999), ( - "result length is different: result1, and result999") - for i, (x, y) in enumerate(zip(result1, result999)): - assert x == y, "element {0}".format(i) - - assert len(result2) == len(result999), ( - "result length is different: result2, and result999") - for i, (x, y) in enumerate(zip(result2, result999)): - assert x == y, "element {0}".format(i) - - # verify that the expected telemetry metrics were logged - expected = [TelemetryField.TIME_CONSUME_FIRST_RESULT, - TelemetryField.TIME_CONSUME_LAST_RESULT, - TelemetryField.TIME_PARSING_CHUNKS, - TelemetryField.TIME_DOWNLOADING_CHUNKS] - for field in expected: - assert sum([1 if x.message['type'] == field else 0 for x in - telemetry_data]) == 3, \ - "Expected three telemetry logs (one per query) " \ - "for log type {0}".format(field) diff --git a/test/test_load_unload.py b/test/test_load_unload.py deleted file mode 100644 index d51caa594..000000000 --- a/test/test_load_unload.py +++ /dev/null @@ -1,416 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import os -from os import path -from getpass import getuser -from logging import getLogger - -import pytest - -try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: - CONNECTION_PARAMETERS_ADMIN = {} - -THIS_DIR = path.dirname(path.realpath(__file__)) - -logger = getLogger(__name__) - - -@pytest.fixture() -def test_data(request, conn_cnx, db_parameters): - assert u'AWS_ACCESS_KEY_ID' in os.environ, u'AWS_ACCESS_KEY_ID is missing' - assert u'AWS_SECRET_ACCESS_KEY' in os.environ, \ - u'AWS_SECRET_ACCESS_KEY is missing' - - unique_name = db_parameters['name'] - database_name = "{0}_db".format(unique_name) - warehouse_name = "{0}_wh".format(unique_name) - - def fin(): - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur.execute("drop database {0}".format(database_name)) - cur.execute("drop warehouse {0}".format(warehouse_name)) - - request.addfinalizer(fin) - - class TestData(object): - def __init__(self): - self.AWS_ACCESS_KEY_ID = "'{0}'".format( - os.environ[u'AWS_ACCESS_KEY_ID']) - self.AWS_SECRET_ACCESS_KEY = "'{0}'".format( - os.environ[u'AWS_SECRET_ACCESS_KEY']) - self.SF_PROJECT_ROOT = os.getenv('SF_PROJECT_ROOT') - if self.SF_PROJECT_ROOT is None: - self.SF_PROJECT_ROOT = path.realpath( - path.join(THIS_DIR, '..', '..', '..', '..', )) - self.stage_name = "{0}_stage".format(unique_name) - self.warehouse_name = warehouse_name - self.database_name = database_name - self.user_bucket = os.getenv( - 'SF_AWS_USER_BUCKET', - "sfc-dev1-regression/{0}/reg".format(getuser())) - - ret = TestData() - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur.execute("use role sysadmin") - cur.execute(""" -create or replace warehouse {0} -warehouse_size = 'small' warehouse_type='standard' -auto_suspend=1800 -""".format(warehouse_name)) - cur.execute(""" -create or replace database {0} -""".format(database_name)) - cur.execute(""" -create or replace schema pytesting_schema -""") - cur.execute(""" -create or replace file format VSV type = 'CSV' -field_delimiter='|' error_on_column_count_mismatch=false - """) - return ret - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_load_s3(test_data, conn_cnx): - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur.execute("use warehouse {0}".format(test_data.warehouse_name)) - cur.execute(""" -use schema {0}.pytesting_schema -""".format(test_data.database_name)) - cur.execute(""" -create or replace table tweets(created_at timestamp, -id number, id_str string, text string, source string, -in_reply_to_status_id number, in_reply_to_status_id_str string, -in_reply_to_user_id number, in_reply_to_user_id_str string, -in_reply_to_screen_name string, user__id number, user__id_str string, -user__name string, user__screen_name string, user__location string, -user__description string, user__url string, -user__entities__description__urls string, user__protected string, -user__followers_count number, user__friends_count number, -user__listed_count number, user__created_at timestamp, -user__favourites_count number, user__utc_offset number, -user__time_zone string, user__geo_enabled string, user__verified string, -user__statuses_count number, user__lang string, -user__contributors_enabled string, user__is_translator string, -user__profile_background_color string, -user__profile_background_image_url string, -user__profile_background_image_url_https string, -user__profile_background_tile string, user__profile_image_url string, -user__profile_image_url_https string, user__profile_link_color string, -user__profile_sidebar_border_color string, -user__profile_sidebar_fill_color string, user__profile_text_color string, -user__profile_use_background_image string, user__default_profile string, -user__default_profile_image string, user__following string, -user__follow_request_sent string, user__notifications string, geo string, -coordinates string, place string, contributors string, retweet_count number, -favorite_count number, entities__hashtags string, entities__symbols string, -entities__urls string, entities__user_mentions string, favorited string, -retweeted string, lang string) -""") - cur.execute("ls @%tweets") - assert cur.rowcount == 0, \ - ('table newly created should not have any files in its ' - 'staging area') - cur.execute(""" -copy into tweets from s3://sfc-dev1-data/twitter/O1k/tweets/ -credentials=(AWS_KEY_ID={aws_access_key_id} -AWS_SECRET_KEY={aws_secret_access_key}) -file_format=(skip_header=1 null_if=('') field_optionally_enclosed_by='"') -""".format( - aws_access_key_id=test_data.AWS_ACCESS_KEY_ID, - aws_secret_access_key=test_data.AWS_SECRET_ACCESS_KEY) - ) - assert cur.rowcount == 1, \ - 'copy into tweets did not set rowcount to 1' - results = cur.fetchall() - assert results[0][0] == \ - "s3://sfc-dev1-data/twitter/O1k/tweets/1.csv.gz", \ - 'ls @%tweets failed' - cur.execute("drop table tweets") - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_put_local_file(conn_cnx, test_data): - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur.execute("alter session set DISABLE_PUT_AND_GET_ON_EXTERNAL_STAGE=false") - cur.execute("use warehouse {0}".format(test_data.warehouse_name)) - cur.execute( - """use schema {0}.pytesting_schema""".format( - test_data.database_name)) - cur.execute(""" -create or replace table pytest_putget_t1 (c1 STRING, c2 STRING, c3 STRING, -c4 STRING, c5 STRING, c6 STRING, c7 STRING, c8 STRING, c9 STRING) -stage_file_format = (field_delimiter = '|' error_on_column_count_mismatch=false) -stage_copy_options = (purge=false) -stage_location = (url = 's3://sfc-dev1-regression/jenkins/{stage_name}' -credentials = ( -AWS_KEY_ID={aws_access_key_id} -AWS_SECRET_KEY={aws_secret_access_key})) -""".format(aws_access_key_id=test_data.AWS_ACCESS_KEY_ID, - aws_secret_access_key=test_data.AWS_SECRET_ACCESS_KEY, - database=test_data.database_name, - stage_name=test_data.stage_name, )) - cur.execute(""" -put file://{0}/ExecPlatform/Database/data/orders_10*.csv @%pytest_putget_t1 -""".format(test_data.SF_PROJECT_ROOT)) - cur.execute("ls @%pytest_putget_t1") - _ = cur.fetchall() - assert cur.rowcount == 2, \ - 'ls @%pytest_putget_t1 did not return 2 rows' - cur.execute("copy into pytest_putget_t1") - results = cur.fetchall() - assert len(results) == 2, '2 files were not copied' - assert results[0][1] == 'LOADED', \ - 'file 1 was not loaded after copy' - assert results[1][1] == 'LOADED', \ - 'file 2 was not loaded after copy' - - cur.execute("select count(*) from pytest_putget_t1") - results = cur.fetchall() - assert results[0][0] == 73, \ - '73 rows not loaded into putest_putget_t1' - cur.execute("rm @%pytest_putget_t1") - results = cur.fetchall() - assert len(results) == 2, 'two files were not removed' - cur.execute( - "select STATUS from information_schema.load_history where table_name='PYTEST_PUTGET_T1'") - results = cur.fetchall() - assert results[0][0] == 'LOADED', \ - 'history does not show file to be loaded' - cur.execute("drop table pytest_putget_t1") - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_put_load_from_user_stage(conn_cnx, test_data): - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur.execute( - "alter session set DISABLE_PUT_AND_GET_ON_EXTERNAL_STAGE=false") - cur.execute(""" -use warehouse {0} -""".format(test_data.warehouse_name)) - cur.execute(""" -use schema {0}.pytesting_schema -""".format(test_data.database_name)) - cur.execute(""" -create or replace stage {stage_name} -url='s3://{user_bucket}/{stage_name}' -credentials = ( -AWS_KEY_ID={aws_access_key_id} -AWS_SECRET_KEY={aws_secret_access_key}) -""".format(aws_access_key_id=test_data.AWS_ACCESS_KEY_ID, - aws_secret_access_key=test_data.AWS_SECRET_ACCESS_KEY, - user_bucket=test_data.user_bucket, - stage_name=test_data.stage_name, )) - cur.execute(""" -create or replace table pytest_putget_t2 (c1 STRING, c2 STRING, c3 STRING, -c4 STRING, c5 STRING, c6 STRING, c7 STRING, c8 STRING, c9 STRING) -""") - cur.execute(""" -put file://{sf_project_root}/ExecPlatform/Database/data/orders_10*.csv -@{stage_name} -""".format( - sf_project_root=test_data.SF_PROJECT_ROOT, - stage_name=test_data.stage_name - )) - # two files should have been put in the staging are - results = cur.fetchall() - assert len(results) == 2 - - cur.execute("ls @%pytest_putget_t2") - results = cur.fetchall() - assert len(results) == 0, \ - 'no files should have been loaded yet' - - # copy - cur.execute(""" -copy into pytest_putget_t2 from @{stage_name} -file_format = (field_delimiter = '|' error_on_column_count_mismatch=false) -purge=true -""".format( - stage_name=test_data.stage_name)) - results = sorted(cur.fetchall()) - assert len(results) == 2, \ - 'copy failed to load two files from the stage' - assert results[0][0] == \ - "s3://{user_bucket}/{stage_name}/orders_100.csv.gz".format( - user_bucket=test_data.user_bucket, - stage_name=test_data.stage_name, - ), 'copy did not load file orders_100' - - assert results[1][0] == \ - "s3://{user_bucket}/{stage_name}/orders_101.csv.gz".format( - user_bucket=test_data.user_bucket, - stage_name=test_data.stage_name, - ), 'copy did not load file orders_101' - - # should be empty (purged) - cur.execute( - "ls @{stage_name}".format(stage_name=test_data.stage_name)) - results = cur.fetchall() - assert len(results) == 0, 'copied files not purged' - cur.execute("drop table pytest_putget_t2") - cur.execute("drop stage {stage_name}".format( - stage_name=test_data.stage_name)) - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_unload(conn_cnx, test_data): - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur.execute( - """use warehouse {0}""".format(test_data.warehouse_name)) - cur.execute( - """use schema {0}.pytesting_schema""".format( - test_data.database_name)) - cur.execute(""" -create or replace stage {stage_name} -url='s3://{user_bucket}/{stage_name}/unload/' -credentials = ( -AWS_KEY_ID={aws_access_key_id} -AWS_SECRET_KEY={aws_secret_access_key}) -""".format( - aws_access_key_id=test_data.AWS_ACCESS_KEY_ID, - aws_secret_access_key=test_data.AWS_SECRET_ACCESS_KEY, - user_bucket=test_data.user_bucket, - stage_name=test_data.stage_name, - )) - - cur.execute(""" -CREATE OR REPLACE TABLE pytest_t3 (c1 STRING, c2 STRING, c3 STRING, -c4 STRING, c5 STRING, c6 STRING, c7 STRING, c8 STRING, c9 STRING) -stage_file_format = (format_name = 'vsv' field_delimiter = '|' -error_on_column_count_mismatch=false) -""") - cur.execute(""" -alter stage {stage_name} set file_format = (format_name = 'VSV' ) -""".format(stage_name=test_data.stage_name)) - - # make sure its clean - cur.execute( - "rm @{stage_name}".format(stage_name=test_data.stage_name)) - - # put local file - cur.execute(""" -put file://{0}/ExecPlatform/Database/data/orders_10*.csv @%pytest_t3 - """.format(test_data.SF_PROJECT_ROOT)) - - # copy into table - cur.execute(""" -copy into pytest_t3 -file_format = (field_delimiter = '|' error_on_column_count_mismatch=false) -purge=true -""") - # unload from table - cur.execute(""" -copy into @{stage_name}/pytest_t3/data_ -from pytest_t3 file_format=(format_name='VSV' compression='gzip') -max_file_size=10000000 -""".format(stage_name=test_data.stage_name)) - - # load the data back to another table - cur.execute(""" -CREATE OR REPLACE TABLE pytest_t3_copy -(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, -c6 STRING, c7 STRING, c8 STRING, c9 STRING) -stage_file_format = (format_name = 'VSV' ) -""") - - cur.execute(""" -copy into pytest_t3_copy -from @{stage_name}/pytest_t3/data_ return_failed_only=true -""".format( - stage_name=test_data.stage_name)) - - # check to make sure they are equal - cur.execute(""" -(select * from pytest_t3 minus select * from pytest_t3_copy) -union -(select * from pytest_t3_copy minus select * from pytest_t3) -""" - ) - assert cur.rowcount == 0, \ - 'unloaded/reloaded data were not the same' - # clean stage - cur.execute("rm @{stage_name}/pytest_t3/data_".format( - stage_name=test_data.stage_name)) - assert cur.rowcount == 1, \ - 'only one file was expected to be removed' - - # unload with deflate - cur.execute(""" -copy into @{stage_name}/pytest_t3/data_ -from pytest_t3 file_format=(format_name='VSV' compression='deflate') -max_file_size=10000000 -""".format(stage_name=test_data.stage_name)) - results = cur.fetchall() - assert results[0][0] == 73, '73 rows were expected to be loaded' - - # create a table to unload data into - cur.execute(""" -CREATE OR REPLACE TABLE pytest_t3_copy -(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, c6 STRING, -c7 STRING, c8 STRING, c9 STRING) -stage_file_format = (format_name = 'VSV' -compression='deflate') -""") - results = cur.fetchall() - assert results[0][0] == \ - "Table PYTEST_T3_COPY successfully created." - - cur.execute(""" -alter stage {stage_name} set file_format = (format_name = 'VSV' - compression='deflate')""".format(stage_name=test_data.stage_name) - ) - - cur.execute(""" -copy into pytest_t3_copy from @{stage_name}/pytest_t3/data_ -return_failed_only=true -""".format( - stage_name=test_data.stage_name) - ) - results = cur.fetchall() - assert results[0][2] == "LOADED" - assert results[0][4] == 73 - # check to make sure they are equal - cur.execute(""" -(select * from pytest_t3 minus select * from pytest_t3_copy) union -(select * from pytest_t3_copy minus select * from pytest_t3)""" - ) - assert cur.rowcount == 0, \ - 'unloaded/reloaded data were not the same' - cur.execute("rm @{stage_name}/pytest_t3/data_".format( - stage_name=test_data.stage_name)) - assert cur.rowcount == 1, \ - 'only one file was expected to be removed' - - # clean stage - cur.execute("rm @{stage_name}/pytest_t3/data_".format( - stage_name=test_data.stage_name)) - - cur.execute("drop table pytest_t3_copy") - cur.execute( - "drop stage {stage_name}".format( - stage_name=test_data.stage_name)) diff --git a/test/test_network.py b/test/test_network.py deleted file mode 100644 index 29860739d..000000000 --- a/test/test_network.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -from logging import getLogger - -logger = getLogger(__name__) - -from snowflake.connector.network import SnowflakeRestful -from snowflake.connector import errors -from snowflake.connector import errorcode - - -def test_no_auth(db_parameters): - """ - SNOW-13588: No auth Rest API test - """ - rest = SnowflakeRestful( - host=db_parameters['host'], - port=db_parameters['port']) - try: - # no auth - # show warehouse - rest.request( - url='/queries', - body={ - 'sequenceId': 10000, - 'sqlText': 'show warehouses', - 'parameters': { - 'ui_mode': True, - }, - }, - method='post', - client='rest') - raise Exception("Must fail with auth error") - except errors.Error as e: - assert e.errno == errorcode.ER_CONNECTION_IS_CLOSED - finally: - rest.close() diff --git a/test/test_numpy_binding.py b/test/test_numpy_binding.py deleted file mode 100644 index 5f251f8a5..000000000 --- a/test/test_numpy_binding.py +++ /dev/null @@ -1,142 +0,0 @@ -import time -import datetime -import numpy as np - - -def test_numpy_datatype_binding(conn_cnx, db_parameters): - """ - Tests numpy data type binding - """ - epoch_time = time.time() - current_datetime = datetime.datetime.fromtimestamp(epoch_time) - current_datetime64 = np.datetime64(current_datetime) - all_data = [{ - 'tz': 'America/Los_Angeles', - 'float': '1.79769313486e+308', - 'numpy_bool': np.True_, - 'epoch_time': epoch_time, - 'current_time': current_datetime64, - 'specific_date': np.datetime64('2005-02-25T03:30'), - 'expected_specific_date': np.datetime64('2005-02-25T03:30').astype(datetime.datetime) - }, { - 'tz': 'Asia/Tokyo', - 'float': '-1.79769313486e+308', - 'numpy_bool': np.False_, - 'epoch_time': epoch_time, - 'current_time': current_datetime64, - 'specific_date': np.datetime64('1970-12-31T05:00:00'), - 'expected_specific_date': np.datetime64('1970-12-31T05:00:00').astype(datetime.datetime) - }, { - 'tz': 'America/New_York', - 'float': '-1.79769313486e+308', - 'numpy_bool': np.True_, - 'epoch_time': epoch_time, - 'current_time': current_datetime64, - 'specific_date': np.datetime64('1969-12-31T05:00:00'), - 'expected_specific_date': np.datetime64('1969-12-31T05:00:00').astype(datetime.datetime) - }, { - 'tz': 'UTC', - 'float': '-1.79769313486e+308', - 'numpy_bool': np.False_, - 'epoch_time': epoch_time, - 'current_time': current_datetime64, - 'specific_date': np.datetime64('1968-11-12T07:00:00.123'), - 'expected_specific_date': np.datetime64('1968-11-12T07:00:00.123').astype(datetime.datetime) - }] - try: - with conn_cnx(numpy=True) as cnx: - cnx.cursor().execute(""" -CREATE OR REPLACE TABLE {name} ( - c1 integer, -- int8 - c2 integer, -- int16 - c3 integer, -- int32 - c4 integer, -- int64 - c5 float, -- float16 - c6 float, -- float32 - c7 float, -- float64 - c8 timestamp_ntz, -- datetime64 - c9 date, -- datetime64 - c10 timestamp_ltz, -- datetime64, - c11 timestamp_tz, -- datetime64 - c12 boolean) -- numpy.bool_ - """.format(name=db_parameters['name'])) - for data in all_data: - cnx.cursor().execute(""" -ALTER SESSION SET timezone='{tz}'""".format(tz=data['tz'])) - cnx.cursor().execute(""" -INSERT INTO {name}( - c1, - c2, - c3, - c4, - c5, - c6, - c7, - c8, - c9, - c10, - c11, - c12 -) -VALUES( - %s, - %s, - %s, - %s, - %s, - %s, - %s, - %s, - %s, - %s, - %s, - %s)""".format( - name=db_parameters['name']), ( - np.iinfo(np.int8).max, - np.iinfo(np.int16).max, - np.iinfo(np.int32).max, - np.iinfo(np.int64).max, - np.finfo(np.float16).max, - np.finfo(np.float32).max, - np.float64(data['float']), - data['current_time'], - data['current_time'], - data['current_time'], - data['specific_date'], - data['numpy_bool'] - )) - rec = cnx.cursor().execute(""" -SELECT - c1, - c2, - c3, - c4, - c5, - c6, - c7, - c8, - c9, - c10, - c11, - c12 - FROM {name}""".format( - name=db_parameters['name'])).fetchone() - assert np.int8(rec[0]) == np.iinfo(np.int8).max - assert np.int16(rec[1]) == np.iinfo(np.int16).max - assert np.int32(rec[2]) == np.iinfo(np.int32).max - assert np.int64(rec[3]) == np.iinfo(np.int64).max - assert np.float16(rec[4]) == np.finfo(np.float16).max - assert np.float32(rec[5]) == np.finfo(np.float32).max - assert rec[6] == np.float64(data['float']) - assert rec[7] == data['current_time'] - assert str(rec[8]) == str(data['current_time'])[0:10] - assert rec[9] == datetime.datetime.fromtimestamp(epoch_time, rec[9].tzinfo) - assert rec[10] == data['expected_specific_date'].replace(tzinfo=rec[10].tzinfo) - assert isinstance(rec[11], bool) and rec[11] == data['numpy_bool'] and np.bool_(rec[11]) == data['numpy_bool'] - cnx.cursor().execute(""" -DELETE FROM {name}""".format(name=db_parameters['name'])) - finally: - with conn_cnx() as cnx: - cnx.cursor().execute(""" - DROP TABLE IF EXISTS {name} - """.format(name=db_parameters['name'])) diff --git a/test/test_ocsp.py b/test/test_ocsp.py deleted file mode 100644 index 783853ff2..000000000 --- a/test/test_ocsp.py +++ /dev/null @@ -1,407 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import codecs -import json -import logging -import tempfile -import time -import os -import platform -from os import path -from os import environ - - -import pytest - -from snowflake.connector import OperationalError -from snowflake.connector.compat import PY2 -from snowflake.connector.errorcode import (ER_SERVER_CERTIFICATE_REVOKED, ER_INVALID_OCSP_RESPONSE_CODE) -from snowflake.connector.ocsp_snowflake import SnowflakeOCSP -from snowflake.connector.errors import RevocationCheckError - -if PY2: - from snowflake.connector.ocsp_pyasn1 import ( - SnowflakeOCSPPyasn1 as SFOCSP - ) -else: - from snowflake.connector.ocsp_asn1crypto import ( - SnowflakeOCSPAsn1Crypto as SFOCSP - ) - -from snowflake.connector.ocsp_snowflake import OCSPCache -from snowflake.connector.ssl_wrap_socket import _openssl_connect - -for logger_name in ['test', 'snowflake.connector', 'botocore']: - logger = logging.getLogger(logger_name) - logger.setLevel(logging.DEBUG) - ch = logging.FileHandler( - path.join(tempfile.gettempdir(), 'python_connector.log')) - ch.setLevel(logging.DEBUG) - ch.setFormatter(logging.Formatter( - '%(asctime)s - %(threadName)s %(filename)s:%(lineno)d - ' - '%(funcName)s() - %(levelname)s - %(message)s')) - logger.addHandler(ch) - -TARGET_HOSTS = [ - 'ocspssd.us-east-1.snowflakecomputing.com', - 'sqs.us-west-2.amazonaws.com', - 'sfcsupport.us-east-1.snowflakecomputing.com', - 'sfcsupport.eu-central-1.snowflakecomputing.com', - 'sfc-dev1-regression.s3.amazonaws.com', - 'sfctest0.snowflakecomputing.com', - 'sfc-ds2-customer-stage.s3.amazonaws.com', - 'snowflake.okta.com', - 'sfcdev1.blob.core.windows.net', - 'sfc-aus-ds1-customer-stage.s3-ap-southeast-2.amazonaws.com', -] - -THIS_DIR = path.dirname(path.realpath(__file__)) - -CACHE_ROOT_DIR = os.getenv('SF_OCSP_RESPONSE_CACHE_DIR') or \ - os.path.expanduser("~") or tempfile.gettempdir() - -if platform.system() == 'Windows': - CACHE_DIR = path.join(CACHE_ROOT_DIR, 'AppData', 'Local', 'Snowflake', - 'Caches') -elif platform.system() == 'Darwin': - CACHE_DIR = path.join(CACHE_ROOT_DIR, 'Library', 'Caches', 'Snowflake') -else: - CACHE_DIR = path.join(CACHE_ROOT_DIR, '.cache', 'snowflake') - -CACHE_LOCATION = path.join(CACHE_DIR, "ocsp_response_cache.json") - - -def test_ocsp(): - """ - OCSP tests - """ - # reset the memory cache - SnowflakeOCSP.clear_cache() - ocsp = SFOCSP() - for url in TARGET_HOSTS: - connection = _openssl_connect(url) - assert ocsp.validate(url, connection), \ - 'Failed to validate: {0}'.format(url) - - -def test_ocsp_wo_cache_server(): - """ - OCSP Tests with Cache Server Disabled - """ - SnowflakeOCSP.clear_cache() - ocsp = SFOCSP(use_ocsp_cache_server=False) - for url in TARGET_HOSTS: - connection = _openssl_connect(url) - assert ocsp.validate(url, connection),\ - 'Failed to validate: {0}'.format(url) - - -def test_ocsp_fail_open_w_single_endpoint(): - SnowflakeOCSP.clear_cache() - - if os.path.exists(CACHE_LOCATION): - os.remove(CACHE_LOCATION) - - environ["SF_OCSP_TEST_MODE"] = "true" - environ["SF_TEST_OCSP_URL"] = "http://httpbin.org/delay/10" - environ["SF_TEST_CA_OCSP_RESPONDER_CONNECTION_TIMEOUT"] = "5" - - ocsp = SFOCSP(use_ocsp_cache_server=False) - connection = _openssl_connect("snowflake.okta.com") - - try: - assert ocsp.validate("snowflake.okta.com", connection), \ - 'Failed to validate: {0}'.format("snowflake.okta.com") - finally: - del environ['SF_OCSP_TEST_MODE'] - del environ['SF_TEST_OCSP_URL'] - del environ['SF_TEST_CA_OCSP_RESPONDER_CONNECTION_TIMEOUT'] - - -def test_ocsp_fail_close_w_single_endpoint(): - SnowflakeOCSP.clear_cache() - - environ["SF_OCSP_TEST_MODE"] = "true" - environ["SF_TEST_OCSP_URL"] = "http://httpbin.org/delay/10" - environ["SF_TEST_CA_OCSP_RESPONDER_CONNECTION_TIMEOUT"] = "5" - - if os.path.exists(CACHE_LOCATION): - os.remove(CACHE_LOCATION) - - ocsp = SFOCSP(use_ocsp_cache_server=False, use_fail_open=False) - connection = _openssl_connect("snowflake.okta.com") - - with pytest.raises(RevocationCheckError) as ex: - ocsp.validate("snowflake.okta.com", connection) - - try: - assert ex.value.errno == ER_INVALID_OCSP_RESPONSE_CODE, "Connection should have failed" - finally: - del environ['SF_OCSP_TEST_MODE'] - del environ['SF_TEST_OCSP_URL'] - del environ['SF_TEST_CA_OCSP_RESPONDER_CONNECTION_TIMEOUT'] - - -def test_ocsp_bad_validity(): - SnowflakeOCSP.clear_cache() - - environ["SF_OCSP_TEST_MODE"] = "true" - environ["SF_TEST_OCSP_FORCE_BAD_RESPONSE_VALIDITY"] = "true" - - if os.path.exists(CACHE_LOCATION): - os.remove(CACHE_LOCATION) - - ocsp = SFOCSP(use_ocsp_cache_server=False) - connection = _openssl_connect("snowflake.okta.com") - - assert ocsp.validate("snowflake.okta.com", connection), "Connection should have passed with fail open" - del environ['SF_OCSP_TEST_MODE'] - del environ['SF_TEST_OCSP_FORCE_BAD_RESPONSE_VALIDITY'] - - -def test_ocsp_single_endpoint(): - environ['SF_OCSP_ACTIVATE_NEW_ENDPOINT'] = 'True' - SnowflakeOCSP.clear_cache() - ocsp = SFOCSP() - ocsp.OCSP_CACHE_SERVER.NEW_DEFAULT_CACHE_SERVER_BASE_URL = \ - "https://snowflake.preprod2.us-west-2-dev.external-zone.snowflakecomputing.com:8085/ocsp/" - connection = _openssl_connect("snowflake.okta.com") - assert ocsp.validate("snowflake.okta.com", connection), \ - 'Failed to validate: {0}'.format("snowflake.okta.com") - - del environ['SF_OCSP_ACTIVATE_NEW_ENDPOINT'] - - -def test_ocsp_by_post_method(): - """ - OCSP tests - """ - # reset the memory cache - SnowflakeOCSP.clear_cache() - ocsp = SFOCSP(use_post_method=True) - for url in TARGET_HOSTS: - connection = _openssl_connect(url) - assert ocsp.validate(url, connection), \ - 'Failed to validate: {0}'.format(url) - - -def test_ocsp_with_file_cache(tmpdir): - """ - OCSP tests and the cache server and file - """ - tmp_dir = str(tmpdir.mkdir('ocsp_response_cache')) - cache_file_name = path.join(tmp_dir, 'cache_file.txt') - - # reset the memory cache - SnowflakeOCSP.clear_cache() - ocsp = SFOCSP( - ocsp_response_cache_uri='file://' + cache_file_name) - for url in TARGET_HOSTS: - connection = _openssl_connect(url) - assert ocsp.validate(url, connection), \ - 'Failed to validate: {0}'.format(url) - - -def test_ocsp_with_bogus_cache_files(tmpdir): - """ - Attempt to use bogus OCSP response data - """ - - cache_file_name, target_hosts = _store_cache_in_file(tmpdir) - - ocsp = SFOCSP( - ocsp_response_cache_uri='file://' + cache_file_name) - OCSPCache.read_ocsp_response_cache_file(ocsp, cache_file_name) - cache_data = OCSPCache.CACHE - assert cache_data, "more than one cache entries should be stored." - - # setting bogus data - current_time = int(time.time()) - for k, v in cache_data.items(): - cache_data[k] = (current_time, b'bogus') - - # write back the cache file - OCSPCache.CACHE = cache_data - OCSPCache.write_ocsp_response_cache_file(ocsp, cache_file_name) - - # forces to use the bogus cache file but it should raise errors - SnowflakeOCSP.clear_cache() - ocsp = SFOCSP( - ocsp_response_cache_uri='file://' + cache_file_name) - for hostname in target_hosts: - connection = _openssl_connect(hostname) - assert ocsp.validate(hostname, connection), \ - 'Failed to validate: {0}'.format(hostname) - - -def test_ocsp_with_outdated_cache(tmpdir): - """ - Attempt to use outdated OCSP response cache file - """ - cache_file_name, target_hosts = _store_cache_in_file(tmpdir) - - ocsp = SFOCSP( - ocsp_response_cache_uri='file://' + cache_file_name) - - # reading cache file - OCSPCache.read_ocsp_response_cache_file(ocsp, cache_file_name) - cache_data = OCSPCache.CACHE - assert cache_data, "more than one cache entries should be stored." - - # setting outdated data - current_time = int(time.time()) - for k, v in cache_data.items(): - cache_data[k] = (current_time - 48 * 60 * 60, v[1]) - - # write back the cache file - OCSPCache.CACHE = cache_data - OCSPCache.write_ocsp_response_cache_file(ocsp, cache_file_name) - - # forces to use the bogus cache file but it should raise errors - SnowflakeOCSP.clear_cache() # reset the memory cache - SFOCSP( - ocsp_response_cache_uri='file://' + cache_file_name) - assert SnowflakeOCSP.cache_size() == 0, \ - 'must be empty. outdated cache should not be loaded' - - -def _store_cache_in_file( - tmpdir, target_hosts=None, filename=None): - if target_hosts is None: - target_hosts = TARGET_HOSTS - if filename is None: - filename = path.join(str(tmpdir), 'cache_file.txt') - - # cache OCSP response - SnowflakeOCSP.clear_cache() - ocsp = SFOCSP( - ocsp_response_cache_uri='file://' + filename, - use_ocsp_cache_server=False) - for hostname in target_hosts: - connection = _openssl_connect(hostname) - assert ocsp.validate(hostname, connection), \ - 'Failed to validate: {0}'.format(hostname) - assert path.exists(filename), "OCSP response cache file" - return filename, target_hosts - - -def test_ocsp_with_invalid_cache_file(): - """ - OCSP tests with an invalid cache file - """ - SnowflakeOCSP.clear_cache() # reset the memory cache - ocsp = SFOCSP(ocsp_response_cache_uri="NEVER_EXISTS") - for url in TARGET_HOSTS[0:1]: - connection = _openssl_connect(url) - assert ocsp.validate(url, connection), \ - 'Failed to validate: {0}'.format(url) - - -def test_concurrent_ocsp_requests(tmpdir): - """ - Run OCSP revocation checks in parallel. The memory and file caches are - deleted randomly. - """ - from multiprocessing.pool import ThreadPool - - cache_file_name = path.join(str(tmpdir), 'cache_file.txt') - SnowflakeOCSP.clear_cache() # reset the memory cache - - target_hosts = TARGET_HOSTS * 5 - pool = ThreadPool(len(target_hosts)) - for hostname in target_hosts: - pool.apply_async(_validate_certs_using_ocsp, - [hostname, cache_file_name]) - pool.close() - pool.join() - - -def _validate_certs_using_ocsp(url, cache_file_name): - """ - Validate OCSP response. Deleting memory cache and file cache randomly - """ - logger = logging.getLogger('test') - import time - import random - time.sleep(random.randint(0, 3)) - if random.random() < 0.2: - logger.info('clearing up cache: OCSP_VALIDATION_CACHE') - SnowflakeOCSP.clear_cache() - if random.random() < 0.05: - logger.info('deleting a cache file: %s', cache_file_name) - SnowflakeOCSP.delete_cache_file() - - connection = _openssl_connect(url) - ocsp = SFOCSP( - ocsp_response_cache_uri='file://' + cache_file_name) - ocsp.validate(url, connection) - - -def test_ocsp_revoked_certificate(): - """ - Test Revoked certificate. - """ - revoked_cert = path.join( - THIS_DIR, 'data', 'cert_tests', 'revoked_certs.pem') - - SnowflakeOCSP.clear_cache() # reset the memory cache - ocsp = SFOCSP() - - with pytest.raises(OperationalError) as ex: - ocsp.validate_certfile(revoked_cert) - assert ex.value.errno == ex.value.errno == ER_SERVER_CERTIFICATE_REVOKED - - -def test_ocsp_incomplete_chain(): - """ - Test incomplete chained certificate - """ - incomplete_chain_cert = path.join( - THIS_DIR, 'data', 'cert_tests', 'incomplete-chain.pem') - - SnowflakeOCSP.clear_cache() # reset the memory cache - ocsp = SFOCSP() - - with pytest.raises(OperationalError) as ex: - ocsp.validate_certfile(incomplete_chain_cert) - assert 'CA certificate is NOT found' in ex.value.msg - - -def test_ocsp_cache_merge(tmpdir): - """ - Merge two OCSP response cache files - """ - previous_cache_filename = path.join(str(tmpdir), 'cache_file1.txt') - _store_cache_in_file( - tmpdir, - target_hosts=TARGET_HOSTS[0:3], - filename=previous_cache_filename) - - current_cache_filename = path.join(str(tmpdir), 'cache_file2.txt') - _store_cache_in_file( - tmpdir, - target_hosts=TARGET_HOSTS[4:], - filename=current_cache_filename) - - latest_cache_filename = path.join(str(tmpdir), 'cache_file.txt') - - SnowflakeOCSP.clear_cache() # reset the memory cache - ocsp = SFOCSP() - OCSPCache.merge_cache( - ocsp, - previous_cache_filename, - current_cache_filename, - latest_cache_filename) - - with codecs.open(previous_cache_filename) as f: - prev = json.load(f) - with codecs.open(current_cache_filename) as f: - curr = json.load(f) - with codecs.open(latest_cache_filename) as f: - latest = json.load(f) - - assert len(latest) > len(prev) - assert len(latest) > len(curr) diff --git a/test/test_pickle_timestamp_tz.py b/test/test_pickle_timestamp_tz.py deleted file mode 100644 index bdb022ba6..000000000 --- a/test/test_pickle_timestamp_tz.py +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import os -import pickle - - -def test_pickle_timestamp_tz(tmpdir, conn_cnx): - """ - Ensure the timestamp_tz result is pickle-able. - """ - tmp_dir = str(tmpdir.mkdir('pickles')) - output = os.path.join(tmp_dir, 'tz.pickle') - expected_tz = None - with conn_cnx() as con: - for rec in con.cursor().execute("select '2019-08-11 01:02:03.123 -03:00'::TIMESTAMP_TZ"): - expected_tz = rec[0] - with open(output, 'wb') as f: - pickle.dump(expected_tz, f) - - with open(output, 'rb') as f: - read_tz = pickle.load(f) - assert expected_tz == read_tz diff --git a/test/test_put_get.py b/test/test_put_get.py deleted file mode 100644 index 8d4cc79e0..000000000 --- a/test/test_put_get.py +++ /dev/null @@ -1,479 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import os -from os import path -from getpass import getuser -from logging import getLogger - -import pytest - -try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: - CONNECTION_PARAMETERS_ADMIN = {} - -THIS_DIR = path.dirname(path.realpath(__file__)) - -logger = getLogger(__name__) - - -@pytest.fixture() -def test_data(request, conn_cnx, db_parameters): - assert u'AWS_ACCESS_KEY_ID' in os.environ - assert u'AWS_SECRET_ACCESS_KEY' in os.environ - - unique_name = db_parameters['name'] - database_name = "{0}_db".format(unique_name) - warehouse_name = "{0}_wh".format(unique_name) - - def fin(): - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur.execute("drop database {0}".format(database_name)) - cur.execute("drop warehouse {0}".format(warehouse_name)) - - request.addfinalizer(fin) - - class TestData(object): - def __init__(self): - self.AWS_ACCESS_KEY_ID = "'{0}'".format( - os.environ[u'AWS_ACCESS_KEY_ID']) - self.AWS_SECRET_ACCESS_KEY = "'{0}'".format( - os.environ[u'AWS_SECRET_ACCESS_KEY']) - self.SF_PROJECT_ROOT = os.getenv('SF_PROJECT_ROOT') - if self.SF_PROJECT_ROOT is None: - self.SF_PROJECT_ROOT = path.realpath( - path.join(THIS_DIR, '..', '..', '..', '..', )) - self.stage_name = "{0}_stage".format(unique_name) - self.warehouse_name = warehouse_name - self.database_name = database_name - self.user_bucket = os.getenv( - 'SF_AWS_USER_BUCKET', - "sfc-dev1-regression/{0}/reg".format(getuser())) - - ret = TestData() - - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur = cnx.cursor() - cur.execute(""" -use role sysadmin -""") - cur.execute(""" -create or replace warehouse {0} -warehouse_size = 'small' -warehouse_type='standard' -auto_suspend=1800 -""".format(warehouse_name)) - cur.execute(""" -create or replace database {0} -""".format(database_name)) - cur.execute(""" -create or replace schema pytesting_schema -""") - cur.execute(""" -create or replace file format VSV type = 'CSV' -field_delimiter='|' error_on_column_count_mismatch=false -""") - - return ret - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_load_s3(test_data, conn_cnx): - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur.execute( - """use warehouse {0}""".format(test_data.warehouse_name)) - cur.execute("""use schema {0}.pytesting_schema""".format( - test_data.database_name)) - cur.execute(""" -create or replace table tweets(created_at timestamp, -id number, id_str string, text string, source string, -in_reply_to_status_id number, in_reply_to_status_id_str string, -in_reply_to_user_id number, in_reply_to_user_id_str string, -in_reply_to_screen_name string, user__id number, user__id_str string, -user__name string, user__screen_name string, user__location string, -user__description string, user__url string, -user__entities__description__urls string, user__protected string, -user__followers_count number, user__friends_count number, -user__listed_count number, user__created_at timestamp, -user__favourites_count number, user__utc_offset number, -user__time_zone string, user__geo_enabled string, user__verified string, -user__statuses_count number, user__lang string, -user__contributors_enabled string, user__is_translator string, -user__profile_background_color string, -user__profile_background_image_url string, -user__profile_background_image_url_https string, -user__profile_background_tile string, user__profile_image_url string, -user__profile_image_url_https string, user__profile_link_color string, -user__profile_sidebar_border_color string, -user__profile_sidebar_fill_color string, user__profile_text_color string, -user__profile_use_background_image string, user__default_profile string, -user__default_profile_image string, user__following string, -user__follow_request_sent string, user__notifications string, geo string, -coordinates string, place string, contributors string, -retweet_count number, -favorite_count number, entities__hashtags string, entities__symbols string, -entities__urls string, entities__user_mentions string, favorited string, -retweeted string, lang string)""") - cur.execute("ls @%tweets") - assert cur.rowcount == 0, \ - ('table newly created should not have any ' - 'files in its staging area') - cur.execute(""" -copy into tweets from s3://sfc-dev1-data/twitter/O1k/tweets/ -credentials=( -AWS_KEY_ID={aws_access_key_id} -AWS_SECRET_KEY={aws_secret_access_key}) -file_format=( - skip_header=1 null_if=('') - field_optionally_enclosed_by='"' -) -""".format( - aws_access_key_id=test_data.AWS_ACCESS_KEY_ID, - aws_secret_access_key=test_data.AWS_SECRET_ACCESS_KEY) - ) - assert cur.rowcount == 1, ( - 'copy into tweets did not set rowcount to 1') - results = cur.fetchall() - assert results[0][0] == ( - "s3://sfc-dev1-data/twitter/O1k/tweets/1.csv.gz") - cur.execute("drop table tweets") - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_put_local_file(test_data, conn_cnx): - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur.execute( - """use warehouse {0}""".format(test_data.warehouse_name)) - cur.execute( - "alter session set DISABLE_PUT_AND_GET_ON_EXTERNAL_STAGE=false") - cur.execute("""use schema {0}.pytesting_schema""".format( - test_data.database_name)) - cur.execute(""" -create or replace table pytest_putget_t1 ( -c1 STRING, c2 STRING, c3 STRING, -c4 STRING, c5 STRING, c6 STRING, c7 STRING, c8 STRING, c9 STRING) -stage_file_format = ( - field_delimiter = '|' - error_on_column_count_mismatch=false) - stage_copy_options = (purge=false) - stage_location = ( - url = 's3://{user_bucket}/{stage_name}' - credentials = ( - AWS_KEY_ID={aws_access_key_id} - AWS_SECRET_KEY={aws_secret_access_key}) -) -""".format( - aws_access_key_id=test_data.AWS_ACCESS_KEY_ID, - aws_secret_access_key=test_data.AWS_SECRET_ACCESS_KEY, - user_bucket=test_data.user_bucket, - stage_name=test_data.stage_name, - )) - cur.execute(""" -put file://{0}/ExecPlatform/Database/data/orders_10*.csv @%pytest_putget_t1 -""".format(test_data.SF_PROJECT_ROOT) - ) - assert cur.is_file_transfer - cur.execute("ls @%pytest_putget_t1").fetchall() - assert not cur.is_file_transfer - assert cur.rowcount == 2, ( - 'ls @%pytest_putget_t1 did not return 2 rows') - cur.execute("copy into pytest_putget_t1") - results = cur.fetchall() - assert len(results) == 2, '2 files were not copied' - assert results[0][1] == 'LOADED', ( - 'file 1 was not loaded after copy') - assert results[1][1] == 'LOADED', ( - 'file 2 was not loaded after copy') - - cur.execute("select count(*) from pytest_putget_t1") - results = cur.fetchall() - assert results[0][0] == 73, ( - '73 rows not loaded into putest_putget_t1') - cur.execute("rm @%pytest_putget_t1") - results = cur.fetchall() - assert len(results) == 2, 'two files were not removed' - cur.execute( - "select STATUS from information_schema.load_history where table_name='PYTEST_PUTGET_T1'") - results = cur.fetchall() - assert results[0][0] == 'LOADED', ( - 'history does not show file to be loaded') - cur.execute("drop table pytest_putget_t1") - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_put_load_from_user_stage(test_data, conn_cnx): - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur.execute( - "alter session set DISABLE_PUT_AND_GET_ON_EXTERNAL_STAGE=false") - cur.execute( - """use warehouse {0}""".format(test_data.warehouse_name)) - cur.execute("""use schema {0}.pytesting_schema""".format( - test_data.database_name)) - cur.execute(""" -create or replace stage {stage_name} -url='s3://{user_bucket}/{stage_name}' -credentials = ( -AWS_KEY_ID={aws_access_key_id} -AWS_SECRET_KEY={aws_secret_access_key}) -""".format( - aws_access_key_id=test_data.AWS_ACCESS_KEY_ID, - aws_secret_access_key=test_data.AWS_SECRET_ACCESS_KEY, - user_bucket=test_data.user_bucket, - stage_name=test_data.stage_name, - )) - cur.execute(""" -create or replace table pytest_putget_t2 (c1 STRING, c2 STRING, c3 STRING, - c4 STRING, c5 STRING, c6 STRING, c7 STRING, c8 STRING, c9 STRING) -""") - cur.execute(""" -put file://{project_root}/ExecPlatform/Database/data/orders_10*.csv -@{stage_name} -""".format( - project_root=test_data.SF_PROJECT_ROOT, - stage_name=test_data.stage_name) - ) - # two files should have been put in the staging are - results = cur.fetchall() - assert len(results) == 2 - - cur.execute("ls @%pytest_putget_t2") - results = cur.fetchall() - assert len(results) == 0, ( - 'no files should have been loaded yet') - - # copy - cur.execute(""" -copy into pytest_putget_t2 from @{stage_name} -file_format = (field_delimiter = '|' error_on_column_count_mismatch=false) -purge=true -""".format( - stage_name=test_data.stage_name - )) - results = sorted(cur.fetchall()) - assert len(results) == 2, ( - 'copy failed to load two files from the stage') - assert results[0][0] == ( - "s3://{user_bucket}/{stage_name}/orders_100.csv.gz". - format( - user_bucket=test_data.user_bucket, - stage_name=test_data.stage_name, - )), 'copy did not load file orders_100' - - assert results[1][0] == ( - "s3://{user_bucket}/{stage_name}/orders_101.csv.gz".format( - user_bucket=test_data.user_bucket, - stage_name=test_data.stage_name, - )), 'copy did not load file orders_101' - - # should be empty (purged) - cur.execute( - "ls @{stage_name}".format(stage_name=test_data.stage_name)) - results = cur.fetchall() - assert len(results) == 0, 'copied files not purged' - cur.execute("drop table pytest_putget_t2") - cur.execute( - "drop stage {stage_name}".format( - stage_name=test_data.stage_name)) - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_unload(test_data, conn_cnx): - with conn_cnx() as cnx: - with cnx.cursor() as cur: - cur.execute( - """use warehouse {0}""".format(test_data.warehouse_name)) - cur.execute("""use schema {0}.pytesting_schema""".format( - test_data.database_name)) - cur.execute(""" -create or replace stage {stage_name} -url='s3://{user_bucket}/{stage_name}/pytest_put_unload/unload/' -credentials = ( -AWS_KEY_ID={aws_access_key_id} -AWS_SECRET_KEY={aws_secret_access_key}) -""".format( - aws_access_key_id=test_data.AWS_ACCESS_KEY_ID, - aws_secret_access_key=test_data.AWS_SECRET_ACCESS_KEY, - user_bucket=test_data.user_bucket, - stage_name=test_data.stage_name, - )) - - cur.execute(""" -CREATE OR REPLACE TABLE pytest_t3 ( -c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, -c6 STRING, c7 STRING, c8 STRING, c9 STRING) -stage_file_format = (format_name = 'vsv' field_delimiter = '|' - error_on_column_count_mismatch=false)""") - cur.execute(""" -alter stage {stage_name} set file_format = ( format_name = 'VSV' ) -""".format(stage_name=test_data.stage_name)) - - # make sure its clean - cur.execute( - "rm @{stage_name}".format(stage_name=test_data.stage_name)) - - # put local file - cur.execute(""" -put file://{0}/ExecPlatform/Database/data/orders_10*.csv -@%pytest_t3""".format(test_data.SF_PROJECT_ROOT) - ) - - # copy into table - cur.execute(""" -copy into pytest_t3 -file_format = (field_delimiter = '|' error_on_column_count_mismatch=false) -purge=true""") - # unload from table - cur.execute(""" -copy into @{stage_name}/data_ -from pytest_t3 file_format=(format_name='VSV' compression='gzip') -max_file_size=10000000""".format(stage_name=test_data.stage_name)) - - # load the data back to another table - cur.execute(""" -CREATE OR REPLACE TABLE pytest_t3_copy ( -c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, -c6 STRING, c7 STRING, c8 STRING, c9 STRING) -stage_file_format = (format_name = 'VSV' )""") - cur.execute(""" -copy into pytest_t3_copy -from @{stage_name}/data_ return_failed_only=true -""".format(stage_name=test_data.stage_name)) - - # check to make sure they are equal - cur.execute(""" -(select * from pytest_t3 minus select * from pytest_t3_copy) -union -(select * from pytest_t3_copy minus select * from pytest_t3) -""") - assert cur.rowcount == 0, ( - 'unloaded/reloaded data were not the same') - # clean stage - cur.execute( - "rm @{stage_name}/data_".format( - stage_name=test_data.stage_name)) - assert cur.rowcount == 1, ( - 'only one file was expected to be removed') - - # unload with deflate - cur.execute(""" -copy into @{stage_name}/data_ -from pytest_t3 file_format=(format_name='VSV' compression='deflate') -max_file_size=10000000 -""".format(stage_name=test_data.stage_name)) - results = cur.fetchall() - assert results[0][0] == 73, ( - '73 rows were expected to be loaded') - - # create a table to unload data into - cur.execute(""" -CREATE OR REPLACE TABLE pytest_t3_copy -(c1 STRING, c2 STRING, c3 STRING, c4 STRING, c5 STRING, c6 STRING, -c7 STRING, c8 STRING, c9 STRING) -stage_file_format = ( -format_name = 'VSV' -compression='deflate')""") - results = cur.fetchall() - assert results[0][0], ( - "Table PYTEST_T3_COPY successfully created.", - "table not created successfully") - - cur.execute(""" -alter stage {stage_name} set file_format = ( -format_name = 'VSV' -compression='deflate') -""".format(stage_name=test_data.stage_name)) - - cur.execute(""" -copy into pytest_t3_copy from @{stage_name}/data_ -return_failed_only=true -""".format(stage_name=test_data.stage_name)) - results = cur.fetchall() - assert results[0][2] == "LOADED", ( - "rows were not loaded successfully") - assert results[0][4] == 73, ( - "not all 73 rows were loaded successfully") - # check to make sure they are equal - cur.execute(""" -(select * from pytest_t3 minus select * from pytest_t3_copy) -union -(select * from pytest_t3_copy minus select * from pytest_t3) -""") - assert cur.rowcount == 0, ( - 'unloaded/reloaded data were not the same') - cur.execute( - "rm @{stage_name}/data_".format( - stage_name=test_data.stage_name)) - assert cur.rowcount == 1, ( - 'only one file was expected to be removed') - - # clean stage - cur.execute( - "rm @{stage_name}/data_".format( - stage_name=test_data.stage_name)) - - cur.execute("drop table pytest_t3_copy") - cur.execute( - "drop stage {stage_name}".format( - stage_name=test_data.stage_name)) - cur.close() - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_put_with_auto_compress_false(tmpdir, db_parameters): - """ - Test PUT command with auto_compress=False - """ - import snowflake.connector - cnx = snowflake.connector.connect( - user=db_parameters['s3_user'], - password=db_parameters['s3_password'], - host=db_parameters['s3_host'], - port=db_parameters['s3_port'], - database=db_parameters['s3_database'], - account=db_parameters['s3_account'], - protocol=db_parameters['s3_protocol']) - - tmp_dir = str(tmpdir.mkdir('data')) - test_data = os.path.join(tmp_dir, 'data.txt') - with open(test_data, 'w') as f: - f.write("test1,test2") - f.write("test3,test4") - - cnx.cursor().execute("RM @~/test_put_uncompress_file") - try: - with cnx.cursor() as cur: - for rec in cur.execute(""" -PUT file://{0} @~/test_put_uncompress_file auto_compress=FALSE -""".format(test_data)): - print(rec) - - ret = cnx.cursor().execute(""" -LS @~/test_put_uncompress_file -""").fetchone() - assert "test_put_uncompress_file/data.txt" in ret[0] - assert "data.txt.gz" not in ret[0] - finally: - cnx.cursor().execute("RM @~/test_put_uncompress_file") diff --git a/test/test_put_get_medium.py b/test/test_put_get_medium.py deleted file mode 100644 index 093a75f3c..000000000 --- a/test/test_put_get_medium.py +++ /dev/null @@ -1,734 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import datetime -import gzip -import os -import random -import shutil -import string -import sys -import tempfile -import time -from logging import getLogger -from os import path - -import pytest -import pytz - -from snowflake.connector import ProgrammingError -from snowflake.connector.cursor import DictCursor - -try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: - CONNECTION_PARAMETERS_ADMIN = {} - -import logging - -for logger_name in ['test', 'snowflake.connector', 'botocore']: - logger = logging.getLogger(logger_name) - logger.setLevel(logging.DEBUG) - ch = logging.FileHandler( - path.join(tempfile.gettempdir(), 'python_connector.log')) - ch.setLevel(logging.DEBUG) - ch.setFormatter(logging.Formatter( - '%(asctime)s - %(threadName)s %(filename)s:%(lineno)d - %(funcName)s() - %(levelname)s - %(message)s')) - logger.addHandler(ch) - -THIS_DIR = os.path.dirname(os.path.realpath(__file__)) -logger = getLogger(__name__) - - -def test_put_copy0(conn_cnx, db_parameters): - """ - Put and Copy a file - """ - data_file = os.path.join(THIS_DIR, "data", "put_get_1.txt") - - def run(cnx, sql): - sql = sql.format( - file=data_file.replace('\\', '\\\\'), - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - def run_with_cursor(cnx, sql): - sql = sql.format( - file=data_file.replace('\\', '\\\\'), - name=db_parameters['name']) - c = cnx.cursor(DictCursor) - return c, c.execute(sql).fetchall() - - with conn_cnx() as cnx: - run(cnx, """ -create table {name} ( -aa int, -dt date, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(5,2)) -""") - c, ret = run_with_cursor(cnx, "put 'file://{file}' @%{name}") - assert c.is_file_transfer, "PUT" - assert len(ret) == 1 and ret[0]['source'] == os.path.basename( - data_file), "File name" - - c, ret = run_with_cursor(cnx, "copy into {name}") - assert not c.is_file_transfer, "COPY" - assert len(ret) == 1 and ret[0]['status'] == "LOADED", \ - "Failed to load data" - - assert ret[0]['rows_loaded'] == 3, "Failed to load 3 rows of data" - - run(cnx, 'drop table if exists {name}') - - -def test_put_copy_compressed(conn_cnx, db_parameters): - """ - Put and Copy compressed files - """ - data_file = os.path.join(THIS_DIR, "data", "gzip_sample.txt.gz") - - def run(cnx, sql): - sql = sql.format( - file=data_file.replace('\\', '\\\\'), - name=db_parameters['name']) - return cnx.cursor(DictCursor).execute(sql).fetchall() - - with conn_cnx() as cnx: - run(cnx, "create or replace table {name} (value string)") - file_size = os.stat(data_file).st_size - ret = run(cnx, "put 'file://{file}' @%{name}") - assert ret[0]['source'] == os.path.basename(data_file), "File name" - assert ret[0]['source_size'] == file_size, "File size" - assert ret[0]['status'] == 'UPLOADED' - - ret = run(cnx, "copy into {name}") - assert len(ret) == 1 and ret[0]['status'] == "LOADED", \ - "Failed to load data" - assert ret[0]['rows_loaded'] == 1, "Failed to load 1 rows of data" - - run(cnx, 'drop table if exists {name}') - - -@pytest.mark.skipif( - True, - reason="BZ2 is not detected in this test case. Need investigation" -) -def test_put_copy_bz2_compressed(conn_cnx, db_parameters): - """ - Put and Copy bz2 compressed files - """ - data_file = os.path.join(THIS_DIR, "data", "bzip2_sample.txt.bz2") - - def run(cnx, sql): - sql = sql.format( - file=data_file.replace('\\', '\\\\'), - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - with conn_cnx() as cnx: - run(cnx, "create or replace table {name} (value string)") - for rec in run(cnx, "put 'file://{file}' @%{name}"): - print(rec) - assert rec[-2] == 'UPLOADED' - for rec in run(cnx, "copy into {name}"): - print(rec) - assert rec[1] == 'LOADED' - - run(cnx, 'drop table if exists {name}') - - -def test_put_copy_brotli_compressed(conn_cnx, db_parameters): - """ - Put and Copy brotli compressed files - """ - data_file = os.path.join(THIS_DIR, "data", "brotli_sample.txt.br") - - def run(cnx, sql): - sql = sql.format( - file=data_file.replace('\\', '\\\\'), - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - with conn_cnx() as cnx: - run(cnx, "create or replace table {name} (value string)") - for rec in run(cnx, "put 'file://{file}' @%{name}"): - print(rec) - assert rec[-2] == 'UPLOADED' - for rec in run( - cnx, "copy into {name} file_format=(compression='BROTLI')"): - print(rec) - assert rec[1] == 'LOADED' - - run(cnx, 'drop table if exists {name}') - - -def test_put_copy_zstd_compressed(conn_cnx, db_parameters): - """ - Put and Copy zstd compressed files - """ - data_file = os.path.join(THIS_DIR, "data", "zstd_sample.txt.zst") - - def run(cnx, sql): - sql = sql.format( - file=data_file.replace('\\', '\\\\'), - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - with conn_cnx() as cnx: - run(cnx, "create or replace table {name} (value string)") - for rec in run(cnx, "put 'file://{file}' @%{name}"): - print(rec) - assert rec[-2] == 'UPLOADED' - for rec in run( - cnx, "copy into {name} file_format=(compression='ZSTD')"): - print(rec) - assert rec[1] == 'LOADED' - - run(cnx, 'drop table if exists {name}') - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_put_copy_parquet_compressed(conn_cnx, db_parameters): - """ - Put and Copy parquet compressed files - """ - data_file = os.path.join( - THIS_DIR, "data", "nation.impala.parquet") - - def run(cnx, sql): - sql = sql.format( - file=data_file.replace('\\', '\\\\'), - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - with conn_cnx() as cnx: - run(cnx, "alter session set enable_parquet_filetype=true") - run(cnx, """ -create or replace table {name} -(value variant) -stage_file_format=(type='parquet') -""") - for rec in run(cnx, "put 'file://{file}' @%{name}"): - print(rec) - assert rec[-2] == 'UPLOADED' - assert rec[4] == 'PARQUET' - assert rec[5] == 'PARQUET' - for rec in run(cnx, "copy into {name}"): - print(rec) - assert rec[1] == 'LOADED' - - run(cnx, 'drop table if exists {name}') - run(cnx, "alter session unset enable_parquet_filetype") - - -def test_put_copy_orc_compressed(conn_cnx, db_parameters): - """ - Put and Copy ORC compressed files - """ - data_file = os.path.join(THIS_DIR, "data", "TestOrcFile.test1.orc") - - def run(cnx, sql): - sql = sql.format( - file=data_file.replace('\\', '\\\\'), - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - with conn_cnx() as cnx: - run(cnx, """ -create or replace table {name} (value variant) stage_file_format=(type='orc') -""") - for rec in run(cnx, "put 'file://{file}' @%{name}"): - print(rec) - assert rec[-2] == 'UPLOADED' - assert rec[4] == 'ORC' - assert rec[5] == 'ORC' - for rec in run(cnx, "copy into {name}"): - print(rec) - assert rec[1] == 'LOADED' - - run(cnx, 'drop table if exists {name}') - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_copy_get(tmpdir, conn_cnx, db_parameters): - """ - Copy and Get a file - """ - name_unload = db_parameters['name'] + "_unload" - tmp_dir = str(tmpdir.mkdir('copy_get_stage')) - tmp_dir_user = str(tmpdir.mkdir('user_get')) - - def run(cnx, sql): - sql = sql.format( - name_unload=name_unload, - tmpdir=tmp_dir, - tmp_dir_user=tmp_dir_user, - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - with conn_cnx() as cnx: - run(cnx, - "alter session set DISABLE_PUT_AND_GET_ON_EXTERNAL_STAGE=false") - run(cnx, """ -create or replace table {name} ( -aa int, -dt date, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(5,2)) -""") - run(cnx, """ -create or replace stage {name_unload} -url='file://{tmpdir}/' -file_format = ( -format_name = 'common.public.csv' -field_delimiter = '|' -error_on_column_count_mismatch=false); -""") - current_time = datetime.datetime.utcnow() - current_time = current_time.replace( - tzinfo=pytz.timezone("America/Los_Angeles")) - current_date = datetime.date.today() - other_time = current_time.replace(tzinfo=pytz.timezone("Asia/Tokyo")) - - fmt = """ -insert into {name}(aa, dt, tstz) -values(%(value)s,%(dt)s,%(tstz)s) -""".format(name=db_parameters['name']) - cnx.cursor().executemany(fmt, [ - {'value': 6543, 'dt': current_date, 'tstz': other_time}, - {'value': 1234, 'dt': current_date, 'tstz': other_time}, - ]) - - run(cnx, """ -copy into @{name_unload}/data_ -from {name} -file_format=( -format_name='common.public.csv' -compression='gzip') -max_file_size=10000000 -""") - ret = run(cnx, "get @{name_unload}/ file://{tmp_dir_user}/") - - assert ret[0][2] == 'DOWNLOADED', 'Failed to download' - cnt = 0 - for _, _, _ in os.walk(tmp_dir_user): - cnt += 1 - assert cnt > 0, 'No file was downloaded' - - run(cnx, "drop stage {name_unload}") - run(cnx, "drop table if exists {name}") - - -def test_put_copy_many_files(tmpdir, test_files, conn_cnx, db_parameters): - """ - Put and Copy many_files - """ - # generates N files - number_of_files = 100 - number_of_lines = 1000 - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files) - - files = os.path.join(tmp_dir, 'file*') - - def run(cnx, sql): - sql = sql.format( - files=files.replace('\\', '\\\\'), - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - with conn_cnx() as cnx: - run(cnx, """ -create or replace table {name} ( -aa int, -dt date, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(6,2)) -""") - run(cnx, "put 'file://{files}' @%{name}") - run(cnx, "copy into {name}") - rows = 0 - for rec in run(cnx, "select count(*) from {name}"): - rows += rec[0] - assert rows == number_of_files * number_of_lines, 'Number of rows' - - run(cnx, "drop table if exists {name}") - - -def test_put_copy_many_files_s3(tmpdir, test_files, conn_cnx, db_parameters): - """ - [s3] Put and Copy many files - """ - # generates N files - number_of_files = 10 - number_of_lines = 1000 - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files) - - files = os.path.join(tmp_dir, 'file*') - - def run(cnx, sql): - sql = sql.format( - files=files.replace('\\', '\\\\'), - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - run(cnx, """ -create or replace table {name} ( -aa int, -dt date, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(6,2)) -""") - try: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - run(cnx, "put 'file://{files}' @%{name}") - run(cnx, "copy into {name}") - - rows = 0 - for rec in run(cnx, "select count(*) from {name}"): - rows += rec[0] - assert rows == number_of_files * number_of_lines, \ - 'Number of rows' - finally: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - run(cnx, "drop table if exists {name}") - - -def test_put_copy_duplicated_files_s3(tmpdir, test_files, conn_cnx, - db_parameters): - """ - [s3] Put and Copy duplicated files - """ - # generates N files - number_of_files = 5 - number_of_lines = 100 - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files) - - files = os.path.join(tmp_dir, 'file*') - - def run(cnx, sql): - sql = sql.format( - files=files.replace('\\', '\\\\'), - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - run(cnx, """ -create or replace table {name} ( -aa int, -dt date, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(6,2)) -""") - - try: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - success_cnt = 0 - skipped_cnt = 0 - for rec in run(cnx, "put 'file://{files}' @%{name}"): - logger.info('rec=%s', rec) - if rec[6] == 'UPLOADED': - success_cnt += 1 - elif rec[6] == 'SKIPPED': - skipped_cnt += 1 - assert success_cnt == number_of_files, 'uploaded files' - assert skipped_cnt == 0, 'skipped files' - - deleted_cnt = 0 - run(cnx, "rm @%{name}/file0") - deleted_cnt += 1 - run(cnx, "rm @%{name}/file1") - deleted_cnt += 1 - run(cnx, "rm @%{name}/file2") - deleted_cnt += 1 - - success_cnt = 0 - skipped_cnt = 0 - for rec in run(cnx, "put 'file://{files}' @%{name}"): - logger.info('rec=%s', rec) - if rec[6] == 'UPLOADED': - success_cnt += 1 - elif rec[6] == 'SKIPPED': - skipped_cnt += 1 - assert success_cnt == deleted_cnt, \ - 'uploaded files in the second time' - assert skipped_cnt == number_of_files - deleted_cnt, \ - 'skipped files in the second time' - - run(cnx, "copy into {name}") - rows = 0 - for rec in run(cnx, "select count(*) from {name}"): - rows += rec[0] - assert rows == number_of_files * number_of_lines, \ - 'Number of rows' - finally: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - run(cnx, "drop table if exists {name}") - - -def test_put_collision(tmpdir, test_files, conn_cnx, db_parameters): - """ - File name collision - TODO: this should be updated once non gz file support is in - """ - # generates N files - number_of_files = 5 - number_of_lines = 10 - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files, - compress=True) - files = os.path.join(tmp_dir, 'file*') - shutil.copy(os.path.join(tmp_dir, 'file0.gz'), - os.path.join(tmp_dir, 'file0')) - stage_name = "test_put_collision/{0}".format(db_parameters['name']) - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute("RM @~/{0}".format(stage_name)) - try: - success_cnt = 0 - skipped_cnt = 0 - for rec in cnx.cursor().execute( - "PUT 'file://{file}' @~/{stage_name}".format( - file=files.replace('\\', '\\\\'), - stage_name=stage_name)): - logger.info('rec=%s', rec) - if rec[6] == 'UPLOADED': - success_cnt += 1 - elif rec[6] == 'SKIPPED': - skipped_cnt += 1 - assert success_cnt == number_of_files + 1 - finally: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute("RM @~/{0}".format(stage_name)) - - -def _generate_huge_value_json(tmpdir, n=1, value_size=1): - fname = str(tmpdir.join('test_put_get_huge_json')) - f = gzip.open(fname, 'wb') - for i in range(n): - logger.debug("adding a value in {0}".format(i)) - f.write('{"k":"{0}"}'.format( - ''.join( - random.choice(string.ascii_uppercase + string.digits) for _ in - range(value_size)))) - f.close() - return fname - - -def _huge_value_json_upload(tmpdir, conn_cnx, db_parameters): - """ - (WIP) Huge json value data - """ - with conn_cnx() as cnx: - json_table = db_parameters['name'] + "_json" - cnx.cursor().execute( - "create or replace table {table} (v variant)".format( - table=json_table)) - - rows = 2 - size = 2000 - tmp_file = _generate_huge_value_json(tmpdir, n=rows, value_size=size) - try: - c = cnx.cursor() - try: - c.execute( - "put 'file://{tmp_file}' @%{name}".format( - tmp_file=tmp_file.replace('\\', '\\\\'), - name=json_table)) - colmap = {} - for index, item in enumerate(c.description): - colmap[item[0]] = index - for rec in c: - source = rec[colmap['source']] - logger.debug(source) - finally: - c.close() - - c = cnx.cursor() - try: - c.execute( - "copy into {name} on_error='skip_file' file_format=(type='json')".format( - name=json_table)) - cnt = 0 - rec = [] - for rec in c: - logger.debug(rec) - cnt += 1 - assert rec[1] == 'LOAD_FAILED', \ - "Loading huge value json should fail" - assert cnt == 1, 'Number of PUT files' - finally: - c.close() - - c = cnx.cursor() - try: - c.execute( - "select count(*) from {name}".format(name=json_table)) - cnt = -1 - for rec in c: - cnt = rec[0] - assert cnt == 0, "Number of copied rows" - finally: - c.close() - - cnx.cursor().execute( - "drop table if exists {table}".format(table=json_table)) - finally: - os.unlink(tmp_file) - - -@pytest.mark.skipif( - os.getenv('TRAVIS') == 'true' or os.getenv('APPVEYOR'), - reason="Flaky tests. Need further investigation" -) -def test_put_get_large_files_s3(tmpdir, test_files, conn_cnx, db_parameters): - """ - [s3] Put and Get Large files - """ - number_of_files = 3 - number_of_lines = 200000 - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files) - - files = os.path.join(tmp_dir, 'file*') - output_dir = os.path.join(tmp_dir, 'output_dir') - os.makedirs(output_dir) - - class cb(object): - def __init__(self, filename, filesize, **_): - pass - - def __call__(self, bytes_amount): - pass - - def run(cnx, sql): - return cnx.cursor().execute( - sql.format( - files=files.replace('\\', '\\\\'), - dir=db_parameters['name'], - output_dir=output_dir.replace('\\', '\\\\')), - _put_callback_output_stream=sys.stdout, - _get_callback_output_stream=sys.stdout, - _get_callback=cb, - _put_callback=cb).fetchall() - - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - try: - run(cnx, "PUT 'file://{files}' @~/{dir}") - # run(cnx, "PUT 'file://{files}' @~/{dir}") # retry - all_recs = [] - for _ in range(100): - all_recs = run(cnx, "LIST @~/{dir}") - if len(all_recs) == number_of_files: - break - time.sleep(1) - else: - pytest.fail( - 'cannot list all files. Potentially ' - 'PUT command missed uploading Files: {0}'.format(all_recs)) - all_recs = run(cnx, "GET @~/{dir} 'file://{output_dir}'") - assert len(all_recs) == number_of_files - assert all([rec[2] == 'DOWNLOADED' for rec in all_recs]) - finally: - run(cnx, "RM @~/{dir}") - - -def test_put_get_with_hint(tmpdir, conn_cnx, db_parameters): - """ - SNOW-15153: PUT and GET with hint - """ - tmp_dir = str(tmpdir.mkdir('put_get_with_hint')) - data_file = os.path.join(THIS_DIR, "data", "put_get_1.txt") - - def run(cnx, sql, _is_put_get=None): - return cnx.cursor().execute( - sql.format( - local_dir=tmp_dir.replace('\\', '\\\\'), - file=data_file.replace('\\', '\\\\'), - name=db_parameters['name']), _is_put_get=_is_put_get).fetchone() - - with conn_cnx() as cnx: - # regular PUT case - ret = run(cnx, "PUT 'file://{file}' @~/{name}") - assert ret[0] == 'put_get_1.txt', 'PUT filename' - - # clean up a file - ret = run(cnx, "RM @~/{name}") - assert ret[0].endswith('put_get_1.txt.gz'), 'RM filename' - - # PUT detection failure - with pytest.raises(ProgrammingError): - run(cnx, """ --- test comments -PUT 'file://{file}' @~/{name}""") - - # PUT with hint - ret = run(cnx, """ ---- test comments -PUT 'file://{file}' @~/{name}""", _is_put_get=True) - assert ret[0] == 'put_get_1.txt', 'PUT filename' - - # GET detection failure - with pytest.raises(ProgrammingError): - run(cnx, """ ---- test comments -GET @~/{name} file://{local_dir}""") - - # GET with hint - ret = run(cnx, """ ---- test comments -GET @~/{name} 'file://{local_dir}'""", _is_put_get=True) - assert ret[0] == 'put_get_1.txt.gz', "GET filename" diff --git a/test/test_put_get_snow_4525.py b/test/test_put_get_snow_4525.py deleted file mode 100644 index 3ca99b10d..000000000 --- a/test/test_put_get_snow_4525.py +++ /dev/null @@ -1,72 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import os - - -def test_load_bogus_file(tmpdir, conn_cnx, db_parameters): - """ - SNOW-4525: Load Bogus file and should fail - """ - with conn_cnx() as cnx: - cnx.cursor().execute(""" -create table {name} ( -aa int, -dt date, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(5,2)) -""".format(name=db_parameters['name'])) - temp_file = str(tmpdir.join('bogus_files')) - with open(temp_file, 'wb') as random_binary_file: - random_binary_file.write(os.urandom(1024)) - cnx.cursor().execute( - "put file://{file} @%{name}".format(file=temp_file, - name=db_parameters['name'])) - - with cnx.cursor() as c: - c.execute( - "copy into {name} on_error='skip_file'".format( - name=db_parameters['name'])) - cnt = 0 - for rec in c: - cnt += 1 - assert rec[1] == "LOAD_FAILED" - cnx.cursor().execute( - "drop table if exists {name}".format(name=db_parameters['name'])) - - -def test_load_bogus_json_file(tmpdir, conn_cnx, db_parameters): - """ - SNOW-4525: Load Bogus JSON file and should fail - """ - - with conn_cnx() as cnx: - json_table = db_parameters['name'] + "_json" - cnx.cursor().execute( - "create table {name} (v variant)".format(name=json_table)) - - temp_file = str(tmpdir.join('bogus_json_files')) - with open(temp_file, 'wb') as random_binary_file: - random_binary_file.write(os.urandom(1024)) - cnx.cursor().execute( - "put file://{file} @%{name}".format(file=temp_file, - name=json_table)) - - with cnx.cursor() as c: - c.execute( - "copy into {name} on_error='skip_file' " - "file_format=(type='json')".format( - name=json_table)) - cnt = 0 - for rec in c: - cnt += 1 - assert rec[1] == "LOAD_FAILED" - cnx.cursor().execute( - "drop table if exists {name}".format(name=json_table)) diff --git a/test/test_put_get_user_stage.py b/test/test_put_get_user_stage.py deleted file mode 100644 index e2fb8a537..000000000 --- a/test/test_put_get_user_stage.py +++ /dev/null @@ -1,399 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import mimetypes -import os -import time -from getpass import getuser -from logging import getLogger - -import pytest - - -@pytest.mark.skipif( - 'AWS_ACCESS_KEY_ID' not in os.environ, - reason="Snowflake admin account is not accessible." -) -def test_put_get_small_data_via_user_stage( - tmpdir, test_files, conn_cnx, db_parameters): - """ - [s3] Put and Get Small Data via User Stage - """ - _put_get_user_stage(tmpdir, test_files, conn_cnx, db_parameters, - number_of_files=5, number_of_lines=10) - - -@pytest.mark.skipif( - 'AWS_ACCESS_KEY_ID' not in os.environ, - reason="Snowflake admin account is not accessible." -) -def test_put_get_large_data_via_user_stage(tmpdir, test_files, conn_cnx, - db_parameters): - """ - [s3] Put and Get Large Data via User Stage - """ - _put_get_user_stage(tmpdir, test_files, conn_cnx, db_parameters, - number_of_files=2, - number_of_lines=200000) - - -def _put_get_user_stage(tmpdir, test_files, conn_cnx, db_parameters, - number_of_files=1, - number_of_lines=1): - # sanity check - assert 'AWS_ACCESS_KEY_ID' in os.environ, 'AWS_ACCESS_KEY_ID is missing' - assert 'AWS_SECRET_ACCESS_KEY' in os.environ, \ - 'AWS_SECRET_ACCESS_KEY is missing' - - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files) - - files = os.path.join(tmp_dir, 'file*') - - stage_name = db_parameters['name'] + '_stage_{0}_{1}'.format( - number_of_files, - number_of_lines) - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute(""" -create or replace table {name} ( -aa int, -dt date, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(6,2)) -""".format(name=db_parameters['name'])) - user_bucket = os.getenv('SF_AWS_USER_BUCKET', - "sfc-dev1-regression/{0}/reg".format( - getuser())) - cnx.cursor().execute(""" -create or replace stage {stage_name} -url='s3://{user_bucket}/{stage_name}-{number_of_files}-{number_of_lines}' -credentials=( - AWS_KEY_ID='{aws_key_id}' - AWS_SECRET_KEY='{aws_secret_key}' -) -""".format(stage_name=stage_name, user_bucket=user_bucket, - aws_key_id=os.getenv('AWS_ACCESS_KEY_ID'), - aws_secret_key=os.getenv('AWS_SECRET_ACCESS_KEY'), - number_of_files=number_of_files, - number_of_lines=number_of_lines)) - try: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute( - "alter session set disable_put_and_get_on_external_stage = false") - cnx.cursor().execute( - "rm @{stage_name}".format(stage_name=stage_name)) - cnx.cursor().execute( - "put file://{file} @{stage_name}".format( - file=files, - stage_name=stage_name)) - cnx.cursor().execute( - "copy into {name} from @{stage_name}".format( - name=db_parameters['name'], stage_name=stage_name)) - c = cnx.cursor() - try: - c.execute( - "select count(*) from {name}".format( - name=db_parameters['name'])) - rows = 0 - for rec in c: - rows += rec[0] - assert rows == number_of_files * number_of_lines, \ - 'Number of rows' - finally: - c.close() - cnx.cursor().execute( - "rm @{stage_name}".format(stage_name=stage_name)) - cnx.cursor().execute( - "copy into @{stage_name} from {name}".format( - name=db_parameters['name'], stage_name=stage_name)) - tmp_dir_user = str(tmpdir.mkdir('put_get_stage')) - cnx.cursor().execute( - "get @{stage_name}/ file://{tmp_dir_user}/".format( - stage_name=stage_name, - tmp_dir_user=tmp_dir_user)) - for root, _, files in os.walk(tmp_dir_user): - for file in files: - mimetypes.init() - _, encoding = mimetypes.guess_type(file) - assert encoding == 'gzip', "exported file type" - finally: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute( - "rm @{stage_name}".format(stage_name=stage_name)) - cnx.cursor().execute( - "drop stage if exists {stage_name}".format( - stage_name=stage_name)) - cnx.cursor().execute( - "drop table if exists {name}".format( - name=db_parameters['name'])) - - -@pytest.mark.skipif( - 'AWS_ACCESS_KEY_ID' not in os.environ, - reason="Snowflake admin account is not accessible." -) -def test_put_get_duplicated_data_user_stage(tmpdir, test_files, conn_cnx, - db_parameters, - number_of_files=5, - number_of_lines=100): - """ - [s3] Put and Get Duplicated Data using User Stage - """ - logger = getLogger(__name__) - assert 'AWS_ACCESS_KEY_ID' in os.environ, 'AWS_ACCESS_KEY_ID is missing' - assert 'AWS_SECRET_ACCESS_KEY' in os.environ, \ - 'AWS_SECRET_ACCESS_KEY is missing' - - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files) - - files = os.path.join(tmp_dir, 'file*') - - stage_name = db_parameters['name'] + '_stage' - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute(""" -create or replace table {name} ( -aa int, -dt date, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(6,2)) -""".format(name=db_parameters['name'])) - user_bucket = os.getenv('SF_AWS_USER_BUCKET', - "sfc-dev1-regression/{0}/reg".format( - getuser())) - cnx.cursor().execute(""" -create or replace stage {stage_name} -url='s3://{user_bucket}/{stage_name}-{number_of_files}-{number_of_lines}' -credentials=( - AWS_KEY_ID='{aws_key_id}' - AWS_SECRET_KEY='{aws_secret_key}' -) -""".format(stage_name=stage_name, user_bucket=user_bucket, - aws_key_id=os.getenv('AWS_ACCESS_KEY_ID'), - aws_secret_key=os.getenv('AWS_SECRET_ACCESS_KEY'), - number_of_files=number_of_files, - number_of_lines=number_of_lines)) - try: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - c = cnx.cursor() - try: - for rec in c.execute( - "rm @{stage_name}".format(stage_name=stage_name)): - logger.info('rec=%s', rec) - finally: - c.close() - - success_cnt = 0 - skipped_cnt = 0 - c = cnx.cursor() - c.execute( - "alter session set disable_put_and_get_on_external_stage = false") - try: - for rec in c.execute( - "put file://{file} @{stage_name}".format( - file=files, stage_name=stage_name)): - logger.info('rec=%s', rec) - if rec[6] == 'UPLOADED': - success_cnt += 1 - elif rec[6] == 'SKIPPED': - skipped_cnt += 1 - finally: - c.close() - assert success_cnt == number_of_files, 'uploaded files' - assert skipped_cnt == 0, 'skipped files' - - logger.info('deleting files in {stage_name}'.format( - stage_name=stage_name)) - - deleted_cnt = 0 - cnx.cursor().execute( - "rm @{stage_name}/file0".format(stage_name=stage_name)) - deleted_cnt += 1 - cnx.cursor().execute( - "rm @{stage_name}/file1".format(stage_name=stage_name)) - deleted_cnt += 1 - cnx.cursor().execute( - "rm @{stage_name}/file2".format(stage_name=stage_name)) - deleted_cnt += 1 - - success_cnt = 0 - skipped_cnt = 0 - c = cnx.cursor() - try: - for rec in c.execute( - "put file://{file} @{stage_name}".format( - file=files, stage_name=stage_name)): - logger.info('rec=%s', rec) - if rec[6] == 'UPLOADED': - success_cnt += 1 - elif rec[6] == 'SKIPPED': - skipped_cnt += 1 - assert success_cnt == deleted_cnt, \ - 'uploaded files in the second time' - assert skipped_cnt == number_of_files - deleted_cnt, \ - 'skipped files in the second time' - finally: - c.close() - - time.sleep(5) - cnx.cursor().execute( - "copy into {name} from @{stage_name}".format( - name=db_parameters['name'], stage_name=stage_name)) - c = cnx.cursor() - try: - c.execute( - "select count(*) from {name}".format( - name=db_parameters['name'])) - rows = 0 - for rec in c: - rows += rec[0] - assert rows == number_of_files * number_of_lines, 'Number of rows' - finally: - c.close() - cnx.cursor().execute( - "rm @{stage_name}".format(stage_name=stage_name)) - cnx.cursor().execute( - "copy into @{stage_name} from {name}".format( - name=db_parameters['name'], stage_name=stage_name)) - tmp_dir_user = str(tmpdir.mkdir('stage2')) - cnx.cursor().execute( - "get @{stage_name}/ file://{tmp_dir_user}/".format( - stage_name=stage_name, - tmp_dir_user=tmp_dir_user)) - for root, _, files in os.walk(tmp_dir_user): - for file in files: - mimetypes.init() - _, encoding = mimetypes.guess_type(file) - assert encoding == 'gzip', "exported file type" - - finally: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute( - "drop stage if exists {stage_name}".format( - stage_name=stage_name)) - cnx.cursor().execute( - "drop table if exists {name}".format( - name=db_parameters['name'])) - - -@pytest.mark.skipif( - 'AWS_ACCESS_KEY_ID' not in os.environ, - reason="Snowflake admin account is not accessible." -) -def test_get_data_user_stage(tmpdir, conn_cnx, db_parameters): - """ - SNOW-20927: get failed with 404 error - """ - assert 'AWS_ACCESS_KEY_ID' in os.environ, 'AWS_ACCESS_KEY_ID is missing' - assert 'AWS_SECRET_ACCESS_KEY' in os.environ, \ - 'AWS_SECRET_ACCESS_KEY is missing' - - default_s3bucket = os.getenv('SF_AWS_USER_BUCKET', - "sfc-dev1-regression/{0}/reg".format( - getuser())) - test_data = [ - { - 's3location': - '{0}/{1}'.format( - default_s3bucket, db_parameters['name'] + '_stage'), - 'stage_name': db_parameters['name'] + '_stage1', - 'data_file_name': 'data.txt', - }, - ] - for elem in test_data: - _put_list_rm_files_in_stage(tmpdir, conn_cnx, db_parameters, elem) - - -def _put_list_rm_files_in_stage(tmpdir, conn_cnx, db_parameters, elem): - s3location = elem['s3location'] - stage_name = elem['stage_name'] - data_file_name = elem['data_file_name'] - - from io import open - from snowflake.connector.compat import (UTF8, TO_UNICODE) - tmp_dir = str(tmpdir.mkdir('data')) - data_file = os.path.join(tmp_dir, data_file_name) - with open(data_file, 'w', encoding=UTF8) as f: - f.write(TO_UNICODE("123,456,string1\n")) - f.write(TO_UNICODE("789,012,string2\n")) - - output_dir = str(tmpdir.mkdir('output')) - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute(""" -create or replace stage {stage_name} - url='s3://{s3location}' - credentials=( - AWS_KEY_ID='{aws_key_id}' - AWS_SECRET_KEY='{aws_secret_key}' - ) -""".format( - s3location=s3location, - stage_name=stage_name, - aws_key_id=os.getenv('AWS_ACCESS_KEY_ID'), - aws_secret_key=os.getenv('AWS_SECRET_ACCESS_KEY') - )) - try: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute(""" -RM @{stage_name} -""".format(stage_name=stage_name)) - cnx.cursor().execute( - "alter session set disable_put_and_get_on_external_stage = false") - rec = cnx.cursor().execute(""" -PUT file://{file} @{stage_name} -""".format(file=data_file, stage_name=stage_name)).fetchone() - assert rec[0] == data_file_name - assert rec[6] == 'UPLOADED' - rec = cnx.cursor().execute(""" -LIST @{stage_name} - """.format(stage_name=stage_name, output_dir=output_dir)).fetchone() - assert rec, 'LIST should return something' - assert rec[0].startswith('s3://'), "The file location in S3" - rec = cnx.cursor().execute(""" -GET @{stage_name} file://{output_dir} -""".format(stage_name=stage_name, output_dir=output_dir)).fetchone() - assert rec[0] == data_file_name + '.gz' - assert rec[2] == 'DOWNLOADED' - finally: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute(""" -RM @{stage_name} -""".format(stage_name=stage_name)) - cnx.cursor().execute( - "drop stage if exists {stage_name}".format( - stage_name=stage_name)) diff --git a/test/test_put_get_with_aws_token.py b/test/test_put_get_with_aws_token.py deleted file mode 100644 index 72af4554d..000000000 --- a/test/test_put_get_with_aws_token.py +++ /dev/null @@ -1,191 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import glob -import gzip -import os - -import boto3 -import pytest - -from snowflake.connector.constants import UTF8 -from snowflake.connector.remote_storage_util import SnowflakeRemoteStorageUtil -from snowflake.connector.s3_util import SnowflakeS3Util - -try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: - CONNECTION_PARAMETERS_ADMIN = {} - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_put_get_with_aws(tmpdir, conn_cnx, db_parameters): - """ - [s3] Put and Get a small text using AWS S3 - """ - # create a data file - fname = str(tmpdir.join('test_put_get_with_aws_token.txt.gz')) - f = gzip.open(fname, 'wb') - original_contents = "123,test1\n456,test2\n" - f.write(original_contents.encode(UTF8)) - f.close() - tmp_dir = str(tmpdir.mkdir('test_put_get_with_aws_token')) - - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute("rm @~/snow9144") - cnx.cursor().execute( - "create or replace table snow9144 (a int, b string)") - try: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute( - "put file://{0} @%snow9144 auto_compress=true parallel=30".format( - fname)) - cnx.cursor().execute("copy into snow9144") - cnx.cursor().execute( - "copy into @~/snow9144 from snow9144 " - "file_format=( format_name='common.public.csv' " - "compression='gzip')") - c = cnx.cursor() - c.execute( - "get @~/snow9144 file://{0} pattern='snow9144.*'".format( - tmp_dir)) - rec = c.fetchone() - assert rec[0].startswith('snow9144'), 'A file downloaded by GET' - assert rec[1] == 36, 'Return right file size' - assert rec[2] == u'DOWNLOADED' , 'Return DOWNLOADED status' - assert rec[3] == u'', 'Return no error message' - cnx.cursor().execute("rm @%snow9144") - cnx.cursor().execute("rm @~/snow9144") - finally: - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute("drop table snow9144") - - files = glob.glob(os.path.join(tmp_dir, 'snow9144*')) - contents = '' - fd = gzip.open(files[0], 'rb') - for line in fd: - contents += line.decode(UTF8) - fd.close() - assert original_contents == contents, ( - 'Output is different from the original file') - - -@pytest.mark.skipif( - 'AWS_ACCESS_KEY_ID' not in os.environ, - reason="Snowflake admin account is not accessible." -) -def test_put_with_invalid_token(tmpdir, conn_cnx, db_parameters): - """ - [s3] SNOW-6154: Use invalid combination of AWS credential - """ - # create a data file - fname = str(tmpdir.join('test_put_get_with_aws_token.txt.gz')) - f = gzip.open(fname, 'wb') - f.write("123,test1\n456,test2".encode(UTF8)) - f.close() - - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute( - "create or replace table snow6154 (a int, b string)") - ret = cnx.cursor()._execute_helper( - "put file://{0} @%snow6154".format(fname)) - stage_location = ret['data']['stageInfo']['location'] - stage_credentials = ret['data']['stageInfo']['creds'] - - s3location = SnowflakeS3Util.extract_bucket_name_and_path( - stage_location) - - s3path = s3location.s3path + os.path.basename(fname) + ".gz" - - # positive case - client = boto3.resource( - 's3', - aws_access_key_id=stage_credentials['AWS_ID'], - aws_secret_access_key=stage_credentials['AWS_KEY'], - aws_session_token=stage_credentials['AWS_TOKEN']) - - client.meta.client.upload_file( - fname, s3location.bucket_name, s3path) - - # negative: wrong location, attempting to put the file in the - # parent path - parent_s3path = os.path.dirname(os.path.dirname(s3path)) + '/' - - with pytest.raises(Exception): - client.meta.client.upload_file( - fname, s3location.bucket_name, parent_s3path) - - # negative: missing AWS_TOKEN - client = boto3.resource( - 's3', - aws_access_key_id=stage_credentials['AWS_ID'], - aws_secret_access_key=stage_credentials['AWS_KEY']) - with pytest.raises(Exception): - client.meta.client.upload_file( - fname, s3location.bucket_name, s3path) - - -def _s3bucket_list(self, client, s3bucket): - """ - Attempts to get the keys from the list. - - Must raise an exception - """ - s3bucket = client.Bucket(s3bucket) - return [key for key in s3bucket.objects] - - -@pytest.mark.skipif( - 'AWS_ACCESS_KEY_ID' not in os.environ, - reason="Snowflake admin account is not accessible." -) -def test_pretend_to_put_but_list(tmpdir, conn_cnx, db_parameters): - """ - [s3] SNOW-6154: pretend to PUT but LIST - """ - # create a data file - fname = str(tmpdir.join('test_put_get_with_aws_token.txt')) - f = gzip.open(fname, 'wb') - f.write("123,test1\n456,test2".encode(UTF8)) - f.close() - - with conn_cnx( - user=db_parameters['s3_user'], - account=db_parameters['s3_account'], - password=db_parameters['s3_password']) as cnx: - cnx.cursor().execute( - "create or replace table snow6154 (a int, b string)") - ret = cnx.cursor()._execute_helper( - "put file://{0} @%snow6154".format(fname)) - stage_location = ret['data']['stageInfo']['location'] - stage_credentials = ret['data']['stageInfo']['creds'] - - s3location = SnowflakeS3Util.extract_bucket_name_and_path( - stage_location) - - # listing - client = boto3.resource( - 's3', - aws_access_key_id=stage_credentials['AWS_ID'], - aws_secret_access_key=stage_credentials['AWS_KEY'], - aws_session_token=stage_credentials['AWS_TOKEN']) - with pytest.raises(Exception): - _s3bucket_list(client, s3location.bucket_name) diff --git a/test/test_put_get_with_azure_token.py b/test/test_put_get_with_azure_token.py deleted file mode 100644 index ea57a8347..000000000 --- a/test/test_put_get_with_azure_token.py +++ /dev/null @@ -1,300 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import glob -import gzip -import os - -import sys - -import time - -import pytest - -from snowflake.connector.constants import UTF8 - -from logging import getLogger -from snowflake.connector.azure_util import SnowflakeAzureUtil - -try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: - CONNECTION_PARAMETERS_ADMIN = {} - -logger = getLogger(__name__) - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_put_get_with_azure(tmpdir, conn_cnx, db_parameters): - """ - [azure] Put and Get a small text using Azure - """ - # create a data file - fname = str(tmpdir.join('test_put_get_with_azure_token.txt.gz')) - f = gzip.open(fname, 'wb') - original_contents = "123,test1\n456,test2\n" - f.write(original_contents.encode(UTF8)) - f.close() - tmp_dir = str(tmpdir.mkdir('test_put_get_with_azure_token')) - - - with conn_cnx( - user=db_parameters['azure_user'], - account=db_parameters['azure_account'], - password=db_parameters['azure_password']) as cnx: - cnx.cursor().execute("rm @~/snow32806") - cnx.cursor().execute( - "create or replace table snow32806 (a int, b string)") - try: - with conn_cnx( - user=db_parameters['azure_user'], - account=db_parameters['azure_account'], - password=db_parameters['azure_password']) as cnx: - with cnx.cursor() as csr: - csr.execute( - "put file://{0} @%snow32806 auto_compress=true parallel=30".format( - fname)) - csr.execute("copy into snow32806") - csr.execute( - "copy into @~/snow32806 from snow32806 " - "file_format=( format_name='common.public.csv' " - "compression='gzip')") - csr.execute( - "get @~/snow32806 file://{0} pattern='snow32806.*'".format( - tmp_dir)) - rec = csr.fetchone() - assert rec[0].startswith('snow32806'), 'A file downloaded by GET' - assert rec[1] == 36, 'Return right file size' - assert rec[2] == u'DOWNLOADED', 'Return DOWNLOADED status' - assert rec[3] == u'', 'Return no error message' - finally: - with conn_cnx( - user=db_parameters['azure_user'], - account=db_parameters['azure_account'], - password=db_parameters['azure_password']) as cnx: - cnx.cursor().execute("drop table snow32806") - cnx.cursor().execute("rm @~/snow32806") - - files = glob.glob(os.path.join(tmp_dir, 'snow32806*')) - contents = '' - fd = gzip.open(files[0], 'rb') - for line in fd: - contents += line.decode(UTF8) - fd.close() - assert original_contents == contents, ( - 'Output is different from the original file') - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_put_copy_many_files_azure(tmpdir, test_files, conn_cnx, db_parameters): - """ - [azure] Put and Copy many files - """ - # generates N files - number_of_files = 10 - number_of_lines = 1000 - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files) - - files = os.path.join(tmp_dir, 'file*') - - def run(cnx, sql): - sql = sql.format( - files=files, - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - with conn_cnx( - user=db_parameters['azure_user'], - account=db_parameters['azure_account'], - password=db_parameters['azure_password']) as cnx: - run(cnx, """ -create or replace table {name} ( -aa int, -dt date, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(6,2)) -""") - try: - with conn_cnx( - user=db_parameters['azure_user'], - account=db_parameters['azure_account'], - password=db_parameters['azure_password']) as cnx: - run(cnx, "put file://{files} @%{name}") - run(cnx, "copy into {name}") - - rows = 0 - for rec in run(cnx, "select count(*) from {name}"): - rows += rec[0] - assert rows == number_of_files * number_of_lines, \ - 'Number of rows' - finally: - with conn_cnx( - user=db_parameters['azure_user'], - account=db_parameters['azure_account'], - password=db_parameters['azure_password']) as cnx: - run(cnx, "drop table if exists {name}") - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_put_copy_duplicated_files_azure(tmpdir, test_files, conn_cnx, - db_parameters): - """ - [azure] Put and Copy duplicated files - """ - # generates N files - number_of_files = 5 - number_of_lines = 100 - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files) - - files = os.path.join(tmp_dir, 'file*') - - def run(cnx, sql): - sql = sql.format( - files=files, - name=db_parameters['name']) - return cnx.cursor().execute(sql).fetchall() - - with conn_cnx( - user=db_parameters['azure_user'], - account=db_parameters['azure_account'], - password=db_parameters['azure_password']) as cnx: - run(cnx, """ -create or replace table {name} ( -aa int, -dt date, -ts timestamp, -tsltz timestamp_ltz, -tsntz timestamp_ntz, -tstz timestamp_tz, -pct float, -ratio number(6,2)) -""") - - try: - with conn_cnx( - user=db_parameters['azure_user'], - account=db_parameters['azure_account'], - password=db_parameters['azure_password']) as cnx: - success_cnt = 0 - skipped_cnt = 0 - for rec in run(cnx, "put file://{files} @%{name}"): - logger.info('rec=%s', rec) - if rec[6] == 'UPLOADED': - success_cnt += 1 - elif rec[6] == 'SKIPPED': - skipped_cnt += 1 - assert success_cnt == number_of_files, 'uploaded files' - assert skipped_cnt == 0, 'skipped files' - - deleted_cnt = 0 - run(cnx, "rm @%{name}/file0") - deleted_cnt += 1 - run(cnx, "rm @%{name}/file1") - deleted_cnt += 1 - run(cnx, "rm @%{name}/file2") - deleted_cnt += 1 - - success_cnt = 0 - skipped_cnt = 0 - for rec in run(cnx, "put file://{files} @%{name}"): - logger.info('rec=%s', rec) - if rec[6] == 'UPLOADED': - success_cnt += 1 - elif rec[6] == 'SKIPPED': - skipped_cnt += 1 - assert success_cnt == deleted_cnt, \ - 'uploaded files in the second time' - assert skipped_cnt == number_of_files - deleted_cnt, \ - 'skipped files in the second time' - - run(cnx, "copy into {name}") - rows = 0 - for rec in run(cnx, "select count(*) from {name}"): - rows += rec[0] - assert rows == number_of_files * number_of_lines, \ - 'Number of rows' - finally: - with conn_cnx( - user=db_parameters['azure_user'], - account=db_parameters['azure_account'], - password=db_parameters['azure_password']) as cnx: - run(cnx, "drop table if exists {name}") - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_put_get_large_files_azure(tmpdir, test_files, conn_cnx, db_parameters): - """ - [azure] Put and Get Large files - """ - number_of_files = 3 - number_of_lines = 200000 - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files) - - files = os.path.join(tmp_dir, 'file*') - output_dir = os.path.join(tmp_dir, 'output_dir') - os.makedirs(output_dir) - - class cb(object): - def __init__(self, filename, filesize, **_): - pass - - def __call__(self, bytes_amount): - pass - - def run(cnx, sql): - return cnx.cursor().execute( - sql.format( - files=files, - dir=db_parameters['name'], - output_dir=output_dir), - _put_callback_output_stream=sys.stdout, - _get_callback_output_stream=sys.stdout, - _get_callback=cb, - _put_callback=cb).fetchall() - - with conn_cnx( - user=db_parameters['azure_user'], - account=db_parameters['azure_account'], - password=db_parameters['azure_password']) as cnx: - try: - run(cnx, "PUT file://{files} @~/{dir}") - - for _ in range(60): - for _ in range(100): - all_recs = run(cnx, "LIST @~/{dir}") - if len(all_recs) == number_of_files: - break - # you may not get the files right after PUT command - # due to the nature of Azure blob, which synchronizes - # data eventually. - time.sleep(1) - else: - # wait for another second and retry. - # this could happen if the files are partially available - # but not all. - time.sleep(1) - break # success - else: - pytest.fail( - 'cannot list all files. Potentially ' - 'PUT command missed uploading Files: {0}'.format(all_recs)) - all_recs = run(cnx, "GET @~/{dir} file://{output_dir}"); - assert len(all_recs) == number_of_files - assert all([rec[2] == 'DOWNLOADED' for rec in all_recs]) - finally: - run(cnx, "RM @~/{dir}") \ No newline at end of file diff --git a/test/test_put_windows_path.py b/test/test_put_windows_path.py deleted file mode 100755 index 1700230be..000000000 --- a/test/test_put_windows_path.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import os -import pytest - -from snowflake.connector.compat import PY2 - - -@pytest.mark.skipif(PY2, reason="Python3.4 or more") -def test_abc(conn_cnx, tmpdir, db_parameters): - """ - PUT a file on Windows using the URI and Windows path - """ - import pathlib - tmp_dir = str(tmpdir.mkdir('data')) - test_data = os.path.join(tmp_dir, 'data.txt') - with open(test_data, 'w') as f: - f.write("test1,test2") - f.write("test3,test4") - - fileURI = pathlib.Path(test_data).as_uri() - - subdir = db_parameters['name'] - with conn_cnx() as con: - rec = con.cursor().execute("put {0} @~/{1}0/".format( - fileURI, subdir)).fetchall() - assert rec[0][6] == u'UPLOADED' - - rec = con.cursor().execute("put file://{0} @~/{1}1/".format( - test_data, subdir)).fetchall() - assert rec[0][6] == u'UPLOADED' - - con.cursor().execute("rm @~/{0}0".format(subdir)) - con.cursor().execute("rm @~/{0}1".format(subdir)) diff --git a/test/test_qmark.py b/test/test_qmark.py deleted file mode 100644 index 6eb1eb122..000000000 --- a/test/test_qmark.py +++ /dev/null @@ -1,155 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - - -import pytest - -from snowflake.connector import errors - - -def test_qmark_paramstyle(conn_cnx, db_parameters): - """ - Binding question marks is not supported by default - """ - try: - with conn_cnx() as cnx: - cnx.cursor().execute( - "CREATE OR REPLACE TABLE {name} " - "(aa STRING, bb STRING)".format( - name=db_parameters['name'])) - cnx.cursor().execute( - "INSERT INTO {name} VALUES('?', '?')".format( - name=db_parameters['name'])) - for rec in cnx.cursor().execute( - "SELECT * FROM {name}".format(name=db_parameters['name'])): - assert rec[0] == "?", "First column value" - with pytest.raises(errors.ProgrammingError): - cnx.cursor().execute( - "INSERT INTO {name} VALUES(?,?)".format( - name=db_parameters['name'])) - finally: - with conn_cnx() as cnx: - cnx.cursor().execute( - "DROP TABLE IF EXISTS {name}".format( - name=db_parameters['name'])) - - -def test_numeric_paramstyle(conn_cnx, db_parameters): - """ - Binding numeric positional style is not supported - """ - try: - with conn_cnx() as cnx: - cnx.cursor().execute( - "CREATE OR REPLACE TABLE {name} " - "(aa STRING, bb STRING)".format( - name=db_parameters['name'])) - cnx.cursor().execute( - "INSERT INTO {name} VALUES(':1', ':2')".format( - name=db_parameters['name'])) - for rec in cnx.cursor().execute( - "SELECT * FROM {name}".format(name=db_parameters['name'])): - assert rec[0] == ":1", "First column value" - with pytest.raises(errors.ProgrammingError): - cnx.cursor().execute( - "INSERT INTO {name} VALUES(:1,:2)".format( - name=db_parameters['name'])) - finally: - with conn_cnx() as cnx: - cnx.cursor().execute( - "DROP TABLE IF EXISTS {name}".format( - name=db_parameters['name'])) - - -def test_qmark_paramstyle_enabled(negative_conn_cnx, db_parameters): - """ - Enable qmark binding - """ - import snowflake.connector - snowflake.connector.paramstyle = u'qmark' - try: - with negative_conn_cnx() as cnx: - cnx.cursor().execute( - "CREATE OR REPLACE TABLE {name} " - "(aa STRING, bb STRING)".format( - name=db_parameters['name'])) - cnx.cursor().execute( - "INSERT INTO {name} VALUES(?, ?)".format( - name=db_parameters['name']), ('test11', 'test12')) - ret = cnx.cursor().execute("select * from {name}".format( - name=db_parameters['name'])).fetchone() - assert ret[0] == 'test11' - assert ret[1] == 'test12' - finally: - with negative_conn_cnx() as cnx: - cnx.cursor().execute( - "DROP TABLE IF EXISTS {name}".format( - name=db_parameters['name'])) - snowflake.connector.paramstyle = u'pyformat' - - # After changing back to pyformat, binding qmark should fail. - try: - with negative_conn_cnx() as cnx: - cnx.cursor().execute( - "CREATE OR REPLACE TABLE {name} " - "(aa STRING, bb STRING)".format( - name=db_parameters['name'])) - with pytest.raises(TypeError): - cnx.cursor().execute( - "INSERT INTO {name} VALUES(?, ?)".format( - name=db_parameters['name']), ('test11', 'test12')) - finally: - with negative_conn_cnx() as cnx: - cnx.cursor().execute( - "DROP TABLE IF EXISTS {name}".format( - name=db_parameters['name'])) - - -def test_binding_datetime_qmark(conn_cnx, db_parameters): - """ - Ensures datetime can bind - """ - import datetime - import snowflake.connector - snowflake.connector.paramstyle = u'qmark' - try: - with conn_cnx() as cnx: - cnx.cursor().execute( - "CREATE OR REPLACE TABLE {name} " - "(aa TIMESTAMP_NTZ)".format( - name=db_parameters['name'])) - days = 2 - inserts = tuple( - [(datetime.datetime(2018, 1, i + 1),) for i in range(days)]) - cnx.cursor().executemany( - "INSERT INTO {name} VALUES(?)".format( - name=db_parameters['name']), - inserts) - ret = cnx.cursor().execute( - "SELECT * FROM {name} ORDER BY 1".format( - name=db_parameters['name'])).fetchall() - for i in range(days): - assert ret[i][0] == inserts[i][0] - finally: - with conn_cnx() as cnx: - cnx.cursor().execute( - "DROP TABLE IF EXISTS {name}".format( - name=db_parameters['name'])) - - -def test_binding_none(conn_cnx): - import snowflake.connector - original = snowflake.connector.paramstyle - snowflake.connector.paramstyle = 'qmark' - - with conn_cnx() as con: - try: - table_name = 'foo' - con.cursor().execute('CREATE TABLE {table}(bar text)'.format(table=table_name)) - con.cursor().execute('INSERT INTO {table} VALUES (?)'.format(table=table_name), [None]) - finally: - con.cursor().execute('DROP TABLE {table}'.format(table=table_name)) - snowflake.connector.paramstyle = original diff --git a/test/test_query_cancelling.py b/test/test_query_cancelling.py deleted file mode 100644 index 969068bdb..000000000 --- a/test/test_query_cancelling.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import logging -import time -from logging import getLogger -from threading import Thread, Lock - -import pytest - -logger = getLogger(__name__) -logging.basicConfig(level=logging.CRITICAL) -from snowflake.connector import errors - -try: - from parameters import (CONNECTION_PARAMETERS_ADMIN) -except: - CONNECTION_PARAMETERS_ADMIN = {} - -@pytest.fixture() -def conn_cnx(request, conn_cnx): - def fin(): - with conn_cnx() as cnx: - cnx.cursor().execute("use role accountadmin") - cnx.cursor().execute("drop user magicuser1") - cnx.cursor().execute("drop user magicuser2") - - request.addfinalizer(fin) - - with conn_cnx() as cnx: - cnx.cursor().execute('use role securityadmin') - cnx.cursor().execute( - "create or replace user magicuser1 password='xxx' " - "default_role='PUBLIC'") - cnx.cursor().execute( - "create or replace user magicuser2 password='xxx' " - "default_role='PUBLIC'") - - return conn_cnx - - -def _query_run(conn, shared, expectedCanceled=True): - """ - Run a query, and wait for possible cancellation. - """ - with conn(user='magicuser1', password='xxx') as cnx: - cnx.cursor().execute('use warehouse regress') - - # Collect the session_id - with cnx.cursor() as c: - c.execute('SELECT current_session()') - for rec in c: - with shared.lock: - shared.session_id = int(rec[0]) - logger.info("Current Session id: {0}".format(shared.session_id)) - - # Run a long query and see if we're canceled - canceled = False - try: - c = cnx.cursor() - c.execute(""" -select count(*) from table(generator(timeLimit => 10))""") - except errors.ProgrammingError as e: - logger.info("FAILED TO RUN QUERY: %s", e) - canceled = e.errno == 604 - if not canceled: - logger.exception('must have been canceled') - raise - finally: - c.close() - - if canceled: - logger.info("Query failed or was canceled") - else: - logger.info("Query finished successfully") - - assert canceled == expectedCanceled - - -def _query_cancel(conn, shared, user, password, expectedCanceled): - """ - Cancel the query running in another thread - """ - with conn(user=user, password=password) as cnx: - cnx.cursor().execute('use warehouse regress') - # .use_warehouse_database_schema(cnx) - - logger.info("User %s's role is: %s", user, cnx.cursor().execute( - "select current_role()").fetchone()[0]) - # Run the cancel query - logger.info("User %s is waiting for Session ID to be available", - user) - while True: - with shared.lock: - if shared.session_id is not None: - break - logger.info("User %s is waiting for Session ID to be available", - user) - time.sleep(1) - logger.info("Target Session id: {0}".format(shared.session_id)) - try: - query = "call system$cancel_all_queries({0})".format( - shared.session_id) - logger.info("Query: %s", query) - cnx.cursor().execute(query) - assert expectedCanceled, ("You should NOT be able to " - "cancel the query [{0}]".format( - shared.session_id)) - except errors.ProgrammingError as e: - logger.info("FAILED TO CANCEL THE QUERY: %s", e) - assert not expectedCanceled, ( - "You should be able to " - "cancel the query [{0}]".format( - shared.session_id)) - - -def _test_helper(conn, expectedCanceled, cancelUser, cancelPass): - """ - Helper function with the actual test. - queryRun is always run with magicuser1/xxx. - queryCancel is run with cancelUser/cancelPass - """ - - class Shared(object): - def __init__(self): - self.lock = Lock() - self.session_id = None - - shared = Shared() - queryRun = Thread(target=_query_run, args=( - conn, shared, expectedCanceled)) - queryRun.start() - queryCancel = Thread(target=_query_cancel, - args=(conn, shared, cancelUser, cancelPass, - expectedCanceled)) - queryCancel.start() - queryCancel.join(5) - queryRun.join(20) - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_same_user_canceling(conn_cnx): - """ - Test that the same user CAN cancel his query - """ - _test_helper(conn_cnx, True, 'magicuser1', 'xxx') - - -@pytest.mark.skipif( - not CONNECTION_PARAMETERS_ADMIN, - reason="Snowflake admin account is not accessible." -) -def test_other_user_canceling(conn_cnx): - """ - Test that the other user CAN NOT cancel his query - """ - _test_helper(conn_cnx, False, 'magicuser2', 'xxx') diff --git a/test/test_reuse_cursor.py b/test/test_reuse_cursor.py deleted file mode 100644 index cd6ea4ac5..000000000 --- a/test/test_reuse_cursor.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - - -def test_reuse_cursor(conn_cnx, db_parameters): - """ - Ensure only the last executed command/query's result sets are returned. - """ - with conn_cnx() as cnx: - c = cnx.cursor() - c.execute(u"create or replace table {name}(c1 string)".format( - name=db_parameters['name'])) - try: - c.execute( - u"insert into {name} values('123'),('456'),('678')".format( - name=db_parameters['name'])) - c.execute(u"show tables") - c.execute(u"select current_date()") - rec = c.fetchone() - assert len(rec) == 1, u"number of records is wrong" - c.execute( - u"select * from {name} order by 1".format( - name=db_parameters['name'])) - recs = c.fetchall() - assert c.description[0][0] == u"C1", u"fisrt column name" - assert len(recs) == 3, u"number of records is wrong" - finally: - c.execute(u"drop table if exists {name}".format( - name=db_parameters['name'])) diff --git a/test/test_session_parameters.py b/test/test_session_parameters.py deleted file mode 100644 index eae8a0148..000000000 --- a/test/test_session_parameters.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import snowflake.connector - - -def test_session_parameters(db_parameters): - """ - Set the session parameters in connection time - """ - connection = snowflake.connector.connect( - protocol=db_parameters['protocol'], - account=db_parameters['account'], - user=db_parameters['user'], - password=db_parameters['password'], - host=db_parameters['host'], - port=db_parameters['port'], - database=db_parameters['database'], - schema=db_parameters['schema'], - session_parameters={ - 'TIMEZONE': 'UTC' - } - ) - ret = connection.cursor().execute( - "show parameters like 'TIMEZONE'").fetchone() - assert ret[1] == 'UTC' diff --git a/test/test_transaction.py b/test/test_transaction.py deleted file mode 100644 index 39fc480b0..000000000 --- a/test/test_transaction.py +++ /dev/null @@ -1,122 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import pytest - -import snowflake.connector - - -def test_transaction(conn_cnx, db_parameters): - u""" - Transaction API - """ - with conn_cnx() as cnx: - cnx.cursor().execute(u"create table {name} (c1 int)".format( - name=db_parameters['name'])) - cnx.cursor().execute(u"insert into {name}(c1) " - u"values(1234),(3456)".format( - name=db_parameters['name'])) - c = cnx.cursor() - c.execute(u"select * from {name}".format(name=db_parameters['name'])) - total = 0 - for rec in c: - total += rec[0] - assert total == 4690, u'total integer' - - # - cnx.cursor().execute(u"begin") - cnx.cursor().execute( - u"insert into {name}(c1) values(5678),(7890)".format( - name=db_parameters['name'])) - c = cnx.cursor() - c.execute(u"select * from {name}".format(name=db_parameters['name'])) - total = 0 - for rec in c: - total += rec[0] - assert total == 18258, u'total integer' - cnx.rollback() - - c.execute(u"select * from {name}".format(name=db_parameters['name'])) - total = 0 - for rec in c: - total += rec[0] - assert total == 4690, u'total integer' - - # - cnx.cursor().execute(u"begin") - cnx.cursor().execute( - u"insert into {name}(c1) values(2345),(6789)".format( - name=db_parameters['name'])) - c = cnx.cursor() - c.execute(u"select * from {name}".format(name=db_parameters['name'])) - total = 0 - for rec in c: - total += rec[0] - assert total == 13824, u'total integer' - cnx.commit() - cnx.rollback() - c = cnx.cursor() - c.execute(u"select * from {name}".format(name=db_parameters['name'])) - total = 0 - for rec in c: - total += rec[0] - assert total == 13824, u'total integer' - - -def test_connection_context_manager(request, db_parameters): - db_config = { - 'protocol': db_parameters['protocol'], - 'account': db_parameters['account'], - 'user': db_parameters['user'], - 'password': db_parameters['password'], - 'host': db_parameters['host'], - 'port': db_parameters['port'], - 'database': db_parameters['database'], - 'schema': db_parameters['schema'], - 'timezone': 'UTC', - } - - def fin(): - with snowflake.connector.connect(**db_config) as cnx: - cnx.cursor().execute(""" -DROP TABLE IF EXISTS {name} -""".format(name=db_parameters['name'])) - - request.addfinalizer(fin) - - try: - with snowflake.connector.connect(**db_config) as cnx: - cnx.autocommit(False) - cnx.cursor().execute(""" -CREATE OR REPLACE TABLE {name} (cc1 int) -""".format(name=db_parameters['name'])) - cnx.cursor().execute(""" -INSERT INTO {name} VALUES(1),(2),(3) -""".format(name=db_parameters['name'])) - ret = cnx.cursor().execute(""" -SELECT SUM(cc1) FROM {name} -""".format(name=db_parameters['name'])).fetchone() - assert ret[0] == 6 - cnx.commit() - cnx.cursor().execute(""" -INSERT INTO {name} VALUES(4),(5),(6) -""".format(name=db_parameters['name'])) - ret = cnx.cursor().execute(""" -SELECT SUM(cc1) FROM {name} -""".format(name=db_parameters['name'])).fetchone() - assert ret[0] == 21 - cnx.cursor().execute(""" -SELECT WRONG SYNTAX QUERY -""".format(name=db_parameters['name'])) - raise Exception("Failed to cause the syntax error") - except snowflake.connector.Error as e: - # syntax error should be caught here - # and the last change must have been rollbacked - with snowflake.connector.connect(**db_config) as cnx: - ret = cnx.cursor().execute(""" -SELECT SUM(cc1) FROM {name} -""".format(name=db_parameters['name'])).fetchone() - assert ret[0] == 6 diff --git a/test/test_unit_arrow_chunk_iterator.py b/test/test_unit_arrow_chunk_iterator.py deleted file mode 100644 index 6a9730d8b..000000000 --- a/test/test_unit_arrow_chunk_iterator.py +++ /dev/null @@ -1,570 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from io import BytesIO -import random -import pytest -import decimal -import datetime -import pytz -import os -import platform -from snowflake.connector.arrow_context import ArrowConverterContext -from snowflake.connector.converter import ( - _generate_tzinfo_from_tzoffset) - -try: - import tzlocal -except ImportError: - tzlocal = None - -try: - from pyarrow import RecordBatchStreamReader - from pyarrow import RecordBatchStreamWriter - from pyarrow import RecordBatch - import pyarrow -except ImportError as e: - pass - -try: - from snowflake.connector.arrow_iterator import PyArrowIterator - no_arrow_iterator_ext = False -except ImportError: - no_arrow_iterator_ext = True - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_string_chunk(): - random.seed(datetime.datetime.now()) - column_meta = [ - {"logicalType": "TEXT"}, - {"logicalType": "TEXT"} - ] - field_foo = pyarrow.field("column_foo", pyarrow.string(), True, column_meta[0]) - field_bar = pyarrow.field("column_bar", pyarrow.string(), True, column_meta[1]) - schema = pyarrow.schema([field_foo, field_bar]) - - def str_generator(): - return str(random.randint(-100, 100)) - - iterate_over_test_chunk([pyarrow.string(), pyarrow.string()], - column_meta, str_generator) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_int64_chunk(): - random.seed(datetime.datetime.now()) - column_meta = [ - {"logicalType": "FIXED", "precision": "38", "scale": "0"}, - {"logicalType": "FIXED", "precision": "38", "scale": "0"} - ] - - def int64_generator(): - return random.randint(-9223372036854775808, 9223372036854775807) - - iterate_over_test_chunk([pyarrow.int64(), pyarrow.int64()], - column_meta, int64_generator) - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_int32_chunk(): - random.seed(datetime.datetime.now()) - column_meta = [ - {"logicalType": "FIXED", "precision": "10", "scale": "0"}, - {"logicalType": "FIXED", "precision": "10", "scale": "0"} - ] - - def int32_generator(): - return random.randint(-2147483648, 2147483637) - - iterate_over_test_chunk([pyarrow.int32(), pyarrow.int32()], - column_meta, int32_generator) - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_int16_chunk(): - random.seed(datetime.datetime.now()) - column_meta = [ - {"logicalType": "FIXED", "precision": "5", "scale": "0"}, - {"logicalType": "FIXED", "precision": "5", "scale": "0"} - ] - - def int16_generator(): - return random.randint(-32768, 32767) - - iterate_over_test_chunk([pyarrow.int16(), pyarrow.int16()], - column_meta, int16_generator) - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_int8_chunk(): - random.seed(datetime.datetime.now()) - column_meta = [ - {"logicalType": "FIXED", "precision": "3", "scale": "0"}, - {"logicalType": "FIXED", "precision": "3", "scale": "0"} - ] - - def int8_generator(): - return random.randint(-128, 127) - - iterate_over_test_chunk([pyarrow.int8(), pyarrow.int8()], - column_meta, int8_generator) - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_bool_chunk(): - random.seed(datetime.datetime.now()) - column_meta = {"logicalType": "BOOLEAN"} - - def bool_generator(): - return bool(random.getrandbits(1)) - - iterate_over_test_chunk([pyarrow.bool_(), pyarrow.bool_()], - [column_meta, column_meta], - bool_generator) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_float_chunk(): - random.seed(datetime.datetime.now()) - column_meta = [ - {"logicalType": "REAL"}, - {"logicalType": "FLOAT"} - ] - - def float_generator(): - return random.uniform(-100.0, 100.0) - - iterate_over_test_chunk([pyarrow.float64(), pyarrow.float64()], - column_meta, float_generator) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_decimal_chunk(): - random.seed(datetime.datetime.now()) - precision = random.randint(1, 38) - scale = random.randint(0, precision) - datatype = None - if precision <= 2: - datatype = pyarrow.int8() - elif precision <= 4: - datatype = pyarrow.int16() - elif precision <= 9: - datatype = pyarrow.int32() - elif precision <= 19: - datatype = pyarrow.int64() - else: - datatype = pyarrow.decimal128(precision, scale) - - def decimal_generator(_precision, _scale): - def decimal128_generator(precision, scale): - data = [] - for i in range(precision): - data.append(str(random.randint(0, 9))) - - if scale: - data.insert(-scale, '.') - return decimal.Decimal("".join(data)) - - def int64_generator(precision): - data = random.randint(-9223372036854775808, 9223372036854775807) - return int(str(data)[:precision if data >= 0 else precision + 1]) - - def int32_generator(precision): - data = random.randint(-2147483648, 2147483637) - return int(str(data)[:precision if data >= 0 else precision + 1]) - - def int16_generator(precision): - data = random.randint(-32768, 32767) - return int(str(data)[:precision if data >= 0 else precision + 1]) - - def int8_generator(precision): - data = random.randint(-128, 127) - return int(str(data)[:precision if data >= 0 else precision + 1]) - - if _precision <= 2: - return int8_generator(_precision) - elif _precision <= 4: - return int16_generator(_precision) - elif _precision <= 9: - return int32_generator(_precision) - elif _precision <= 19: - return int64_generator(_precision) - else: - return decimal128_generator(_precision, _scale) - - def expected_data_transform_decimal(_precision, _scale): - def expected_data_transform_decimal_impl(data, precision=_precision, scale=_scale): - if precision <= 19: - return decimal.Decimal(data).scaleb(-scale) - else: - return data - - return expected_data_transform_decimal_impl - - column_meta = { "logicalType" : "FIXED", "precision" : str(precision), "scale" : str(scale) } - iterate_over_test_chunk([datatype, datatype], [column_meta, column_meta], - lambda: decimal_generator(precision, scale), expected_data_transform_decimal(precision, scale)) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_date_chunk(): - random.seed(datetime.datetime.now()) - column_meta = { - "byteLength": "4", - "logicalType": "DATE", - "precision": "38", - "scale": "0", - "charLength": "0" - } - - def date_generator(): - return datetime.date.fromordinal(random.randint(1, 1000000)) - - iterate_over_test_chunk([pyarrow.date32(), pyarrow.date32()], - [column_meta, column_meta], - date_generator) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_binary_chunk(): - random.seed(datetime.datetime.now()) - column_meta = { - "byteLength": "100", - "logicalType": "BINARY", - "precision": "0", - "scale": "0", - "charLength": "0" - } - - def byte_array_generator(): - return bytearray(os.urandom(1000)) - - iterate_over_test_chunk([pyarrow.binary(), pyarrow.binary()], - [column_meta, column_meta], - byte_array_generator) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_time_chunk(): - random.seed(datetime.datetime.now()) - column_meta_int64 = [ - {"logicalType": "TIME", "scale": "9"}, - {"logicalType": "TIME", "scale": "9"} - ] - - column_meta_int32 = [ - {"logicalType": "TIME", "scale": "4"}, - {"logicalType": "TIME", "scale": "4"} - ] - - def time_generator_int64(): - return random.randint(0, 86399999999999) - - def time_generator_int32(): - return random.randint(0, 863999999) - - def expected_data_transform_int64(data): - milisec = data % (10**9) - milisec //= 10**3 - data //= 10**9 - second = data % 60 - data //= 60 - minute = data % 60 - hour = data // 60 - return datetime.time(hour, minute, second, milisec) - - def expected_data_transform_int32(data): - milisec = data % (10**4) - milisec *= 10**2 - data //= 10**4 - second = data % 60 - data //= 60 - minute = data % 60 - hour = data // 60 - return datetime.time(hour, minute, second, milisec) - - iterate_over_test_chunk([pyarrow.int64(), pyarrow.int64()], - column_meta_int64, time_generator_int64, expected_data_transform_int64) - - iterate_over_test_chunk([pyarrow.int32(), pyarrow.int32()], - column_meta_int32, time_generator_int32, expected_data_transform_int32) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_timestamp_ntz_chunk(): - random.seed(datetime.datetime.now()) - scale = random.randint(0, 9) - column_meta = [ - {"logicalType": "TIMESTAMP_NTZ", "scale": str(scale)}, - {"logicalType": "TIMESTAMP_NTZ", "scale": str(scale)} - ] - data_type = pyarrow.struct([pyarrow.field('epoch', pyarrow.int64()), - pyarrow.field('fraction', pyarrow.int32())]) if scale > 7 else pyarrow.int64() - - def timestamp_ntz_generator(scale): - epoch = random.randint(-621355968, 2534023007) - frac = random.randint(0, 10**scale - 1) * (10**(9 - scale)) if scale > 7 else random.randint(0, 10**scale - 1) - if scale > 7: - return {'epoch': epoch, 'fraction' : frac} - else: - epoch = str(epoch) - frac = str(frac) - ZEROFILL = '000000000' - frac = ZEROFILL[:scale - len(frac)] + frac - return int(epoch + frac) if scale else int(epoch) - - def expected_data_transform_ntz(_scale): - def expected_data_transform_ntz_impl(data, scale=_scale): - if scale > 7: - frac = data['fraction'] - epoch = data['epoch'] - if epoch < 0: - epoch += 1 - frac = 10**9 - frac - frac = str(int(frac / 10**(9 - scale))) - ZERO_FILL = '000000000' - frac = ZERO_FILL[:scale - len(frac)] + frac - data = int(str(epoch) + frac) - - microsec = str(data) - if scale > 6: - microsec = microsec[:-scale] + "." + microsec[-scale:-scale + 6] - else: - microsec = microsec[:-scale] + "." + microsec[-scale:] if scale else microsec - - if platform.system() == 'Windows': - return datetime.datetime.utcfromtimestamp(0) + datetime.timedelta(seconds=(float(microsec))) - else: - return datetime.datetime.utcfromtimestamp(float(microsec)) - - return expected_data_transform_ntz_impl - - iterate_over_test_chunk([data_type, data_type], - column_meta, lambda: timestamp_ntz_generator(scale), expected_data_transform_ntz(scale)) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_timestamp_ltz_chunk(): - random.seed(datetime.datetime.now()) - scale = random.randint(0, 9) - column_meta = [ - {"logicalType": "TIMESTAMP_LTZ", "scale": str(scale)}, - {"logicalType": "TIMESTAMP_LTZ", "scale": str(scale)} - ] - data_type = pyarrow.struct([pyarrow.field('epoch', pyarrow.int64()), - pyarrow.field('fraction', pyarrow.int32())]) if scale > 7 else pyarrow.int64() - - def timestamp_ltz_generator(scale): - epoch = random.randint(-621355968, 2534023007) - frac = random.randint(0, 10**scale - 1) * (10**(9 - scale)) if scale > 7 else random.randint(0, 10**scale - 1) - if scale > 7: - return {'epoch': epoch, 'fraction' : frac} - else: - epoch = str(epoch) - frac = str(frac) - ZEROFILL = '000000000' - frac = ZEROFILL[:scale - len(frac)] + frac - return int(epoch + frac) if scale else int(epoch) - - def expected_data_transform_ltz(_scale): - def expected_data_transform_ltz_impl(data, scale=_scale): - tzinfo = get_timezone() # can put a string parameter here in the future - if scale > 7: - frac = data['fraction'] - epoch = data['epoch'] - if epoch < 0: - epoch += 1 - frac = 10**9 - frac - frac = str(int(frac / 10**(9 - scale))) - ZERO_FILL = '000000000' - frac = ZERO_FILL[:scale - len(frac)] + frac - data = int(str(epoch) + frac) - - microsec = str(data) - if scale > 6: - microsec = microsec[:-scale] + "." + microsec[-scale:-scale + 6] - else: - microsec = microsec[:-scale] + "." + microsec[-scale:] if scale else microsec - - if platform.system() == 'Windows': - t0 = datetime.datetime.utcfromtimestamp(0) + datetime.timedelta(seconds=(float(microsec))) - return pytz.utc.localize(t0, is_dst=False).astimezone(tzinfo) - else: - return datetime.datetime.fromtimestamp(float(microsec), tz=tzinfo) - - return expected_data_transform_ltz_impl - - iterate_over_test_chunk([data_type, data_type], - column_meta, lambda: timestamp_ltz_generator(scale), expected_data_transform_ltz(scale)) - - -@pytest.mark.skipif( - no_arrow_iterator_ext, - reason="arrow_iterator extension is not built.") -def test_iterate_over_timestamp_tz_chunk(): - random.seed(datetime.datetime.now()) - scale = random.randint(0, 9) - column_meta = [ - {"byteLength": "16" if scale > 3 else "8", "logicalType": "TIMESTAMP_TZ", "scale": str(scale)}, - {"byteLength": "16" if scale > 3 else "8", "logicalType": "TIMESTAMP_TZ", "scale": str(scale)} - ] - - type1 = pyarrow.struct([pyarrow.field('epoch', pyarrow.int64()), - pyarrow.field('timezone', pyarrow.int32()), - pyarrow.field('fraction', pyarrow.int32())]) - type2 = pyarrow.struct([pyarrow.field('epoch', pyarrow.int64()), - pyarrow.field('timezone', pyarrow.int32())]) - data_type = type1 if scale > 3 else type2 - - def timestamp_tz_generator(scale): - epoch = random.randint(-621355968, 2534023007) - frac = random.randint(0, 10**scale - 1) * (10**(9 - scale)) if scale > 3 else random.randint(0, 10**scale - 1) - timezone = random.randint(1, 2879) - if scale > 3: - return {'epoch': epoch, 'timezone': timezone, 'fraction' : frac} - else: - epoch = str(epoch) - frac = str(frac) - ZEROFILL = '000000000' - frac = ZEROFILL[:scale - len(frac)] + frac - return {'epoch': int(epoch + frac) if scale else int(epoch), 'timezone': timezone} - - def expected_data_transform_tz(_scale): - def expected_data_transform_tz_impl(data, scale=_scale): - timezone = data['timezone'] - tzinfo = _generate_tzinfo_from_tzoffset(timezone - 1440) - epoch = data['epoch'] - if scale > 3: - frac = data['fraction'] - if epoch < 0: - epoch += 1 - frac = 10**9 - frac - frac = str(int(frac / 10**(9 - scale))) - ZERO_FILL = '000000000' - frac = ZERO_FILL[:scale - len(frac)] + frac - epoch = int(str(epoch) + frac) - - microsec = str(epoch) - if scale > 6: - microsec = microsec[:-scale] + "." + microsec[-scale:-scale + 6] - else: - microsec = microsec[:-scale] + "." + microsec[-scale:] if scale else microsec - - if platform.system() == 'Windows': - t = datetime.datetime.utcfromtimestamp(0) + datetime.timedelta(seconds=(float(microsec))) - if pytz.utc != tzinfo: - t += tzinfo.utcoffset(t) - return t.replace(tzinfo=tzinfo) - else: - return datetime.datetime.fromtimestamp(float(microsec), tz=tzinfo) - - return expected_data_transform_tz_impl - - iterate_over_test_chunk([data_type, data_type], - column_meta, lambda: timestamp_tz_generator(scale), expected_data_transform_tz(scale)) - - -def iterate_over_test_chunk(pyarrow_type, column_meta, source_data_generator, expected_data_transformer=None): - stream = BytesIO() - - assert len(pyarrow_type) == len(column_meta) - - column_size = len(pyarrow_type) - batch_row_count = 10 - batch_count = 9 - - fields = [] - for i in range(column_size): - fields.append(pyarrow.field("column_{}".format(i), pyarrow_type[i], True, column_meta[i])) - schema = pyarrow.schema(fields) - - expected_data = [] - writer = RecordBatchStreamWriter(stream, schema) - - for i in range(batch_count): - column_arrays = [] - py_arrays = [] - for j in range(column_size): - column_data = [] - not_none_cnt = 0 - while not_none_cnt == 0: - column_data = [] - for k in range(batch_row_count): - data = None if bool(random.getrandbits(1)) else source_data_generator() - if data != None: - not_none_cnt += 1 - column_data.append(data) - column_arrays.append(column_data) - py_arrays.append(pyarrow.array(column_data, type=pyarrow_type[j])) - - if expected_data_transformer: - for i in range(len(column_arrays)): - column_arrays[i] = [expected_data_transformer(data) if data is not None else None for data in column_arrays[i]] - expected_data.append(column_arrays) - - column_names = ["column_{}".format(i) for i in range(column_size)] - rb = RecordBatch.from_arrays(py_arrays, column_names) - writer.write_batch(rb) - - writer.close() - - # seek stream to begnning so that we can read from stream - stream.seek(0) - reader = RecordBatchStreamReader(stream) - context = ArrowConverterContext() - it = PyArrowIterator(reader, context) - - count = 0 - while True: - try: - val = next(it) - for i in range(column_size): - batch_index = int(count / batch_row_count) - assert val[i] == expected_data[batch_index][i][count - batch_row_count * batch_index] - count += 1 - except StopIteration: - assert count == (batch_count * batch_row_count) - break - - - -def get_timezone(timezone=None): - """ Get the session timezone or use the local computer's timezone. """ - try: - tz = 'UTC' if not timezone else timezone - return pytz.timezone(tz) - except pytz.exceptions.UnknownTimeZoneError: - logger.warning('converting to tzinfo failed') - if tzlocal is not None: - return tzlocal.get_localzone() - else: - try: - return datetime.datetime.timezone.utc - except AttributeError: - return pytz.timezone('UTC') diff --git a/test/test_unit_auth.py b/test/test_unit_auth.py deleted file mode 100644 index dfccee9d6..000000000 --- a/test/test_unit_auth.py +++ /dev/null @@ -1,222 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import time - -from snowflake.connector.auth import Auth -from snowflake.connector.auth_default import AuthByDefault -from snowflake.connector.compat import PY2 -from snowflake.connector.constants import OCSPMode -from snowflake.connector.description import (CLIENT_NAME, CLIENT_VERSION) -from snowflake.connector.network import SnowflakeRestful - -if PY2: - from mock import MagicMock, Mock, PropertyMock -else: - from unittest.mock import MagicMock, Mock, PropertyMock - - -def _init_rest(application, post_requset): - connection = MagicMock() - connection._login_timeout = 120 - connection.errorhandler = Mock(return_value=None) - connection._ocsp_mode = Mock(return_value=OCSPMode.FAIL_OPEN) - type(connection).application = PropertyMock(return_value=application) - type(connection)._internal_application_name = PropertyMock( - return_value=CLIENT_NAME - ) - type(connection)._internal_application_version = PropertyMock( - return_value=CLIENT_VERSION - ) - - rest = SnowflakeRestful(host='testaccount.snowflakecomputing.com', - port=443, - connection=connection) - rest._post_request = post_requset - return rest - - -def _mock_auth_mfa_rest_response(url, headers, body, **kwargs): - """ - Success case - """ - global mock_cnt - _ = url - _ = headers - _ = body - _ = kwargs.get('dummy') - if mock_cnt == 0: - ret = { - u'success': True, - u'message': None, - u'data': { - u'nextAction': u'EXT_AUTHN_DUO_ALL', - u'inFlightCtx': u'inFlightCtx', - } - } - elif mock_cnt == 1: - ret = { - u'success': True, - u'message': None, - u'data': { - u'token': u'TOKEN', - u'masterToken': u'MASTER_TOKEN', - } - } - - mock_cnt += 1 - return ret - - -def _mock_auth_mfa_rest_response_failure(url, headers, body, **kwargs): - """ - Failure case - """ - global mock_cnt - _ = url - _ = headers - _ = body - _ = kwargs.get('dummy') - - if mock_cnt == 0: - ret = { - u'success': True, - u'message': None, - u'data': { - u'nextAction': u'EXT_AUTHN_DUO_ALL', - u'inFlightCtx': u'inFlightCtx', - } - } - elif mock_cnt == 1: - ret = { - u'success': True, - u'message': None, - u'data': { - u'nextAction': u'BAD', - u'inFlightCtx': u'inFlightCtx', - } - } - - mock_cnt += 1 - return ret - - -def _mock_auth_mfa_rest_response_timeout(url, headers, body, **kwargs): - """ - Timeout case - """ - global mock_cnt - _ = url - _ = headers - _ = body - _ = kwargs.get('dummy') - if mock_cnt == 0: - ret = { - u'success': True, - u'message': None, - u'data': { - u'nextAction': u'EXT_AUTHN_DUO_ALL', - u'inFlightCtx': u'inFlightCtx', - } - } - elif mock_cnt == 1: - time.sleep(10) # should timeout while here - ret = {} - - mock_cnt += 1 - return ret - - -def test_auth_mfa(): - """ - Authentication by MFA - """ - global mock_cnt - application = 'testapplication' - account = 'testaccount' - user = 'testuser' - password = 'testpassword' - - # success test case - mock_cnt = 0 - rest = _init_rest(application, _mock_auth_mfa_rest_response) - auth = Auth(rest) - auth_instance = AuthByDefault(password) - auth.authenticate(auth_instance, account, user) - assert not rest._connection.errorhandler.called # not error - assert rest.token == 'TOKEN' - assert rest.master_token == 'MASTER_TOKEN' - - # failure test case - mock_cnt = 0 - rest = _init_rest(application, _mock_auth_mfa_rest_response_failure) - auth = Auth(rest) - auth_instance = AuthByDefault(password) - auth.authenticate(auth_instance, account, user) - assert rest._connection.errorhandler.called # error - - # timeout 1 second - mock_cnt = 0 - rest = _init_rest(application, _mock_auth_mfa_rest_response_timeout) - auth = Auth(rest) - auth_instance = AuthByDefault(password) - auth.authenticate(auth_instance, account, user, timeout=1) - assert rest._connection.errorhandler.called # error - - -def _mock_auth_password_change_rest_response(url, headers, body, **kwargs): - """ - Success case - """ - global mock_cnt - _ = url - _ = headers - _ = body - _ = kwargs.get('dummy') - if mock_cnt == 0: - ret = { - u'success': True, - u'message': None, - u'data': { - u'nextAction': u'PWD_CHANGE', - u'inFlightCtx': u'inFlightCtx', - } - } - elif mock_cnt == 1: - ret = { - u'success': True, - u'message': None, - u'data': { - u'token': u'TOKEN', - u'masterToken': u'MASTER_TOKEN', - } - } - - mock_cnt += 1 - return ret - - -def test_auth_password_change(): - """ - Password change - """ - global mock_cnt - - def _password_callback(): - return "NEW_PASSWORD" - - application = 'testapplication' - account = 'testaccount' - user = 'testuser' - password = 'testpassword' - - # success test case - mock_cnt = 0 - rest = _init_rest(application, _mock_auth_password_change_rest_response) - auth = Auth(rest) - auth_instance = AuthByDefault(password) - auth.authenticate(auth_instance, account, user, - password_callback=_password_callback) - assert not rest._connection.errorhandler.called # not error diff --git a/test/test_unit_auth_oauth.py b/test/test_unit_auth_oauth.py deleted file mode 100644 index 29c5b9d7c..000000000 --- a/test/test_unit_auth_oauth.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from snowflake.connector.auth_oauth import AuthByOAuth -from snowflake.connector.compat import PY2 - -if PY2: - from mock import MagicMock, Mock, PropertyMock -else: - from unittest.mock import MagicMock, Mock, PropertyMock - - -def test_auth_oauth(): - """ Simple OAuth test.""" - token = "oAuthToken" - auth = AuthByOAuth(token) - auth.authenticate(None, None, None, None, None) - body = {'data':{}} - auth.update_body(body) - assert body['data']['TOKEN'] == token, body - assert body['data']['AUTHENTICATOR'] == 'OAUTH', body diff --git a/test/test_unit_auth_webbrowser.py b/test/test_unit_auth_webbrowser.py deleted file mode 100644 index 00dfe2ce6..000000000 --- a/test/test_unit_auth_webbrowser.py +++ /dev/null @@ -1,208 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from snowflake.connector.auth_webbrowser import AuthByWebBrowser -from snowflake.connector.compat import PY2 -from snowflake.connector.constants import OCSPMode -from snowflake.connector.description import (CLIENT_NAME, CLIENT_VERSION) -from snowflake.connector.network import EXTERNAL_BROWSER_AUTHENTICATOR, SnowflakeRestful - -if PY2: - from mock import MagicMock, Mock, PropertyMock -else: - from unittest.mock import MagicMock, Mock, PropertyMock - -AUTHENTICATOR = 'https://testsso.snowflake.net/' -APPLICATION = 'testapplication' -ACCOUNT = 'testaccount' -USER = 'testuser' -PASSWORD = 'testpassword' -SERVICE_NAME = '' -REF_PROOF_KEY = 'MOCK_PROOF_KEY' -REF_SSO_URL = 'https://testsso.snowflake.net/sso' - - -def mock_webserver(target_instance, application, port): - _ = application - _ = port - target_instance._webserver_status = True - - -def test_auth_webbrowser_get(): - """ - Authentication by WebBrowser positive test case - """ - ref_token = "MOCK_TOKEN" - - rest = _init_rest(REF_SSO_URL, REF_PROOF_KEY) - - # mock webbrowser - mock_webbrowser = MagicMock() - mock_webbrowser.open_new.return_value = True - - # mock socket - mock_socket_instance = MagicMock() - mock_socket_instance.getsockname.return_value = [None, 12345] - - mock_socket_client = MagicMock() - mock_socket_client.recv.return_value = ('\r\n'.join([ - u"GET /?token={0}&confirm=true HTTP/1.1".format(ref_token), - u"User-Agent: snowflake-agent" - ])).encode('utf-8') - mock_socket_instance.accept.return_value = (mock_socket_client, None) - mock_socket = Mock(return_value=mock_socket_instance) - - auth = AuthByWebBrowser( - rest, APPLICATION, - webbrowser_pkg=mock_webbrowser, socket_pkg=mock_socket) - auth.authenticate( - AUTHENTICATOR, SERVICE_NAME, ACCOUNT, USER, PASSWORD) - assert not rest._connection.errorhandler.called # no error - assert auth.assertion_content == ref_token - body = {u'data': {}} - auth.update_body(body) - assert body[u'data'][u'TOKEN'] == ref_token - assert body[u'data'][u'PROOF_KEY'] == REF_PROOF_KEY - assert body[u'data'][u'AUTHENTICATOR'] == EXTERNAL_BROWSER_AUTHENTICATOR - - -def test_auth_webbrowser_post(): - """ - Authentication by WebBrowser positive test case with POST - """ - ref_token = "MOCK_TOKEN" - - rest = _init_rest(REF_SSO_URL, REF_PROOF_KEY) - - # mock webbrowser - mock_webbrowser = MagicMock() - mock_webbrowser.open_new.return_value = True - - # mock socket - mock_socket_instance = MagicMock() - mock_socket_instance.getsockname.return_value = [None, 12345] - - mock_socket_client = MagicMock() - mock_socket_client.recv.return_value = ('\r\n'.join([ - u"POST / HTTP/1.1", - u"User-Agent: snowflake-agent", - u"Host: localhost:12345", - u"", - u"token={0}&confirm=true".format(ref_token) - ])).encode('utf-8') - mock_socket_instance.accept.return_value = (mock_socket_client, None) - mock_socket = Mock(return_value=mock_socket_instance) - - auth = AuthByWebBrowser( - rest, APPLICATION, - webbrowser_pkg=mock_webbrowser, socket_pkg=mock_socket) - auth.authenticate( - AUTHENTICATOR, SERVICE_NAME, ACCOUNT, USER, PASSWORD) - assert not rest._connection.errorhandler.called # no error - assert auth.assertion_content == ref_token - body = {u'data': {}} - auth.update_body(body) - assert body[u'data'][u'TOKEN'] == ref_token - assert body[u'data'][u'PROOF_KEY'] == REF_PROOF_KEY - assert body[u'data'][u'AUTHENTICATOR'] == EXTERNAL_BROWSER_AUTHENTICATOR - - -def test_auth_webbrowser_fail_webbrowser(): - """ - Authentication by WebBrowser. fail to start web browser - """ - rest = _init_rest(REF_SSO_URL, REF_PROOF_KEY) - - # mock webbrowser - mock_webbrowser = MagicMock() - mock_webbrowser.open_new.return_value = False - - # mock socket - mock_socket_instance = MagicMock() - mock_socket_instance.getsockname.return_value = [None, 12345] - - mock_socket_client = MagicMock() - mock_socket_client.recv.return_value = ('\r\n'.join([ - u"GET /?token=MOCK_TOKEN HTTP/1.1", - u"User-Agent: snowflake-agent" - ])).encode('utf-8') - mock_socket_instance.accept.return_value = (mock_socket_client, None) - mock_socket = Mock(return_value=mock_socket_instance) - - auth = AuthByWebBrowser( - rest, APPLICATION, webbrowser_pkg=mock_webbrowser, - socket_pkg=mock_socket) - auth.authenticate( - AUTHENTICATOR, SERVICE_NAME, ACCOUNT, USER, PASSWORD) - assert rest._connection.errorhandler.called # an error - assert auth.assertion_content is None - - -def test_auth_webbrowser_fail_webserver(): - """ - Authentication by WebBrowser. fail to start web browser - """ - rest = _init_rest(REF_SSO_URL, REF_PROOF_KEY) - - # mock webbrowser - mock_webbrowser = MagicMock() - mock_webbrowser.open_new.return_value = True - - # mock socket - mock_socket_instance = MagicMock() - mock_socket_instance.getsockname.return_value = [None, 12345] - - mock_socket_client = MagicMock() - mock_socket_client.recv.return_value = ('\r\n'.join([ - u"GARBAGE", - u"User-Agent: snowflake-agent" - ])).encode('utf-8') - mock_socket_instance.accept.return_value = (mock_socket_client, None) - mock_socket = Mock(return_value=mock_socket_instance) - - # case 1: invalid HTTP request - auth = AuthByWebBrowser( - rest, APPLICATION, webbrowser_pkg=mock_webbrowser, - socket_pkg=mock_socket) - auth.authenticate( - AUTHENTICATOR, SERVICE_NAME, ACCOUNT, USER, PASSWORD) - assert rest._connection.errorhandler.called # an error - assert auth.assertion_content is None - - -def _init_rest(ref_sso_url, ref_proof_key, success=True, message=None): - def post_request(url, headers, body, **kwargs): - _ = url - _ = headers - _ = body - _ = kwargs.get('dummy') - return { - 'success': success, - 'message': message, - 'data': { - 'ssoUrl': ref_sso_url, - 'proofKey': ref_proof_key, - } - } - - connection = MagicMock() - connection._login_timeout = 120 - connection.errorhandler = Mock(return_value=None) - connection._ocsp_mode = Mock(return_value=OCSPMode.FAIL_OPEN) - type(connection).application = PropertyMock(return_value=CLIENT_NAME) - type(connection)._internal_application_name = PropertyMock( - return_value=CLIENT_NAME - ) - type(connection)._internal_application_version = PropertyMock( - return_value=CLIENT_VERSION - ) - - rest = SnowflakeRestful( - host='testaccount.snowflakecomputing.com', - port=443, - connection=connection) - rest._post_request = post_request - return rest diff --git a/test/test_unit_binaryformat.py b/test/test_unit_binaryformat.py deleted file mode 100644 index 1d64c0442..000000000 --- a/test/test_unit_binaryformat.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -from snowflake.connector.sfbinaryformat import ( - SnowflakeBinaryFormat, binary_to_python, binary_to_snowflake) - - -def test_basic(): - """Test hex and base64 formatting.""" - # Hex - fmt = SnowflakeBinaryFormat("heX") - assert fmt.format(b'') == '' - assert fmt.format(b'\x00') == '00' - assert fmt.format(b'\xAB\xCD\x12') == 'ABCD12' - assert fmt.format(b'\x00\xFF\x42\x01') == '00FF4201' - - # Base64 - fmt = SnowflakeBinaryFormat("BasE64") - assert fmt.format(b'') == '' - assert fmt.format(b'\x00') == 'AA==' - assert fmt.format(b'\xAB\xCD\x12') == 'q80S' - assert fmt.format(b'\x00\xFF\x42\x01') == 'AP9CAQ==' - - -def test_binary_to_python(): - """Test conversion to Python data type.""" - assert binary_to_python('') == b'' - assert binary_to_python('00') == b'\x00' - assert binary_to_python('ABCD12') == b'\xAB\xCD\x12' - - -def test_binary_to_snowflake(): - """Test conversion for passing to Snowflake.""" - assert binary_to_snowflake(b'') == b'' - assert binary_to_snowflake(b'\x00') == b'00' - assert binary_to_snowflake(b'\xAB\xCD\x12') == b'ABCD12' diff --git a/test/test_unit_connection.py b/test/test_unit_connection.py deleted file mode 100644 index 01b09ef93..000000000 --- a/test/test_unit_connection.py +++ /dev/null @@ -1,223 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import pytest -import os - -import snowflake.connector -from snowflake.connector.auth import ( - delete_temporary_credential_file, -) -from snowflake.connector.compat import PY2 - -if PY2: - from mock import patch -else: - from unittest.mock import patch - -@patch( - 'snowflake.connector.auth_webbrowser.AuthByWebBrowser.authenticate') -@patch( - 'snowflake.connector.network.SnowflakeRestful._post_request' -) -def test_connect_externalbrowser( - mockSnowflakeRestfulPostRequest, - mockAuthByBrowserAuthenticate): - """ - Connect with authentictor=externalbrowser mock. - """ - - os.environ['SF_TEMPORARY_CREDENTIAL_CACHE_DIR'] = os.getenv( - "WORKSPACE", os.path.expanduser("~")) - - def mock_post_request(url, headers, json_body, **kwargs): - global mock_cnt - ret = None - if mock_cnt == 0: - # return from /v1/login-request - ret = { - u'success': True, - u'message': None, - u'data': { - u'token': u'TOKEN', - u'masterToken': u'MASTER_TOKEN', - u'idToken': u'ID_TOKEN', - }} - elif mock_cnt == 1: - # return from /token-request - ret = { - u'success': True, - u'message': None, - u'data': { - u'sessionToken': u'NEW_TOKEN', - }} - elif mock_cnt == 2: - # return from USE WAREHOUSE TESTWH_NEW - ret = { - u'success': True, - u'message': None, - u'data': { - u'finalDatabase': 'TESTDB', - u'finalWarehouse': 'TESTWH_NEW', - }} - elif mock_cnt == 3: - # return from USE DATABASE TESTDB_NEW - ret = { - u'success': True, - u'message': None, - u'data': { - u'finalDatabase': 'TESTDB_NEW', - u'finalWarehouse': 'TESTWH_NEW', - }} - elif mock_cnt == 4: - # return from SELECT 1 - ret = { - u'success': True, - u'message': None, - u'data': { - u'finalDatabase': 'TESTDB_NEW', - u'finalWarehouse': 'TESTWH_NEW', - }} - mock_cnt += 1 - return ret - - global mock_cnt - mock_cnt = 0 - - # pre-authentication doesn't matter - mockAuthByBrowserAuthenticate.return_value = None - - # POST requests mock - mockSnowflakeRestfulPostRequest.side_effect = mock_post_request - - delete_temporary_credential_file() - - mock_cnt = 0 - - account = 'testaccount' - user = 'testuser' - authenticator = 'externalbrowser' - - # first connection - con = snowflake.connector.connect( - account=account, - user=user, - authenticator=authenticator, - database='TESTDB', - warehouse='TESTWH', - ) - assert con._rest.token == u'TOKEN' - assert con._rest.master_token == u'MASTER_TOKEN' - assert con._rest.id_token == u'ID_TOKEN' - - # second connection that uses the id token to get the session token - con = snowflake.connector.connect( - account=account, - user=user, - authenticator=authenticator, - database='TESTDB_NEW', # override the database - warehouse='TESTWH_NEW', # override the warehouse - ) - - assert con._rest.token == u'NEW_TOKEN' - assert con._rest.master_token is None - assert con._rest.id_token == 'ID_TOKEN' - assert con.database == 'TESTDB_NEW' - assert con.warehouse == 'TESTWH_NEW' - - -@patch( - 'snowflake.connector.network.SnowflakeRestful._post_request' -) -def test_connect_with_service_name(mockSnowflakeRestfulPostRequest): - def mock_post_request(url, headers, json_body, **kwargs): - global mock_cnt - ret = None - if mock_cnt == 0: - # return from /v1/login-request - ret = { - u'success': True, - u'message': None, - u'data': { - u'token': u'TOKEN', - u'masterToken': u'MASTER_TOKEN', - u'idToken': u'ID_TOKEN', - u'parameters': [ - {'name': 'SERVICE_NAME', 'value': "FAKE_SERVICE_NAME"} - ], - }} - return ret - - # POST requests mock - mockSnowflakeRestfulPostRequest.side_effect = mock_post_request - - global mock_cnt - mock_cnt = 0 - - account = 'testaccount' - user = 'testuser' - - # connection - con = snowflake.connector.connect( - account=account, - user=user, - password='testpassword', - database='TESTDB', - warehouse='TESTWH', - ) - assert con.service_name == 'FAKE_SERVICE_NAME' - - -@pytest.mark.skip(reason="Mock doesn't work as expected.") -@patch( - 'snowflake.connector.network.SnowflakeRestful._post_request' -) -def test_connection_ignore_exception(mockSnowflakeRestfulPostRequest): - def mock_post_request(url, headers, json_body, **kwargs): - global mock_cnt - ret = None - if mock_cnt == 0: - # return from /v1/login-request - ret = { - u'success': True, - u'message': None, - u'data': { - u'token': u'TOKEN', - u'masterToken': u'MASTER_TOKEN', - u'idToken': u'ID_TOKEN', - u'parameters': [ - {'name': 'SERVICE_NAME', 'value': "FAKE_SERVICE_NAME"} - ], - }} - elif mock_cnt == 1: - ret = { - u'success': False, - u'message': "Session gone", - u'data': None, - u'code': 390111 - } - mock_cnt += 1 - return ret - - # POST requests mock - mockSnowflakeRestfulPostRequest.side_effect = mock_post_request - - global mock_cnt - mock_cnt = 0 - - account = 'testaccount' - user = 'testuser' - - # connection - con = snowflake.connector.connect( - account=account, - user=user, - password='testpassword', - database='TESTDB', - warehouse='TESTWH', - ) - # Test to see if closing connection works or raises an exception. If an exception is raised, test will fail. - con.close() - diff --git a/test/test_unit_construct_hostname.py b/test/test_unit_construct_hostname.py deleted file mode 100644 index 81ef953dc..000000000 --- a/test/test_unit_construct_hostname.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from snowflake.connector.util_text import construct_hostname - - -def test_construct_hostname_basic(): - assert construct_hostname('eu-central-1', 'account1') == \ - 'account1.eu-central-1.snowflakecomputing.com' - - assert construct_hostname('', 'account1') == \ - 'account1.snowflakecomputing.com' - - assert construct_hostname(None, 'account1') == \ - 'account1.snowflakecomputing.com' - - assert construct_hostname('as-east-3', 'account1') == \ - 'account1.as-east-3.snowflakecomputing.com' - - assert construct_hostname('as-east-3', 'account1.eu-central-1') == \ - 'account1.as-east-3.snowflakecomputing.com' - - assert construct_hostname('', 'account1.eu-central-1') == \ - 'account1.eu-central-1.snowflakecomputing.com' - - assert construct_hostname(None, 'account1.eu-central-1') == \ - 'account1.eu-central-1.snowflakecomputing.com' - - assert construct_hostname(None, 'account1-jkabfvdjisoa778wqfgeruishafeuw89q.global') == \ - 'account1-jkabfvdjisoa778wqfgeruishafeuw89q.global.snowflakecomputing.com' diff --git a/test/test_unit_converter.py b/test/test_unit_converter.py deleted file mode 100644 index 047934344..000000000 --- a/test/test_unit_converter.py +++ /dev/null @@ -1,75 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from logging import getLogger - -import pytest - -from snowflake.connector.compat import (TO_UNICODE, PY2, PY34_EXACT) -from snowflake.connector.connection import DefaultConverterClass -from snowflake.connector.converter_snowsql import SnowflakeConverterSnowSQL - -logger = getLogger(__name__) - -ConverterSnowSQL = SnowflakeConverterSnowSQL - - -def test_is_dst(): - """ - SNOW-6020: Failed to convert to local time during DST is being - changed - """ - # DST to non-DST - convClass = DefaultConverterClass() - conv = convClass() - conv.set_parameter('TIMEZONE', 'America/Los_Angeles') - - col_meta = { - 'name': 'CREATED_ON', - 'type': 6, - 'length': None, - 'precision': None, - 'scale': 3, - 'nullable': True, - } - m = conv.to_python_method('TIMESTAMP_LTZ', col_meta) - ret = m('1414890189.000') - - assert TO_UNICODE(ret) == u'2014-11-01 18:03:09-07:00', \ - 'Timestamp during from DST to non-DST' - - # non-DST to DST - col_meta = { - 'name': 'CREATED_ON', - 'type': 6, - 'length': None, - 'precision': None, - 'scale': 3, - 'nullable': True, - } - m = conv.to_python_method('TIMESTAMP_LTZ', col_meta) - ret = m('1425780189.000') - - assert TO_UNICODE(ret) == u'2015-03-07 18:03:09-08:00', \ - 'Timestamp during from non-DST to DST' - - -@pytest.mark.skipif(PY2 or PY34_EXACT, reason='Snowsql runs on PY35+') -def test_more_timestamps(): - conv = ConverterSnowSQL() - conv.set_parameter('TIMESTAMP_NTZ_OUTPUT_FORMAT', - 'YYYY-MM-DD HH24:MI:SS.FF9') - m = conv.to_python_method('TIMESTAMP_NTZ', {'scale': 9}) - assert m('-2208943503.876543211') == '1900-01-01 12:34:56.123456789' - assert m('-2208943503.000000000') == '1900-01-01 12:34:57.000000000' - assert m('-2208943503.012000000') == '1900-01-01 12:34:56.988000000' - - conv.set_parameter('TIMESTAMP_NTZ_OUTPUT_FORMAT', - 'YYYY-MM-DD HH24:MI:SS.FF9') - m = conv.to_python_method('TIMESTAMP_NTZ', {'scale': 7}) - assert m('-2208943503.8765432') == '1900-01-01 12:34:56.123456800' - assert m('-2208943503.0000000') == '1900-01-01 12:34:57.000000000' - assert m('-2208943503.0120000') == '1900-01-01 12:34:56.988000000' diff --git a/test/test_unit_datetime.py b/test/test_unit_datetime.py deleted file mode 100644 index 5d4f87084..000000000 --- a/test/test_unit_datetime.py +++ /dev/null @@ -1,99 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import pytest -import time -from datetime import datetime -from snowflake.connector.compat import IS_WINDOWS -from snowflake.connector.sfdatetime import ( - SnowflakeDateTimeFormat, - SnowflakeDateTime -) - - -def test_basic_datetime_format(): - """ - Datetime format basic - """ - # date - value = datetime(2014, 11, 30) - formatter = SnowflakeDateTimeFormat(u'YYYY-MM-DD') - assert formatter.format(value) == u'2014-11-30' - - # date time => date - value = datetime(2014, 11, 30, 12, 31, 45) - formatter = SnowflakeDateTimeFormat(u'YYYY-MM-DD') - assert formatter.format(value) == u'2014-11-30' - - # date time => date time - value = datetime(2014, 11, 30, 12, 31, 45) - formatter = SnowflakeDateTimeFormat( - u'YYYY-MM-DD"T"HH24:MI:SS') - assert formatter.format(value) == u'2014-11-30T12:31:45' - - # date time => date time in microseconds with 4 precision - value = datetime(2014, 11, 30, 12, 31, 45, microsecond=987654) - formatter = SnowflakeDateTimeFormat( - u'YYYY-MM-DD"T"HH24:MI:SS.FF4') - assert formatter.format(value) == u'2014-11-30T12:31:45.9876' - - # date time => date time in microseconds with full precision up to - # microseconds - value = datetime(2014, 11, 30, 12, 31, 45, microsecond=987654) - formatter = SnowflakeDateTimeFormat( - u'YYYY-MM-DD"T"HH24:MI:SS.FF') - assert formatter.format(value) == u'2014-11-30T12:31:45.987654' - - -def test_datetime_with_smaller_milliseconds(): - # date time => date time in microseconds with full precision up to - # microseconds - value = datetime(2014, 11, 30, 12, 31, 45, microsecond=123) - formatter = SnowflakeDateTimeFormat( - u'YYYY-MM-DD"T"HH24:MI:SS.FF9') - assert formatter.format(value) == u'2014-11-30T12:31:45.000123' - - -def test_datetime_format_negative(): - u"""Datetime format negative""" - value = datetime(2014, 11, 30, 12, 31, 45, microsecond=987654) - formatter = SnowflakeDateTimeFormat( - u'YYYYYYMMMDDDDD"haha"hoho"hihi"H12HHH24MI') - assert formatter.format(value) == u'20141411M3030DhahaHOHOhihiH1212H2431' - - -def test_struct_time_format(): - # struct_time for general use - value = time.strptime("30 Sep 01 11:20:30", "%d %b %y %H:%M:%S") - formatter = SnowflakeDateTimeFormat( - u'YYYY-MM-DD"T"HH24:MI:SS.FF') - assert formatter.format(value) == '2001-09-30T11:20:30.0' - - # struct_time encapsulated in SnowflakeDateTime. Mainly used by SnowSQL - value = SnowflakeDateTime( - time.strptime("30 Sep 01 11:20:30", "%d %b %y %H:%M:%S"), - nanosecond=0, scale=1 - ) - formatter = SnowflakeDateTimeFormat( - u'YYYY-MM-DD"T"HH24:MI:SS.FF', - datetime_class=SnowflakeDateTime) - assert formatter.format(value) == '2001-09-30T11:20:30.0' - - # format without fraction of seconds - formatter = SnowflakeDateTimeFormat( - u'YYYY-MM-DD"T"HH24:MI:SS', - datetime_class=SnowflakeDateTime) - assert formatter.format(value) == '2001-09-30T11:20:30' - - -@pytest.mark.skipif(IS_WINDOWS, reason='not supported yet') -def test_struct_time_format_extreme_large(): - # extreme large epoch time - value = SnowflakeDateTime( - time.gmtime(14567890123567), nanosecond=0, scale=1) - formatter = SnowflakeDateTimeFormat( - u'YYYY-MM-DD"T"HH24:MI:SS.FF', - datetime_class=SnowflakeDateTime) - assert formatter.format(value) == '463608-01-23T09:26:07.0' diff --git a/test/test_unit_encryption_util.py b/test/test_unit_encryption_util.py deleted file mode 100644 index 81ca3e7d1..000000000 --- a/test/test_unit_encryption_util.py +++ /dev/null @@ -1,93 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import codecs -import glob -import os -import tempfile -from os import path - -from snowflake.connector.compat import PY2 -from snowflake.connector.constants import (UTF8) -from snowflake.connector.encryption_util import SnowflakeEncryptionUtil -from snowflake.connector.remote_storage_util import \ - SnowflakeFileEncryptionMaterial - -THIS_DIR = path.dirname(path.realpath(__file__)) - - -def test_encrypt_decrypt_file(): - """ - Encrypt and Decrypt a file - """ - - encryption_material = SnowflakeFileEncryptionMaterial( - query_stage_master_key='ztke8tIdVt1zmlQIZm0BMA==', - query_id='123873c7-3a66-40c4-ab89-e3722fbccce1', - smk_id=3112) - data = 'test data' - input_fd, input_file = tempfile.mkstemp() - encrypted_file = None - decrypted_file = None - try: - with codecs.open(input_file, 'w', encoding=UTF8) as fd: - fd.write(data) - - (metadata, encrypted_file) = SnowflakeEncryptionUtil.encrypt_file( - encryption_material, input_file) - decrypted_file = SnowflakeEncryptionUtil.decrypt_file( - metadata, encryption_material, encrypted_file) - - contents = '' - with codecs.open(decrypted_file, 'r', encoding=UTF8) as fd: - for line in fd: - contents += line - assert data == contents, "encrypted and decrypted contents" - finally: - os.close(input_fd) - os.remove(input_file) - if encrypted_file: - os.remove(encrypted_file) - if decrypted_file: - os.remove(decrypted_file) - - -def test_encrypt_decrypt_large_file(tmpdir, test_files): - """ - Encrypt and Decrypt a large file - """ - encryption_material = SnowflakeFileEncryptionMaterial( - query_stage_master_key='ztke8tIdVt1zmlQIZm0BMA==', - query_id='123873c7-3a66-40c4-ab89-e3722fbccce1', - smk_id=3112) - - # generates N files - number_of_files = 1 - number_of_lines = 10000 - tmp_dir = test_files(tmpdir, number_of_lines, number_of_files) - - files = glob.glob(os.path.join(tmp_dir, 'file*')) - input_file = files[0] - encrypted_file = None - decrypted_file = None - try: - (metadata, encrypted_file) = SnowflakeEncryptionUtil.encrypt_file( - encryption_material, input_file) - decrypted_file = SnowflakeEncryptionUtil.decrypt_file( - metadata, encryption_material, encrypted_file) - - contents = '' - cnt = 0 - with codecs.open(decrypted_file, 'r', encoding=UTF8) as fd: - for line in fd: - contents += line - cnt += 1 - assert cnt == number_of_lines, "number of lines" - finally: - os.remove(input_file) - if encrypted_file: - os.remove(encrypted_file) - if decrypted_file: - os.remove(decrypted_file) diff --git a/test/test_unit_ocsp.py b/test/test_unit_ocsp.py deleted file mode 100644 index c4f315754..000000000 --- a/test/test_unit_ocsp.py +++ /dev/null @@ -1,111 +0,0 @@ -# encoding=utf-8 -# !/usr/bin/env python -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import os -from snowflake.connector.ocsp_snowflake import OCSPCache -from snowflake.connector.ocsp_snowflake import OCSPServer - - -def test_building_retry_url(): - # privatelink retry url - OCSP_SERVER = OCSPServer() - OCSPCache.ACTIVATE_SSD = False - OCSP_SERVER.OCSP_RETRY_URL = None - OCSP_SERVER.CACHE_SERVER_URL = \ - 'http://ocsp.us-east-1.snowflakecomputing.com/ocsp_response_cache.json' - OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) - assert OCSP_SERVER.OCSP_RETRY_URL == \ - 'http://ocsp.us-east-1.snowflakecomputing.com/retry/{0}/{1}' - - # privatelink retry url with port - OCSPCache.ACTIVATE_SSD = False - OCSP_SERVER.OCSP_RETRY_URL = None - OCSP_SERVER.CACHE_SERVER_URL = \ - 'http://ocsp.us-east-1.snowflakecomputing.com:80/ocsp_response_cache' \ - '.json' - OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) - assert OCSP_SERVER.OCSP_RETRY_URL == \ - 'http://ocsp.us-east-1.snowflakecomputing.com:80/retry/{0}/{1}' - - # non-privatelink retry url - OCSPCache.ACTIVATE_SSD = False - OCSP_SERVER.OCSP_RETRY_URL = None - OCSP_SERVER.CACHE_SERVER_URL = \ - 'http://ocsp.snowflakecomputing.com/ocsp_response_cache.json' - OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) - assert OCSP_SERVER.OCSP_RETRY_URL is None - - # non-privatelink retry url with port - OCSPCache.ACTIVATE_SSD = False - OCSP_SERVER.OCSP_RETRY_URL = None - OCSP_SERVER.CACHE_SERVER_URL = \ - 'http://ocsp.snowflakecomputing.com:80/ocsp_response_cache.json' - OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) - assert OCSP_SERVER.OCSP_RETRY_URL is None - - # ssd enabled for privatelink retry url - OCSPCache.ACTIVATE_SSD = True - OCSP_SERVER.OCSP_RETRY_URL = None - OCSP_SERVER.CACHE_SERVER_URL = \ - 'http://ocsp.us-east-1.snowflakecomputing.com/ocsp_response_cache.json' - OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) - assert OCSP_SERVER.OCSP_RETRY_URL == \ - 'http://ocsp.us-east-1.snowflakecomputing.com/retry' - - # ssd enabled for privatelink retry url with port - OCSPCache.ACTIVATE_SSD = True - OCSP_SERVER.OCSP_RETRY_URL = None - OCSP_SERVER.CACHE_SERVER_URL = \ - 'http://ocsp.us-east-1.snowflakecomputing.com:80/ocsp_response_cache' \ - '.json' - OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) - assert OCSP_SERVER.OCSP_RETRY_URL == \ - 'http://ocsp.us-east-1.snowflakecomputing.com:80/retry' - - # ssd enabled for non-privatelink - OCSPCache.ACTIVATE_SSD = True - OCSP_SERVER.OCSP_RETRY_URL = None - OCSP_SERVER.CACHE_SERVER_URL = \ - 'http://ocsp.snowflakecomputing.com/ocsp_response_cache.json' - OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) - assert OCSP_SERVER.OCSP_RETRY_URL is None - - # ssd enabled for non-privatelink with port - OCSPCache.ACTIVATE_SSD = True - OCSP_SERVER.OCSP_RETRY_URL = None - OCSP_SERVER.CACHE_SERVER_URL = \ - 'http://ocsp.snowflakecomputing.com:80/ocsp_response_cache.json' - OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) - assert OCSP_SERVER.OCSP_RETRY_URL is None - #Once SSD is active we would use hostname specific OCSP Endpoints. - - -def test_building_new_retry(): - OCSP_SERVER = OCSPServer() - OCSPCache.ACTIVATE_SSD = False - OCSP_SERVER.OCSP_RETRY_URL = None - hname = \ - "a1.us-east-1.snowflakecomputing.com" - os.environ["SF_OCSP_ACTIVATE_NEW_ENDPOINT"] = "true" - OCSP_SERVER.reset_ocsp_endpoint(hname) - assert OCSP_SERVER.CACHE_SERVER_URL == \ - "https://ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch" - - assert OCSP_SERVER.OCSP_RETRY_URL == "https://ocspssd.us-east-1.snowflakecomputing.com/ocsp/retry" - - hname = "a1-12345.global.snowflakecomputing.com" - OCSP_SERVER.reset_ocsp_endpoint(hname) - assert OCSP_SERVER.CACHE_SERVER_URL == "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/fetch" - - assert OCSP_SERVER.OCSP_RETRY_URL == "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry" - - hname = "snowflake.okta.com" - OCSP_SERVER.reset_ocsp_endpoint(hname) - assert OCSP_SERVER.CACHE_SERVER_URL == "https://ocspssd.snowflakecomputing.com/ocsp/fetch" - - assert OCSP_SERVER.OCSP_RETRY_URL == "https://ocspssd.snowflakecomputing.com/ocsp/retry" - - del os.environ['SF_OCSP_ACTIVATE_NEW_ENDPOINT'] diff --git a/test/test_unit_parse_account.py b/test/test_unit_parse_account.py deleted file mode 100644 index 92a4f4116..000000000 --- a/test/test_unit_parse_account.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -from snowflake.connector.util_text import parse_account - - -def test_parse_account_basic(): - assert parse_account('account1') == 'account1' - - assert parse_account('account1.eu-central-1') == 'account1' - - assert parse_account('account1-jkabfvdjisoa778wqfgeruishafeuw89q.global') == 'account1' diff --git a/test/test_unit_proxies.py b/test/test_unit_proxies.py deleted file mode 100644 index 664f5af88..000000000 --- a/test/test_unit_proxies.py +++ /dev/null @@ -1,32 +0,0 @@ -# encoding=utf-8 -# !/usr/bin/env python -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import os - - -def test_set_proxies(): - from snowflake.connector.proxy import set_proxies - - assert set_proxies('proxyhost', '8080') == { - 'http': 'http://proxyhost:8080', - 'https': 'http://proxyhost:8080', - } - assert set_proxies('http://proxyhost', '8080') == { - 'http': 'http://proxyhost:8080', - 'https': 'http://proxyhost:8080', - } - assert set_proxies('http://proxyhost', '8080', 'testuser', 'testpass') == { - 'http': 'http://testuser:testpass@proxyhost:8080', - 'https': 'http://testuser:testpass@proxyhost:8080', - } - assert set_proxies('proxyhost', '8080', 'testuser', 'testpass') == { - 'http': 'http://testuser:testpass@proxyhost:8080', - 'https': 'http://testuser:testpass@proxyhost:8080', - } - - # NOTE environment variable is set if the proxy parameter is specified. - del os.environ['HTTP_PROXY'] - del os.environ['HTTPS_PROXY'] diff --git a/test/test_unit_put_get.py b/test/test_unit_put_get.py deleted file mode 100644 index d345806b4..000000000 --- a/test/test_unit_put_get.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. - -import pytest -from os import path, chmod -from snowflake.connector.compat import PY2 -from snowflake.connector.errors import Error -from snowflake.connector.file_transfer_agent import SnowflakeFileTransferAgent -from snowflake.connector.compat import IS_WINDOWS - -if PY2: - from mock import MagicMock -else: - from unittest.mock import MagicMock - - -@pytest.mark.skipif(IS_WINDOWS, reason='permission model is different') -def test_put_error(tmpdir): - """ - Test for raise_put_get_error flag in SnowflakeFileTransferAgent - """ - tmp_dir = str(tmpdir.mkdir('putfiledir')) - file1 = path.join(tmp_dir, 'file1') - remote_location = path.join(tmp_dir, 'remote_loc') - with open(file1, 'w') as f: - f.write('test1') - - # nobody can read now. - chmod(file1, 0o000) - - con = MagicMock() - cursor = con.cursor() - cursor.errorhandler = Error.default_errorhandler - query = 'PUT something' - ret = { - 'data': { - 'command': 'UPLOAD', - 'autoCompress': False, - 'src_locations': [file1], - 'sourceCompression': 'none', - 'stageInfo': { - 'location': remote_location, - 'locationType': 'LOCAL_FS', - 'path': 'remote_loc', - } - }, - 'success': True, - } - - # no error is raised - sf_file_transfer_agent = SnowflakeFileTransferAgent( - cursor, - query, - ret, - raise_put_get_error=False) - sf_file_transfer_agent.execute() - sf_file_transfer_agent.result() - - # Permission error should be raised - sf_file_transfer_agent = SnowflakeFileTransferAgent( - cursor, - query, - ret, - raise_put_get_error=True) - sf_file_transfer_agent.execute() - with pytest.raises(Exception): - sf_file_transfer_agent.result() - - chmod(file1, 0o700) diff --git a/test/test_unit_s3_util.py b/test/test_unit_s3_util.py deleted file mode 100644 index 40a45a7fc..000000000 --- a/test/test_unit_s3_util.py +++ /dev/null @@ -1,219 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import errno -import os -from collections import defaultdict -from os import path - -import OpenSSL -import botocore -from boto3.exceptions import S3UploadFailedError - -from snowflake.connector.compat import PY2 -from snowflake.connector.constants import (SHA256_DIGEST, ResultStatus) -from snowflake.connector.remote_storage_util import ( - SnowflakeRemoteStorageUtil, DEFAULT_MAX_RETRY) -from snowflake.connector.s3_util import (SnowflakeS3Util, - ERRORNO_WSAECONNABORTED) - -THIS_DIR = path.dirname(path.realpath(__file__)) - -if PY2: - from mock import Mock, MagicMock, PropertyMock -else: - from unittest.mock import Mock, MagicMock, PropertyMock - - -def test_extract_bucket_name_and_path(): - """ - Extract bucket name and S3 path - """ - s3_util = SnowflakeS3Util - - s3_loc = s3_util.extract_bucket_name_and_path( - 'sfc-dev1-regression/test_sub_dir/') - assert s3_loc.bucket_name == 'sfc-dev1-regression' - assert s3_loc.s3path == 'test_sub_dir/' - - s3_loc = s3_util.extract_bucket_name_and_path( - 'sfc-dev1-regression/stakeda/test_stg/test_sub_dir/') - assert s3_loc.bucket_name == 'sfc-dev1-regression' - assert s3_loc.s3path == 'stakeda/test_stg/test_sub_dir/' - - s3_loc = s3_util.extract_bucket_name_and_path( - 'sfc-dev1-regression/') - assert s3_loc.bucket_name == 'sfc-dev1-regression' - assert s3_loc.s3path == '' - - s3_loc = s3_util.extract_bucket_name_and_path( - 'sfc-dev1-regression//') - assert s3_loc.bucket_name == 'sfc-dev1-regression' - assert s3_loc.s3path == '/' - - s3_loc = s3_util.extract_bucket_name_and_path( - 'sfc-dev1-regression///') - assert s3_loc.bucket_name == 'sfc-dev1-regression' - assert s3_loc.s3path == '//' - - -def test_upload_one_file_to_s3_wsaeconnaborted(): - """ - Tests Upload one file to S3 with retry on ERRORNO_WSAECONNABORTED. - The last attempted max_currency should be (initial_parallel/max_retry) - """ - upload_file = MagicMock( - side_effect=OpenSSL.SSL.SysCallError( - ERRORNO_WSAECONNABORTED, 'mock err. connection aborted')) - s3object = MagicMock(metadata=defaultdict(str), upload_file=upload_file) - client = Mock() - client.Object.return_value = s3object - initial_parallel = 100 - upload_meta = { - u'no_sleeping_time': True, - u'parallel': initial_parallel, - u'put_callback': None, - u'put_callback_output_stream': None, - u'existing_files': [], - u'client': client, - SHA256_DIGEST: '123456789abcdef', - u'stage_info': { - u'location': 'sfc-customer-stage/rwyi-testacco/users/9220/', - u'locationType': 'S3', - }, - u'dst_file_name': 'data1.txt.gz', - u'src_file_name': path.join(THIS_DIR, 'data', 'put_get_1.txt'), - } - upload_meta[u'real_src_file_name'] = upload_meta['src_file_name'] - upload_meta[u'upload_size'] = os.stat(upload_meta['src_file_name']).st_size - tmp_upload_meta = upload_meta.copy() - try: - SnowflakeRemoteStorageUtil.upload_one_file_to_s3(tmp_upload_meta) - raise Exception("Should fail with OpenSSL.SSL.SysCallError") - except OpenSSL.SSL.SysCallError: - assert upload_file.call_count == DEFAULT_MAX_RETRY - assert 'last_max_concurrency' in tmp_upload_meta - assert tmp_upload_meta[ - 'last_max_concurrency' - ] == initial_parallel / DEFAULT_MAX_RETRY - - # min parallel == 1 - upload_file.reset_mock() - initial_parallel = 4 - upload_meta[u'parallel'] = initial_parallel - tmp_upload_meta = upload_meta.copy() - try: - SnowflakeRemoteStorageUtil.upload_one_file_to_s3(tmp_upload_meta) - raise Exception("Should fail with OpenSSL.SSL.SysCallError") - except OpenSSL.SSL.SysCallError: - assert upload_file.call_count == DEFAULT_MAX_RETRY - assert 'last_max_concurrency' in tmp_upload_meta - assert tmp_upload_meta['last_max_concurrency'] == 1 - - -def test_upload_one_file_to_s3_econnreset(): - """ - Tests Upload one file to S3 with retry on errno.ECONNRESET. - The last attempted max_currency should not be changed. - """ - for error_code in [errno.ECONNRESET, - errno.ETIMEDOUT, - errno.EPIPE, - -1]: - upload_file = MagicMock( - side_effect=OpenSSL.SSL.SysCallError( - error_code, 'mock err. connection aborted')) - s3object = MagicMock(metadata=defaultdict(str), upload_file=upload_file) - client = Mock() - client.Object.return_value = s3object - initial_parallel = 100 - upload_meta = { - u'no_sleeping_time': True, - u'parallel': initial_parallel, - u'put_callback': None, - u'put_callback_output_stream': None, - u'existing_files': [], - SHA256_DIGEST: '123456789abcdef', - u'stage_info': { - u'location': 'sfc-teststage/rwyitestacco/users/1234/', - u'locationType': 'S3', - }, - u'client': client, - u'dst_file_name': 'data1.txt.gz', - u'src_file_name': path.join(THIS_DIR, 'data', 'put_get_1.txt'), - } - upload_meta[u'real_src_file_name'] = upload_meta['src_file_name'] - upload_meta[ - u'upload_size'] = os.stat(upload_meta['src_file_name']).st_size - try: - SnowflakeRemoteStorageUtil.upload_one_file_to_s3(upload_meta) - raise Exception("Should fail with OpenSSL.SSL.SysCallError") - except OpenSSL.SSL.SysCallError: - assert upload_file.call_count == DEFAULT_MAX_RETRY - assert 'last_max_concurrency' not in upload_meta - - -def test_get_s3_file_object_http_400_error(): - """ - Tests Get S3 file object with HTTP 400 error. Looks like HTTP 400 is - returned when AWS token expires and S3.Object.load is called. - """ - load_method = MagicMock( - side_effect=botocore.exceptions.ClientError( - {'Error': {'Code': u'400', 'Message': 'Bad Request'}}, - operation_name='mock load')) - s3object = MagicMock(load=load_method) - client = Mock() - client.Object.return_value = s3object - client.load.return_value = None - type(client).s3path = PropertyMock(return_value='s3://testbucket/') - meta = { - u'client': client, - u'stage_info': { - u'location': 'sfc-teststage/rwyitestacco/users/1234/', - u'locationType': 'S3', - } - } - filename = "/path1/file2.txt" - akey = SnowflakeS3Util.get_file_header(meta, filename) - assert akey is None - assert meta['result_status'] == ResultStatus.RENEW_TOKEN - - -def test_upload_file_with_s3_upload_failed_error(): - """ - Tests Upload file with S3UploadFailedError, which could indicate AWS - token expires. - """ - upload_file = MagicMock( - side_effect=S3UploadFailedError( - "An error occurred (ExpiredToken) when calling the " - "CreateMultipartUpload operation: The provided token has expired.")) - client = Mock() - client.Object.return_value = MagicMock( - metadata=defaultdict(str), upload_file=upload_file) - initial_parallel = 100 - upload_meta = { - u'no_sleeping_time': True, - u'parallel': initial_parallel, - u'put_callback': None, - u'put_callback_output_stream': None, - u'existing_files': [], - SHA256_DIGEST: '123456789abcdef', - u'stage_info': { - u'location': 'sfc-teststage/rwyitestacco/users/1234/', - u'locationType': 'S3', - }, - u'client': client, - u'dst_file_name': 'data1.txt.gz', - u'src_file_name': path.join(THIS_DIR, 'data', 'put_get_1.txt'), - } - upload_meta[u'real_src_file_name'] = upload_meta['src_file_name'] - upload_meta[ - u'upload_size'] = os.stat(upload_meta['src_file_name']).st_size - - akey = SnowflakeRemoteStorageUtil.upload_one_file_to_s3(upload_meta) - assert akey is None - assert upload_meta['result_status'] == ResultStatus.RENEW_TOKEN diff --git a/test/test_unit_secret_detector.py b/test/test_unit_secret_detector.py deleted file mode 100644 index 416851c41..000000000 --- a/test/test_unit_secret_detector.py +++ /dev/null @@ -1,141 +0,0 @@ -# encoding=utf-8 -# !/usr/bin/env python -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import random -import string - -from snowflake.connector.secret_detector import SecretDetector - - -def test_mask_aws_secret(): - sql = "copy into 's3://xxxx/test' from \n" \ - "(select seq1(), random()\n" \ - ", random(), random(), random(), random()\n" \ - ", random(), random(), random(), random()\n" \ - ", random() , random(), random(), random()\n" \ - "\tfrom table(generator(rowcount => 10000)))\n" \ - "credentials=(\n" \ - " aws_key_id='xxdsdfsafds'\n" \ - " aws_secret_key='safas+asfsad+safasf'\n" \ - " )\n" \ - "OVERWRITE = TRUE \n" \ - "MAX_FILE_SIZE = 500000000 \n" \ - "HEADER = TRUE \n" \ - "FILE_FORMAT = (TYPE = PARQUET SNAPPY_COMPRESSION = TRUE )\n" \ - ";" - - correct = "copy into 's3://xxxx/test' from \n" \ - "(select seq1(), random()\n" \ - ", random(), random(), random(), random()\n" \ - ", random(), random(), random(), random()\n" \ - ", random() , random(), random(), random()\n" \ - "\tfrom table(generator(rowcount => 10000)))\n" \ - "credentials=(\n" \ - " aws_key_id='**********'\n" \ - " aws_secret_key='**********'\n" \ - " )\n" \ - "OVERWRITE = TRUE \n" \ - "MAX_FILE_SIZE = 500000000 \n" \ - "HEADER = TRUE \n" \ - "FILE_FORMAT = (TYPE = PARQUET SNAPPY_COMPRESSION = TRUE )\n" \ - ";" - - # Mask an aws key id and secret key - masked_sql = SecretDetector.mask_secrets(sql) - assert masked_sql == correct - - -def test_mask_sas_token(): - azure_sas_token = "https://someaccounts.blob.core.windows.net/results/018b90ab-0033-" \ - "5f8e-0000-14f1000bd376_0/main/data_0_0_1?sv=2015-07-08&" \ - "sig=iCvQmdZngZNW%2F4vw43j6%2BVz6fndHF5LI639QJba4r8o%3D&" \ - "spr=https&st=2016-04-12T03%3A24%3A31Z&" \ - "se=2016-04-13T03%3A29%3A31Z&srt=s&ss=bf&sp=rwl" - - masked_azure_sas_token = "https://someaccounts.blob.core.windows.net/results/018b90ab-0033-" \ - "5f8e-0000-14f1000bd376_0/main/data_0_0_1?sv=2015-07-08&" \ - "sig=**********&" \ - "spr=https&st=2016-04-12T03%3A24%3A31Z&" \ - "se=2016-04-13T03%3A29%3A31Z&srt=s&ss=bf&sp=rwl" - - s3_sas_token = "https://somebucket.s3.amazonaws.com/vzy1-s-va_demo0/results/018b92f3" \ - "-01c2-02dd-0000-03d5000c8066_0/main/data_0_0_1?" \ - "x-amz-server-side-encryption-customer-algorithm=AES256&" \ - "response-content-encoding=gzip&AWSAccessKeyId=AKIAIOSFODNN7EXAMPLE" \ - "&Expires=1555481960&Signature=zFiRkdB9RtRRYomppVes4fQ%2ByWw%3D" - - masked_s3_sas_token = "https://somebucket.s3.amazonaws.com/vzy1-s-va_demo0/results/018b92f3" \ - "-01c2-02dd-0000-03d5000c8066_0/main/data_0_0_1?" \ - "x-amz-server-side-encryption-customer-algorithm=AES256&" \ - "response-content-encoding=gzip&AWSAccessKeyId=**********" \ - "&Expires=1555481960&Signature=**********" - - # Mask azure token - masked_text = SecretDetector.mask_secrets(azure_sas_token) - assert masked_text == masked_azure_sas_token - - # Mask s3 token - masked_text = SecretDetector.mask_secrets(s3_sas_token) - assert masked_text == masked_s3_sas_token - - text = ''.join([random.choice(string.ascii_lowercase) for i in range(200)]) - masked_text = SecretDetector.mask_secrets(text) - # Randomly generated string should cause no substitutions - assert masked_text == text - - # Mask multiple azure tokens - masked_text = SecretDetector.mask_secrets(azure_sas_token + '\n' + azure_sas_token) - assert masked_text == masked_azure_sas_token + '\n' + masked_azure_sas_token - - # Mask multiple s3 tokens - masked_text = SecretDetector.mask_secrets(s3_sas_token + '\n' + s3_sas_token) - assert masked_text == masked_s3_sas_token + '\n' + masked_s3_sas_token - - # Mask azure and s3 token - masked_text = SecretDetector.mask_secrets(azure_sas_token + '\n' + s3_sas_token) - assert masked_text == masked_azure_sas_token + '\n' + masked_s3_sas_token - - -def test_mask_secrets(): - sql = "create stage mystage " \ - "URL = 's3://mybucket/mypath/' " \ - "credentials = (aws_key_id = 'AKIAIOSFODNN7EXAMPLE' " \ - "aws_secret_key = 'frJIUN8DYpKDtOLCwo//yllqDzg='); " \ - "create stage mystage2 " \ - "URL = 'azure//mystorage.blob.core.windows.net/cont' " \ - "credentials = (azure_sas_token = " \ - "'?sv=2016-05-31&ss=b&srt=sco&sp=rwdl&se=2018-06-27T10:05:50Z&" \ - "st=2017-06-27T02:05:50Z&spr=https,http&" \ - "sig=bgqQwoXwxzuD2GJfagRg7VOS8hzNr3QLT7rhS8OFRLQ%3D')" - - masked_sql = "create stage mystage " \ - "URL = 's3://mybucket/mypath/' " \ - "credentials = (aws_key_id='**********' " \ - "aws_secret_key='**********'); " \ - "create stage mystage2 " \ - "URL = 'azure//mystorage.blob.core.windows.net/cont' " \ - "credentials = (azure_sas_token = " \ - "'?sv=2016-05-31&ss=b&srt=sco&sp=rwdl&se=2018-06-27T10:05:50Z&" \ - "st=2017-06-27T02:05:50Z&spr=https,http&" \ - "sig=**********')" - - # Test masking all kinds of secrets - masked = SecretDetector.mask_secrets(sql) - assert masked == masked_sql - - text = ''.join([random.choice(string.ascii_lowercase) for i in range(500)]) - masked_text = SecretDetector.mask_secrets(text) - # Randomly generated string should cause no substitutions - assert masked_text == text - - -def test_mask_private_keys(): - text = "\"privateKeyData\": \"aslkjdflasjf\"" - - filtered_text = "\"privateKeyData\": \"XXXX\"" - - result = SecretDetector.mask_secrets(text) - assert result == filtered_text diff --git a/test/test_unit_telemetry.py b/test/test_unit_telemetry.py deleted file mode 100644 index 9734e0d57..000000000 --- a/test/test_unit_telemetry.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2018-2019 Snowflake Computing Inc. All right reserved. -# -from snowflake.connector.compat import PY2 -from snowflake.connector.telemetry import * - -if PY2: - from mock import Mock -else: - from unittest.mock import Mock - - -def test_telemetry_data_to_dict(): - """ - Test that TelemetryData instances are properly converted to dicts - """ - assert TelemetryData({}, 2000).to_dict() == {'message': {}, 'timestamp': '2000'} - - d = {'type': 'test', 'query_id': '1', 'value': 20} - assert TelemetryData(d, 1234).to_dict() == {'message': d, 'timestamp': '1234'} - - -def get_client_and_mock(): - rest_call = Mock() - rest_call.return_value = {u'success': True} - rest = Mock() - rest.attach_mock(rest_call, 'request') - client = TelemetryClient(rest, 2) - return (client, rest_call) - - -def test_telemetry_simple_flush(): - """ - Test that metrics are properly enqueued and sent to telemetry - """ - client, rest_call = get_client_and_mock() - - client.add_log_to_batch(TelemetryData({}, 2000)) - assert rest_call.call_count == 0 - - client.add_log_to_batch(TelemetryData({}, 3000)) - assert rest_call.call_count == 1 - - -def test_telemetry_close(): - """ - Test that remaining metrics are flushed on close - """ - client, rest_call = get_client_and_mock() - - client.add_log_to_batch(TelemetryData({}, 2000)) - assert rest_call.call_count == 0 - - client.close() - assert rest_call.call_count == 1 - assert client.is_closed() - - -def test_telemetry_close_empty(): - """ - Test that no calls are made on close if there are no metrics to flush - """ - client, rest_call = get_client_and_mock() - - client.close() - assert rest_call.call_count == 0 - assert client.is_closed() - - -def test_telemetry_send_batch(): - """ - Test that metrics are sent with the send_batch method - """ - client, rest_call = get_client_and_mock() - - client.add_log_to_batch(TelemetryData({}, 2000)) - assert rest_call.call_count == 0 - - client.send_batch() - assert rest_call.call_count == 1 - - -def test_telemetry_send_batch_empty(): - """ - Test that send_batch does nothing when there are no metrics to send - """ - client, rest_call = get_client_and_mock() - - client.send_batch() - assert rest_call.call_count == 0 - - -def test_telemetry_send_batch_clear(): - """ - Test that send_batch clears the first batch and will not send anything - on a second call - """ - client, rest_call = get_client_and_mock() - - client.add_log_to_batch(TelemetryData({}, 2000)) - assert rest_call.call_count == 0 - - client.send_batch() - assert rest_call.call_count == 1 - - client.send_batch() - assert rest_call.call_count == 1 - - -def test_telemetry_auto_disable(): - """ - Test that the client will automatically disable itself if a request fails - """ - client, rest_call = get_client_and_mock() - rest_call.return_value = {u'success': False} - - client.add_log_to_batch(TelemetryData({}, 2000)) - assert client.is_enabled() - - client.send_batch() - assert not client.is_enabled() - - -def test_telemetry_add_batch_disabled(): - """ - Test that the client will not add logs if disabled - """ - client, _ = get_client_and_mock() - - client.disable() - client.add_log_to_batch(TelemetryData({}, 2000)) - - assert client.buffer_size() == 0 - - -def test_telemetry_send_batch_disabled(): - """ - Test that the client will not send logs if disabled - """ - client, rest_call = get_client_and_mock() - - client.add_log_to_batch(TelemetryData({}, 2000)) - assert client.buffer_size() == 1 - - client.disable() - - client.send_batch() - assert client.buffer_size() == 1 - assert rest_call.call_count == 0 \ No newline at end of file diff --git a/test/test_unit_telemetry_oob.py b/test/test_unit_telemetry_oob.py deleted file mode 100644 index 7ed46b17f..000000000 --- a/test/test_unit_telemetry_oob.py +++ /dev/null @@ -1,129 +0,0 @@ -# encoding=utf-8 -# !/usr/bin/env python -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# - -import pytest - -from snowflake.connector.telemetry_oob import TelemetryService -from snowflake.connector.errors import RevocationCheckError -from snowflake.connector.sqlstate import SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED -from snowflake.connector.errorcode import ER_FAILED_TO_REQUEST - -DEV_CONFIG = { - 'host': 'localhost', - 'port': 8080, - 'account': 'testAccount', - 'user': 'test', - 'password': 'ShouldNotShowUp', - 'protocol': 'http' -} -telemetry_data = {} -exception = RevocationCheckError("Test OCSP Revocation error") -event_type = "Test OCSP Exception" -stack_trace = [ - 'Traceback (most recent call last):\n', - ' File "", line 10, in \n lumberjack()\n', - ' File "", line 4, in lumberjack\n bright_side_of_death()\n', - ' File "", line 7, in bright_side_of_death\n return tuple()[0]\n', - 'IndexError: tuple index out of range\n' -] - -event_name = "HttpRetryTimeout" -url = "http://localhost:8080/queries/v1/query-request?request_guid=a54a3d70-abf2-4576-bb6f-ddf23999491a" -method = "POST" - - -@pytest.fixture() -def telemetry_setup(request): - """ - Sets up the telemetry service by enabling it and flushing any entries - """ - telemetry = TelemetryService.get_instance() - telemetry.update_context(DEV_CONFIG) - telemetry.enable() - telemetry.flush() - - -def test_telemetry_oob_simple_flush(telemetry_setup): - """ - Tests capturing and sending a simple OCSP Exception message - """ - telemetry = TelemetryService.get_instance() - - telemetry.log_ocsp_exception(event_type, telemetry_data, exception=exception, stack_trace=stack_trace) - assert telemetry.size() == 1 - telemetry.flush() - assert telemetry.size() == 0 - - -def test_telemetry_oob_urgent(telemetry_setup): - """ - Tests sending an urgent OCSP Exception message - """ - telemetry = TelemetryService.get_instance() - - telemetry.log_ocsp_exception(event_type, telemetry_data, exception=exception, stack_trace=stack_trace, urgent=True) - assert telemetry.size() == 0 - - -def test_telemetry_oob_close(telemetry_setup): - """ - Tests closing the Telemetry Service when there are still messages in the queue - """ - telemetry = TelemetryService.get_instance() - - telemetry.log_ocsp_exception(event_type, telemetry_data, exception=exception, stack_trace=stack_trace) - assert telemetry.size() == 1 - telemetry.close() - assert telemetry.size() == 0 - - -def test_telemetry_oob_close_empty(telemetry_setup): - """ - Tests closing the Telemetry Service when the queue is empty - """ - telemetry = TelemetryService.get_instance() - - assert telemetry.size() == 0 - telemetry.close() - assert telemetry.size() == 0 - - -def test_telemetry_oob_log_when_disabled(telemetry_setup): - """ - Tests trying to log to the telemetry service when it is disabled - """ - telemetry = TelemetryService.get_instance() - - assert telemetry.size() == 0 - telemetry.disable() - telemetry.log_ocsp_exception(event_type, telemetry_data, exception=exception, stack_trace=stack_trace) - assert telemetry.size() == 0 - telemetry.enable() - - -def test_telemetry_oob_http_log(telemetry_setup): - """ - Tests sending a simple HTTP request telemetry event - """ - telemetry = TelemetryService.get_instance() - - telemetry.log_http_request(event_name, url, method, SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, ER_FAILED_TO_REQUEST, - exception=exception, stack_trace=stack_trace) - assert telemetry.size() == 1 - telemetry.flush() - assert telemetry.size() == 0 - - -def test_telemetry_oob_http_log_urgent(telemetry_setup): - """ - Tests sending an urgent HTTP request telemetry event - """ - telemetry = TelemetryService.get_instance() - - assert telemetry.size() == 0 - telemetry.log_http_request(event_name, url, method, SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, ER_FAILED_TO_REQUEST, - exception=exception, stack_trace=stack_trace, urgent=True) - assert telemetry.size() == 0 diff --git a/test/unit/__init__.py b/test/unit/__init__.py new file mode 100644 index 000000000..d689e9c10 --- /dev/null +++ b/test/unit/__init__.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# diff --git a/test/unit/conftest.py b/test/unit/conftest.py new file mode 100644 index 000000000..fe16ac092 --- /dev/null +++ b/test/unit/conftest.py @@ -0,0 +1,18 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +import pytest + +from snowflake.connector.telemetry_oob import TelemetryService + + +@pytest.fixture(autouse=True, scope="session") +def disable_oob_telemetry(): + oob_telemetry_service = TelemetryService.get_instance() + original_state = oob_telemetry_service.enabled + oob_telemetry_service.disable() + yield None + if original_state: + oob_telemetry_service.enable() diff --git a/test/unit/test_auth.py b/test/unit/test_auth.py new file mode 100644 index 000000000..f699ab5b0 --- /dev/null +++ b/test/unit/test_auth.py @@ -0,0 +1,215 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import time +from unittest.mock import MagicMock, Mock, PropertyMock + +import pytest + +from snowflake.connector.auth import Auth +from snowflake.connector.auth_default import AuthByDefault +from snowflake.connector.constants import OCSPMode +from snowflake.connector.description import CLIENT_NAME, CLIENT_VERSION +from snowflake.connector.network import SnowflakeRestful + + +def _init_rest(application, post_requset): + connection = MagicMock() + connection._login_timeout = 120 + connection._network_timeout = None + connection.errorhandler = Mock(return_value=None) + connection._ocsp_mode = Mock(return_value=OCSPMode.FAIL_OPEN) + type(connection).application = PropertyMock(return_value=application) + type(connection)._internal_application_name = PropertyMock(return_value=CLIENT_NAME) + type(connection)._internal_application_version = PropertyMock( + return_value=CLIENT_VERSION + ) + + rest = SnowflakeRestful( + host="testaccount.snowflakecomputing.com", port=443, connection=connection + ) + rest._post_request = post_requset + return rest + + +def _create_mock_auth_mfs_rest_response(next_action: str): + def _mock_auth_mfa_rest_response(url, headers, body, **kwargs): + """Tests successful case.""" + global mock_cnt + _ = url + _ = headers + _ = body + _ = kwargs.get("dummy") + if mock_cnt == 0: + ret = { + "success": True, + "message": None, + "data": { + "nextAction": next_action, + "inFlightCtx": "inFlightCtx", + }, + } + elif mock_cnt == 1: + ret = { + "success": True, + "message": None, + "data": { + "token": "TOKEN", + "masterToken": "MASTER_TOKEN", + }, + } + + mock_cnt += 1 + return ret + + return _mock_auth_mfa_rest_response + + +def _mock_auth_mfa_rest_response_failure(url, headers, body, **kwargs): + """Tests failed case.""" + global mock_cnt + _ = url + _ = headers + _ = body + _ = kwargs.get("dummy") + + if mock_cnt == 0: + ret = { + "success": True, + "message": None, + "data": { + "nextAction": "EXT_AUTHN_DUO_ALL", + "inFlightCtx": "inFlightCtx", + }, + } + elif mock_cnt == 1: + ret = { + "success": True, + "message": None, + "data": { + "nextAction": "BAD", + "inFlightCtx": "inFlightCtx", + }, + } + + mock_cnt += 1 + return ret + + +def _mock_auth_mfa_rest_response_timeout(url, headers, body, **kwargs): + """Tests timeout case.""" + global mock_cnt + _ = url + _ = headers + _ = body + _ = kwargs.get("dummy") + if mock_cnt == 0: + ret = { + "success": True, + "message": None, + "data": { + "nextAction": "EXT_AUTHN_DUO_ALL", + "inFlightCtx": "inFlightCtx", + }, + } + elif mock_cnt == 1: + time.sleep(10) # should timeout while here + ret = {} + + mock_cnt += 1 + return ret + + +@pytest.mark.parametrize( + "next_action", ("EXT_AUTHN_DUO_ALL", "EXT_AUTHN_DUO_PUSH_N_PASSCODE") +) +def test_auth_mfa(next_action: str): + """Authentication by MFA.""" + global mock_cnt + application = "testapplication" + account = "testaccount" + user = "testuser" + password = "testpassword" + + # success test case + mock_cnt = 0 + rest = _init_rest(application, _create_mock_auth_mfs_rest_response(next_action)) + auth = Auth(rest) + auth_instance = AuthByDefault(password) + auth.authenticate(auth_instance, account, user) + assert not rest._connection.errorhandler.called # not error + assert rest.token == "TOKEN" + assert rest.master_token == "MASTER_TOKEN" + + # failure test case + mock_cnt = 0 + rest = _init_rest(application, _mock_auth_mfa_rest_response_failure) + auth = Auth(rest) + auth_instance = AuthByDefault(password) + auth.authenticate(auth_instance, account, user) + assert rest._connection.errorhandler.called # error + + # timeout 1 second + mock_cnt = 0 + rest = _init_rest(application, _mock_auth_mfa_rest_response_timeout) + auth = Auth(rest) + auth_instance = AuthByDefault(password) + auth.authenticate(auth_instance, account, user, timeout=1) + assert rest._connection.errorhandler.called # error + + +def _mock_auth_password_change_rest_response(url, headers, body, **kwargs): + """Test successful case.""" + global mock_cnt + _ = url + _ = headers + _ = body + _ = kwargs.get("dummy") + if mock_cnt == 0: + ret = { + "success": True, + "message": None, + "data": { + "nextAction": "PWD_CHANGE", + "inFlightCtx": "inFlightCtx", + }, + } + elif mock_cnt == 1: + ret = { + "success": True, + "message": None, + "data": { + "token": "TOKEN", + "masterToken": "MASTER_TOKEN", + }, + } + + mock_cnt += 1 + return ret + + +def test_auth_password_change(): + """Tests password change.""" + global mock_cnt + + def _password_callback(): + return "NEW_PASSWORD" + + application = "testapplication" + account = "testaccount" + user = "testuser" + password = "testpassword" + + # success test case + mock_cnt = 0 + rest = _init_rest(application, _mock_auth_password_change_rest_response) + auth = Auth(rest) + auth_instance = AuthByDefault(password) + auth.authenticate( + auth_instance, account, user, password_callback=_password_callback + ) + assert not rest._connection.errorhandler.called # not error diff --git a/test/unit/test_auth_oauth.py b/test/unit/test_auth_oauth.py new file mode 100644 index 000000000..cee80a45c --- /dev/null +++ b/test/unit/test_auth_oauth.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from snowflake.connector.auth_oauth import AuthByOAuth + + +def test_auth_oauth(): + """Simple OAuth test.""" + token = "oAuthToken" + auth = AuthByOAuth(token) + auth.authenticate(None, None, None, None, None) + body = {"data": {}} + auth.update_body(body) + assert body["data"]["TOKEN"] == token, body + assert body["data"]["AUTHENTICATOR"] == "OAUTH", body diff --git a/test/test_unit_auth_okta.py b/test/unit/test_auth_okta.py similarity index 55% rename from test/test_unit_auth_okta.py rename to test/unit/test_auth_okta.py index 0885bf714..6b092cad8 100644 --- a/test/test_unit_auth_okta.py +++ b/test/unit/test_auth_okta.py @@ -1,45 +1,41 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # +from __future__ import annotations + +from unittest.mock import MagicMock, Mock, PropertyMock + from snowflake.connector.auth_okta import AuthByOkta -from snowflake.connector.compat import PY2 from snowflake.connector.constants import OCSPMode -from snowflake.connector.description import (CLIENT_NAME, CLIENT_VERSION) +from snowflake.connector.description import CLIENT_NAME, CLIENT_VERSION from snowflake.connector.network import SnowflakeRestful -if PY2: - from mock import MagicMock, Mock, PropertyMock -else: - from unittest.mock import MagicMock, Mock, PropertyMock - def test_auth_okta(): - """ - Authentication by OKTA positive test case - """ - authenticator = 'https://testsso.snowflake.net/' - application = 'testapplication' - account = 'testaccount' - user = 'testuser' - password = 'testpassword' - service_name = '' - - ref_sso_url = 'https://testsso.snowflake.net/sso' - ref_token_url = 'https://testsso.snowflake.net/token' + """Authentication by OKTA positive test case.""" + authenticator = "https://testsso.snowflake.net/" + application = "testapplication" + account = "testaccount" + user = "testuser" + password = "testpassword" + service_name = "" + + ref_sso_url = "https://testsso.snowflake.net/sso" + ref_token_url = "https://testsso.snowflake.net/token" rest = _init_rest(ref_sso_url, ref_token_url) auth = AuthByOkta(rest, application) # step 1 headers, sso_url, token_url = auth._step1( - authenticator, service_name, account, user) + authenticator, service_name, account, user + ) assert not rest._connection.errorhandler.called # no error - assert headers.get('accept') is not None - assert headers.get('Content-Type') is not None - assert headers.get('User-Agent') is not None + assert headers.get("accept") is not None + assert headers.get("Content-Type") is not None + assert headers.get("User-Agent") is not None assert sso_url == ref_sso_url assert token_url == ref_token_url @@ -48,11 +44,11 @@ def test_auth_okta(): assert not rest._connection.errorhandler.called # no error # step 3 - ref_one_time_token = '1token1' + ref_one_time_token = "1token1" def fake_fetch(method, full_url, headers, **kwargs): return { - 'cookieToken': ref_one_time_token, + "cookieToken": ref_one_time_token, } rest.fetch = fake_fetch @@ -61,11 +57,11 @@ def fake_fetch(method, full_url, headers, **kwargs): assert one_time_token == ref_one_time_token # step 4 - ref_response_html = ''' + ref_response_html = """
-''' +""" def fake_fetch(method, full_url, headers, **kwargs): return ref_response_html @@ -75,8 +71,8 @@ def fake_fetch(method, full_url, headers, **kwargs): assert response_html == response_html # step 5 - rest._protocol = 'https' - rest._host = '{account}.snowflakecomputing.com'.format(account=account) + rest._protocol = "https" + rest._host = f"{account}.snowflakecomputing.com" rest._port = 443 auth._step5(ref_response_html) assert not rest._connection.errorhandler.called # no error @@ -84,83 +80,78 @@ def fake_fetch(method, full_url, headers, **kwargs): def test_auth_okta_step1_negative(): - """ - Authentication by OKTA step1 negative test case - """ - authenticator = 'https://testsso.snowflake.net/' - application = 'testapplication' - account = 'testaccount' - user = 'testuser' - service_name = '' + """Authentication by OKTA step1 negative test case.""" + authenticator = "https://testsso.snowflake.net/" + application = "testapplication" + account = "testaccount" + user = "testuser" + service_name = "" # not success status is returned - ref_sso_url = 'https://testsso.snowflake.net/sso' - ref_token_url = 'https://testsso.snowflake.net/token' - rest = _init_rest( - ref_sso_url, ref_token_url, success=False, message='error') + ref_sso_url = "https://testsso.snowflake.net/sso" + ref_token_url = "https://testsso.snowflake.net/token" + rest = _init_rest(ref_sso_url, ref_token_url, success=False, message="error") auth = AuthByOkta(rest, application) # step 1 - _, _, _ = auth._step1( - authenticator, service_name, account, user) + _, _, _ = auth._step1(authenticator, service_name, account, user) assert rest._connection.errorhandler.called # error should be raised def test_auth_okta_step2_negative(): - """ - Authentication by OKTA step2 negative test case - """ - authenticator = 'https://testsso.snowflake.net/' - application = 'testapplication' - account = 'testaccount' - user = 'testuser' - service_name = '' + """Authentication by OKTA step2 negative test case.""" + authenticator = "https://testsso.snowflake.net/" + application = "testapplication" + account = "testaccount" + user = "testuser" + service_name = "" # invalid SSO URL - ref_sso_url = 'https://testssoinvalid.snowflake.net/sso' - ref_token_url = 'https://testsso.snowflake.net/token' + ref_sso_url = "https://testssoinvalid.snowflake.net/sso" + ref_token_url = "https://testsso.snowflake.net/token" rest = _init_rest(ref_sso_url, ref_token_url) auth = AuthByOkta(rest, application) # step 1 headers, sso_url, token_url = auth._step1( - authenticator, service_name, account, user) + authenticator, service_name, account, user + ) # step 2 auth._step2(authenticator, sso_url, token_url) assert rest._connection.errorhandler.called # error # invalid TOKEN URL - ref_sso_url = 'https://testsso.snowflake.net/sso' - ref_token_url = 'https://testssoinvalid.snowflake.net/token' + ref_sso_url = "https://testsso.snowflake.net/sso" + ref_token_url = "https://testssoinvalid.snowflake.net/token" rest = _init_rest(ref_sso_url, ref_token_url) auth = AuthByOkta(rest, application) # step 1 headers, sso_url, token_url = auth._step1( - authenticator, service_name, account, user) + authenticator, service_name, account, user + ) # step 2 auth._step2(authenticator, sso_url, token_url) assert rest._connection.errorhandler.called # error def test_auth_okta_step3_negative(): - """ - Authentication by OKTA step3 negative test case - """ - authenticator = 'https://testsso.snowflake.net/' - application = 'testapplication' - account = 'testaccount' - user = 'testuser' - password = 'testpassword' - service_name = '' - - ref_sso_url = 'https://testsso.snowflake.net/sso' - ref_token_url = 'https://testsso.snowflake.net/token' + """Authentication by OKTA step3 negative test case.""" + authenticator = "https://testsso.snowflake.net/" + application = "testapplication" + account = "testaccount" + user = "testuser" + password = "testpassword" + service_name = "" + + ref_sso_url = "https://testsso.snowflake.net/sso" + ref_token_url = "https://testsso.snowflake.net/token" rest = _init_rest(ref_sso_url, ref_token_url) auth = AuthByOkta(rest, application) # step 1 headers, sso_url, token_url = auth._step1( - authenticator, service_name, account, user) + authenticator, service_name, account, user + ) # step 2 auth._step2(authenticator, sso_url, token_url) assert not rest._connection.errorhandler.called # no error @@ -168,7 +159,7 @@ def test_auth_okta_step3_negative(): # step 3: authentication by IdP failed. def fake_fetch(method, full_url, headers, **kwargs): return { - 'failed': 'auth failed', + "failed": "auth failed", } rest.fetch = fake_fetch @@ -177,34 +168,33 @@ def fake_fetch(method, full_url, headers, **kwargs): def test_auth_okta_step5_negative(): - """ - Authentication by OKTA step5 negative test case - """ - authenticator = 'https://testsso.snowflake.net/' - application = 'testapplication' - account = 'testaccount' - user = 'testuser' - password = 'testpassword' - service_name = '' - - ref_sso_url = 'https://testsso.snowflake.net/sso' - ref_token_url = 'https://testsso.snowflake.net/token' + """Authentication by OKTA step5 negative test case.""" + authenticator = "https://testsso.snowflake.net/" + application = "testapplication" + account = "testaccount" + user = "testuser" + password = "testpassword" + service_name = "" + + ref_sso_url = "https://testsso.snowflake.net/sso" + ref_token_url = "https://testsso.snowflake.net/token" rest = _init_rest(ref_sso_url, ref_token_url) auth = AuthByOkta(rest, application) # step 1 headers, sso_url, token_url = auth._step1( - authenticator, service_name, account, user) + authenticator, service_name, account, user + ) assert not rest._connection.errorhandler.called # no error # step 2 auth._step2(authenticator, sso_url, token_url) assert not rest._connection.errorhandler.called # no error # step 3 - ref_one_time_token = '1token1' + ref_one_time_token = "1token1" def fake_fetch(method, full_url, headers, **kwargs): return { - 'cookieToken': ref_one_time_token, + "cookieToken": ref_one_time_token, } rest.fetch = fake_fetch @@ -213,12 +203,12 @@ def fake_fetch(method, full_url, headers, **kwargs): # step 4 # HTML includes invalid account name - ref_response_html = ''' + ref_response_html = """
-''' +""" def fake_fetch(method, full_url, headers, **kwargs): return ref_response_html @@ -228,8 +218,8 @@ def fake_fetch(method, full_url, headers, **kwargs): assert response_html == ref_response_html # step 5 - rest._protocol = 'https' - rest._host = '{account}.snowflakecomputing.com'.format(account=account) + rest._protocol = "https" + rest._host = f"{account}.snowflakecomputing.com" rest._port = 443 auth._step5(ref_response_html) assert rest._connection.errorhandler.called # error @@ -240,30 +230,29 @@ def post_request(url, headers, body, **kwargs): _ = url _ = headers _ = body - _ = kwargs.get('dummy') + _ = kwargs.get("dummy") return { - 'success': success, - 'message': message, - 'data': { - 'ssoUrl': ref_sso_url, - 'tokenUrl': ref_token_url, - } + "success": success, + "message": message, + "data": { + "ssoUrl": ref_sso_url, + "tokenUrl": ref_token_url, + }, } connection = MagicMock() connection._login_timeout = 120 + connection._network_timeout = None connection.errorhandler = Mock(return_value=None) connection._ocsp_mode = Mock(return_value=OCSPMode.FAIL_OPEN) type(connection).application = PropertyMock(return_value=CLIENT_NAME) - type(connection)._internal_application_name = PropertyMock( - return_value=CLIENT_NAME - ) + type(connection)._internal_application_name = PropertyMock(return_value=CLIENT_NAME) type(connection)._internal_application_version = PropertyMock( return_value=CLIENT_VERSION ) - rest = SnowflakeRestful(host='testaccount.snowflakecomputing.com', - port=443, - connection=connection) + rest = SnowflakeRestful( + host="testaccount.snowflakecomputing.com", port=443, connection=connection + ) rest._post_request = post_request return rest diff --git a/test/unit/test_auth_webbrowser.py b/test/unit/test_auth_webbrowser.py new file mode 100644 index 000000000..5adec7db5 --- /dev/null +++ b/test/unit/test_auth_webbrowser.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from unittest.mock import MagicMock, Mock, PropertyMock, patch + +import pytest + +from snowflake.connector.auth_webbrowser import AuthByWebBrowser +from snowflake.connector.constants import OCSPMode +from snowflake.connector.description import CLIENT_NAME, CLIENT_VERSION +from snowflake.connector.network import EXTERNAL_BROWSER_AUTHENTICATOR, SnowflakeRestful + +AUTHENTICATOR = "https://testsso.snowflake.net/" +APPLICATION = "testapplication" +ACCOUNT = "testaccount" +USER = "testuser" +PASSWORD = "testpassword" +SERVICE_NAME = "" +REF_PROOF_KEY = "MOCK_PROOF_KEY" +REF_SSO_URL = "https://testsso.snowflake.net/sso" + + +def mock_webserver(target_instance, application, port): + _ = application + _ = port + target_instance._webserver_status = True + + +def test_auth_webbrowser_get(): + """Authentication by WebBrowser positive test case.""" + ref_token = "MOCK_TOKEN" + + rest = _init_rest(REF_SSO_URL, REF_PROOF_KEY) + + # mock webbrowser + mock_webbrowser = MagicMock() + mock_webbrowser.open_new.return_value = True + + # mock socket + mock_socket_instance = MagicMock() + mock_socket_instance.getsockname.return_value = [None, 12345] + + mock_socket_client = MagicMock() + mock_socket_client.recv.return_value = ( + "\r\n".join( + [ + f"GET /?token={ref_token}&confirm=true HTTP/1.1", + "User-Agent: snowflake-agent", + ] + ) + ).encode("utf-8") + mock_socket_instance.accept.return_value = (mock_socket_client, None) + mock_socket = Mock(return_value=mock_socket_instance) + + auth = AuthByWebBrowser( + rest, APPLICATION, webbrowser_pkg=mock_webbrowser, socket_pkg=mock_socket + ) + auth.authenticate(AUTHENTICATOR, SERVICE_NAME, ACCOUNT, USER, PASSWORD) + assert not rest._connection.errorhandler.called # no error + assert auth.assertion_content == ref_token + body = {"data": {}} + auth.update_body(body) + assert body["data"]["TOKEN"] == ref_token + assert body["data"]["PROOF_KEY"] == REF_PROOF_KEY + assert body["data"]["AUTHENTICATOR"] == EXTERNAL_BROWSER_AUTHENTICATOR + + +def test_auth_webbrowser_post(): + """Authentication by WebBrowser positive test case with POST.""" + ref_token = "MOCK_TOKEN" + + rest = _init_rest(REF_SSO_URL, REF_PROOF_KEY) + + # mock webbrowser + mock_webbrowser = MagicMock() + mock_webbrowser.open_new.return_value = True + + # mock socket + mock_socket_instance = MagicMock() + mock_socket_instance.getsockname.return_value = [None, 12345] + + mock_socket_client = MagicMock() + mock_socket_client.recv.return_value = ( + "\r\n".join( + [ + "POST / HTTP/1.1", + "User-Agent: snowflake-agent", + "Host: localhost:12345", + "", + f"token={ref_token}&confirm=true", + ] + ) + ).encode("utf-8") + mock_socket_instance.accept.return_value = (mock_socket_client, None) + mock_socket = Mock(return_value=mock_socket_instance) + + auth = AuthByWebBrowser( + rest, APPLICATION, webbrowser_pkg=mock_webbrowser, socket_pkg=mock_socket + ) + auth.authenticate(AUTHENTICATOR, SERVICE_NAME, ACCOUNT, USER, PASSWORD) + assert not rest._connection.errorhandler.called # no error + assert auth.assertion_content == ref_token + body = {"data": {}} + auth.update_body(body) + assert body["data"]["TOKEN"] == ref_token + assert body["data"]["PROOF_KEY"] == REF_PROOF_KEY + assert body["data"]["AUTHENTICATOR"] == EXTERNAL_BROWSER_AUTHENTICATOR + + +@pytest.mark.parametrize( + "input_text,expected_error", + [ + ("", True), + ("http://example.com/notokenurl", True), + ("http://example.com/sso?token=", True), + ("http://example.com/sso?token=MOCK_TOKEN", False), + ], +) +def test_auth_webbrowser_fail_webbrowser( + monkeypatch, capsys, input_text, expected_error +): + """Authentication by WebBrowser with failed to start web browser case.""" + rest = _init_rest(REF_SSO_URL, REF_PROOF_KEY) + ref_token = "MOCK_TOKEN" + + # mock webbrowser + mock_webbrowser = MagicMock() + mock_webbrowser.open_new.return_value = False + + # mock socket + mock_socket_instance = MagicMock() + mock_socket_instance.getsockname.return_value = [None, 12345] + + mock_socket_client = MagicMock() + mock_socket_client.recv.return_value = ( + "\r\n".join(["GET /?token=MOCK_TOKEN HTTP/1.1", "User-Agent: snowflake-agent"]) + ).encode("utf-8") + mock_socket_instance.accept.return_value = (mock_socket_client, None) + mock_socket = Mock(return_value=mock_socket_instance) + + auth = AuthByWebBrowser( + rest, APPLICATION, webbrowser_pkg=mock_webbrowser, socket_pkg=mock_socket + ) + with patch("builtins.input", return_value=input_text): + auth.authenticate(AUTHENTICATOR, SERVICE_NAME, ACCOUNT, USER, PASSWORD) + captured = capsys.readouterr() + assert captured.out == ( + "Initiating login request with your identity provider. A browser window " + "should have opened for you to complete the login. If you can't see it, " + "check existing browser windows, or your OS settings. Press CTRL+C to " + "abort and try again...\nWe were unable to open a browser window for " + "you, please open the following url manually then paste the URL you " + f"are redirected to into the terminal.\nURL: {REF_SSO_URL}\n" + ) + if expected_error: + assert rest._connection.errorhandler.called # an error + assert auth.assertion_content is None + else: + assert not rest._connection.errorhandler.called # no error + body = {"data": {}} + auth.update_body(body) + assert body["data"]["TOKEN"] == ref_token + assert body["data"]["PROOF_KEY"] == REF_PROOF_KEY + assert body["data"]["AUTHENTICATOR"] == EXTERNAL_BROWSER_AUTHENTICATOR + + +def test_auth_webbrowser_fail_webserver(capsys): + """Authentication by WebBrowser with failed to start web browser case.""" + rest = _init_rest(REF_SSO_URL, REF_PROOF_KEY) + + # mock webbrowser + mock_webbrowser = MagicMock() + mock_webbrowser.open_new.return_value = True + + # mock socket + mock_socket_instance = MagicMock() + mock_socket_instance.getsockname.return_value = [None, 12345] + + mock_socket_client = MagicMock() + mock_socket_client.recv.return_value = ( + "\r\n".join(["GARBAGE", "User-Agent: snowflake-agent"]) + ).encode("utf-8") + mock_socket_instance.accept.return_value = (mock_socket_client, None) + mock_socket = Mock(return_value=mock_socket_instance) + + # case 1: invalid HTTP request + auth = AuthByWebBrowser( + rest, APPLICATION, webbrowser_pkg=mock_webbrowser, socket_pkg=mock_socket + ) + auth.authenticate(AUTHENTICATOR, SERVICE_NAME, ACCOUNT, USER, PASSWORD) + captured = capsys.readouterr() + assert captured.out == ( + "Initiating login request with your identity provider. A browser window " + "should have opened for you to complete the login. If you can't see it, " + "check existing browser windows, or your OS settings. Press CTRL+C to " + "abort and try again...\n" + ) + assert rest._connection.errorhandler.called # an error + assert auth.assertion_content is None + + +def _init_rest(ref_sso_url, ref_proof_key, success=True, message=None): + def post_request(url, headers, body, **kwargs): + _ = url + _ = headers + _ = body + _ = kwargs.get("dummy") + return { + "success": success, + "message": message, + "data": { + "ssoUrl": ref_sso_url, + "proofKey": ref_proof_key, + }, + } + + connection = MagicMock() + connection._login_timeout = 120 + connection._network_timeout = None + connection.errorhandler = Mock(return_value=None) + connection._ocsp_mode = Mock(return_value=OCSPMode.FAIL_OPEN) + type(connection).application = PropertyMock(return_value=CLIENT_NAME) + type(connection)._internal_application_name = PropertyMock(return_value=CLIENT_NAME) + type(connection)._internal_application_version = PropertyMock( + return_value=CLIENT_VERSION + ) + + rest = SnowflakeRestful( + host="testaccount.snowflakecomputing.com", port=443, connection=connection + ) + rest._post_request = post_request + return rest diff --git a/test/unit/test_binaryformat.py b/test/unit/test_binaryformat.py new file mode 100644 index 000000000..87455c077 --- /dev/null +++ b/test/unit/test_binaryformat.py @@ -0,0 +1,43 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from snowflake.connector.sfbinaryformat import ( + SnowflakeBinaryFormat, + binary_to_python, + binary_to_snowflake, +) + + +def test_basic(): + """Test hex and base64 formatting.""" + # Hex + fmt = SnowflakeBinaryFormat("heX") + assert fmt.format(b"") == "" + assert fmt.format(b"\x00") == "00" + assert fmt.format(b"\xAB\xCD\x12") == "ABCD12" + assert fmt.format(b"\x00\xFF\x42\x01") == "00FF4201" + + # Base64 + fmt = SnowflakeBinaryFormat("BasE64") + assert fmt.format(b"") == "" + assert fmt.format(b"\x00") == "AA==" + assert fmt.format(b"\xAB\xCD\x12") == "q80S" + assert fmt.format(b"\x00\xFF\x42\x01") == "AP9CAQ==" + + +def test_binary_to_python(): + """Test conversion to Python data type.""" + assert binary_to_python("") == b"" + assert binary_to_python("00") == b"\x00" + assert binary_to_python("ABCD12") == b"\xAB\xCD\x12" + + +def test_binary_to_snowflake(): + """Test conversion for passing to Snowflake.""" + assert binary_to_snowflake(b"") == b"" + assert binary_to_snowflake(b"\x00") == b"00" + assert binary_to_snowflake(b"\xAB\xCD\x12") == b"ABCD12" diff --git a/test/unit/test_bind_upload_agent.py b/test/unit/test_bind_upload_agent.py new file mode 100644 index 000000000..e35342339 --- /dev/null +++ b/test/unit/test_bind_upload_agent.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +from unittest.mock import MagicMock + + +def test_bind_upload_agent_uploading_multiple_files(): + from snowflake.connector.bind_upload_agent import BindUploadAgent + + csr = MagicMock(auto_spec=True) + rows = [bytes(10)] * 10 + agent = BindUploadAgent(csr, rows, stream_buffer_size=10) + agent.upload() + assert csr.execute.call_count == 11 # 1 for stage creation + 10 files + + +def test_bind_upload_agent_row_size_exceed_buffer_size(): + from snowflake.connector.bind_upload_agent import BindUploadAgent + + csr = MagicMock(auto_spec=True) + rows = [bytes(15)] * 10 + agent = BindUploadAgent(csr, rows, stream_buffer_size=10) + agent.upload() + assert csr.execute.call_count == 11 # 1 for stage creation + 10 files diff --git a/test/unit/test_connection.py b/test/unit/test_connection.py new file mode 100644 index 000000000..87a1fad16 --- /dev/null +++ b/test/unit/test_connection.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +import json +import os +import sys +from unittest.mock import patch + +import pytest + +import snowflake.connector + +try: # pragma: no cover + from snowflake.connector.constants import ENV_VAR_PARTNER, QueryStatus +except ImportError: + ENV_VAR_PARTNER = "SF_PARTNER" + QueryStatus = None + + +def fake_connector() -> snowflake.connector.SnowflakeConnection: + return snowflake.connector.connect( + user="user", + account="account", + password="testpassword", + database="TESTDB", + warehouse="TESTWH", + ) + + +@pytest.fixture +def mock_post_requests(monkeypatch): + request_body = {} + + def mock_post_request(request, url, headers, json_body, **kwargs): + nonlocal request_body + request_body.update(json.loads(json_body)) + return { + "success": True, + "message": None, + "data": { + "token": "TOKEN", + "masterToken": "MASTER_TOKEN", + "idToken": None, + "parameters": [{"name": "SERVICE_NAME", "value": "FAKE_SERVICE_NAME"}], + }, + } + + monkeypatch.setattr( + snowflake.connector.network.SnowflakeRestful, "_post_request", mock_post_request + ) + + return request_body + + +def test_connect_with_service_name(mock_post_requests): + assert fake_connector().service_name == "FAKE_SERVICE_NAME" + + +@pytest.mark.skip(reason="Mock doesn't work as expected.") +@patch("snowflake.connector.network.SnowflakeRestful._post_request") +def test_connection_ignore_exception(mockSnowflakeRestfulPostRequest): + def mock_post_request(url, headers, json_body, **kwargs): + global mock_cnt + ret = None + if mock_cnt == 0: + # return from /v1/login-request + ret = { + "success": True, + "message": None, + "data": { + "token": "TOKEN", + "masterToken": "MASTER_TOKEN", + "idToken": None, + "parameters": [ + {"name": "SERVICE_NAME", "value": "FAKE_SERVICE_NAME"} + ], + }, + } + elif mock_cnt == 1: + ret = { + "success": False, + "message": "Session gone", + "data": None, + "code": 390111, + } + mock_cnt += 1 + return ret + + # POST requests mock + mockSnowflakeRestfulPostRequest.side_effect = mock_post_request + + global mock_cnt + mock_cnt = 0 + + account = "testaccount" + user = "testuser" + + # connection + con = snowflake.connector.connect( + account=account, + user=user, + password="testpassword", + database="TESTDB", + warehouse="TESTWH", + ) + # Test to see if closing connection works or raises an exception. If an exception is raised, test will fail. + con.close() + + +@pytest.mark.skipolddriver +def test_is_still_running(): + """Checks that is_still_running returns expected results.""" + statuses = [ + (QueryStatus.RUNNING, True), + (QueryStatus.ABORTING, False), + (QueryStatus.SUCCESS, False), + (QueryStatus.FAILED_WITH_ERROR, False), + (QueryStatus.ABORTED, False), + (QueryStatus.QUEUED, True), + (QueryStatus.FAILED_WITH_INCIDENT, False), + (QueryStatus.DISCONNECTED, False), + (QueryStatus.RESUMING_WAREHOUSE, True), + (QueryStatus.QUEUED_REPARING_WAREHOUSE, True), + (QueryStatus.RESTARTED, False), + (QueryStatus.BLOCKED, True), + (QueryStatus.NO_DATA, True), + ] + for status, expected_result in statuses: + assert ( + snowflake.connector.SnowflakeConnection.is_still_running(status) + == expected_result + ) + + +@pytest.mark.skipolddriver +def test_partner_env_var(mock_post_requests): + PARTNER_NAME = "Amanda" + + with patch.dict(os.environ, {ENV_VAR_PARTNER: PARTNER_NAME}): + assert fake_connector().application == PARTNER_NAME + + assert ( + mock_post_requests["data"]["CLIENT_ENVIRONMENT"]["APPLICATION"] == PARTNER_NAME + ) + + +@pytest.mark.skipolddriver +def test_imported_module(mock_post_requests): + with patch.dict(sys.modules, {"streamlit": "foo"}): + assert fake_connector().application == "streamlit" + + assert ( + mock_post_requests["data"]["CLIENT_ENVIRONMENT"]["APPLICATION"] == "streamlit" + ) diff --git a/test/unit/test_construct_hostname.py b/test/unit/test_construct_hostname.py new file mode 100644 index 000000000..3935bc70e --- /dev/null +++ b/test/unit/test_construct_hostname.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from snowflake.connector.util_text import construct_hostname + + +def test_construct_hostname_basic(): + assert ( + construct_hostname("eu-central-1", "account1") + == "account1.eu-central-1.snowflakecomputing.com" + ) + + assert construct_hostname("", "account1") == "account1.snowflakecomputing.com" + + assert construct_hostname(None, "account1") == "account1.snowflakecomputing.com" + + assert ( + construct_hostname("as-east-3", "account1") + == "account1.as-east-3.snowflakecomputing.com" + ) + + assert ( + construct_hostname("as-east-3", "account1.eu-central-1") + == "account1.as-east-3.snowflakecomputing.com" + ) + + assert ( + construct_hostname("", "account1.eu-central-1") + == "account1.eu-central-1.snowflakecomputing.com" + ) + + assert ( + construct_hostname(None, "account1.eu-central-1") + == "account1.eu-central-1.snowflakecomputing.com" + ) + + assert ( + construct_hostname(None, "account1-jkabfvdjisoa778wqfgeruishafeuw89q.global") + == "account1-jkabfvdjisoa778wqfgeruishafeuw89q.global.snowflakecomputing.com" + ) diff --git a/test/unit/test_converter.py b/test/unit/test_converter.py new file mode 100644 index 000000000..b4fd5724f --- /dev/null +++ b/test/unit/test_converter.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from logging import getLogger + +import pytest + +from snowflake.connector import ProgrammingError +from snowflake.connector.connection import DefaultConverterClass +from snowflake.connector.converter import SnowflakeConverter +from snowflake.connector.converter_snowsql import SnowflakeConverterSnowSQL + +logger = getLogger(__name__) + +ConverterSnowSQL = SnowflakeConverterSnowSQL + + +def test_is_dst(): + """SNOW-6020: Failed to convert to local time during DST is being changed.""" + # DST to non-DST + convClass = DefaultConverterClass() + conv = convClass() + conv.set_parameter("TIMEZONE", "America/Los_Angeles") + + col_meta = { + "name": "CREATED_ON", + "type": 6, + "length": None, + "precision": None, + "scale": 3, + "nullable": True, + } + m = conv.to_python_method("TIMESTAMP_LTZ", col_meta) + ret = m("1414890189.000") + + assert ( + str(ret) == "2014-11-01 18:03:09-07:00" + ), "Timestamp during from DST to non-DST" + + # non-DST to DST + col_meta = { + "name": "CREATED_ON", + "type": 6, + "length": None, + "precision": None, + "scale": 3, + "nullable": True, + } + m = conv.to_python_method("TIMESTAMP_LTZ", col_meta) + ret = m("1425780189.000") + + assert ( + str(ret) == "2015-03-07 18:03:09-08:00" + ), "Timestamp during from non-DST to DST" + + +def test_more_timestamps(): + conv = ConverterSnowSQL() + conv.set_parameter("TIMESTAMP_NTZ_OUTPUT_FORMAT", "YYYY-MM-DD HH24:MI:SS.FF9") + m = conv.to_python_method("TIMESTAMP_NTZ", {"scale": 9}) + assert m("-2208943503.876543211") == "1900-01-01 12:34:56.123456789" + assert m("-2208943503.000000000") == "1900-01-01 12:34:57.000000000" + assert m("-2208943503.012000000") == "1900-01-01 12:34:56.988000000" + + conv.set_parameter("TIMESTAMP_NTZ_OUTPUT_FORMAT", "YYYY-MM-DD HH24:MI:SS.FF9") + m = conv.to_python_method("TIMESTAMP_NTZ", {"scale": 7}) + assert m("-2208943503.8765432") == "1900-01-01 12:34:56.123456800" + assert m("-2208943503.0000000") == "1900-01-01 12:34:57.000000000" + assert m("-2208943503.0120000") == "1900-01-01 12:34:56.988000000" + + +def test_converter_to_snowflake_error(): + converter = SnowflakeConverter() + with pytest.raises( + ProgrammingError, match=r"Binding data in type \(bogus\) is not supported" + ): + converter._bogus_to_snowflake("Bogus") + + +def test_converter_to_snowflake_bindings_error(): + converter = SnowflakeConverter() + with pytest.raises( + ProgrammingError, + match=r"Binding data in type \(somethingsomething\) is not supported", + ): + converter._somethingsomething_to_snowflake_bindings("Bogus") diff --git a/test/unit/test_cursor.py b/test/unit/test_cursor.py new file mode 100644 index 000000000..f054fd074 --- /dev/null +++ b/test/unit/test_cursor.py @@ -0,0 +1,29 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +import pytest + +from snowflake.connector.cursor import SnowflakeCursor + +try: + from snowflake.connector.constants import FileTransferType +except ImportError: + from enum import Enum + + class FileTransferType(Enum): + GET = "get" + PUT = "put" + + +@pytest.mark.parametrize( + "sql,_type", + ( + ("PUT file:///tmp/data/mydata.csv @my_int_stage;", FileTransferType.PUT), + ("GET @%mytable file:///tmp/data/;", FileTransferType.GET), + ("select 1;", None), + ), +) +def test_get_filetransfer_type(sql, _type): + assert SnowflakeCursor.get_file_transfer_type(sql) == _type diff --git a/test/unit/test_datetime.py b/test/unit/test_datetime.py new file mode 100644 index 000000000..9a89f9373 --- /dev/null +++ b/test/unit/test_datetime.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import time +from datetime import datetime + +import pytest + +from snowflake.connector.compat import IS_WINDOWS +from snowflake.connector.sfdatetime import SnowflakeDateTime, SnowflakeDateTimeFormat + + +def test_basic_datetime_format(): + """Datetime format basic tests.""" + # date + value = datetime(2014, 11, 30) + formatter = SnowflakeDateTimeFormat("YYYY-MM-DD") + assert formatter.format(value) == "2014-11-30" + + # date time => date + value = datetime(2014, 11, 30, 12, 31, 45) + formatter = SnowflakeDateTimeFormat("YYYY-MM-DD") + assert formatter.format(value) == "2014-11-30" + + # date time => date time + value = datetime(2014, 11, 30, 12, 31, 45) + formatter = SnowflakeDateTimeFormat('YYYY-MM-DD"T"HH24:MI:SS') + assert formatter.format(value) == "2014-11-30T12:31:45" + + # date time => date time in microseconds with 4 precision + value = datetime(2014, 11, 30, 12, 31, 45, microsecond=987654) + formatter = SnowflakeDateTimeFormat('YYYY-MM-DD"T"HH24:MI:SS.FF4') + assert formatter.format(value) == "2014-11-30T12:31:45.9876" + + # date time => date time in microseconds with full precision up to + # microseconds + value = datetime(2014, 11, 30, 12, 31, 45, microsecond=987654) + formatter = SnowflakeDateTimeFormat('YYYY-MM-DD"T"HH24:MI:SS.FF') + assert formatter.format(value) == "2014-11-30T12:31:45.987654" + + +def test_datetime_with_smaller_milliseconds(): + # date time => date time in microseconds with full precision up to + # microseconds + value = datetime(2014, 11, 30, 12, 31, 45, microsecond=123) + formatter = SnowflakeDateTimeFormat('YYYY-MM-DD"T"HH24:MI:SS.FF9') + assert formatter.format(value) == "2014-11-30T12:31:45.000123" + + +def test_datetime_format_negative(): + """Datetime format negative.""" + value = datetime(2014, 11, 30, 12, 31, 45, microsecond=987654) + formatter = SnowflakeDateTimeFormat('YYYYYYMMMDDDDD"haha"hoho"hihi"H12HHH24MI') + assert formatter.format(value) == "20141411M3030DhahaHOHOhihiH1212H2431" + + +def test_struct_time_format(): + # struct_time for general use + value = time.strptime("30 Sep 01 11:20:30", "%d %b %y %H:%M:%S") + formatter = SnowflakeDateTimeFormat('YYYY-MM-DD"T"HH24:MI:SS.FF') + assert formatter.format(value) == "2001-09-30T11:20:30.0" + + # struct_time encapsulated in SnowflakeDateTime. Mainly used by SnowSQL + value = SnowflakeDateTime( + time.strptime("30 Sep 01 11:20:30", "%d %b %y %H:%M:%S"), nanosecond=0, scale=1 + ) + formatter = SnowflakeDateTimeFormat( + 'YYYY-MM-DD"T"HH24:MI:SS.FF', datetime_class=SnowflakeDateTime + ) + assert formatter.format(value) == "2001-09-30T11:20:30.0" + + # format without fraction of seconds + formatter = SnowflakeDateTimeFormat( + 'YYYY-MM-DD"T"HH24:MI:SS', datetime_class=SnowflakeDateTime + ) + assert formatter.format(value) == "2001-09-30T11:20:30" + + +@pytest.mark.skipif(IS_WINDOWS, reason="not supported yet") +def test_struct_time_format_extreme_large(): + # extreme large epoch time + value = SnowflakeDateTime(time.gmtime(14567890123567), nanosecond=0, scale=1) + formatter = SnowflakeDateTimeFormat( + 'YYYY-MM-DD"T"HH24:MI:SS.FF', datetime_class=SnowflakeDateTime + ) + assert formatter.format(value) == "463608-01-23T09:26:07.0" diff --git a/test/unit/test_dbapi.py b/test/unit/test_dbapi.py new file mode 100644 index 000000000..7b8fdd969 --- /dev/null +++ b/test/unit/test_dbapi.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from snowflake.connector.dbapi import Binary + + +def test_Binary(): + assert Binary(b"foo") == b"foo" diff --git a/test/unit/test_encryption_util.py b/test/unit/test_encryption_util.py new file mode 100644 index 000000000..09b31ed7c --- /dev/null +++ b/test/unit/test_encryption_util.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import codecs +import glob +import os +from os import path + +from snowflake.connector.constants import UTF8 +from snowflake.connector.encryption_util import SnowflakeEncryptionUtil + +try: # pragma: no cover + from snowflake.connector.storage_client import SnowflakeFileEncryptionMaterial +except ImportError: # keep olddrivertest from breaking + from snowflake.connector.remote_storage_util import ( + SnowflakeFileEncryptionMaterial, + ) + +from ..generate_test_files import generate_k_lines_of_n_files + +THIS_DIR = path.dirname(path.realpath(__file__)) + + +def test_encrypt_decrypt_file(tmp_path): + """Encrypts and Decrypts a file.""" + encryption_material = SnowflakeFileEncryptionMaterial( + query_stage_master_key="ztke8tIdVt1zmlQIZm0BMA==", + query_id="123873c7-3a66-40c4-ab89-e3722fbccce1", + smk_id=3112, + ) + data = "test data" + input_file = tmp_path / "test_encrypt_decrypt_file" + encrypted_file = None + decrypted_file = None + try: + with input_file.open("w", encoding=UTF8) as fd: + fd.write(data) + + (metadata, encrypted_file) = SnowflakeEncryptionUtil.encrypt_file( + encryption_material, input_file + ) + decrypted_file = SnowflakeEncryptionUtil.decrypt_file( + metadata, encryption_material, encrypted_file + ) + + contents = "" + with codecs.open(decrypted_file, "r", encoding=UTF8) as fd: + for line in fd: + contents += line + assert data == contents, "encrypted and decrypted contents" + finally: + input_file.unlink() + if encrypted_file: + os.remove(encrypted_file) + if decrypted_file: + os.remove(decrypted_file) + + +def test_encrypt_decrypt_large_file(tmpdir): + """Encrypts and Decrypts a large file.""" + encryption_material = SnowflakeFileEncryptionMaterial( + query_stage_master_key="ztke8tIdVt1zmlQIZm0BMA==", + query_id="123873c7-3a66-40c4-ab89-e3722fbccce1", + smk_id=3112, + ) + + # generates N files + number_of_files = 1 + number_of_lines = 10000 + tmp_dir = generate_k_lines_of_n_files( + number_of_lines, number_of_files, tmp_dir=str(tmpdir.mkdir("data")) + ) + + files = glob.glob(os.path.join(tmp_dir, "file*")) + input_file = files[0] + encrypted_file = None + decrypted_file = None + try: + (metadata, encrypted_file) = SnowflakeEncryptionUtil.encrypt_file( + encryption_material, input_file + ) + decrypted_file = SnowflakeEncryptionUtil.decrypt_file( + metadata, encryption_material, encrypted_file + ) + + contents = "" + cnt = 0 + with codecs.open(decrypted_file, "r", encoding=UTF8) as fd: + for line in fd: + contents += line + cnt += 1 + assert cnt == number_of_lines, "number of lines" + finally: + os.remove(input_file) + if encrypted_file: + os.remove(encrypted_file) + if decrypted_file: + os.remove(decrypted_file) diff --git a/test/unit/test_errors.py b/test/unit/test_errors.py new file mode 100644 index 000000000..3aee8f8d7 --- /dev/null +++ b/test/unit/test_errors.py @@ -0,0 +1,40 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import re +import uuid + +from snowflake.connector import errors + + +def test_detecting_duplicate_detail_insertion(): + sfqid = str(uuid.uuid4()) + sqlstate = "24000" + errno = 123456 + msg = "Some error happened" + expected_msg = re.compile(rf"{errno} \({sqlstate}\): {sfqid}: {msg}") + original_ex = errors.ProgrammingError( + sqlstate=sqlstate, + sfqid=sfqid, + errno=errno, + msg=msg, + ) + # Test whether regular exception confirms to what we expect to see + assert expected_msg.fullmatch(original_ex.msg) + + # Test whether exception with flag confirms to what we expect to see + assert errors.ProgrammingError( + msg=original_ex.msg, + done_format_msg=True, + ) + # Test whether exception with auto detection confirms to what we expect to see + assert errors.ProgrammingError( + msg=original_ex.msg, + ) + + +def test_args(): + assert errors.Error("msg").args == ("msg",) diff --git a/test/unit/test_gcs_client.py b/test/unit/test_gcs_client.py new file mode 100644 index 000000000..ef645586a --- /dev/null +++ b/test/unit/test_gcs_client.py @@ -0,0 +1,348 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging +from os import path +from unittest import mock +from unittest.mock import Mock + +import pytest + +from snowflake.connector import SnowflakeConnection +from snowflake.connector.constants import SHA256_DIGEST, ResultStatus + +from ..randomize import random_string + +try: + from snowflake.connector.errors import RequestExceedMaxRetryError + from snowflake.connector.file_transfer_agent import ( + SnowflakeFileMeta, + SnowflakeFileTransferAgent, + StorageCredential, + ) + from snowflake.connector.storage_client import METHODS + from snowflake.connector.vendored.requests import Response +except ImportError: + # Compatibility for olddriver tests + from requests import Response + + SnowflakeFileMeta = dict + RequestExceedMaxRetryError = None + METHODS = {} + megabytes = 1024 * 1024 + +try: # pragma: no cover + from snowflake.connector.gcs_storage_client import SnowflakeGCSRestClient +except ImportError: + SnowflakeGCSRestClient = None + +# We need these for our OldDriver tests. We run most up to date tests with the oldest supported driver version +try: + from snowflake.connector.vendored import requests + + vendored_request = True +except ImportError: # pragma: no cover + import requests + + vendored_request = False + +THIS_DIR = path.dirname(path.realpath(__file__)) + + +@pytest.mark.parametrize("errno", [408, 429, 500, 503]) +def test_upload_retry_errors(errno, tmpdir): + """Tests whether retryable errors are handled correctly when upploading.""" + f_name = str(tmpdir.join("some_file.txt")) + resp = requests.Response() + resp.status_code = errno + meta = SnowflakeFileMeta( + name=f_name, + src_file_name=f_name, + stage_location_type="GCS", + presigned_url="some_url", + sha256_digest="asd", + ) + if RequestExceedMaxRetryError is not None: + mock_connection = mock.create_autospec(SnowflakeConnection) + client = SnowflakeGCSRestClient( + meta, + StorageCredential({}, mock_connection, ""), + {}, + mock_connection, + "", + ) + with open(f_name, "w") as f: + f.write(random_string(15)) + if RequestExceedMaxRetryError is None: + with mock.patch( + "snowflake.connector.vendored.requests.put" + if vendored_request + else "requests.put", + side_effect=requests.exceptions.HTTPError(response=resp), + ): + SnowflakeGCSUtil.upload_file(f_name, meta, None, 99, 64000) + assert isinstance(meta.last_error, requests.exceptions.HTTPError) + assert meta.result_status == ResultStatus.NEED_RETRY + else: + client.data_file = f_name + + if vendored_request: + with mock.patch.dict( + METHODS, + {"PUT": lambda *a, **kw: resp}, + ): + with pytest.raises(RequestExceedMaxRetryError): + # Retry quickly during unit tests + client.SLEEP_UNIT = 0.0 + client.upload_chunk(0) + else: + # Old Driver test specific code + with mock.patch("requests.put"): + SnowflakeGCSUtil.upload_file(f_name, meta, None, 99, 64000) + assert isinstance(meta.last_error, requests.exceptions.HTTPError) + assert meta.result_status == ResultStatus.NEED_RETRY + + +def test_upload_uncaught_exception(tmpdir): + """Tests whether non-retryable errors are handled correctly when uploading.""" + f_name = str(tmpdir.join("some_file.txt")) + resp = requests.Response() + resp.status_code = 501 + exc = requests.exceptions.HTTPError(response=resp) + with open(f_name, "w") as f: + f.write(random_string(15)) + agent = SnowflakeFileTransferAgent( + mock.MagicMock(), + f"put {f_name} @~", + { + "data": { + "command": "UPLOAD", + "src_locations": [f_name], + "stageInfo": { + "locationType": "GCS", + "location": "", + "creds": {"AWS_SECRET_KEY": "", "AWS_KEY_ID": ""}, + "region": "test", + "endPoint": None, + }, + "localLocation": "/tmp", + } + }, + ) + with mock.patch( + "snowflake.connector.gcs_storage_client.SnowflakeGCSRestClient.get_file_header", + ), mock.patch( + "snowflake.connector.gcs_storage_client.SnowflakeGCSRestClient._upload_chunk", + side_effect=exc, + ): + agent.execute() + assert agent._file_metadata[0].error_details is exc + + +@pytest.mark.parametrize("errno", [403, 408, 429, 500, 503]) +def test_download_retry_errors(errno, tmp_path): + """Tests whether retryable errors are handled correctly when downloading.""" + resp = requests.Response() + resp.status_code = errno + if errno == 403: + pytest.skip("This behavior has changed in the move from SDKs") + meta_info = { + "name": "data1.txt.gz", + "stage_location_type": "S3", + "no_sleeping_time": True, + "put_callback": None, + "put_callback_output_stream": None, + SHA256_DIGEST: "123456789abcdef", + "dst_file_name": "data1.txt.gz", + "src_file_name": path.join(THIS_DIR, "../data", "put_get_1.txt"), + "overwrite": True, + } + meta = SnowflakeFileMeta(**meta_info) + creds = {"AWS_SECRET_KEY": "", "AWS_KEY_ID": ""} + cnx = mock.MagicMock(autospec=SnowflakeConnection) + rest_client = SnowflakeGCSRestClient( + meta, + StorageCredential( + creds, + cnx, + "GET file:/tmp/file.txt @~", + ), + { + "locationType": "AWS", + "location": "bucket/path", + "creds": creds, + "region": "test", + "endPoint": None, + }, + cnx, + "GET file:///tmp/file.txt @~", + ) + from snowflake.connector.storage_client import METHODS + + rest_client.SLEEP_UNIT = 0 + with mock.patch.dict(METHODS, GET=mock.MagicMock(return_value=resp)): + with pytest.raises( + RequestExceedMaxRetryError, + match="GET with url .* failed for exceeding maximum retries", + ): + rest_client.download_chunk(0) + + +@pytest.mark.parametrize("errno", (501, 403)) +def test_download_uncaught_exception(tmp_path, errno): + """Tests whether non-retryable errors are handled correctly when downloading.""" + resp = requests.Response() + resp.status_code = errno + meta_info = { + "name": "data1.txt.gz", + "stage_location_type": "S3", + "no_sleeping_time": True, + "put_callback": None, + "put_callback_output_stream": None, + SHA256_DIGEST: "123456789abcdef", + "dst_file_name": "data1.txt.gz", + "src_file_name": path.join(THIS_DIR, "../data", "put_get_1.txt"), + "overwrite": True, + } + meta = SnowflakeFileMeta(**meta_info) + creds = {"AWS_SECRET_KEY": "", "AWS_KEY_ID": ""} + cnx = mock.MagicMock(autospec=SnowflakeConnection) + rest_client = SnowflakeGCSRestClient( + meta, + StorageCredential( + creds, + cnx, + "GET file:/tmp/file.txt @~", + ), + { + "locationType": "AWS", + "location": "bucket/path", + "creds": creds, + "region": "test", + "endPoint": None, + }, + cnx, + "GET file:///tmp/file.txt @~", + ) + from snowflake.connector.storage_client import METHODS + + rest_client.SLEEP_UNIT = 0 + with mock.patch.dict(METHODS, GET=mock.MagicMock(return_value=resp)): + with pytest.raises( + requests.exceptions.HTTPError, + ): + rest_client.download_chunk(0) + + +def test_upload_put_timeout(tmp_path, caplog): + """Tests whether timeout error is handled correctly when uploading.""" + caplog.set_level(logging.DEBUG, "snowflake.connector") + f_name = str(tmp_path / "some_file.txt") + resp = requests.Response() + with open(f_name, "w") as f: + f.write(random_string(15)) + agent = SnowflakeFileTransferAgent( + mock.Mock(autospec=SnowflakeConnection, connection=None), + f"put {f_name} @~", + { + "data": { + "command": "UPLOAD", + "src_locations": [f_name], + "stageInfo": { + "locationType": "GCS", + "location": "", + "creds": {"AWS_SECRET_KEY": "", "AWS_KEY_ID": ""}, + "region": "test", + "endPoint": None, + }, + "localLocation": "/tmp", + } + }, + ) + mocked_put, mocked_head = mock.MagicMock(), mock.MagicMock() + mocked_put.side_effect = requests.exceptions.Timeout(response=resp) + resp = Response() + resp.status_code = 404 + mocked_head.return_value = resp + SnowflakeGCSRestClient.SLEEP_UNIT = 0 + from snowflake.connector.storage_client import METHODS + + with mock.patch.dict(METHODS, {"PUT": mocked_put, "HEAD": mocked_head}): + agent.execute() + assert ( + "snowflake.connector.storage_client", + logging.WARNING, + "PUT with url https://storage.googleapis.com//some_file.txt.gz failed for transient error: ", + ) in caplog.record_tuples + assert ( + "snowflake.connector.file_transfer_agent", + logging.DEBUG, + "Chunk 0 of file some_file.txt failed to transfer for unexpected exception PUT with url https://storage.googleapis.com//some_file.txt.gz failed for exceeding maximum retries.", + ) in caplog.record_tuples + + +def test_download_timeout(tmp_path, caplog): + """Tests whether timeout error is handled correctly when downloading.""" + timeout_exc = requests.exceptions.Timeout(response=requests.Response()) + meta_info = { + "name": "data1.txt.gz", + "stage_location_type": "S3", + "no_sleeping_time": True, + "put_callback": None, + "put_callback_output_stream": None, + SHA256_DIGEST: "123456789abcdef", + "dst_file_name": "data1.txt.gz", + "src_file_name": path.join(THIS_DIR, "../data", "put_get_1.txt"), + "overwrite": True, + } + meta = SnowflakeFileMeta(**meta_info) + creds = {"AWS_SECRET_KEY": "", "AWS_KEY_ID": ""} + cnx = mock.MagicMock(autospec=SnowflakeConnection) + rest_client = SnowflakeGCSRestClient( + meta, + StorageCredential( + creds, + cnx, + "GET file:/tmp/file.txt @~", + ), + { + "locationType": "AWS", + "location": "bucket/path", + "creds": creds, + "region": "test", + "endPoint": None, + }, + cnx, + "GET file:///tmp/file.txt @~", + ) + from snowflake.connector.storage_client import METHODS + + rest_client.SLEEP_UNIT = 0 + with mock.patch.dict(METHODS, GET=mock.MagicMock(side_effect=timeout_exc)): + exc = Exception("stop execution") + with mock.patch.object(rest_client.credentials, "update", side_effect=exc): + with pytest.raises(RequestExceedMaxRetryError): + rest_client.download_chunk(0) + + +def test_get_file_header_none_with_presigned_url(tmp_path): + """Tests whether default file handle created by get_file_header is as expected.""" + meta = SnowflakeFileMeta( + name=str(tmp_path / "some_file"), + src_file_name=str(tmp_path / "some_file"), + stage_location_type="GCS", + presigned_url="www.example.com", + ) + storage_credentials = Mock() + storage_credentials.creds = {} + stage_info = Mock() + connection = Mock() + client = SnowflakeGCSRestClient( + meta, storage_credentials, stage_info, connection, "" + ) + file_header = client.get_file_header(meta.name) + assert file_header is None diff --git a/test/unit/test_linux_local_file_cache.py b/test/unit/test_linux_local_file_cache.py new file mode 100644 index 000000000..66ac90fff --- /dev/null +++ b/test/unit/test_linux_local_file_cache.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import os + +import pytest + +import snowflake.connector.auth as auth +from snowflake.connector.compat import IS_LINUX + +HOST_0 = "host_0" +HOST_1 = "host_1" +USER_0 = "user_0" +USER_1 = "user_1" +CRED_0 = "cred_0" +CRED_1 = "cred_1" + +CRED_TYPE_0 = "ID_TOKEN" +CRED_TYPE_1 = "MFA_TOKEN" + + +def get_credential(sys, user): + return auth.TEMPORARY_CREDENTIAL.get(sys.upper(), {}).get(user.upper()) + + +@pytest.mark.skipif(not IS_LINUX, reason="The test is only for Linux platform") +def test_basic_store(tmpdir): + os.environ["SF_TEMPORARY_CREDENTIAL_CACHE_DIR"] = str(tmpdir) + + auth.delete_temporary_credential_file() + auth.TEMPORARY_CREDENTIAL.clear() + + auth.read_temporary_credential_file() + assert not auth.TEMPORARY_CREDENTIAL + + auth.write_temporary_credential_file(HOST_0, USER_0, CRED_0) + auth.write_temporary_credential_file(HOST_1, USER_1, CRED_1) + auth.write_temporary_credential_file(HOST_0, USER_1, CRED_1) + + auth.read_temporary_credential_file() + assert auth.TEMPORARY_CREDENTIAL + assert get_credential(HOST_0, USER_0) == CRED_0 + assert get_credential(HOST_1, USER_1) == CRED_1 + assert get_credential(HOST_0, USER_1) == CRED_1 + + auth.delete_temporary_credential_file() + + +def test_delete_specific_item(): + """The old behavior of delete cache is deleting the whole cache file. Now we change it to partially deletion.""" + auth.write_temporary_credential_file( + HOST_0, + auth.build_temporary_credential_name(HOST_0, USER_0, CRED_TYPE_0), + CRED_0, + ) + auth.write_temporary_credential_file( + HOST_0, + auth.build_temporary_credential_name(HOST_0, USER_0, CRED_TYPE_1), + CRED_1, + ) + auth.read_temporary_credential_file() + + assert auth.TEMPORARY_CREDENTIAL + assert ( + get_credential( + HOST_0, auth.build_temporary_credential_name(HOST_0, USER_0, CRED_TYPE_0) + ) + == CRED_0 + ) + assert ( + get_credential( + HOST_0, auth.build_temporary_credential_name(HOST_0, USER_0, CRED_TYPE_1) + ) + == CRED_1 + ) + + auth.temporary_credential_file_delete_password(HOST_0, USER_0, CRED_TYPE_0) + auth.read_temporary_credential_file() + assert not get_credential( + HOST_0, auth.build_temporary_credential_name(HOST_0, USER_0, CRED_TYPE_0) + ) + assert ( + get_credential( + HOST_0, auth.build_temporary_credential_name(HOST_0, USER_0, CRED_TYPE_1) + ) + == CRED_1 + ) + + auth.delete_temporary_credential_file() diff --git a/test/unit/test_log_secret_detector.py b/test/unit/test_log_secret_detector.py new file mode 100644 index 000000000..369548500 --- /dev/null +++ b/test/unit/test_log_secret_detector.py @@ -0,0 +1,272 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging +from unittest import mock + +from snowflake.connector.secret_detector import SecretDetector + + +def basic_masking(test_str): + masked, masked_str, err_str = SecretDetector.mask_secrets(test_str) + assert not masked + assert err_str is None + assert masked_str == test_str + + +def test_none_string(): + basic_masking(None) + + +def test_empty_string(): + basic_masking("") + + +def test_no_masking(): + basic_masking("This string is innocuous") + + +@mock.patch.object( + SecretDetector, + "mask_connection_token", + mock.Mock(side_effect=Exception("Test exception")), +) +def test_exception_in_masking(): + test_str = "This string will raise an exception" + masked, masked_str, err_str = SecretDetector.mask_secrets(test_str) + assert masked + assert err_str == "Test exception" + assert masked_str == "Test exception" + + +def exception_in_log_masking(): + test_str = "This string will raise an exception" + log_record = logging.LogRecord( + SecretDetector.__name__, + logging.DEBUG, + "test_unit_log_secret_detector.py", + 45, + test_str, + list(), + None, + ) + log_record.asctime = "2003-07-08 16:49:45,896" + secret_detector = SecretDetector() + sanitized_log = secret_detector.format(log_record) + assert "Test exception" in sanitized_log + assert "secret_detector.py" in sanitized_log + assert "sanitize_log_str" in sanitized_log + assert test_str not in sanitized_log + + +@mock.patch.object( + SecretDetector, + "mask_connection_token", + mock.Mock(side_effect=Exception("Test exception")), +) +def test_exception_in_secret_detector_while_log_masking(): + exception_in_log_masking() + + +@mock.patch.object( + SecretDetector, "mask_secrets", mock.Mock(side_effect=Exception("Test exception")) +) +def test_exception_while_log_masking(): + exception_in_log_masking() + + +def test_mask_token(): + long_token = ( + "_Y1ZNETTn5/qfUWj3Jedby7gipDzQs=U" + "KyJH9DS=nFzzWnfZKGV+C7GopWCGD4Lj" + "OLLFZKOE26LXHDt3pTi4iI1qwKuSpf/F" + "mClCMBSissVsU3Ei590FP0lPQQhcSGcD" + "u69ZL_1X6e9h5z62t/iY7ZkII28n2qU=" + "nrBJUgPRCIbtJQkVJXIuOHjX4G5yUEKj" + "ZBAx4w6=_lqtt67bIA=o7D=oUSjfywsR" + "FoloNIkBPXCwFTv+1RVUHgVA2g8A9Lw5" + "XdJYuI8vhg=f0bKSq7AhQ2Bh" + ) + + token_str_w_prefix = "Token =" + long_token + masked, masked_str, err_str = SecretDetector.mask_secrets(token_str_w_prefix) + assert masked + assert err_str is None + assert masked_str == "Token =****" + + id_token_str_w_prefix = "idToken : " + long_token + masked, masked_str, err_str = SecretDetector.mask_secrets(id_token_str_w_prefix) + assert masked + assert err_str is None + assert masked_str == "idToken : ****" + + session_token_w_prefix = "sessionToken : " + long_token + masked, masked_str, err_str = SecretDetector.mask_secrets(session_token_w_prefix) + assert masked + assert err_str is None + assert masked_str == "sessionToken : ****" + + master_token_w_prefix = "masterToken : " + long_token + masked, masked_str, err_str = SecretDetector.mask_secrets(master_token_w_prefix) + assert masked + assert err_str is None + assert masked_str == "masterToken : ****" + + assertion_w_prefix = "assertion content:" + long_token + masked, masked_str, err_str = SecretDetector.mask_secrets(assertion_w_prefix) + assert masked + assert err_str is None + assert masked_str == "assertion content:****" + + +def test_token_false_positives(): + false_positive_token_str = ( + "2020-04-30 23:06:04,069 - MainThread auth.py:397" + " - write_temporary_credential() - DEBUG - no ID " + "token is given when try to store temporary credential" + ) + + masked, masked_str, err_str = SecretDetector.mask_secrets(false_positive_token_str) + assert not masked + assert err_str is None + assert masked_str == false_positive_token_str + + +def test_password(): + random_password = "Fh[+2J~AcqeqW%?" + random_password_w_prefix = "password:" + random_password + masked, masked_str, err_str = SecretDetector.mask_secrets(random_password_w_prefix) + assert masked + assert err_str is None + assert masked_str == "password:****" + + random_password_caps = "PASSWORD:" + random_password + masked, masked_str, err_str = SecretDetector.mask_secrets(random_password_caps) + assert masked + assert err_str is None + assert masked_str == "PASSWORD:****" + + random_password_mix_case = "PassWorD:" + random_password + masked, masked_str, err_str = SecretDetector.mask_secrets(random_password_mix_case) + assert masked + assert err_str is None + assert masked_str == "PassWorD:****" + + random_password_equal_sign = "password = " + random_password + masked, masked_str, err_str = SecretDetector.mask_secrets( + random_password_equal_sign + ) + assert masked + assert err_str is None + assert masked_str == "password = ****" + + random_password = "Fh[+2J~AcqeqW%?" + random_password_w_prefix = "pwd:" + random_password + masked, masked_str, err_str = SecretDetector.mask_secrets(random_password_w_prefix) + assert masked + assert err_str is None + assert masked_str == "pwd:****" + + +def test_token_password(): + long_token = ( + "_Y1ZNETTn5/qfUWj3Jedby7gipDzQs=U" + "KyJH9DS=nFzzWnfZKGV+C7GopWCGD4Lj" + "OLLFZKOE26LXHDt3pTi4iI1qwKuSpf/F" + "mClCMBSissVsU3Ei590FP0lPQQhcSGcD" + "u69ZL_1X6e9h5z62t/iY7ZkII28n2qU=" + "nrBJUgPRCIbtJQkVJXIuOHjX4G5yUEKj" + "ZBAx4w6=_lqtt67bIA=o7D=oUSjfywsR" + "FoloNIkBPXCwFTv+1RVUHgVA2g8A9Lw5" + "XdJYuI8vhg=f0bKSq7AhQ2Bh" + ) + + long_token2 = ( + "ktL57KJemuq4-M+Q0pdRjCIMcf1mzcr" + "MwKteDS5DRE/Pb+5MzvWjDH7LFPV5b_" + "/tX/yoLG3b4TuC6Q5qNzsARPPn_zs/j" + "BbDOEg1-IfPpdsbwX6ETeEnhxkHIL4H" + "sP-V" + ) + + random_pwd = "Fh[+2J~AcqeqW%?" + random_pwd2 = random_pwd + "vdkav13" + + test_string_w_prefix = ( + "token=" + long_token + " random giberish " + "password:" + random_pwd + ) + masked, masked_str, err_str = SecretDetector.mask_secrets(test_string_w_prefix) + assert masked + assert err_str is None + assert masked_str == "token=****" + " random giberish " + "password:****" + + # order reversed + test_string_w_prefix = ( + "password:" + random_pwd + " random giberish " + "token=" + long_token + ) + + masked, masked_str, err_str = SecretDetector.mask_secrets(test_string_w_prefix) + assert masked + assert err_str is None + assert masked_str == "password:****" + " random giberish " + "token=****" + + # multiple tokens and password + test_string_w_prefix = ( + "token=" + + long_token + + " random giberish " + + "password:" + + random_pwd + + " random giberish " + + "idToken:" + + long_token2 + ) + masked, masked_str, err_str = SecretDetector.mask_secrets(test_string_w_prefix) + assert masked + assert err_str is None + assert ( + masked_str + == "token=****" + + " random giberish " + + "password:****" + + " random giberish " + + "idToken:****" + ) + + # multiple passwords + test_string_w_prefix = ( + "password=" + random_pwd + " random giberish " + "pwd:" + random_pwd2 + ) + masked, masked_str, err_str = SecretDetector.mask_secrets(test_string_w_prefix) + assert masked + assert err_str is None + assert masked_str == "password=" + "****" + " random giberish " + "pwd:" + "****" + + test_string_w_prefix = ( + "password=" + + random_pwd + + " random giberish " + + "password=" + + random_pwd2 + + " random giberish " + + "password=" + + random_pwd + ) + masked, masked_str, err_str = SecretDetector.mask_secrets(test_string_w_prefix) + assert masked + assert err_str is None + assert ( + masked_str + == "password=" + + "****" + + " random giberish " + + "password=" + + "****" + + " random giberish " + + "password=" + + "****" + ) diff --git a/test/unit/test_mfa_no_cache.py b/test/unit/test_mfa_no_cache.py new file mode 100644 index 000000000..a9171b269 --- /dev/null +++ b/test/unit/test_mfa_no_cache.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import json +from unittest.mock import patch + +import pytest + +import snowflake.connector +from snowflake.connector.compat import IS_LINUX + +try: + from snowflake.connector.options import installed_keyring +except ImportError: + # if installed_keyring is unavailable, we set it as True to skip the test + installed_keyring = True +try: + from snowflake.connector.auth import delete_temporary_credential +except ImportError: + delete_temporary_credential = None + +MFA_TOKEN = "MFATOKEN" + + +@pytest.mark.skipif( + IS_LINUX or installed_keyring or not delete_temporary_credential, + reason="Required test env is Mac/Win with no pre-installed keyring package" + "and available delete_temporary_credential.", +) +@patch("snowflake.connector.network.SnowflakeRestful._post_request") +def test_mfa_no_local_secure_storage(mockSnowflakeRestfulPostRequest): + """Test whether username_password_mfa authenticator can work when no local secure storage is available.""" + global mock_post_req_cnt + mock_post_req_cnt = 0 + + # This test requires Mac/Win and no keyring lib is installed + assert not installed_keyring + + def mock_post_request(url, headers, json_body, **kwargs): + global mock_post_req_cnt + ret = None + body = json.loads(json_body) + if mock_post_req_cnt == 0: + # issue MFA token for a succeeded login + assert ( + body["data"]["SESSION_PARAMETERS"].get("CLIENT_REQUEST_MFA_TOKEN") + is True + ) + ret = { + "success": True, + "message": None, + "data": { + "token": "TOKEN", + "masterToken": "MASTER_TOKEN", + "mfaToken": "MFA_TOKEN", + }, + } + elif mock_post_req_cnt == 2: + # No local secure storage available, so no mfa cache token should be provided + assert ( + body["data"]["SESSION_PARAMETERS"].get("CLIENT_REQUEST_MFA_TOKEN") + is True + ) + assert "TOKEN" not in body["data"] + ret = { + "success": True, + "message": None, + "data": { + "token": "NEW_TOKEN", + "masterToken": "NEW_MASTER_TOKEN", + }, + } + elif mock_post_req_cnt in [1, 3]: + # connection.close() + ret = {"success": True} + mock_post_req_cnt += 1 + return ret + + # POST requests mock + mockSnowflakeRestfulPostRequest.side_effect = mock_post_request + + conn_cfg = { + "account": "testaccount", + "user": "testuser", + "password": "testpwd", + "authenticator": "username_password_mfa", + "host": "testaccount.snowflakecomputing.com", + } + + delete_temporary_credential( + host=conn_cfg["host"], user=conn_cfg["user"], cred_type=MFA_TOKEN + ) + + # first connection, no mfa token cache + con = snowflake.connector.connect(**conn_cfg) + assert con._rest.token == "TOKEN" + assert con._rest.master_token == "MASTER_TOKEN" + assert con._rest.mfa_token == "MFA_TOKEN" + con.close() + + # second connection, no mfa token should be issued as well since no available local secure storage + con = snowflake.connector.connect(**conn_cfg) + assert con._rest.token == "NEW_TOKEN" + assert con._rest.master_token == "NEW_MASTER_TOKEN" + assert not con._rest.mfa_token + con.close() diff --git a/test/unit/test_ocsp.py b/test/unit/test_ocsp.py new file mode 100644 index 000000000..920cde876 --- /dev/null +++ b/test/unit/test_ocsp.py @@ -0,0 +1,539 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import codecs +import json +import logging +import os +import time +from concurrent.futures.thread import ThreadPoolExecutor +from os import environ, path + +import pytest + +from snowflake.connector import OperationalError +from snowflake.connector.errors import RevocationCheckError +from snowflake.connector.ocsp_asn1crypto import SnowflakeOCSPAsn1Crypto as SFOCSP +from snowflake.connector.ocsp_snowflake import OCSPCache, OCSPServer, SnowflakeOCSP +from snowflake.connector.ssl_wrap_socket import _openssl_connect + +from ..randomize import random_string + +try: + from snowflake.connector.errorcode import ( + ER_OCSP_RESPONSE_CERT_STATUS_REVOKED, + ER_OCSP_RESPONSE_FETCH_FAILURE, + ) +except ImportError: + ER_OCSP_RESPONSE_CERT_STATUS_REVOKED = None + ER_OCSP_RESPONSE_FETCH_FAILURE = None + +TARGET_HOSTS = [ + "ocspssd.us-east-1.snowflakecomputing.com", + "sqs.us-west-2.amazonaws.com", + "sfcsupport.us-east-1.snowflakecomputing.com", + "sfcsupport.eu-central-1.snowflakecomputing.com", + "sfc-dev1-regression.s3.amazonaws.com", + "sfctest0.snowflakecomputing.com", + "sfc-ds2-customer-stage.s3.amazonaws.com", + "snowflake.okta.com", + "sfcdev1.blob.core.windows.net", + "sfc-aus-ds1-customer-stage.s3-ap-southeast-2.amazonaws.com", +] + +THIS_DIR = path.dirname(path.realpath(__file__)) + + +@pytest.fixture(autouse=True) +def ocsp_reset(tmpdir): + # Reset OCSP cache location before each test + if "SF_OCSP_RESPONSE_CACHE_DIR" in os.environ: + del os.environ["SF_OCSP_RESPONSE_CACHE_DIR"] + os.environ["SF_OCSP_RESPONSE_CACHE_DIR"] = str(tmpdir.join(random_string(5))) + OCSPCache.reset_cache_dir() + + +def test_ocsp(): + """OCSP tests.""" + # reset the memory cache + SnowflakeOCSP.clear_cache() + ocsp = SFOCSP() + for url in TARGET_HOSTS: + connection = _openssl_connect(url, timeout=5) + assert ocsp.validate(url, connection), f"Failed to validate: {url}" + + +def test_ocsp_wo_cache_server(): + """OCSP Tests with Cache Server Disabled.""" + SnowflakeOCSP.clear_cache() + ocsp = SFOCSP(use_ocsp_cache_server=False) + for url in TARGET_HOSTS: + connection = _openssl_connect(url) + assert ocsp.validate(url, connection), f"Failed to validate: {url}" + + +def test_ocsp_wo_cache_file(): + """OCSP tests without File cache. + + Notes: + Use /etc as a readonly directory such that no cache file is used. + """ + # reset the memory cache + SnowflakeOCSP.clear_cache() + OCSPCache.del_cache_file() + environ["SF_OCSP_RESPONSE_CACHE_DIR"] = "/etc" + OCSPCache.reset_cache_dir() + + try: + ocsp = SFOCSP() + for url in TARGET_HOSTS: + connection = _openssl_connect(url) + assert ocsp.validate(url, connection), f"Failed to validate: {url}" + finally: + del environ["SF_OCSP_RESPONSE_CACHE_DIR"] + OCSPCache.reset_cache_dir() + + +def test_ocsp_fail_open_w_single_endpoint(): + SnowflakeOCSP.clear_cache() + + OCSPCache.del_cache_file() + + environ["SF_OCSP_TEST_MODE"] = "true" + environ["SF_TEST_OCSP_URL"] = "http://httpbin.org/delay/10" + environ["SF_TEST_CA_OCSP_RESPONDER_CONNECTION_TIMEOUT"] = "5" + + ocsp = SFOCSP(use_ocsp_cache_server=False) + connection = _openssl_connect("snowflake.okta.com") + + try: + assert ocsp.validate( + "snowflake.okta.com", connection + ), "Failed to validate: {}".format("snowflake.okta.com") + finally: + del environ["SF_OCSP_TEST_MODE"] + del environ["SF_TEST_OCSP_URL"] + del environ["SF_TEST_CA_OCSP_RESPONDER_CONNECTION_TIMEOUT"] + + +@pytest.mark.skipif( + ER_OCSP_RESPONSE_CERT_STATUS_REVOKED is None, + reason="No ER_OCSP_RESPONSE_CERT_STATUS_REVOKED is available.", +) +def test_ocsp_fail_close_w_single_endpoint(): + SnowflakeOCSP.clear_cache() + + environ["SF_OCSP_TEST_MODE"] = "true" + environ["SF_TEST_OCSP_URL"] = "http://httpbin.org/delay/10" + environ["SF_TEST_CA_OCSP_RESPONDER_CONNECTION_TIMEOUT"] = "5" + + OCSPCache.del_cache_file() + + ocsp = SFOCSP(use_ocsp_cache_server=False, use_fail_open=False) + connection = _openssl_connect("snowflake.okta.com") + + with pytest.raises(RevocationCheckError) as ex: + ocsp.validate("snowflake.okta.com", connection) + + try: + assert ( + ex.value.errno == ER_OCSP_RESPONSE_FETCH_FAILURE + ), "Connection should have failed" + finally: + del environ["SF_OCSP_TEST_MODE"] + del environ["SF_TEST_OCSP_URL"] + del environ["SF_TEST_CA_OCSP_RESPONDER_CONNECTION_TIMEOUT"] + + +def test_ocsp_bad_validity(): + SnowflakeOCSP.clear_cache() + + environ["SF_OCSP_TEST_MODE"] = "true" + environ["SF_TEST_OCSP_FORCE_BAD_RESPONSE_VALIDITY"] = "true" + + OCSPCache.del_cache_file() + + ocsp = SFOCSP(use_ocsp_cache_server=False) + connection = _openssl_connect("snowflake.okta.com") + + assert ocsp.validate( + "snowflake.okta.com", connection + ), "Connection should have passed with fail open" + del environ["SF_OCSP_TEST_MODE"] + del environ["SF_TEST_OCSP_FORCE_BAD_RESPONSE_VALIDITY"] + + +def test_ocsp_single_endpoint(): + environ["SF_OCSP_ACTIVATE_NEW_ENDPOINT"] = "True" + SnowflakeOCSP.clear_cache() + ocsp = SFOCSP() + ocsp.OCSP_CACHE_SERVER.NEW_DEFAULT_CACHE_SERVER_BASE_URL = "https://snowflake.preprod3.us-west-2-dev.external-zone.snowflakecomputing.com:8085/ocsp/" + connection = _openssl_connect("snowflake.okta.com") + assert ocsp.validate( + "snowflake.okta.com", connection + ), "Failed to validate: {}".format("snowflake.okta.com") + + del environ["SF_OCSP_ACTIVATE_NEW_ENDPOINT"] + + +def test_ocsp_by_post_method(): + """OCSP tests.""" + # reset the memory cache + SnowflakeOCSP.clear_cache() + ocsp = SFOCSP(use_post_method=True) + for url in TARGET_HOSTS: + connection = _openssl_connect(url) + assert ocsp.validate(url, connection), f"Failed to validate: {url}" + + +def test_ocsp_with_file_cache(tmpdir): + """OCSP tests and the cache server and file.""" + tmp_dir = str(tmpdir.mkdir("ocsp_response_cache")) + cache_file_name = path.join(tmp_dir, "cache_file.txt") + + # reset the memory cache + SnowflakeOCSP.clear_cache() + ocsp = SFOCSP(ocsp_response_cache_uri="file://" + cache_file_name) + for url in TARGET_HOSTS: + connection = _openssl_connect(url) + assert ocsp.validate(url, connection), f"Failed to validate: {url}" + + +def test_ocsp_with_bogus_cache_files(tmpdir): + """Attempts to use bogus OCSP response data.""" + cache_file_name, target_hosts = _store_cache_in_file(tmpdir) + + ocsp = SFOCSP() + OCSPCache.read_ocsp_response_cache_file(ocsp, cache_file_name) + cache_data = OCSPCache.CACHE + assert cache_data, "more than one cache entries should be stored." + + # setting bogus data + current_time = int(time.time()) + for k, _ in cache_data.items(): + cache_data[k] = (current_time, b"bogus") + + # write back the cache file + OCSPCache.CACHE = cache_data + OCSPCache.write_ocsp_response_cache_file(ocsp, cache_file_name) + + # forces to use the bogus cache file but it should raise errors + SnowflakeOCSP.clear_cache() + ocsp = SFOCSP() + for hostname in target_hosts: + connection = _openssl_connect(hostname) + assert ocsp.validate(hostname, connection), "Failed to validate: {}".format( + hostname + ) + + +def test_ocsp_with_outdated_cache(tmpdir): + """Attempts to use outdated OCSP response cache file.""" + cache_file_name, target_hosts = _store_cache_in_file(tmpdir) + + ocsp = SFOCSP() + + # reading cache file + OCSPCache.read_ocsp_response_cache_file(ocsp, cache_file_name) + cache_data = OCSPCache.CACHE + assert cache_data, "more than one cache entries should be stored." + + # setting outdated data + current_time = int(time.time()) + for k, v in cache_data.items(): + cache_data[k] = (current_time - 144 * 60 * 60, v[1]) + + # write back the cache file + OCSPCache.CACHE = cache_data + OCSPCache.write_ocsp_response_cache_file(ocsp, cache_file_name) + + # forces to use the bogus cache file but it should raise errors + SnowflakeOCSP.clear_cache() # reset the memory cache + SFOCSP() + assert ( + SnowflakeOCSP.cache_size() == 0 + ), "must be empty. outdated cache should not be loaded" + + +def _store_cache_in_file(tmpdir, target_hosts=None): + if target_hosts is None: + target_hosts = TARGET_HOSTS + os.environ["SF_OCSP_RESPONSE_CACHE_DIR"] = str(tmpdir) + OCSPCache.reset_cache_dir() + filename = path.join(str(tmpdir), "ocsp_response_cache.json") + + # cache OCSP response + SnowflakeOCSP.clear_cache() + ocsp = SFOCSP( + ocsp_response_cache_uri="file://" + filename, use_ocsp_cache_server=False + ) + for hostname in target_hosts: + connection = _openssl_connect(hostname) + assert ocsp.validate(hostname, connection), "Failed to validate: {}".format( + hostname + ) + assert path.exists(filename), "OCSP response cache file" + return filename, target_hosts + + +def test_ocsp_with_invalid_cache_file(): + """OCSP tests with an invalid cache file.""" + SnowflakeOCSP.clear_cache() # reset the memory cache + ocsp = SFOCSP(ocsp_response_cache_uri="NEVER_EXISTS") + for url in TARGET_HOSTS[0:1]: + connection = _openssl_connect(url) + assert ocsp.validate(url, connection), f"Failed to validate: {url}" + + +def test_concurrent_ocsp_requests(tmpdir): + """Run OCSP revocation checks in parallel. The memory and file caches are deleted randomly.""" + cache_file_name = path.join(str(tmpdir), "cache_file.txt") + SnowflakeOCSP.clear_cache() # reset the memory cache + + target_hosts = TARGET_HOSTS * 5 + pool = ThreadPoolExecutor(len(target_hosts)) + for hostname in target_hosts: + pool.submit(_validate_certs_using_ocsp, hostname, cache_file_name) + pool.shutdown() + + +def _validate_certs_using_ocsp(url, cache_file_name): + """Validate OCSP response. Deleting memory cache and file cache randomly.""" + logger = logging.getLogger("test") + import random + import time + + time.sleep(random.randint(0, 3)) + if random.random() < 0.2: + logger.info("clearing up cache: OCSP_VALIDATION_CACHE") + SnowflakeOCSP.clear_cache() + if random.random() < 0.05: + logger.info("deleting a cache file: %s", cache_file_name) + SnowflakeOCSP.delete_cache_file() + + connection = _openssl_connect(url) + ocsp = SFOCSP(ocsp_response_cache_uri="file://" + cache_file_name) + ocsp.validate(url, connection) + + +@pytest.mark.skip(reason="certificate expired.") +def test_ocsp_revoked_certificate(): + """Tests revoked certificate.""" + revoked_cert = path.join(THIS_DIR, "../data", "cert_tests", "revoked_certs.pem") + + SnowflakeOCSP.clear_cache() # reset the memory cache + ocsp = SFOCSP() + + with pytest.raises(OperationalError) as ex: + ocsp.validate_certfile(revoked_cert) + assert ex.value.errno == ex.value.errno == ER_OCSP_RESPONSE_CERT_STATUS_REVOKED + + +def test_ocsp_incomplete_chain(): + """Tests incomplete chained certificate.""" + incomplete_chain_cert = path.join( + THIS_DIR, "../data", "cert_tests", "incomplete-chain.pem" + ) + + SnowflakeOCSP.clear_cache() # reset the memory cache + ocsp = SFOCSP() + + with pytest.raises(OperationalError) as ex: + ocsp.validate_certfile(incomplete_chain_cert) + assert "CA certificate is NOT found" in ex.value.msg + + +def test_ocsp_cache_merge(tmpdir): + """ + Merges two OCSP response cache files. + + First create the entire cache for all hosts. + Split the created cache into two caches + Merge the two caches. + This is to prevent us from doing multiple cache + creations as the results are not predictable + due to OCSP responder shenanigans + """ + cache_folder = tmpdir.mkdir("caches") + cache_filename, _ = _store_cache_in_file(cache_folder, target_hosts=TARGET_HOSTS) + + previous_folder = tmpdir.mkdir("previous") + previous_cache_filename = path.join( + str(previous_folder), "ocsp_response_cache.json" + ) + + current_folder = tmpdir.mkdir("current") + current_cache_filename = path.join(str(current_folder), "ocsp_response_cache.json") + + prev_cache = {} + current_cache = {} + with codecs.open(cache_filename) as cf: + orig_cache = json.load(cf) + counter = 0 + for certid, arr in orig_cache.items(): + if counter < 1: + prev_cache.update({certid: arr}) + else: + current_cache.update({certid: arr}) + counter += 1 + + with open(previous_cache_filename, "w") as prev_cache_fp: + json.dump(prev_cache, prev_cache_fp) + + with open(current_cache_filename, "w") as curr_cache_fp: + json.dump(current_cache, curr_cache_fp) + + latest_folder = tmpdir.mkdir("latest") + latest_cache_filename = path.join(str(latest_folder), "cache_file.txt") + + SnowflakeOCSP.clear_cache() # reset the memory cache + ocsp = SFOCSP() + OCSPCache.merge_cache( + ocsp, previous_cache_filename, current_cache_filename, latest_cache_filename + ) + + with codecs.open(previous_cache_filename) as f: + prev = json.load(f) + with codecs.open(current_cache_filename) as f: + curr = json.load(f) + with codecs.open(latest_cache_filename) as f: + latest = json.load(f) + + assert len(latest) > len(prev) + assert len(latest) > len(curr) + + +def test_building_retry_url(): + # privatelink retry url + OCSP_SERVER = OCSPServer() + OCSPCache.ACTIVATE_SSD = False + OCSP_SERVER.OCSP_RETRY_URL = None + OCSP_SERVER.CACHE_SERVER_URL = ( + "http://ocsp.us-east-1.snowflakecomputing.com/ocsp_response_cache.json" + ) + OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) + assert ( + OCSP_SERVER.OCSP_RETRY_URL + == "http://ocsp.us-east-1.snowflakecomputing.com/retry/{0}/{1}" + ) + + # privatelink retry url with port + OCSPCache.ACTIVATE_SSD = False + OCSP_SERVER.OCSP_RETRY_URL = None + OCSP_SERVER.CACHE_SERVER_URL = ( + "http://ocsp.us-east-1.snowflakecomputing.com:80/ocsp_response_cache" ".json" + ) + OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) + assert ( + OCSP_SERVER.OCSP_RETRY_URL + == "http://ocsp.us-east-1.snowflakecomputing.com:80/retry/{0}/{1}" + ) + + # non-privatelink retry url + OCSPCache.ACTIVATE_SSD = False + OCSP_SERVER.OCSP_RETRY_URL = None + OCSP_SERVER.CACHE_SERVER_URL = ( + "http://ocsp.snowflakecomputing.com/ocsp_response_cache.json" + ) + OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) + assert OCSP_SERVER.OCSP_RETRY_URL is None + + # non-privatelink retry url with port + OCSPCache.ACTIVATE_SSD = False + OCSP_SERVER.OCSP_RETRY_URL = None + OCSP_SERVER.CACHE_SERVER_URL = ( + "http://ocsp.snowflakecomputing.com:80/ocsp_response_cache.json" + ) + OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) + assert OCSP_SERVER.OCSP_RETRY_URL is None + + # ssd enabled for privatelink retry url + OCSPCache.ACTIVATE_SSD = True + OCSP_SERVER.OCSP_RETRY_URL = None + OCSP_SERVER.CACHE_SERVER_URL = ( + "http://ocsp.us-east-1.snowflakecomputing.com/ocsp_response_cache.json" + ) + OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) + assert ( + OCSP_SERVER.OCSP_RETRY_URL + == "http://ocsp.us-east-1.snowflakecomputing.com/retry" + ) + + # ssd enabled for privatelink retry url with port + OCSPCache.ACTIVATE_SSD = True + OCSP_SERVER.OCSP_RETRY_URL = None + OCSP_SERVER.CACHE_SERVER_URL = ( + "http://ocsp.us-east-1.snowflakecomputing.com:80/ocsp_response_cache" ".json" + ) + OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) + assert ( + OCSP_SERVER.OCSP_RETRY_URL + == "http://ocsp.us-east-1.snowflakecomputing.com:80/retry" + ) + + # ssd enabled for non-privatelink + OCSPCache.ACTIVATE_SSD = True + OCSP_SERVER.OCSP_RETRY_URL = None + OCSP_SERVER.CACHE_SERVER_URL = ( + "http://ocsp.snowflakecomputing.com/ocsp_response_cache.json" + ) + OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) + assert OCSP_SERVER.OCSP_RETRY_URL is None + + # ssd enabled for non-privatelink with port + OCSPCache.ACTIVATE_SSD = True + OCSP_SERVER.OCSP_RETRY_URL = None + OCSP_SERVER.CACHE_SERVER_URL = ( + "http://ocsp.snowflakecomputing.com:80/ocsp_response_cache.json" + ) + OCSP_SERVER.reset_ocsp_dynamic_cache_server_url(None) + assert OCSP_SERVER.OCSP_RETRY_URL is None + # Once SSD is active we would use hostname specific OCSP Endpoints. + + +def test_building_new_retry(): + OCSP_SERVER = OCSPServer() + OCSPCache.ACTIVATE_SSD = False + OCSP_SERVER.OCSP_RETRY_URL = None + hname = "a1.us-east-1.snowflakecomputing.com" + os.environ["SF_OCSP_ACTIVATE_NEW_ENDPOINT"] = "true" + OCSP_SERVER.reset_ocsp_endpoint(hname) + assert ( + OCSP_SERVER.CACHE_SERVER_URL + == "https://ocspssd.us-east-1.snowflakecomputing.com/ocsp/fetch" + ) + + assert ( + OCSP_SERVER.OCSP_RETRY_URL + == "https://ocspssd.us-east-1.snowflakecomputing.com/ocsp/retry" + ) + + hname = "a1-12345.global.snowflakecomputing.com" + OCSP_SERVER.reset_ocsp_endpoint(hname) + assert ( + OCSP_SERVER.CACHE_SERVER_URL + == "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/fetch" + ) + + assert ( + OCSP_SERVER.OCSP_RETRY_URL + == "https://ocspssd-12345.global.snowflakecomputing.com/ocsp/retry" + ) + + hname = "snowflake.okta.com" + OCSP_SERVER.reset_ocsp_endpoint(hname) + assert ( + OCSP_SERVER.CACHE_SERVER_URL + == "https://ocspssd.snowflakecomputing.com/ocsp/fetch" + ) + + assert ( + OCSP_SERVER.OCSP_RETRY_URL + == "https://ocspssd.snowflakecomputing.com/ocsp/retry" + ) + + del os.environ["SF_OCSP_ACTIVATE_NEW_ENDPOINT"] diff --git a/test/unit/test_oob_secret_detector.py b/test/unit/test_oob_secret_detector.py new file mode 100644 index 000000000..d6bb1b901 --- /dev/null +++ b/test/unit/test_oob_secret_detector.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import random +import string + +from snowflake.connector.secret_detector import SecretDetector + + +def test_mask_aws_secret(): + sql = ( + "copy into 's3://xxxx/test' from \n" + "(select seq1(), random()\n" + ", random(), random(), random(), random()\n" + ", random(), random(), random(), random()\n" + ", random() , random(), random(), random()\n" + "\tfrom table(generator(rowcount => 10000)))\n" + "credentials=(\n" + " aws_key_id='xxdsdfsafds'\n" + " aws_secret_key='safas+asfsad+safasf'\n" + " )\n" + "OVERWRITE = TRUE \n" + "MAX_FILE_SIZE = 500000000 \n" + "HEADER = TRUE \n" + "FILE_FORMAT = (TYPE = PARQUET SNAPPY_COMPRESSION = TRUE )\n" + ";" + ) + + correct = ( + "copy into 's3://xxxx/test' from \n" + "(select seq1(), random()\n" + ", random(), random(), random(), random()\n" + ", random(), random(), random(), random()\n" + ", random() , random(), random(), random()\n" + "\tfrom table(generator(rowcount => 10000)))\n" + "credentials=(\n" + " aws_key_id='****'\n" + " aws_secret_key='****'\n" + " )\n" + "OVERWRITE = TRUE \n" + "MAX_FILE_SIZE = 500000000 \n" + "HEADER = TRUE \n" + "FILE_FORMAT = (TYPE = PARQUET SNAPPY_COMPRESSION = TRUE )\n" + ";" + ) + + # Mask an aws key id and secret key + _, masked_sql, _ = SecretDetector.mask_secrets(sql) + assert masked_sql == correct + + +def test_mask_sas_token(): + azure_sas_token = ( + "https://someaccounts.blob.core.windows.net/results/018b90ab-0033-" + "5f8e-0000-14f1000bd376_0/main/data_0_0_1?sv=2015-07-08&" + "sig=iCvQmdZngZNW%2F4vw43j6%2BVz6fndHF5LI639QJba4r8o%3D&" + "spr=https&st=2016-04-12T03%3A24%3A31Z&" + "se=2016-04-13T03%3A29%3A31Z&srt=s&ss=bf&sp=rwl" + ) + + masked_azure_sas_token = ( + "https://someaccounts.blob.core.windows.net/results/018b90ab-0033-" + "5f8e-0000-14f1000bd376_0/main/data_0_0_1?sv=2015-07-08&" + "sig=****&" + "spr=https&st=2016-04-12T03%3A24%3A31Z&" + "se=2016-04-13T03%3A29%3A31Z&srt=s&ss=bf&sp=rwl" + ) + + s3_sas_token = ( + "https://somebucket.s3.amazonaws.com/vzy1-s-va_demo0/results/018b92f3" + "-01c2-02dd-0000-03d5000c8066_0/main/data_0_0_1?" + "x-amz-server-side-encryption-customer-algorithm=AES256&" + "response-content-encoding=gzip&AWSAccessKeyId=AKIAIOSFODNN7EXAMPLE" + "&Expires=1555481960&Signature=zFiRkdB9RtRRYomppVes4fQ%2ByWw%3D" + ) + + masked_s3_sas_token = ( + "https://somebucket.s3.amazonaws.com/vzy1-s-va_demo0/results/018b92f3" + "-01c2-02dd-0000-03d5000c8066_0/main/data_0_0_1?" + "x-amz-server-side-encryption-customer-algorithm=AES256&" + "response-content-encoding=gzip&AWSAccessKeyId=****" + "&Expires=1555481960&Signature=****" + ) + + # Mask azure token + _, masked_text, _ = SecretDetector.mask_secrets(azure_sas_token) + assert masked_text == masked_azure_sas_token + + # Mask s3 token + _, masked_text, _ = SecretDetector.mask_secrets(s3_sas_token) + assert masked_text == masked_s3_sas_token + + text = "".join([random.choice(string.ascii_lowercase) for i in range(200)]) + _, masked_text, _ = SecretDetector.mask_secrets(text) + # Randomly generated string should cause no substitutions + assert masked_text == text + + # Mask multiple azure tokens + _, masked_text, _ = SecretDetector.mask_secrets( + azure_sas_token + "\n" + azure_sas_token + ) + assert masked_text == masked_azure_sas_token + "\n" + masked_azure_sas_token + + # Mask multiple s3 tokens + _, masked_text, _ = SecretDetector.mask_secrets(s3_sas_token + "\n" + s3_sas_token) + assert masked_text == masked_s3_sas_token + "\n" + masked_s3_sas_token + + # Mask azure and s3 token + _, masked_text, _ = SecretDetector.mask_secrets( + azure_sas_token + "\n" + s3_sas_token + ) + assert masked_text == masked_azure_sas_token + "\n" + masked_s3_sas_token + + +def test_mask_secrets(): + sql = ( + "create stage mystage " + "URL = 's3://mybucket/mypath/' " + "credentials = (aws_key_id = 'AKIAIOSFODNN7EXAMPLE' " + "aws_secret_key = 'frJIUN8DYpKDtOLCwo//yllqDzg='); " + "create stage mystage2 " + "URL = 'azure//mystorage.blob.core.windows.net/cont' " + "credentials = (azure_sas_token = " + "'?sv=2016-05-31&ss=b&srt=sco&sp=rwdl&se=2018-06-27T10:05:50Z&" + "st=2017-06-27T02:05:50Z&spr=https,http&" + "sig=bgqQwoXwxzuD2GJfagRg7VOS8hzNr3QLT7rhS8OFRLQ%3D')" + ) + + masked_sql = ( + "create stage mystage " + "URL = 's3://mybucket/mypath/' " + "credentials = (aws_key_id='****' " + "aws_secret_key='****'); " + "create stage mystage2 " + "URL = 'azure//mystorage.blob.core.windows.net/cont' " + "credentials = (azure_sas_token = " + "'?sv=2016-05-31&ss=b&srt=sco&sp=rwdl&se=2018-06-27T10:05:50Z&" + "st=2017-06-27T02:05:50Z&spr=https,http&" + "sig=****')" + ) + + # Test masking all kinds of secrets + _, masked_text, _ = SecretDetector.mask_secrets(sql) + assert masked_text == masked_sql + + text = "".join([random.choice(string.ascii_lowercase) for i in range(500)]) + _, masked_text, _ = SecretDetector.mask_secrets(text) + # Randomly generated string should cause no substitutions + assert masked_text == text + + +def test_mask_private_keys(): + text = '"privateKeyData": "aslkjdflasjf"' + + filtered_text = '"privateKeyData": "XXXX"' + + _, result, _ = SecretDetector.mask_secrets(text) + assert result == filtered_text diff --git a/test/unit/test_parse_account.py b/test/unit/test_parse_account.py new file mode 100644 index 000000000..ceb1b55db --- /dev/null +++ b/test/unit/test_parse_account.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from snowflake.connector.util_text import parse_account + + +def test_parse_account_basic(): + assert parse_account("account1") == "account1" + + assert parse_account("account1.eu-central-1") == "account1" + + assert ( + parse_account("account1-jkabfvdjisoa778wqfgeruishafeuw89q.global") == "account1" + ) diff --git a/test/unit/test_proxies.py b/test/unit/test_proxies.py new file mode 100644 index 000000000..105ff4691 --- /dev/null +++ b/test/unit/test_proxies.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +import os + + +def test_set_proxies(): + from snowflake.connector.proxy import set_proxies + + assert set_proxies("proxyhost", "8080") == { + "http": "http://proxyhost:8080", + "https": "http://proxyhost:8080", + } + assert set_proxies("http://proxyhost", "8080") == { + "http": "http://proxyhost:8080", + "https": "http://proxyhost:8080", + } + assert set_proxies("http://proxyhost", "8080", "testuser", "testpass") == { + "http": "http://testuser:testpass@proxyhost:8080", + "https": "http://testuser:testpass@proxyhost:8080", + } + assert set_proxies("proxyhost", "8080", "testuser", "testpass") == { + "http": "http://testuser:testpass@proxyhost:8080", + "https": "http://testuser:testpass@proxyhost:8080", + } + + # NOTE environment variable is set if the proxy parameter is specified. + del os.environ["HTTP_PROXY"] + del os.environ["HTTPS_PROXY"] diff --git a/test/unit/test_put_get.py b/test/unit/test_put_get.py new file mode 100644 index 000000000..1845a4b1c --- /dev/null +++ b/test/unit/test_put_get.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +from os import chmod, path +from unittest import mock + +import pytest + +from snowflake.connector import OperationalError +from snowflake.connector.compat import IS_WINDOWS +from snowflake.connector.cursor import SnowflakeCursor +from snowflake.connector.errors import Error +from snowflake.connector.file_transfer_agent import ( + SnowflakeAzureProgressPercentage, + SnowflakeFileTransferAgent, + SnowflakeS3ProgressPercentage, +) + + +@pytest.mark.skipif(IS_WINDOWS, reason="permission model is different") +def test_put_error(tmpdir): + """Tests for raise_put_get_error flag (now turned on by default) in SnowflakeFileTransferAgent.""" + tmp_dir = str(tmpdir.mkdir("putfiledir")) + file1 = path.join(tmp_dir, "file1") + remote_location = path.join(tmp_dir, "remote_loc") + with open(file1, "w") as f: + f.write("test1") + + con = mock.MagicMock() + cursor = con.cursor() + cursor.errorhandler = Error.default_errorhandler + query = "PUT something" + ret = { + "data": { + "command": "UPLOAD", + "autoCompress": False, + "src_locations": [file1], + "sourceCompression": "none", + "stageInfo": { + "creds": {}, + "location": remote_location, + "locationType": "LOCAL_FS", + "path": "remote_loc", + }, + }, + "success": True, + } + + agent_class = SnowflakeFileTransferAgent + + # no error is raised + sf_file_transfer_agent = agent_class(cursor, query, ret, raise_put_get_error=False) + sf_file_transfer_agent.execute() + sf_file_transfer_agent.result() + + # nobody can read now. + chmod(file1, 0o000) + # Permission error should be raised + sf_file_transfer_agent = agent_class(cursor, query, ret, raise_put_get_error=True) + sf_file_transfer_agent.execute() + with pytest.raises(Exception): + sf_file_transfer_agent.result() + + # unspecified, should fail because flag is on by default now + sf_file_transfer_agent = agent_class(cursor, query, ret) + sf_file_transfer_agent.execute() + with pytest.raises(Exception): + sf_file_transfer_agent.result() + + chmod(file1, 0o700) + + +def test_get_empty_file(tmpdir): + """Tests for error message when retrieving missing file.""" + tmp_dir = str(tmpdir.mkdir("getfiledir")) + + con = mock.MagicMock() + cursor = con.cursor() + cursor.errorhandler = Error.default_errorhandler + query = f"GET something file:\\{tmp_dir}" + ret = { + "data": { + "localLocation": tmp_dir, + "command": "DOWNLOAD", + "autoCompress": False, + "src_locations": [], + "sourceCompression": "none", + "stageInfo": { + "creds": {}, + "location": "", + "locationType": "S3", + "path": "remote_loc", + }, + }, + "success": True, + } + + sf_file_transfer_agent = SnowflakeFileTransferAgent( + cursor, query, ret, raise_put_get_error=True + ) + with pytest.raises(OperationalError, match=".*the file does not exist.*$"): + sf_file_transfer_agent.execute() + assert not sf_file_transfer_agent.result()["rowset"] + + +@pytest.mark.skipolddriver +def test_percentage(tmp_path): + """Tests for ProgressPercentage classes.""" + from snowflake.connector.file_transfer_agent import percent + + assert 1.0 == percent(0, 0) + assert 1.0 == percent(20, 0) + assert 1.0 == percent(40, 20) + assert 0.5 == percent(14, 28) + + file_path = tmp_path / "zero_file1" + file_path.touch() + func_callback = SnowflakeS3ProgressPercentage(str(file_path), 0) + func_callback(1) + func_callback = SnowflakeAzureProgressPercentage(str(file_path), 0) + func_callback(1) + + +@pytest.mark.skipolddriver +def test_upload_file_with_azure_upload_failed_error(tmp_path): + """Tests Upload file with expired Azure storage token.""" + file1 = tmp_path / "file1" + with file1.open("w") as f: + f.write("test1") + rest_client = SnowflakeFileTransferAgent( + mock.MagicMock(autospec=SnowflakeCursor), + "PUT some_file.txt", + { + "data": { + "command": "UPLOAD", + "src_locations": [file1], + "sourceCompression": "none", + "stageInfo": { + "creds": { + "AZURE_SAS_TOKEN": "sas_token", + }, + "location": "some_bucket", + "region": "no_region", + "locationType": "AZURE", + "path": "remote_loc", + "endPoint": "", + "storageAccount": "storage_account", + }, + }, + "success": True, + }, + ) + exc = Exception("Stop executing") + with mock.patch( + "snowflake.connector.azure_storage_client.SnowflakeAzureRestClient._has_expired_token", + return_value=True, + ): + with mock.patch( + "snowflake.connector.file_transfer_agent.StorageCredential.update", + side_effect=exc, + ) as mock_update: + rest_client.execute() + assert mock_update.called + assert rest_client._results[0].error_details is exc diff --git a/test/test_unit_renew_session.py b/test/unit/test_renew_session.py similarity index 59% rename from test/test_unit_renew_session.py rename to test/unit/test_renew_session.py index ee20caef0..60e70ca5b 100644 --- a/test/test_unit_renew_session.py +++ b/test/unit/test_renew_session.py @@ -1,17 +1,13 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # -from snowflake.connector.compat import ( - PY2) -from snowflake.connector.network import SnowflakeRestful +from __future__ import annotations + +from unittest.mock import MagicMock, Mock, PropertyMock -if PY2: - from mock import MagicMock, Mock, PropertyMock -else: - from unittest.mock import MagicMock, Mock, PropertyMock +from snowflake.connector.network import SnowflakeRestful def test_renew_session(): @@ -21,22 +17,23 @@ def test_renew_session(): NEW_MASTER_TOKEN = "new_master_token" connection = MagicMock() connection.errorhandler = Mock(return_value=None) - type(connection)._probe_connection = PropertyMock( - return_value=False) + type(connection)._probe_connection = PropertyMock(return_value=False) rest = SnowflakeRestful( - host='testaccount.snowflakecomputing.com', - port=443, - connection=connection) + host="testaccount.snowflakecomputing.com", port=443, connection=connection + ) rest._token = OLD_SESSION_TOKEN rest._master_token = OLD_MASTER_TOKEN # inject a fake method (success) def fake_request_exec(**_): - return {u'success': True, - u'data': { - u"sessionToken": NEW_SESSION_TOKEN, - u"masterToken": NEW_MASTER_TOKEN}} + return { + "success": True, + "data": { + "sessionToken": NEW_SESSION_TOKEN, + "masterToken": NEW_MASTER_TOKEN, + }, + } rest._request_exec = fake_request_exec @@ -47,9 +44,7 @@ def fake_request_exec(**_): # inject a fake method (failure) def fake_request_exec(**_): - return {u'success': False, - u'message': "failed to renew session", - u'code': 987654} + return {"success": False, "message": "failed to renew session", "code": 987654} rest._request_exec = fake_request_exec diff --git a/test/unit/test_result_batch.py b/test/unit/test_result_batch.py new file mode 100644 index 000000000..0480f8e25 --- /dev/null +++ b/test/unit/test_result_batch.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from collections import namedtuple +from http import HTTPStatus +from test.helpers import create_mock_response +from unittest import mock + +import pytest + +from snowflake.connector import DatabaseError, InterfaceError +from snowflake.connector.compat import ( + BAD_GATEWAY, + BAD_REQUEST, + FORBIDDEN, + GATEWAY_TIMEOUT, + INTERNAL_SERVER_ERROR, + METHOD_NOT_ALLOWED, + OK, + REQUEST_TIMEOUT, + SERVICE_UNAVAILABLE, + UNAUTHORIZED, +) +from snowflake.connector.errorcode import ( + ER_FAILED_TO_CONNECT_TO_DB, + ER_FAILED_TO_REQUEST, +) +from snowflake.connector.errors import ( + BadGatewayError, + BadRequest, + ForbiddenError, + GatewayTimeoutError, + InternalServerError, + MethodNotAllowed, + OtherHTTPRetryableError, + ServiceUnavailableError, +) + +try: + from snowflake.connector.result_batch import MAX_DOWNLOAD_RETRY, JSONResultBatch +except ImportError: + MAX_DOWNLOAD_RETRY = None + JSONResultBatch = None +from snowflake.connector.sqlstate import ( + SQLSTATE_CONNECTION_REJECTED, + SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, +) + +try: + from snowflake.connector.vendored import requests # NOQA + + REQUEST_MODULE_PATH = "snowflake.connector.vendored.requests" +except ImportError: + REQUEST_MODULE_PATH = "requests" + + +MockRemoteChunkInfo = namedtuple("MockRemoteChunkInfo", "url") +chunk_info = MockRemoteChunkInfo("http://www.chunk-url.com") +result_batch = ( + JSONResultBatch(100, None, chunk_info, [], [], True) if JSONResultBatch else None +) + + +@mock.patch(REQUEST_MODULE_PATH + ".get") +def test_ok_response_download(mock_get): + mock_get.return_value = create_mock_response(200) + + response = result_batch._download() + + # successful on first try + assert mock_get.call_count == 1 + assert response.status_code == 200 + + +@pytest.mark.parametrize( + "errcode,error_class", + [ + (BAD_REQUEST, BadRequest), # 400 + (FORBIDDEN, ForbiddenError), # 403 + (METHOD_NOT_ALLOWED, MethodNotAllowed), # 405 + (REQUEST_TIMEOUT, OtherHTTPRetryableError), # 408 + (INTERNAL_SERVER_ERROR, InternalServerError), # 500 + (BAD_GATEWAY, BadGatewayError), # 502 + (SERVICE_UNAVAILABLE, ServiceUnavailableError), # 503 + (GATEWAY_TIMEOUT, GatewayTimeoutError), # 504 + (555, OtherHTTPRetryableError), # random 5xx error + ], +) +def test_retryable_response_download(errcode, error_class): + """This test checks that responses which are deemed 'retryable' are handled correctly.""" + # retryable exceptions + with mock.patch(REQUEST_MODULE_PATH + ".get") as mock_get: + mock_get.return_value = create_mock_response(errcode) + + with mock.patch("time.sleep", return_value=None): + with pytest.raises(error_class) as ex: + _ = result_batch._download() + err_msg = ex.value.msg + if isinstance(errcode, HTTPStatus): + assert str(errcode.value) in err_msg + else: + assert str(errcode) in err_msg + assert mock_get.call_count == MAX_DOWNLOAD_RETRY + + +def test_unauthorized_response_download(): + """This tests that the Unauthorized response (401 status code) is handled correctly.""" + with mock.patch(REQUEST_MODULE_PATH + ".get") as mock_get: + mock_get.return_value = create_mock_response(UNAUTHORIZED) + + with mock.patch("time.sleep", return_value=None): + with pytest.raises(DatabaseError) as ex: + _ = result_batch._download() + error = ex.value + assert error.errno == ER_FAILED_TO_CONNECT_TO_DB + assert error.sqlstate == SQLSTATE_CONNECTION_REJECTED + assert "401" in error.msg + assert mock_get.call_count == MAX_DOWNLOAD_RETRY + + +@pytest.mark.parametrize("status_code", [201, 302]) +def test_non_200_response_download(status_code): + """This test checks that "success" codes which are not 200 still retry.""" + with mock.patch(REQUEST_MODULE_PATH + ".get") as mock_get: + mock_get.return_value = create_mock_response(status_code) + + with mock.patch("time.sleep", return_value=None): + with pytest.raises(InterfaceError) as ex: + _ = result_batch._download() + error = ex.value + assert error.errno == ER_FAILED_TO_REQUEST + assert error.sqlstate == SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED + assert mock_get.call_count == MAX_DOWNLOAD_RETRY + + +def test_retries_until_success(): + with mock.patch(REQUEST_MODULE_PATH + ".get") as mock_get: + error_codes = [BAD_REQUEST, UNAUTHORIZED, 201] + # There is an OK added to the list of responses so that there is a success + # and the retry loop ends. + mock_responses = [create_mock_response(code) for code in error_codes + [OK]] + mock_get.side_effect = mock_responses + + with mock.patch("time.sleep", return_value=None): + res = result_batch._download() + assert res.raw == "success" + # call `get` once for each error and one last time when it succeeds + assert mock_get.call_count == len(error_codes) + 1 diff --git a/test/test_unit_retry_network.py b/test/unit/test_retry_network.py similarity index 56% rename from test/test_unit_retry_network.py rename to test/unit/test_retry_network.py index 4b747f77b..ba983bdf6 100644 --- a/test/test_unit_retry_network.py +++ b/test/unit/test_retry_network.py @@ -1,74 +1,64 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # +from __future__ import annotations + import errno import os -import tempfile import time -from logging import getLogger -from os import path +from unittest.mock import MagicMock, Mock, PropertyMock import OpenSSL.SSL import pytest -from botocore.vendored.requests.exceptions import ( - ConnectionError, ConnectTimeout, ReadTimeout, SSLError) -from botocore.vendored.requests.packages.urllib3.exceptions import ( - ProtocolError, ReadTimeoutError) from snowflake.connector.compat import ( - PY2, OK, INTERNAL_SERVER_ERROR, FORBIDDEN, - SERVICE_UNAVAILABLE, - GATEWAY_TIMEOUT, - BAD_REQUEST, BAD_GATEWAY, + BAD_REQUEST, + FORBIDDEN, + GATEWAY_TIMEOUT, + INTERNAL_SERVER_ERROR, + OK, + SERVICE_UNAVAILABLE, UNAUTHORIZED, BadStatusLine, - IncompleteRead) + IncompleteRead, +) from snowflake.connector.errors import ( - InterfaceError, DatabaseError, OtherHTTPRetryableError) + DatabaseError, + InterfaceError, + OtherHTTPRetryableError, +) from snowflake.connector.network import ( - RetryRequest, SnowflakeRestful, STATUS_TO_EXCEPTION) - -if PY2: - from mock import MagicMock, PropertyMock, Mock -else: - from unittest.mock import MagicMock, PropertyMock, Mock + STATUS_TO_EXCEPTION, + RetryRequest, + SnowflakeRestful, +) + +# We need these for our OldDriver tests. We run most up to date tests with the oldest supported driver version +try: + from snowflake.connector.vendored import requests, urllib3 +except ImportError: # pragma: no cover + import requests + import urllib3 THIS_DIR = os.path.dirname(os.path.realpath(__file__)) -import logging - -for logger_name in ['test', 'snowflake.connector', 'botocore']: - logger = logging.getLogger(logger_name) - logger.setLevel(logging.DEBUG) - ch = logging.FileHandler( - path.join(tempfile.gettempdir(), 'python_connector.log')) - ch.setLevel(logging.DEBUG) - ch.setFormatter(logging.Formatter( - '%(asctime)s - %(threadName)s %(filename)s:%(lineno)d - %(funcName)s() - %(levelname)s - %(message)s')) - logger.addHandler(ch) - -logger = getLogger(__name__) - def test_request_exec(): - rest = SnowflakeRestful( - host='testaccount.snowflakecomputing.com', - port=443) + rest = SnowflakeRestful(host="testaccount.snowflakecomputing.com", port=443) default_parameters = { - 'method': "POST", - 'full_url': "https://testaccount.snowflakecomputing.com/", - 'headers': {}, - 'data': '{"code": 12345}', - 'token': None + "method": "POST", + "full_url": "https://testaccount.snowflakecomputing.com/", + "headers": {}, + "data": '{"code": 12345}', + "token": None, } # request mock - output_data = {'success': True, 'code': 12345} + output_data = {"success": True, "code": 12345} request_mock = MagicMock() type(request_mock).status_code = PropertyMock(return_value=OK) request_mock.json.return_value = output_data @@ -79,7 +69,7 @@ def test_request_exec(): # success ret = rest._request_exec(session=session, **default_parameters) - assert ret == output_data, 'output data' + assert ret == output_data, "output data" # retryable exceptions for errcode in [ @@ -93,30 +83,24 @@ def test_request_exec(): ]: type(request_mock).status_code = PropertyMock(return_value=errcode) try: - rest._request_exec( - session=session, **default_parameters) - pytest.fail('should fail') + rest._request_exec(session=session, **default_parameters) + pytest.fail("should fail") except RetryRequest as e: - cls = STATUS_TO_EXCEPTION.get( - errcode, - OtherHTTPRetryableError) - assert isinstance( - e.args[0], - cls), "must be internal error exception" + cls = STATUS_TO_EXCEPTION.get(errcode, OtherHTTPRetryableError) + assert isinstance(e.args[0], cls), "must be internal error exception" # unauthorized type(request_mock).status_code = PropertyMock(return_value=UNAUTHORIZED) with pytest.raises(InterfaceError): - rest._request_exec( - session=session, **default_parameters) + rest._request_exec(session=session, **default_parameters) # unauthorized with catch okta unauthorized error # TODO: what is the difference to InterfaceError? type(request_mock).status_code = PropertyMock(return_value=UNAUTHORIZED) with pytest.raises(DatabaseError): rest._request_exec( - session=session, catch_okta_unauthorized_error=True, - **default_parameters) + session=session, catch_okta_unauthorized_error=True, **default_parameters + ) class IncompleteReadMock(IncompleteRead): def __init__(self): @@ -124,21 +108,19 @@ def __init__(self): # handle retryable exception for exc in [ - ConnectTimeout, - ReadTimeout, + requests.exceptions.ConnectTimeout, + requests.exceptions.ReadTimeout, IncompleteReadMock, - SSLError, - ProtocolError, - ConnectionError, + urllib3.exceptions.ProtocolError, + requests.exceptions.ConnectionError, AttributeError, ]: session = MagicMock() session.request = Mock(side_effect=exc) try: - rest._request_exec( - session=session, **default_parameters) - pytest.fail('should fail') + rest._request_exec(session=session, **default_parameters) + pytest.fail("should fail") except RetryRequest as e: cause = e.args[0] assert isinstance(cause, exc), "same error class" @@ -149,15 +131,15 @@ def __init__(self): OpenSSL.SSL.SysCallError(errno.ETIMEDOUT), OpenSSL.SSL.SysCallError(errno.EPIPE), OpenSSL.SSL.SysCallError(-1), # unknown - ReadTimeoutError(None, None, None), - BadStatusLine('fake') + # TODO: should we keep this? + # urllib3.exceptions.ReadTimeoutError(None, None, None), + BadStatusLine("fake"), ]: session = MagicMock() session.request = Mock(side_effect=exc) try: - rest._request_exec( - session=session, **default_parameters) - pytest.fail('should fail') + rest._request_exec(session=session, **default_parameters) + pytest.fail("should fail") except RetryRequest as e: assert e.args[0] == exc, "same error instance" @@ -167,11 +149,10 @@ def test_fetch(): connection.errorhandler = Mock(return_value=None) rest = SnowflakeRestful( - host='testaccount.snowflakecomputing.com', - port=443, - connection=connection) + host="testaccount.snowflakecomputing.com", port=443, connection=connection + ) - class Cnt(object): + class Cnt: def __init__(self): self.c = 0 @@ -183,10 +164,10 @@ def reset(self): cnt = Cnt() default_parameters = { - 'method': "POST", - 'full_url': "https://testaccount.snowflakecomputing.com/", - 'headers': {'cnt': cnt}, - 'data': '{"code": 12345}', + "method": "POST", + "full_url": "https://testaccount.snowflakecomputing.com/", + "headers": {"cnt": cnt}, + "data": '{"code": 12345}', } NOT_RETRYABLE = 1000 @@ -195,19 +176,19 @@ class NotRetryableException(Exception): pass def fake_request_exec(**kwargs): - headers = kwargs.get('headers') - cnt = headers['cnt'] + headers = kwargs.get("headers") + cnt = headers["cnt"] time.sleep(3) if cnt.c <= 1: # the first two raises failure cnt.c += 1 - raise RetryRequest(Exception('can retry')) + raise RetryRequest(Exception("can retry")) elif cnt.c == NOT_RETRYABLE: # not retryable exception - raise NotRetryableException('cannot retry') + raise NotRetryableException("cannot retry") else: # return success in the third attempt - return {'success': True, 'data': "valid data"} + return {"success": True, "data": "valid data"} # inject a fake method rest._request_exec = fake_request_exec @@ -215,7 +196,7 @@ def fake_request_exec(**kwargs): # first two attempts will fail but third will success cnt.reset() ret = rest.fetch(timeout=10, **default_parameters) - assert ret == {'success': True, 'data': "valid data"} + assert ret == {"success": True, "data": "valid data"} assert not rest._connection.errorhandler.called # no error # first attempt to reach timeout even if the exception is retryable @@ -228,3 +209,11 @@ def fake_request_exec(**kwargs): cnt.set(NOT_RETRYABLE) with pytest.raises(NotRetryableException): rest.fetch(timeout=7, **default_parameters) + + # first attempt fails and will not retry + cnt.reset() + default_parameters["no_retry"] = True + ret = rest.fetch(timeout=10, **default_parameters) + assert ret == {} + assert cnt.c == 1 # failed on first call - did not retry + assert rest._connection.errorhandler.called # error diff --git a/test/unit/test_s3_util.py b/test/unit/test_s3_util.py new file mode 100644 index 000000000..9cf2582be --- /dev/null +++ b/test/unit/test_s3_util.py @@ -0,0 +1,450 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +import logging +import re +from os import path +from unittest import mock +from unittest.mock import MagicMock + +import pytest + +from snowflake.connector import SnowflakeConnection +from snowflake.connector.constants import SHA256_DIGEST +from snowflake.connector.cursor import SnowflakeCursor +from snowflake.connector.file_transfer_agent import SnowflakeFileTransferAgent + +from ..helpers import verify_log_tuple + +try: + from snowflake.connector.constants import megabyte + from snowflake.connector.errors import RequestExceedMaxRetryError + from snowflake.connector.file_transfer_agent import ( + SnowflakeFileMeta, + StorageCredential, + ) + from snowflake.connector.s3_storage_client import ( + ERRORNO_WSAECONNABORTED, + EXPIRED_TOKEN, + SnowflakeS3RestClient, + ) + from snowflake.connector.vendored.requests import HTTPError, Response +except ImportError: + # Compatibility for olddriver tests + from requests import HTTPError, Response + + from snowflake.connector.s3_util import ERRORNO_WSAECONNABORTED # NOQA + + SnowflakeFileMeta = dict + SnowflakeS3RestClient = None + RequestExceedMaxRetryError = None + StorageCredential = None + megabytes = 1024 * 1024 + DEFAULT_MAX_RETRY = 5 + +THIS_DIR = path.dirname(path.realpath(__file__)) +MINIMAL_METADATA = SnowflakeFileMeta( + name="file.txt", + stage_location_type="S3", + src_file_name="file.txt", +) + + +@pytest.mark.parametrize( + "input, bucket_name, s3path", + [ + ("sfc-dev1-regression/test_sub_dir/", "sfc-dev1-regression", "test_sub_dir/"), + ( + "sfc-dev1-regression/stakeda/test_stg/test_sub_dir/", + "sfc-dev1-regression", + "stakeda/test_stg/test_sub_dir/", + ), + ("sfc-dev1-regression/", "sfc-dev1-regression", ""), + ("sfc-dev1-regression//", "sfc-dev1-regression", "/"), + ("sfc-dev1-regression///", "sfc-dev1-regression", "//"), + ], +) +def test_extract_bucket_name_and_path(input, bucket_name, s3path): + """Extracts bucket name and S3 path.""" + s3_loc = SnowflakeS3RestClient._extract_bucket_name_and_path(input) + assert s3_loc.bucket_name == bucket_name + assert s3_loc.path == s3path + + +def test_upload_file_with_s3_upload_failed_error(tmp_path): + """Tests Upload file with S3UploadFailedError, which could indicate AWS token expires.""" + file1 = tmp_path / "file1" + with file1.open("w") as f: + f.write("test1") + rest_client = SnowflakeFileTransferAgent( + MagicMock(autospec=SnowflakeCursor), + "PUT some_file.txt", + { + "data": { + "command": "UPLOAD", + "autoCompress": False, + "src_locations": [file1], + "sourceCompression": "none", + "stageInfo": { + "creds": { + "AWS_SECRET_KEY": "secret key", + "AWS_KEY_ID": "secret id", + "AWS_TOKEN": "", + }, + "location": "some_bucket", + "region": "no_region", + "locationType": "S3", + "path": "remote_loc", + "endPoint": "", + }, + }, + "success": True, + }, + ) + exc = Exception("Stop executing") + + def mock_transfer_accelerate_config( + self: SnowflakeS3RestClient, + use_accelerate_endpoint: bool | None = None, + ) -> bool: + self.endpoint = f"https://{self.s3location.bucket_name}.s3.awsamazon.com" + return False + + with mock.patch( + "snowflake.connector.s3_storage_client.SnowflakeS3RestClient._has_expired_token", + return_value=True, + ): + with mock.patch( + "snowflake.connector.s3_storage_client.SnowflakeS3RestClient.transfer_accelerate_config", + mock_transfer_accelerate_config, + ): + with mock.patch( + "snowflake.connector.file_transfer_agent.StorageCredential.update", + side_effect=exc, + ) as mock_update: + rest_client.execute() + assert mock_update.called + assert rest_client._results[0].error_details is exc + + +def test_get_header_expiry_error(): + """Tests whether token expiry error is handled as expected when getting header.""" + meta_info = { + "name": "data1.txt.gz", + "stage_location_type": "S3", + "no_sleeping_time": True, + "put_callback": None, + "put_callback_output_stream": None, + SHA256_DIGEST: "123456789abcdef", + "dst_file_name": "data1.txt.gz", + "src_file_name": path.join(THIS_DIR, "../data", "put_get_1.txt"), + "overwrite": True, + } + meta = SnowflakeFileMeta(**meta_info) + creds = {"AWS_SECRET_KEY": "", "AWS_KEY_ID": "", "AWS_TOKEN": ""} + rest_client = SnowflakeS3RestClient( + meta, + StorageCredential( + creds, + MagicMock(autospec=SnowflakeConnection), + "PUT file:/tmp/file.txt @~", + ), + { + "locationType": "AWS", + "location": "bucket/path", + "creds": creds, + "region": "test", + "endPoint": None, + }, + 8 * megabyte, + ) + resp = MagicMock( + autospec=Response, + status_code=400, + text=f"{EXPIRED_TOKEN}", + ) + from snowflake.connector.storage_client import METHODS + + with mock.patch.dict(METHODS, HEAD=MagicMock(return_value=resp)): + exc = Exception("stop execution") + with mock.patch.object(rest_client.credentials, "update", side_effect=exc): + with pytest.raises(Exception) as caught_exc: + rest_client.get_file_header("file.txt") + assert caught_exc.value is exc + + +def test_get_header_unknown_error(caplog): + """Tests whether unexpected errors are handled as expected when getting header.""" + caplog.set_level(logging.DEBUG, "snowflake.connector") + meta_info = { + "name": "data1.txt.gz", + "stage_location_type": "S3", + "no_sleeping_time": True, + "put_callback": None, + "put_callback_output_stream": None, + SHA256_DIGEST: "123456789abcdef", + "dst_file_name": "data1.txt.gz", + "src_file_name": path.join(THIS_DIR, "../data", "put_get_1.txt"), + "overwrite": True, + } + meta = SnowflakeFileMeta(**meta_info) + creds = {"AWS_SECRET_KEY": "", "AWS_KEY_ID": "", "AWS_TOKEN": ""} + rest_client = SnowflakeS3RestClient( + meta, + StorageCredential( + creds, + MagicMock(autospec=SnowflakeConnection), + "PUT file:/tmp/file.txt @~", + ), + { + "locationType": "AWS", + "location": "bucket/path", + "creds": creds, + "region": "test", + "endPoint": None, + }, + 8 * megabyte, + ) + resp = Response() + # dont' use transient error codes + resp.status_code = 555 + from snowflake.connector.storage_client import METHODS + + with mock.patch.dict(METHODS, HEAD=MagicMock(return_value=resp)): + with pytest.raises(HTTPError, match="555 Server Error"): + rest_client.get_file_header("file.txt") + + +def test_upload_expiry_error(): + """Tests whether token expiry error is handled as expected when uploading.""" + meta_info = { + "name": "data1.txt.gz", + "stage_location_type": "S3", + "no_sleeping_time": True, + "put_callback": None, + "put_callback_output_stream": None, + SHA256_DIGEST: "123456789abcdef", + "dst_file_name": "data1.txt.gz", + "src_file_name": path.join(THIS_DIR, "../data", "put_get_1.txt"), + "overwrite": True, + } + meta = SnowflakeFileMeta(**meta_info) + creds = {"AWS_SECRET_KEY": "", "AWS_KEY_ID": "", "AWS_TOKEN": ""} + rest_client = SnowflakeS3RestClient( + meta, + StorageCredential( + creds, + MagicMock(autospec=SnowflakeConnection), + "PUT file:/tmp/file.txt @~", + ), + { + "locationType": "AWS", + "location": "bucket/path", + "creds": creds, + "region": "test", + "endPoint": None, + }, + 8 * megabyte, + ) + resp = MagicMock( + autospec=Response, + status_code=400, + text=f"{EXPIRED_TOKEN}", + ) + from snowflake.connector.storage_client import METHODS + + with mock.patch.dict(METHODS, PUT=MagicMock(return_value=resp)): + exc = Exception("stop execution") + with mock.patch.object(rest_client.credentials, "update", side_effect=exc): + with mock.patch( + "snowflake.connector.storage_client.SnowflakeStorageClient.preprocess" + ): + rest_client.prepare_upload() + with pytest.raises(Exception) as caught_exc: + rest_client.upload_chunk(0) + assert caught_exc.value is exc + + +def test_upload_unknown_error(): + """Tests whether unknown errors are handled as expected when uploading.""" + meta_info = { + "name": "data1.txt.gz", + "stage_location_type": "S3", + "no_sleeping_time": True, + "put_callback": None, + "put_callback_output_stream": None, + SHA256_DIGEST: "123456789abcdef", + "dst_file_name": "data1.txt.gz", + "src_file_name": path.join(THIS_DIR, "../data", "put_get_1.txt"), + "overwrite": True, + } + meta = SnowflakeFileMeta(**meta_info) + creds = {"AWS_SECRET_KEY": "", "AWS_KEY_ID": "", "AWS_TOKEN": ""} + rest_client = SnowflakeS3RestClient( + meta, + StorageCredential( + creds, + MagicMock(autospec=SnowflakeConnection), + "PUT file:/tmp/file.txt @~", + ), + { + "locationType": "AWS", + "location": "bucket/path", + "creds": creds, + "region": "test", + "endPoint": None, + }, + 8 * megabyte, + ) + resp = Response() + resp.status_code = 555 + from snowflake.connector.storage_client import METHODS + + with mock.patch.dict(METHODS, PUT=MagicMock(return_value=resp)): + exc = Exception("stop execution") + with mock.patch.object(rest_client.credentials, "update", side_effect=exc): + with mock.patch( + "snowflake.connector.storage_client.SnowflakeStorageClient.preprocess" + ): + rest_client.prepare_upload() + with pytest.raises(HTTPError, match="555 Server Error"): + rest_client.upload_chunk(0) + + +def test_download_expiry_error(): + """Tests whether token expiry error is handled as expected when downloading.""" + meta_info = { + "name": "data1.txt.gz", + "stage_location_type": "S3", + "no_sleeping_time": True, + "put_callback": None, + "put_callback_output_stream": None, + SHA256_DIGEST: "123456789abcdef", + "dst_file_name": "data1.txt.gz", + "src_file_name": "path/to/put_get_1.txt", + "overwrite": True, + } + meta = SnowflakeFileMeta(**meta_info) + creds = {"AWS_SECRET_KEY": "", "AWS_KEY_ID": "", "AWS_TOKEN": ""} + rest_client = SnowflakeS3RestClient( + meta, + StorageCredential( + creds, + MagicMock(autospec=SnowflakeConnection), + "GET file:/tmp/file.txt @~", + ), + { + "locationType": "AWS", + "location": "bucket/path", + "creds": creds, + "region": "test", + "endPoint": None, + }, + 8 * megabyte, + ) + resp = MagicMock( + autospec=Response, + status_code=400, + text=f"{EXPIRED_TOKEN}", + ) + from snowflake.connector.storage_client import METHODS + + with mock.patch.dict(METHODS, GET=MagicMock(return_value=resp)): + exc = Exception("stop execution") + with mock.patch.object(rest_client.credentials, "update", side_effect=exc): + with pytest.raises(Exception) as caught_exc: + rest_client.download_chunk(0) + assert caught_exc.value is exc + + +def test_download_unknown_error(caplog): + """Tests whether an unknown error is handled as expected when downloading.""" + caplog.set_level(logging.DEBUG, "snowflake.connector") + agent = SnowflakeFileTransferAgent( + MagicMock(), + "get @~/f /tmp", + { + "data": { + "command": "DOWNLOAD", + "src_locations": ["/tmp/a"], + "stageInfo": { + "locationType": "S3", + "location": "", + "creds": {"AWS_SECRET_KEY": "", "AWS_KEY_ID": "", "AWS_TOKEN": ""}, + "region": "", + "endPoint": None, + }, + "localLocation": "/tmp", + } + }, + ) + resp = Response() + resp.status_code = 400 + resp.reason = "No, just chuck testing..." + with mock.patch( + "snowflake.connector.s3_storage_client.SnowflakeS3RestClient._send_request_with_authentication_and_retry", + return_value=resp, + ), mock.patch( + "snowflake.connector.file_transfer_agent.SnowflakeFileTransferAgent._transfer_accelerate_config", + side_effect=None, + ): + agent.execute() + assert ( + str(agent._file_metadata[0].error_details) + == "400 Client Error: No, just chuck testing... for url: None" + ) + assert verify_log_tuple( + "snowflake.connector.storage_client", + logging.ERROR, + re.compile("Failed to download a file: .*a"), + caplog.record_tuples, + ) + + +def test_download_retry_exceeded_error(): + """Tests whether a retry exceeded error is handled as expected when downloading.""" + meta_info = { + "name": "data1.txt.gz", + "stage_location_type": "S3", + "no_sleeping_time": True, + "put_callback": None, + "put_callback_output_stream": None, + SHA256_DIGEST: "123456789abcdef", + "dst_file_name": "data1.txt.gz", + "src_file_name": "path/to/put_get_1.txt", + "overwrite": True, + } + meta = SnowflakeFileMeta(**meta_info) + creds = {"AWS_SECRET_KEY": "", "AWS_KEY_ID": "", "AWS_TOKEN": ""} + rest_client = SnowflakeS3RestClient( + meta, + StorageCredential( + creds, + MagicMock(autospec=SnowflakeConnection), + "GET file:/tmp/file.txt @~", + ), + { + "locationType": "AWS", + "location": "bucket/path", + "creds": creds, + "region": "test", + "endPoint": None, + }, + 8 * megabyte, + ) + rest_client.SLEEP_UNIT = 0 + resp = Response() + resp.status_code = 500 # Use a transient error code + from snowflake.connector.storage_client import METHODS + + with mock.patch.dict(METHODS, GET=MagicMock(return_value=resp)): + with mock.patch.object(rest_client.credentials, "update"): + with pytest.raises( + RequestExceedMaxRetryError, + match=r"GET with url .* failed for exceeding maximum retries", + ): + rest_client.download_chunk(0) diff --git a/test/unit/test_session_manager.py b/test/unit/test_session_manager.py new file mode 100644 index 000000000..3c8ccc654 --- /dev/null +++ b/test/unit/test_session_manager.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from enum import Enum +from unittest import mock + +from snowflake.connector.network import SnowflakeRestful + +try: + from snowflake.connector.ssl_wrap_socket import DEFAULT_OCSP_MODE +except ImportError: + + class OCSPMode(Enum): + FAIL_OPEN = "FAIL_OPEN" + + DEFAULT_OCSP_MODE = OCSPMode.FAIL_OPEN + +hostname_1 = "sfctest0.snowflakecomputing.com" +url_1 = f"https://{hostname_1}:443/session/v1/login-request" + +hostname_2 = "sfc-ds2-customer-stage.s3.amazonaws.com" +url_2 = f"https://{hostname_2}/rgm1-s-sfctest0/stages/" +url_3 = f"https://{hostname_2}/rgm1-s-sfctst0/stages/another-url" + + +mock_conn = mock.Mock() +mock_conn.disable_request_pooling = False +mock_conn._ocsp_mode = lambda: DEFAULT_OCSP_MODE + + +def close_sessions(rest: SnowflakeRestful, num_session_pools: int) -> None: + """Helper function to call SnowflakeRestful.close(). Asserts close was called on all SessionPools.""" + with mock.patch("snowflake.connector.network.SessionPool.close") as close_mock: + rest.close() + assert close_mock.call_count == num_session_pools + + +def create_session( + rest: SnowflakeRestful, num_sessions: int = 1, url: str | None = None +) -> None: + """ + Creates 'num_sessions' sessions to 'url'. This is recursive so that idle sessions + are not reused. + """ + if num_sessions == 0: + return + with rest._use_requests_session(url): + create_session(rest, num_sessions - 1, url) + + +@mock.patch("snowflake.connector.network.SnowflakeRestful.make_requests_session") +def test_no_url_multiple_sessions(make_session_mock): + rest = SnowflakeRestful(connection=mock_conn) + + create_session(rest, 2) + + assert make_session_mock.call_count == 2 + + assert list(rest._sessions_map.keys()) == [None] + + session_pool = rest._sessions_map[None] + assert len(session_pool._idle_sessions) == 2 + assert len(session_pool._active_sessions) == 0 + + close_sessions(rest, 1) + + +@mock.patch("snowflake.connector.network.SnowflakeRestful.make_requests_session") +def test_multiple_urls_multiple_sessions(make_session_mock): + rest = SnowflakeRestful(connection=mock_conn) + + for url in [url_1, url_2, None]: + create_session(rest, num_sessions=2, url=url) + + assert make_session_mock.call_count == 6 + + hostnames = list(rest._sessions_map.keys()) + for hostname in [hostname_1, hostname_2, None]: + assert hostname in hostnames + + for pool in rest._sessions_map.values(): + assert len(pool._idle_sessions) == 2 + assert len(pool._active_sessions) == 0 + + close_sessions(rest, 3) + + +@mock.patch("snowflake.connector.network.SnowflakeRestful.make_requests_session") +def test_multiple_urls_reuse_sessions(make_session_mock): + rest = SnowflakeRestful(connection=mock_conn) + for url in [url_1, url_2, url_3, None]: + # create 10 sessions, one after another + for _ in range(10): + create_session(rest, url=url) + + # only one session is created and reused thereafter + assert make_session_mock.call_count == 3 + + hostnames = list(rest._sessions_map.keys()) + assert len(hostnames) == 3 + for hostname in [hostname_1, hostname_2, None]: + assert hostname in hostnames + + for pool in rest._sessions_map.values(): + assert len(pool._idle_sessions) == 1 + assert len(pool._active_sessions) == 0 + + close_sessions(rest, 3) diff --git a/test/test_unit_split_statement.py b/test/unit/test_split_statement.py similarity index 53% rename from test/test_unit_split_statement.py rename to test/unit/test_split_statement.py index ab48b9576..d2db79b2a 100644 --- a/test/test_unit_split_statement.py +++ b/test/unit/test_split_statement.py @@ -1,124 +1,126 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. # + +from __future__ import annotations + from io import StringIO import pytest -from snowflake.connector.compat import PY2 -from snowflake.connector.util_text import split_statements - +try: + from snowflake.connector.util_text import split_statements +except ImportError: + split_statements = None -def _to_unicode(sql): - return sql.decode('utf-8') if PY2 and isinstance(sql, str) else sql +try: + from snowflake.connector.util_text import SQLDelimiter +except ImportError: + SQLDelimiter = None +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_simple_sql(): - with StringIO(_to_unicode("show tables")) as f: + with StringIO("show tables") as f: itr = split_statements(f) - assert next(itr) == ('show tables', False) + assert next(itr) == ("show tables", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode("show tables;")) as f: + with StringIO("show tables;") as f: itr = split_statements(f) - assert next(itr) == ('show tables;', False) + assert next(itr) == ("show tables;", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode("select 1;select 2")) as f: + with StringIO("select 1;select 2") as f: itr = split_statements(f) - assert next(itr) == ('select 1;', False) - assert next(itr) == ('select 2', False) + assert next(itr) == ("select 1;", False) + assert next(itr) == ("select 2", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode("select 1;select 2;")) as f: + with StringIO("select 1;select 2;") as f: itr = split_statements(f) - assert next(itr) == ('select 1;', False) - assert next(itr) == ('select 2;', False) + assert next(itr) == ("select 1;", False) + assert next(itr) == ("select 2;", False) with pytest.raises(StopIteration): next(itr) s = "select 1; -- test" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ('select 1; -- test', False) + assert next(itr) == ("select 1; -- test", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) - assert next(itr) == ('select 1;', False) + assert next(itr) == ("select 1;", False) with pytest.raises(StopIteration): next(itr) s = "select /* test */ 1; -- test comment select 1;" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ( - 'select /* test */ 1; -- test comment select 1;', False) + assert next(itr) == ("select /* test */ 1; -- test comment select 1;", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) - assert next(itr) == ('select 1;', False) + assert next(itr) == ("select 1;", False) with pytest.raises(StopIteration): next(itr) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_multiple_line_sql(): - s = """select /* test */ 1; -- test comment -select 23;""" + s = "select /* test */ 1; -- test comment\nselect 23;" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ( - ('select /* test */ 1; -- test comment', False)) - assert next(itr) == ('select 23;', False) + assert next(itr) == (("select /* test */ 1; -- test comment", False)) + assert next(itr) == ("select 23;", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) - assert next(itr) == ('select 1;', False) - assert next(itr) == ('select 23;', False) + assert next(itr) == ("select 1;", False) + assert next(itr) == ("select 23;", False) with pytest.raises(StopIteration): next(itr) s = """select /* test */ 1; -- test comment select 23; -- test comment 2""" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ( - 'select /* test */ 1; -- test comment', False) - assert next(itr) == ('select 23; -- test comment 2', False) + assert next(itr) == ("select /* test */ 1; -- test comment", False) + assert next(itr) == ("select 23; -- test comment 2", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) - assert next(itr) == ('select 1;', False) - assert next(itr) == ('select 23;', False) + assert next(itr) == ("select 1;", False) + assert next(itr) == ("select 23;", False) with pytest.raises(StopIteration): next(itr) s = """select /* test */ 1; -- test comment select 23; /* test comment 2 */ select 3""" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ( - 'select /* test */ 1; -- test comment', False) - assert next(itr) == ('select 23;', False) - assert next(itr) == ('/* test comment 2 */ select 3', False) + assert next(itr) == ("select /* test */ 1; -- test comment", False) + assert next(itr) == ("select 23;", False) + assert next(itr) == ("/* test comment 2 */ select 3", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) - assert next(itr) == ('select 1;', False) - assert next(itr) == ('select 23;', False) - assert next(itr) == ('select 3', False) + assert next(itr) == ("select 1;", False) + assert next(itr) == ("select 23;", False) + assert next(itr) == ("select 3", False) with pytest.raises(StopIteration): next(itr) @@ -126,15 +128,14 @@ def test_multiple_line_sql(): select 23; /* test comment 2 */ select 3;""" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ( - "select /* test */ 1; -- test comment", False) + assert next(itr) == ("select /* test */ 1; -- test comment", False) assert next(itr) == ("select 23;", False) assert next(itr) == ("/* test comment 2\n*/ select 3;", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) assert next(itr) == ("select 1;", False) assert next(itr) == ("select 23;", False) @@ -149,17 +150,20 @@ def test_multiple_line_sql(): select 23; /* test comment 2 */ select 3;""" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ("select /* test\n" - " continued comments 1\n" - " continued comments 2\n" - " */ 1; -- test comment", False) + assert next(itr) == ( + "select /* test\n" + " continued comments 1\n" + " continued comments 2\n" + " */ 1; -- test comment", + False, + ) assert next(itr) == ("select 23;", False) assert next(itr) == ("/* test comment 2\n*/ select 3;", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) assert next(itr) == ("select 1;", False) assert next(itr) == ("select 23;", False) @@ -168,18 +172,17 @@ def test_multiple_line_sql(): next(itr) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_quotes(): - s = """select 'hello', 1; -- test comment -select 23,'hello""" + s = "select 'hello', 1; -- test comment\nselect 23,'hello" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ( - "select 'hello', 1; -- test comment", False) + assert next(itr) == ("select 'hello', 1; -- test comment", False) assert next(itr) == ("select 23,'hello", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) assert next(itr) == ("select 'hello', 1;", False) assert next(itr) == ("select 23,'hello", False) @@ -189,17 +192,16 @@ def test_quotes(): s = """select 'he"llo', 1; -- test comment select "23,'hello" """ - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ( - "select 'he\"llo', 1; -- test comment", False) - assert next(itr) == ("select \"23,'hello\"", False) + assert next(itr) == ("select 'he\"llo', 1; -- test comment", False) + assert next(itr) == ('select "23,\'hello"', False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) assert next(itr) == ("select 'he\"llo', 1;", False) - assert next(itr) == ("select \"23,'hello\"", False) + assert next(itr) == ('select "23,\'hello"', False) with pytest.raises(StopIteration): next(itr) @@ -207,17 +209,16 @@ def test_quotes(): ', 1; -- test comment select "23,'hello" """ - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ( - "select 'hello\n', 1; -- test comment", False) - assert next(itr) == ("select \"23,'hello\"", False) + assert next(itr) == ("select 'hello\n', 1; -- test comment", False) + assert next(itr) == ('select "23,\'hello"', False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) assert next(itr) == ("select 'hello\n', 1;", False) - assert next(itr) == ("select \"23,'hello\"", False) + assert next(itr) == ('select "23,\'hello"', False) with pytest.raises(StopIteration): next(itr) @@ -225,14 +226,13 @@ def test_quotes(): ', 1; -- test comment select "23,'','hello" """ - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ( - "select 'hello''\n', 1; -- test comment", False) + assert next(itr) == ("select 'hello''\n', 1; -- test comment", False) assert next(itr) == ("select \"23,'','hello\"", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) assert next(itr) == ("select 'hello''\n', 1;", False) assert next(itr) == ("select \"23,'','hello\"", False) @@ -240,37 +240,37 @@ def test_quotes(): next(itr) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_quotes_in_comments(): - s = """select 'hello'; -- test comment 'hello2' in comment -/* comment 'quote'*/ select true -""" - with StringIO(_to_unicode(s)) as f: + s = "select 'hello'; -- test comment 'hello2' in comment\n/* comment 'quote'*/ select true\n" + with StringIO(s) as f: itr = split_statements(f) assert next(itr) == ( - "select 'hello'; -- test comment 'hello2' in comment", False) - assert next(itr) == ( - "/* comment 'quote'*/ select true", False) + "select 'hello'; -- test comment 'hello2' in comment", + False, + ) + assert next(itr) == ("/* comment 'quote'*/ select true", False) with pytest.raises(StopIteration): next(itr) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_backslash(): + """Tests backslash in a literal. + + Notes: + The backslash is escaped in a Python string literal. Double backslashes in a string literal represents a + single backslash. """ - Test backslash in a literal. - Note the backslash is escaped in a Python string literal. Double backslashes - in a string literal represents a single backslash. - """ - s = """select 'hello\\\\', 1; -- test comment -select 23,'\nhello""" + s = "select 'hello\\\\', 1; -- test comment\nselect 23,'\nhello" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) - assert next(itr) == ( - "select 'hello\\\\', 1; -- test comment", False) + assert next(itr) == ("select 'hello\\\\', 1; -- test comment", False) assert next(itr) == ("select 23,'\nhello", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) assert next(itr) == ("select 'hello\\\\', 1;", False) assert next(itr) == ("select 23,'\nhello", False) @@ -278,17 +278,17 @@ def test_backslash(): next(itr) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_file_with_slash_star(): - s = """put file:///tmp/* @%tmp; -ls @%tmp;""" + s = "put file:///tmp/* @%tmp;\nls @%tmp;" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) assert next(itr) == ("put file:///tmp/* @%tmp;", True) assert next(itr) == ("ls @%tmp;", False) with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) assert next(itr) == ("put file:///tmp/* @%tmp;", True) assert next(itr) == ("ls @%tmp;", False) @@ -309,24 +309,32 @@ def test_file_with_slash_star(): list @~; """ - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) assert next(itr) == ("list @~;", False) # no comment line is returned assert next(itr) == ( - "-- first half\n" - "put file://$SELF_DIR/staging-test-data/*.csv.gz @~;", True) + "-- first half\n" "put file://$SELF_DIR/staging-test-data/*.csv.gz @~;", + True, + ) assert next(itr) == ( - "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~;", True) + "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~;", + True, + ) assert next(itr) == ( - "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~ " - "overwrite=true;", True) + "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~ " "overwrite=true;", + True, + ) # no comment line is returned assert next(itr) == ( "-- second half\n" - "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~/foo;", True) + "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~/foo;", + True, + ) assert next(itr) == ( - "put file://$SELF_DIR/staging-test-data/bar.csv.gz @~/bar;", True) + "put file://$SELF_DIR/staging-test-data/bar.csv.gz @~/bar;", + True, + ) # no empty line is returned assert next(itr) == ("list @~;", False) assert next(itr) == ("remove @~ pattern='.*.csv.gz';", False) @@ -334,22 +342,31 @@ def test_file_with_slash_star(): # last raises StopIteration with pytest.raises(StopIteration): next(itr) - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) assert next(itr) == ("list @~;", False) # no comment line is returned assert next(itr) == ( - "put file://$SELF_DIR/staging-test-data/*.csv.gz @~;", True) + "put file://$SELF_DIR/staging-test-data/*.csv.gz @~;", + True, + ) assert next(itr) == ( - "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~;", True) + "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~;", + True, + ) assert next(itr) == ( - "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~ " - "overwrite=true;", True) + "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~ " "overwrite=true;", + True, + ) # no comment line is returned assert next(itr) == ( - "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~/foo;", True) + "put file://$SELF_DIR/staging-test-data/foo.csv.gz @~/foo;", + True, + ) assert next(itr) == ( - "put file://$SELF_DIR/staging-test-data/bar.csv.gz @~/bar;", True) + "put file://$SELF_DIR/staging-test-data/bar.csv.gz @~/bar;", + True, + ) # no empty line is returned assert next(itr) == ("list @~;", False) assert next(itr) == ("remove @~ pattern='.*.csv.gz';", False) @@ -359,19 +376,25 @@ def test_file_with_slash_star(): next(itr) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_sql_with_commands(): - with StringIO(_to_unicode("""create or replace view aaa + with StringIO( + """create or replace view aaa as select * from LINEITEM limit 1000; !spool $outfile show views like 'AAA'; !spool off drop view if exists aaa; -show tables""")) as f: +show tables""" + ) as f: itr = split_statements(f) - assert next(itr) == ("""create or replace view aaa + assert next(itr) == ( + """create or replace view aaa as select * from - LINEITEM limit 1000;""", False) + LINEITEM limit 1000;""", + False, + ) assert next(itr) == ("""!spool $outfile""", False) assert next(itr) == ("show views like 'AAA';", False) @@ -382,18 +405,20 @@ def test_sql_with_commands(): next(itr) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_sql_example1(): - with StringIO(_to_unicode(""" + with StringIO( + """ create or replace table a(aa int, bb string); truncate a; rm @%a; put file://a.txt @%a; copy into a; select * from a; -drop table if exists a;""")) as f: +drop table if exists a;""" + ) as f: itr = split_statements(f) - assert next(itr) == ( - "create or replace table a(aa int, bb string);", False) + assert next(itr) == ("create or replace table a(aa int, bb string);", False) assert next(itr) == ("truncate a;", False) assert next(itr) == ("rm @%a;", False) assert next(itr) == ("put file://a.txt @%a;", True) @@ -405,15 +430,20 @@ def test_sql_example1(): def test_space_before_put(): - with StringIO(_to_unicode(""" + with StringIO( + """ -- sample data uploads PUT file:///tmp/data.txt @%ab; SELECT 1; /* 134 */ select /* 567*/ 345;> GET @%bcd file:///tmp/aaa.txt; -""")) as f: +""" + ) as f: itr = split_statements(f) - assert next(itr) == ("""-- sample data uploads - PUT file:///tmp/data.txt @%ab;""", True) + assert next(itr) == ( + """-- sample data uploads + PUT file:///tmp/data.txt @%ab;""", + True, + ) assert next(itr) == ("""SELECT 1;""", False) assert next(itr) == ("""/* 134 */ select /* 567*/ 345;>""", False) assert next(itr) == ("""GET @%bcd file:///tmp/aaa.txt;""", True) @@ -421,109 +451,190 @@ def test_space_before_put(): next(itr) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_empty_statement(): - with StringIO(_to_unicode("""select 1; + with StringIO( + """select 1; -- tail comment1 -- tail comment2 -""")) as f: +""" + ) as f: itr = split_statements(f) assert next(itr) == ("""select 1;""", False) - assert next(itr) == ("""-- tail comment1 --- tail comment2""", None) + assert next(itr) == ( + """-- tail comment1 +-- tail comment2""", + None, + ) with pytest.raises(StopIteration): next(itr) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_multiple_comments(): s = """--- test comment 1 select /*another test comments*/ 1; -- test comment 2 -- test comment 3 select 2; """ - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=False) assert next(itr) == ( "--- test comment 1\n" - "select /*another test comments*/ 1; -- test comment 2", False) + "select /*another test comments*/ 1; -- test comment 2", + False, + ) assert next(itr) == ("-- test comment 3\nselect 2;", False) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_comments_with_semicolon(): s = """--test ; select 1; """ - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=False) - assert next(itr) == ( - "--test ;\n" - "select 1;", False - ) + assert next(itr) == ("--test ;\n" "select 1;", False) with pytest.raises(StopIteration): next(itr) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_comment_in_values(): - """ - SNOW-51297: SnowSQL -o remove_comments=True breaks the query - """ + """SNOW-51297: SnowSQL -o remove_comments=True breaks the query.""" # no space before a comment s = """INSERT INTO foo VALUES (/*TIMEOUT*/ 10);""" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) - assert next(itr) == ( - "INSERT INTO foo\nVALUES ( 10);", False - ) + assert next(itr) == ("INSERT INTO foo\nVALUES ( 10);", False) # no space before and after a comment s = """INSERT INTO foo VALUES (/*TIMEOUT*/10);""" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) - assert next(itr) == ( - "INSERT INTO foo\nVALUES (10);", False - ) + assert next(itr) == ("INSERT INTO foo\nVALUES (10);", False) # workaround s = """INSERT INTO foo VALUES ( /*TIMEOUT*/ 10);""" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) - assert next(itr) == ( - "INSERT INTO foo\nVALUES ( 10);", False - ) + assert next(itr) == ("INSERT INTO foo\nVALUES ( 10);", False) # a comment start from the beginning of the line s = """INSERT INTO foo VALUES ( /*TIMEOUT*/ 10);""" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) - assert next(itr) == ( - "INSERT INTO foo VALUES (\n\n10);", False - ) + assert next(itr) == ("INSERT INTO foo VALUES (\n\n10);", False) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_multiline_double_dollar_experssion_with_removed_comments(): s = """CREATE FUNCTION mean(a FLOAT, b FLOAT) RETURNS FLOAT LANGUAGE JAVASCRIPT AS $$ var c = a + b; return(c / 2); $$;""" - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f, remove_comments=True) assert next(itr) == ( "CREATE FUNCTION mean(a FLOAT, b FLOAT)\n" " RETURNS FLOAT LANGUAGE JAVASCRIPT AS $$\n" - " var c = a + b;\n return(c / 2);\n $$;", False) + " var c = a + b;\n return(c / 2);\n $$;", + False, + ) +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") def test_backslash_quote_escape(): s = """ SELECT 1 'Snowflake\\'s 1'; -SELECT 2 'Snowflake\\'s 2' +SELECT 2 'Snowflake\\'s 2' """ - with StringIO(_to_unicode(s)) as f: + with StringIO(s) as f: itr = split_statements(f) assert next(itr) == ("SELECT 1 'Snowflake\\'s 1';", False) assert next(itr) == ("SELECT 2 'Snowflake\\'s 2'", False) + + +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") +def test_sql_delimiter(): + """Copy of test_sql_with_commands but with an unconventional sql_delimiter. + + This test should not only verify that a random delimiter splits SQL commands correctly, but + also that semi colon gets added for the split statements instead of the reserved custom keywords. + + Since this is a generator function the sql_delimiter cannot be passed in as a string as it might change + during execution, so the SnowSQL's cli class is passed in by SnowSQL. This function makes sure that this + behaviour is not broken by mistake. + """ + delimiter = SQLDelimiter("imi") + with StringIO( + ( + "create or replace view aaa\n" + " as select * from\n" + " LINEITEM limit 1000 {delimiter}\n" + "!spool $outfile\n" + "show views like 'AAA'{delimiter}\n" + "!spool off\n" + "drop view if exists aaa {delimiter}\n" + "show tables" + ).format(delimiter=delimiter.sql_delimiter) + ) as f: + itr = split_statements(f, delimiter=delimiter) + assert next(itr) == ( + """create or replace view aaa + as select * from + LINEITEM limit 1000 ;""", + False, + ) + + assert next(itr) == ("""!spool $outfile""", False) + assert next(itr) == ("show views like 'AAA';", False) + assert next(itr) == ("!spool off", False) + assert next(itr) == ("drop view if exists aaa ;", False) + assert next(itr) == ("show tables", False) + with pytest.raises(StopIteration): + next(itr) + + +@pytest.mark.skipif(split_statements is None, reason="No split_statements is available") +def test_sql_splitting_tokenization(): + """This tests that sql_delimiter is token sensitive.""" + raw_sql = "select 123 as asd" + for c in set(raw_sql.replace(" ", "")): + sql = raw_sql + " " + c + " " + raw_sql + with StringIO(sql) as sqlio: + s = split_statements(sqlio, delimiter=SQLDelimiter(c)) + assert next(s)[0] == raw_sql + " ;" + assert next(s)[0] == raw_sql + + +@pytest.mark.skipif( + split_statements is None or SQLDelimiter is None, + reason="No split_statements or SQLDelimiter is available", +) +@pytest.mark.parametrize( + "sql, delimiter, split_stmnts", + [ + ("select 1 as a__b __ select 1", "__", ["select 1 as a__b ;", "select 1"]), + ("select 1 as a__b/", "/", ["select 1 as a__b;"]), + ('select 1 as "ab" ab', "ab", ['select 1 as "ab" ;']), + ('select 1 as "ab"ab', "ab", ['select 1 as "ab";']), + ("select 1 as abab", "ab", ["select 1 as abab"]), + ("insert into table t1 values (1)/", "/", ["insert into table t1 values (1);"]), + ("select 1 as a$_", "_", ["select 1 as a$_"]), + ("select 1 as _$", "_", ["select 1 as _$"]), + ], +) +def test_sql_splitting_various(sql, delimiter, split_stmnts): + """This tests various smaller sql splitting pitfalls.""" + with StringIO(sql) as sqlio: + statements = list( + s[0] for s in split_statements(sqlio, delimiter=SQLDelimiter(delimiter)) + ) + assert statements == split_stmnts diff --git a/test/unit/test_storage_client.py b/test/unit/test_storage_client.py new file mode 100644 index 000000000..d689e9c10 --- /dev/null +++ b/test/unit/test_storage_client.py @@ -0,0 +1,3 @@ +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# diff --git a/test/unit/test_telemetry.py b/test/unit/test_telemetry.py new file mode 100644 index 000000000..b1d9b0405 --- /dev/null +++ b/test/unit/test_telemetry.py @@ -0,0 +1,134 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# + +from __future__ import annotations + +from unittest.mock import Mock + +import snowflake.connector.telemetry + + +def test_telemetry_data_to_dict(): + """Tests that TelemetryData instances are properly converted to dicts.""" + assert snowflake.connector.telemetry.TelemetryData({}, 2000).to_dict() == { + "message": {}, + "timestamp": "2000", + } + + d = {"type": "test", "query_id": "1", "value": 20} + assert snowflake.connector.telemetry.TelemetryData(d, 1234).to_dict() == { + "message": d, + "timestamp": "1234", + } + + +def get_client_and_mock(): + rest_call = Mock() + rest_call.return_value = {"success": True} + rest = Mock() + rest.attach_mock(rest_call, "request") + client = snowflake.connector.telemetry.TelemetryClient(rest, 2) + return client, rest_call + + +def test_telemetry_simple_flush(): + """Tests that metrics are properly enqueued and sent to telemetry.""" + client, rest_call = get_client_and_mock() + + client.add_log_to_batch(snowflake.connector.telemetry.TelemetryData({}, 2000)) + assert rest_call.call_count == 0 + + client.add_log_to_batch(snowflake.connector.telemetry.TelemetryData({}, 3000)) + assert rest_call.call_count == 1 + + +def test_telemetry_close(): + """Tests that remaining metrics are flushed on close.""" + client, rest_call = get_client_and_mock() + + client.add_log_to_batch(snowflake.connector.telemetry.TelemetryData({}, 2000)) + assert rest_call.call_count == 0 + + client.close() + assert rest_call.call_count == 1 + assert client.is_closed + + +def test_telemetry_close_empty(): + """Tests that no calls are made on close if there are no metrics to flush.""" + client, rest_call = get_client_and_mock() + + client.close() + assert rest_call.call_count == 0 + assert client.is_closed + + +def test_telemetry_send_batch(): + """Tests that metrics are sent with the send_batch method.""" + client, rest_call = get_client_and_mock() + + client.add_log_to_batch(snowflake.connector.telemetry.TelemetryData({}, 2000)) + assert rest_call.call_count == 0 + + client.send_batch() + assert rest_call.call_count == 1 + + +def test_telemetry_send_batch_empty(): + """Tests that send_batch does nothing when there are no metrics to send.""" + client, rest_call = get_client_and_mock() + + client.send_batch() + assert rest_call.call_count == 0 + + +def test_telemetry_send_batch_clear(): + """Tests that send_batch clears the first batch and will not send anything on a second call.""" + client, rest_call = get_client_and_mock() + + client.add_log_to_batch(snowflake.connector.telemetry.TelemetryData({}, 2000)) + assert rest_call.call_count == 0 + + client.send_batch() + assert rest_call.call_count == 1 + + client.send_batch() + assert rest_call.call_count == 1 + + +def test_telemetry_auto_disable(): + """Tests that the client will automatically disable itself if a request fails.""" + client, rest_call = get_client_and_mock() + rest_call.return_value = {"success": False} + + client.add_log_to_batch(snowflake.connector.telemetry.TelemetryData({}, 2000)) + assert client.is_enabled() + + client.send_batch() + assert not client.is_enabled() + + +def test_telemetry_add_batch_disabled(): + """Tests that the client will not add logs if disabled.""" + client, _ = get_client_and_mock() + + client.disable() + client.add_log_to_batch(snowflake.connector.telemetry.TelemetryData({}, 2000)) + + assert client.buffer_size() == 0 + + +def test_telemetry_send_batch_disabled(): + """Tests that the client will not send logs if disabled.""" + client, rest_call = get_client_and_mock() + + client.add_log_to_batch(snowflake.connector.telemetry.TelemetryData({}, 2000)) + assert client.buffer_size() == 1 + + client.disable() + + client.send_batch() + assert client.buffer_size() == 1 + assert rest_call.call_count == 0 diff --git a/test/unit/test_telemetry_oob.py b/test/unit/test_telemetry_oob.py new file mode 100644 index 000000000..06b3d2aff --- /dev/null +++ b/test/unit/test_telemetry_oob.py @@ -0,0 +1,153 @@ +#!/usr/bin/env python +# +# Copyright (c) 2012-2021 Snowflake Computing Inc. All rights reserved. +# +from __future__ import annotations + +import pytest + +import snowflake.connector.errorcode +from snowflake.connector.errorcode import ER_FAILED_TO_REQUEST +from snowflake.connector.errors import RevocationCheckError +from snowflake.connector.ocsp_snowflake import OCSPTelemetryData +from snowflake.connector.sqlstate import SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED +from snowflake.connector.telemetry_oob import TelemetryService + +DEV_CONFIG = { + "host": "localhost", + "port": 8080, + "account": "testAccount", + "user": "test", + "password": "ShouldNotShowUp", + "protocol": "http", +} +telemetry_data = {} +exception = RevocationCheckError("Test OCSP Revocation error") +event_type = "Test OCSP Exception" +stack_trace = [ + "Traceback (most recent call last):\n", + ' File "", line 10, in \n lumberjack()\n', + ' File "", line 4, in lumberjack\n bright_side_of_death()\n', + ' File "", line 7, in bright_side_of_death\n return tuple()[0]\n', + "IndexError: tuple index out of range\n", +] + +event_name = "HttpRetryTimeout" +url = "http://localhost:8080/queries/v1/query-request?request_guid=a54a3d70-abf2-4576-bb6f-ddf23999491a" +method = "POST" + + +@pytest.fixture() +def telemetry_setup(request): + """Sets up the telemetry service by enabling it and flushing any entries.""" + telemetry = TelemetryService.get_instance() + telemetry.update_context(DEV_CONFIG) + telemetry.enable() + telemetry.flush() + + +def test_telemetry_oob_simple_flush(telemetry_setup): + """Tests capturing and sending a simple OCSP Exception message.""" + telemetry = TelemetryService.get_instance() + + telemetry.log_ocsp_exception( + event_type, telemetry_data, exception=exception, stack_trace=stack_trace + ) + assert telemetry.size() == 1 + telemetry.flush() + assert telemetry.size() == 0 + + +@pytest.mark.flaky(reruns=3) +def test_telemetry_oob_urgent(telemetry_setup): + """Tests sending an urgent OCSP Exception message.""" + telemetry = TelemetryService.get_instance() + + telemetry.log_ocsp_exception( + event_type, + telemetry_data, + exception=exception, + stack_trace=stack_trace, + urgent=True, + ) + assert telemetry.size() == 0 + + +def test_telemetry_oob_close(telemetry_setup): + """Tests closing the Telemetry Service when there are still messages in the queue.""" + telemetry = TelemetryService.get_instance() + + telemetry.log_ocsp_exception( + event_type, telemetry_data, exception=exception, stack_trace=stack_trace + ) + assert telemetry.size() == 1 + telemetry.close() + assert telemetry.size() == 0 + + +def test_telemetry_oob_close_empty(telemetry_setup): + """Tests closing the Telemetry Service when the queue is empty.""" + telemetry = TelemetryService.get_instance() + + assert telemetry.size() == 0 + telemetry.close() + assert telemetry.size() == 0 + + +def test_telemetry_oob_log_when_disabled(telemetry_setup): + """Tests trying to log to the telemetry service when it is disabled.""" + telemetry = TelemetryService.get_instance() + + assert telemetry.size() == 0 + telemetry.disable() + telemetry.log_ocsp_exception( + event_type, telemetry_data, exception=exception, stack_trace=stack_trace + ) + assert telemetry.size() == 0 + telemetry.enable() + + +def test_telemetry_oob_http_log(telemetry_setup): + """Tests sending a simple HTTP request telemetry event.""" + telemetry = TelemetryService.get_instance() + + telemetry.log_http_request_error( + event_name, + url, + method, + SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + ER_FAILED_TO_REQUEST, + exception=exception, + stack_trace=stack_trace, + ) + assert telemetry.size() == 1 + telemetry.flush() + assert telemetry.size() == 0 + + +def test_telemetry_oob_error_code_mapping(): + """Tests that all OCSP error codes have a corresponding Telemetry sub event type.""" + ec_dict = snowflake.connector.errorcode.__dict__ + for ec, ec_val in ec_dict.items(): + if not ec.startswith("__") and ec not in ("annotations",): + if 254000 <= ec_val < 255000: + assert ec_val in OCSPTelemetryData.ERROR_CODE_MAP + + +@pytest.mark.flaky(reruns=3) +def test_telemetry_oob_http_log_urgent(telemetry_setup): + """Tests sending an urgent HTTP request telemetry event.""" + telemetry = TelemetryService.get_instance() + + assert telemetry.size() == 0 + telemetry.log_http_request_error( + event_name, + url, + method, + SQLSTATE_CONNECTION_WAS_NOT_ESTABLISHED, + ER_FAILED_TO_REQUEST, + exception=exception, + stack_trace=stack_trace, + urgent=True, + ) + assert telemetry.size() == 0 diff --git a/tested_requirements/README.md b/tested_requirements/README.md new file mode 100644 index 000000000..00eefaa05 --- /dev/null +++ b/tested_requirements/README.md @@ -0,0 +1,12 @@ +# Tested requirements files + +## Usage + +These requirements files can be used by customers to reproduce the exact environment +Snowflake used to run tests with. + +Use it like a regular requirements file, with: + +```shell +python -m pip install -r requirements_37.reqs +``` diff --git a/tested_requirements/requirements_310.reqs b/tested_requirements/requirements_310.reqs new file mode 100644 index 000000000..c6162d4dc --- /dev/null +++ b/tested_requirements/requirements_310.reqs @@ -0,0 +1,17 @@ +# Generated on: Python 3.10.4 +asn1crypto==1.5.1 +certifi==2021.10.8 +cffi==1.15.0 +charset-normalizer==2.0.12 +cryptography==36.0.2 +idna==3.3 +oscrypto==1.3.0 +pycparser==2.21 +pycryptodomex==3.14.1 +PyJWT==2.3.0 +pyOpenSSL==21.0.0 +pytz==2022.1 +requests==2.27.1 +six==1.16.0 +urllib3==1.26.9 +snowflake-connector-python==2.7.7 diff --git a/tested_requirements/requirements_37.reqs b/tested_requirements/requirements_37.reqs new file mode 100644 index 000000000..21d9192f7 --- /dev/null +++ b/tested_requirements/requirements_37.reqs @@ -0,0 +1,17 @@ +# Generated on: Python 3.7.12 +asn1crypto==1.5.1 +certifi==2021.10.8 +cffi==1.15.0 +charset-normalizer==2.0.12 +cryptography==36.0.2 +idna==3.3 +oscrypto==1.3.0 +pycparser==2.21 +pycryptodomex==3.14.1 +PyJWT==2.3.0 +pyOpenSSL==21.0.0 +pytz==2022.1 +requests==2.27.1 +six==1.16.0 +urllib3==1.26.9 +snowflake-connector-python==2.7.7 diff --git a/tested_requirements/requirements_38.reqs b/tested_requirements/requirements_38.reqs new file mode 100644 index 000000000..94ad3dc32 --- /dev/null +++ b/tested_requirements/requirements_38.reqs @@ -0,0 +1,17 @@ +# Generated on: Python 3.8.12 +asn1crypto==1.5.1 +certifi==2021.10.8 +cffi==1.15.0 +charset-normalizer==2.0.12 +cryptography==36.0.2 +idna==3.3 +oscrypto==1.3.0 +pycparser==2.21 +pycryptodomex==3.14.1 +PyJWT==2.3.0 +pyOpenSSL==21.0.0 +pytz==2022.1 +requests==2.27.1 +six==1.16.0 +urllib3==1.26.9 +snowflake-connector-python==2.7.7 diff --git a/tested_requirements/requirements_39.reqs b/tested_requirements/requirements_39.reqs new file mode 100644 index 000000000..24ae60378 --- /dev/null +++ b/tested_requirements/requirements_39.reqs @@ -0,0 +1,17 @@ +# Generated on: Python 3.9.12 +asn1crypto==1.5.1 +certifi==2021.10.8 +cffi==1.15.0 +charset-normalizer==2.0.12 +cryptography==36.0.2 +idna==3.3 +oscrypto==1.3.0 +pycparser==2.21 +pycryptodomex==3.14.1 +PyJWT==2.3.0 +pyOpenSSL==21.0.0 +pytz==2022.1 +requests==2.27.1 +six==1.16.0 +urllib3==1.26.9 +snowflake-connector-python==2.7.7 diff --git a/time_util.py b/time_util.py deleted file mode 100644 index 3b460aa60..000000000 --- a/time_util.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2018-2019 Snowflake Computing Inc. All right reserved. -# -import random -import time - -from logging import getLogger - -logger = getLogger(__name__) - -try: - from threading import _Timer as Timer -except ImportError: - from threading import Timer - -DEFAULT_MASTER_VALIDITY_IN_SECONDS = 4 * 60 * 60 # seconds - - -class HeartBeatTimer(Timer): - """ - A thread which executes a function every - client_session_keep_alive_heartbeat_frequency seconds - """ - - def __init__(self, client_session_keep_alive_heartbeat_frequency, f): - interval = client_session_keep_alive_heartbeat_frequency - super(HeartBeatTimer, self).__init__(interval, f) - - def run(self): - while not self.finished.is_set(): - self.finished.wait(self.interval) - if not self.finished.is_set(): - try: - self.function() - except Exception as e: - logger.debug('failed to heartbeat: %s', e) - - -def get_time_millis(): - """ - Return the current time in millis - """ - return int(time.time() * 1000) - - -class DecorrelateJitterBackoff(object): - # Decorrelate Jitter backoff - # https://www.awsarchitectureblog.com/2015/03/backoff.html - def __init__(self, base, cap): - self._base = base - self._cap = cap - - def next_sleep(self, _, sleep): - return min(self._cap, random.randint(self._base, sleep * 3)) diff --git a/tool/__init__.py b/tool/__init__.py deleted file mode 100644 index 8b1378917..000000000 --- a/tool/__init__.py +++ /dev/null @@ -1 +0,0 @@ - diff --git a/tool/dump_ocsp_response.py b/tool/dump_ocsp_response.py deleted file mode 100644 index c4873f875..000000000 --- a/tool/dump_ocsp_response.py +++ /dev/null @@ -1,127 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import time -from os import path -from time import gmtime, strftime - -from asn1crypto import ocsp as asn1crypto_ocsp - -from snowflake.connector.compat import (urlsplit) -from snowflake.connector.ocsp_asn1crypto \ - import SnowflakeOCSPAsn1Crypto as SFOCSP -from snowflake.connector.ssl_wrap_socket import _openssl_connect - - -def main(): - """ - Internal Tool: OCSP response dumper - """ - - def help(): - print( - "Dump OCSP Response for the URL. ") - print(""" -Usage: {0} [ ...] -""".format(path.basename(sys.argv[0]))) - sys.exit(2) - - import sys - if len(sys.argv) < 2: - help() - - urls = sys.argv[1:] - dump_ocsp_response(urls, output_filename=None) - - -def dump_good_status(current_time, single_response): - print("This Update: {0}".format(single_response['this_update'].native)) - print("Next Update: {0}".format(single_response['next_update'].native)) - this_update = ( - single_response['this_update'].native.replace(tzinfo=None) - - SFOCSP.ZERO_EPOCH).total_seconds() - next_update = ( - single_response['next_update'].native.replace(tzinfo=None) - - SFOCSP.ZERO_EPOCH).total_seconds() - - tolerable_validity = SFOCSP._calculate_tolerable_validity( - this_update, - next_update) - print("Tolerable Update: {0}".format( - strftime('%Y%m%d%H%M%SZ', gmtime( - next_update + tolerable_validity)) - )) - if SFOCSP._is_validaity_range(current_time, this_update, next_update): - print("OK") - else: - print(SFOCSP._validity_error_message( - current_time, this_update, next_update)) - - -def dump_revoked_status(single_response): - revoked_info = single_response['cert_status'] - revocation_time = revoked_info.native['revocation_time'] - revocation_reason = revoked_info.native['revocation_reason'] - print("Revoked Time: {0}".format( - revocation_time.strftime( - SFOCSP.OUTPUT_TIMESTAMP_FORMAT))) - print("Revoked Reason: {0}".format(revocation_reason)) - - -def dump_ocsp_response(urls, output_filename): - ocsp = SFOCSP() - for url in urls: - if not url.startswith('http'): - url = 'https://' + url - parsed_url = urlsplit(url) - hostname = parsed_url.hostname - port = parsed_url.port or 443 - connection = _openssl_connect(hostname, port) - cert_data = ocsp.extract_certificate_chain(connection) - current_time = int(time.time()) - print("Target URL: {0}".format(url)) - print("Current Time: {0}".format( - strftime('%Y%m%d%H%M%SZ', gmtime(current_time)))) - for issuer, subject in cert_data: - cert_id, _ = ocsp.create_ocsp_request(issuer, subject) - _, _, _, cert_id, ocsp_response_der = \ - ocsp.validate_by_direct_connection(issuer, subject) - ocsp_response = asn1crypto_ocsp.OCSPResponse.load(ocsp_response_der) - print( - "------------------------------------------------------------") - print("Subject Name: {0}".format(subject.subject.native)) - print("Issuer Name: {0}".format(issuer.subject.native)) - print("OCSP URI: {0}".format(subject.ocsp_urls)) - print("CRL URI: {0}".format( - subject.crl_distribution_points[0].native)) - print("Issuer Name Hash: {0}".format(subject.issuer.sha1)) - print("Issuer Key Hash: {0}".format(issuer.public_key.sha1)) - print("Serial Number: {0}".format(subject.serial_number)) - print("Response Status: {0}".format( - ocsp_response['response_status'].native)) - basic_ocsp_response = ocsp_response.basic_ocsp_response - tbs_response_data = basic_ocsp_response['tbs_response_data'] - print("Responder ID: {0}".format( - tbs_response_data['responder_id'].name)) - current_time = int(time.time()) - for single_response in tbs_response_data['responses']: - cert_status = single_response['cert_status'].name - if cert_status == 'good': - dump_good_status(current_time, single_response) - elif cert_status == 'revoked': - dump_revoked_status(single_response) - else: - print("Unknown") - print('') - - if output_filename: - SFOCSP.OCSP_CACHE.write_ocsp_response_cache_file( - ocsp, - output_filename) - return SFOCSP.OCSP_CACHE.CACHE - - -if __name__ == '__main__': - main() diff --git a/tox.ini b/tox.ini new file mode 100644 index 000000000..2e7156e00 --- /dev/null +++ b/tox.ini @@ -0,0 +1,182 @@ +[coverage:report] +skip_covered = False +show_missing = True +[coverage:run] +branch = true +parallel = true +omit = */snowflake/connector/tool/* + */snowflake/connector/vendored/* + src/snowflake/connector/incident.py +[coverage:paths] +source = src/snowflake/connector + */.tox/*/lib/python*/site-packages/snowflake/connector + */.tox\*\Lib\site-packages\snowflake\connector + */src/snowflake/connector + *\src\snowflake\connector + */fips_env/lib/python*/site-packages/snowflake/connector + +[tox] +minversion = 3.7 +envlist = fix_lint, + py{37,38,39,310}-{unit-parallel,integ,pandas,sso}, + coverage +skip_missing_interpreters = true +requires = + tox-external-wheels>=0.1.6 + +[testenv] +description = run the tests with pytest under {basepython} +extras = + development + pandas: pandas + sso: secure-local-storage +deps = + pip >= 19.3.1 +install_command = python -m pip install -U {opts} {packages} +external_wheels = + py37-ci: dist/*cp37*.whl + py38-ci: dist/*cp38*.whl + py39-ci: dist/*cp39*.whl + py310-ci: dist/*cp310*.whl +setenv = + COVERAGE_FILE = {env:COVERAGE_FILE:{toxworkdir}/.coverage.{envname}} + ci: SNOWFLAKE_PYTEST_OPTS = -vvv + # Set test type, either notset, unit, integ, or both + unit-integ: SNOWFLAKE_TEST_TYPE = (unit or integ) + !unit-!integ: SNOWFLAKE_TEST_TYPE = (unit or integ) + unit: SNOWFLAKE_TEST_TYPE = unit + integ: SNOWFLAKE_TEST_TYPE = integ + parallel: SNOWFLAKE_PYTEST_OPTS = {env:SNOWFLAKE_PYTEST_OPTS:} -n auto + # Add common parts into pytest command + SNOWFLAKE_PYTEST_COV_LOCATION = {env:JUNIT_REPORT_DIR:{toxworkdir}}/junit.{envname}-{env:cloud_provider:dev}.xml + SNOWFLAKE_PYTEST_COV_CMD = --cov snowflake.connector --junitxml {env:SNOWFLAKE_PYTEST_COV_LOCATION} --cov-report= + SNOWFLAKE_PYTEST_CMD = pytest {env:SNOWFLAKE_PYTEST_OPTS:} {env:SNOWFLAKE_PYTEST_COV_CMD} +passenv = + AWS_ACCESS_KEY_ID + AWS_SECRET_ACCESS_KEY + SF_PROJECT_ROOT + cloud_provider + SF_REGRESS_LOGS + ; Github Actions provided environmental variables + GITHUB_ACTIONS + JENKINS_HOME + ; This is required on windows. Otherwise pwd module won't be imported successfully, + ; see https://github.com/tox-dev/tox/issues/1455 + USERNAME + CLIENT_LOG_DIR_PATH_DOCKER + PYTEST_ADDOPTS +commands = + # Test environments + # Note: make sure to have a default env and all the other special ones + !pandas-!sso-!lambda: {env:SNOWFLAKE_PYTEST_CMD} -m "{env:SNOWFLAKE_TEST_TYPE} and not sso and not pandas and not lambda" {posargs:} test + pandas: {env:SNOWFLAKE_PYTEST_CMD} -m "{env:SNOWFLAKE_TEST_TYPE} and pandas" {posargs:} test + sso: {env:SNOWFLAKE_PYTEST_CMD} -m "{env:SNOWFLAKE_TEST_TYPE} and sso" {posargs:} test + lambda: {env:SNOWFLAKE_PYTEST_CMD} -m "{env:SNOWFLAKE_TEST_TYPE} and lambda" {posargs:} test + +[testenv:olddriver] +basepython = python3.7 +description = run the old driver tests with pytest under {basepython} +deps = + pip >= 19.3.1 + snowflake-connector-python==1.9.1 + azure-storage-blob==2.1.0 + pandas + pendulum!=2.1.1 + pytest<6.1.0 + pytest-cov + pytest-rerunfailures + pytest-timeout + pytest-xdist + mock +skip_install = True +setenv = {[testenv]setenv} +passenv = {[testenv]passenv} +commands = + {env:SNOWFLAKE_PYTEST_CMD} -m "not skipolddriver" -vvv {posargs:} test + +[testenv:coverage] +description = [run locally after tests]: combine coverage data and create report +; generates a diff coverage against origin/master (can be changed by setting DIFF_AGAINST env var) +deps = {[testenv]deps} + coverage +; diff_cover +skip_install = True +passenv = DIFF_AGAINST +setenv = COVERAGE_FILE={toxworkdir}/.coverage +commands = coverage combine + coverage report -m + coverage xml -o {env:COV_REPORT_DIR:{toxworkdir}}/coverage.xml + coverage html -d {env:COV_REPORT_DIR:{toxworkdir}}/htmlcov +; diff-cover --compare-branch {env:DIFF_AGAINST:origin/master} {toxworkdir}/coverage.xml +depends = py37, py38, py39, py310 + +[testenv:py{37,38,39,310}-coverage] +# I hate doing this, but this env is for Jenkins, please keep it up-to-date with the one env above it if necessary +description = [run locally after tests]: combine coverage data and create report specifically with {basepython} +deps = {[testenv:coverage]deps} +skip_install = {[testenv:coverage]skip_install} +passenv = {[testenv:coverage]passenv} +setenv = {[testenv:coverage]setenv} +commands = {[testenv:coverage]commands} +depends = {[testenv:coverage]depends} + +[testenv:flake8] +; DEPRECATED +description = check code style with flake8 +skip_install = true +deps = flake8 +commands = flake8 {posargs} + +[testenv:fix_lint] +description = format the code base to adhere to our styles, and complain about what we cannot do automatically +basepython = python3.7 +passenv = + PROGRAMDATA +deps = + {[testenv]deps} + pre-commit >= 2.9.0 +skip_install = True +commands = pre-commit run + python -c 'import pathlib; print("hint: run \{\} install to add checks as pre-commit hook".format(pathlib.Path(r"{envdir}") / "bin" / "pre-commit"))' + +[pytest] +log_level = info +addopts = -ra --strict-markers +junit_family = legacy +markers = + # Optional dependency groups markers + lambda: AWS lambda tests + pandas: tests for pandas integration + sso: tests for sso optional dependency integration + # Cloud provider markers + aws: tests for Amazon Cloud storage + azure: tests for Azure Cloud storage + gcp: tests for Google Cloud storage + # Test type markers + integ: integration tests + unit: unit tests + skipolddriver: skip for old driver tests + # Other markers + timeout: tests that need a timeout time + internal: tests that could but should only run on our internal CI + external: tests that could but should only run on our external CI + +[isort] +multi_line_output = 3 +include_trailing_comma = True +force_grid_wrap = 0 +use_parentheses = True +ensure_newline_before_comments = True +line_length = 88 +known_first_party =snowflake,parameters,generate_test_files + +[flake8] +# Notes on ignores: +# - all ignored Ds mean doc issues, these should be cleaned up +ignore = B011,C901,D100,D101,D102,D103,D104,D105,D107,D401,E203,E402,E501,F821,W503 +exclude= + build,tool,.tox,parameters.py,parameters_jenkins.py, +# Disable checking virtualenv contents + *venv* +max-line-length = 88 +show-source = true diff --git a/util_text.py b/util_text.py deleted file mode 100644 index 1719f64c3..000000000 --- a/util_text.py +++ /dev/null @@ -1,219 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2012-2019 Snowflake Computing Inc. All right reserved. -# -import logging -import re - -import ijson - -COMMENT_PATTERN_RE = re.compile(r'^\s*\-\-') -EMPTY_LINE_RE = re.compile(r'^\s*$') - -_logger = logging.getLogger(__name__) - - -def split_statements(buf, remove_comments=False): - """ - Splits a stream into SQL statements (ends with a semicolon) or - commands (!...) - :param buf: Unicode data stream - :param remove_comments: True removes all comments - :return: yields a SQL statement or a command - """ - in_quote = False - ch_quote = None - in_comment = False - in_double_dollars = False - - line = buf.readline() - if isinstance(line, bytes): - raise TypeError("Input data must not be binary type.") - - statement = [] - while line != '': - col = 0 - col0 = 0 - len_line = len(line) - while True: - if col >= len_line: - if col0 < col: - if not in_comment and not in_quote \ - and not in_double_dollars: - statement.append((line[col0:col], True)) - if len(statement) == 1 and statement[0][0] == '': - statement = [] - break - elif not in_comment and (in_quote or in_double_dollars): - statement.append((line[col0:col], True)) - elif not remove_comments: - statement.append((line[col0:col], False)) - break - elif in_comment: - if line[col:].startswith("*/"): - in_comment = False - if not remove_comments: - statement.append((line[col0:col + 2], False)) - col += 2 - col0 = col - else: - col += 1 - elif in_double_dollars: - if line[col:].startswith("$$"): - in_double_dollars = False - statement.append((line[col0:col + 2], False)) - col += 2 - col0 = col - else: - col += 1 - elif in_quote: - if line[col] == '\\' and col < len_line - 1 and \ - line[col + 1] in (ch_quote, '\\'): - col += 2 - elif line[col] == ch_quote: - if col < len_line - 1 and line[col + 1] != ch_quote or \ - col == len_line - 1: - # exits quote - in_quote = False - statement.append((line[col0:col + 1], True)) - col += 1 - col0 = col - else: - # escaped quote and still in quote - col += 2 - else: - col += 1 - else: - if line[col] in ("'", '"'): - in_quote = True - ch_quote = line[col] - col += 1 - elif line[col] in (' ', '\t'): - statement.append((line[col0:col + 1], True)) - col += 1 - col0 = col - elif line[col:].startswith("--"): - statement.append((line[col0:col], True)) - if not remove_comments: - # keep the comment - statement.append((line[col:], False)) - col = len_line + 1 - col0 = col - elif line[col:].startswith("/*") and \ - not line[col0:].startswith("file://"): - if not remove_comments: - statement.append((line[col0:col + 2], False)) - else: - statement.append((line[col0:col], False)) - col += 2 - col0 = col - in_comment = True - elif line[col:].startswith("$$"): - statement.append((line[col0:col + 2], True)) - col += 2 - col0 = col - in_double_dollars = True - elif line[col] == ';': - statement.append((line[col0:col + 1], True)) - col += 1 - try: - if line[col] == '>': - col += 1 - statement[-1] = (statement[-1][0] + '>', - statement[-1][1]) - except IndexError: - pass - if COMMENT_PATTERN_RE.match(line[col:]) or \ - EMPTY_LINE_RE.match(line[col:]): - if not remove_comments: - # keep the comment - statement.append((line[col:], False)) - col = len_line - while col < len_line and line[col] in (' ', '\t'): - col += 1 - yield _concatenate_statements(statement) - col0 = col - statement = [] - elif col == 0 and line[col] == '!': # command - if len(statement) > 0: - yield _concatenate_statements(statement) - statement = [] - yield line.rstrip(';').strip(), False - break - else: - col += 1 - line = buf.readline() - - if len(statement) > 0: - yield _concatenate_statements(statement) - - -def _concatenate_statements(statement_list): - """ - concatenate statements - - is_put_or_get is set to True if the statement is PUT or GET otherwise - False for valid statement. None is set if the statement is empty or - comment only. - :return: a statement, is_put_or_get - """ - valid_statement_list = [] - is_put_or_get = None - for text, is_statement in statement_list: - valid_statement_list.append(text) - if is_put_or_get is None and is_statement and len(text.strip()) >= 3: - is_put_or_get = text[:3].upper() in ('PUT', 'GET') - return u''.join(valid_statement_list).strip(), is_put_or_get - - -def split_rows_from_stream(stream): - """ - Splits into rows from a stream object. Generator. - """ - row = [] - in_row = False - for prefix, event, value in ijson.parse(stream): - if prefix == '': - continue - if in_row: - if event == 'end_array': - yield row - row = [] - in_row = False - else: - row.append(value) - elif event == 'start_array': - in_row = True - - -def construct_hostname(region, account): - """ - Constructs hostname from region and account - """ - if region == u'us-west-2': - region = '' - if region: - if account.find(u'.') > 0: - account = account[0:account.find(u'.')] - host = u'{0}.{1}.snowflakecomputing.com'.format(account, region) - else: - host = u'{0}.snowflakecomputing.com'.format(account) - return host - - -def parse_account(account): - url_parts = account.split(u'.') - # if this condition is true, then we have some extra - # stuff in the account field. - if len(url_parts) > 1: - if url_parts[1] == u'global': - # remove external ID from account - parsed_account = url_parts[0][0:url_parts[0].rfind(u'-')] - else: - # remove region subdomain - parsed_account = url_parts[0] - else: - parsed_account = account - - return parsed_account